-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathLogistic_regretion.py
More file actions
117 lines (50 loc) · 1.99 KB
/
Logistic_regretion.py
File metadata and controls
117 lines (50 loc) · 1.99 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
import torch
import torch.nn as nn
import numpy as np
from sklearn import datasets
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import train_test_split
#0) prepare data
bc=datasets.load_breast_cancer()
X,y=bc.data,bc.target
n_samples,n_features=X.shape
# print(n_samples,n_features)
X_train,X_test,y_train,y_test=train_test_split(X,y,test_size=0.2,random_state=1234)
sc=StandardScaler()
X_train=sc.fit_transform(X_train)
X_test=sc.transform(X_test)
X_train=torch.from_numpy(X_train.astype(np.float32))
X_test=torch.from_numpy(X_test.astype(np.float32))
y_train=torch.from_numpy(y_train.astype(np.float32))
y_test=torch.from_numpy(y_test.astype(np.float32))
y_train=y_train.view(y_train.shape[0],1)
y_test=y_test.view(y_test.shape[0],1)
#1) prepare model
class LogsisticRegression(nn.Module):
def __init__(self,n_input_features):
super(LogsisticRegression,self).__init__()
self.linear=nn.Linear(n_input_features,1)
def forward(self,x):
y_prdicted=torch.sigmoid(self.linear(x))
return y_prdicted
model=LogsisticRegression(n_features)
#2)Loss and optimiser
learning_rate=0.01
criterion=nn.BCELoss()
optimizer = torch.optim.SGD(model.parameters(), lr=learning_rate)
num_epochs=1000
for epoch in range(num_epochs):
#forward pass
y_predicted=model(X_train)
loss=criterion(y_predicted,y_train)
#loss backward pas
loss.backward()
optimizer.step()
optimizer.zero_grad()
if((epoch+1)%50==0):
print(f'epoch:{epoch+1},loss=={loss.item():.4f}')
with torch.no_grad():
y_predicted=model(X_test)
y_predicted_cls=y_predicted.round()
acc=y_predicted_cls.eq(y_test).sum()/float(y_test.shape[0])
print(f'accuracy={acc:.4f}')