-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathlr.py
92 lines (45 loc) · 1.47 KB
/
lr.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
import numpy as np
import pandas as pd
def load_data(path, header):
df = pd.read_csv(path, header=header, delimiter="\t")
return df
def sigmoid(x):
return (1/(1+np.exp(-x)))
def dsigmoid(x):
return x*(1-x)
xtrain=load_data('train_dataset.tsv',None)
ytrain=np.array(xtrain.pop(1000))
class LogisticRegression:
def fit(self,x,y,lr=0.01,max_iters=5000,tol=0.0005):
#随机生成w矩阵 bias
self.y=y
w=np.random.randn(x.shape[-1]+1,1)
bias = np.ones(x.shape[0]).T
bias = bias.reshape(len(bias),1)
x = np.hstack((bias, x))
for i in range(max_iters):
pred = sigmoid(w.T.dot(x.T))
delta=(y-pred)*dsigmoid(pred)
w+=lr*x.T.dot(delta.T)/x.T.shape[0]
loss=abs(np.mean(y-pred))
if i%50==0:
print('iters:%d,loss:%f'%(i,loss))
if loss<tol:
break
self.w=w
print('训练完毕')
self.predict(x)
def predict(self,x):
pred=sigmoid(self.w.T.dot(x.T))
pred=np.where(pred<0.5,0,1)
acc=np.mean(pred==self.y)
print('acc:',acc)
return pred
def save_parameters(self,path):
weights=pd.DataFrame(self.w)
weights.to_csv(path,header=None,sep='\t')
print('保存完毕')
if __name__ == '__main__':
model=LogisticRegression()
model.fit(xtrain,ytrain)
model.save_parameters('d:/weights.tsv')