Skip to content

Commit 24c583a

Browse files
committed
Logistic Regression
1 parent eca5afd commit 24c583a

4 files changed

Lines changed: 371 additions & 0 deletions

File tree

LogisticRegression/data1.npy

2.42 KB
Binary file not shown.

LogisticRegression/data1.txt

Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
34.62365962451697,78.0246928153624,0
2+
30.28671076822607,43.89499752400101,0
3+
35.84740876993872,72.90219802708364,0
4+
60.18259938620976,86.30855209546826,1
5+
79.0327360507101,75.3443764369103,1
6+
45.08327747668339,56.3163717815305,0
7+
61.10666453684766,96.51142588489624,1
8+
75.02474556738889,46.55401354116538,1
9+
76.09878670226257,87.42056971926803,1
10+
84.43281996120035,43.53339331072109,1
11+
95.86155507093572,38.22527805795094,0
12+
75.01365838958247,30.60326323428011,0
13+
82.30705337399482,76.48196330235604,1
14+
69.36458875970939,97.71869196188608,1
15+
39.53833914367223,76.03681085115882,0
16+
53.9710521485623,89.20735013750205,1
17+
69.07014406283025,52.74046973016765,1
18+
67.94685547711617,46.67857410673128,0
19+
70.66150955499435,92.92713789364831,1
20+
76.97878372747498,47.57596364975532,1
21+
67.37202754570876,42.83843832029179,0
22+
89.67677575072079,65.79936592745237,1
23+
50.534788289883,48.85581152764205,0
24+
34.21206097786789,44.20952859866288,0
25+
77.9240914545704,68.9723599933059,1
26+
62.27101367004632,69.95445795447587,1
27+
80.1901807509566,44.82162893218353,1
28+
93.114388797442,38.80067033713209,0
29+
61.83020602312595,50.25610789244621,0
30+
38.78580379679423,64.99568095539578,0
31+
61.379289447425,72.80788731317097,1
32+
85.40451939411645,57.05198397627122,1
33+
52.10797973193984,63.12762376881715,0
34+
52.04540476831827,69.43286012045222,1
35+
40.23689373545111,71.16774802184875,0
36+
54.63510555424817,52.21388588061123,0
37+
33.91550010906887,98.86943574220611,0
38+
64.17698887494485,80.90806058670817,1
39+
74.78925295941542,41.57341522824434,0
40+
34.1836400264419,75.2377203360134,0
41+
83.90239366249155,56.30804621605327,1
42+
51.54772026906181,46.85629026349976,0
43+
94.44336776917852,65.56892160559052,1
44+
82.36875375713919,40.61825515970618,0
45+
51.04775177128865,45.82270145776001,0
46+
62.22267576120188,52.06099194836679,0
47+
77.19303492601364,70.45820000180959,1
48+
97.77159928000232,86.7278223300282,1
49+
62.07306379667647,96.76882412413983,1
50+
91.56497449807442,88.69629254546599,1
51+
79.94481794066932,74.16311935043758,1
52+
99.2725269292572,60.99903099844988,1
53+
90.54671411399852,43.39060180650027,1
54+
34.52451385320009,60.39634245837173,0
55+
50.2864961189907,49.80453881323059,0
56+
49.58667721632031,59.80895099453265,0
57+
97.64563396007767,68.86157272420604,1
58+
32.57720016809309,95.59854761387875,0
59+
74.24869136721598,69.82457122657193,1
60+
71.79646205863379,78.45356224515052,1
61+
75.3956114656803,85.75993667331619,1
62+
35.28611281526193,47.02051394723416,0
63+
56.25381749711624,39.26147251058019,0
64+
30.05882244669796,49.59297386723685,0
65+
44.66826172480893,66.45008614558913,0
66+
66.56089447242954,41.09209807936973,0
67+
40.45755098375164,97.53518548909936,1
68+
49.07256321908844,51.88321182073966,0
69+
80.27957401466998,92.11606081344084,1
70+
66.74671856944039,60.99139402740988,1
71+
32.72283304060323,43.30717306430063,0
72+
64.0393204150601,78.03168802018232,1
73+
72.34649422579923,96.22759296761404,1
74+
60.45788573918959,73.09499809758037,1
75+
58.84095621726802,75.85844831279042,1
76+
99.82785779692128,72.36925193383885,1
77+
47.26426910848174,88.47586499559782,1
78+
50.45815980285988,75.80985952982456,1
79+
60.45555629271532,42.50840943572217,0
80+
82.22666157785568,42.71987853716458,0
81+
88.9138964166533,69.80378889835472,1
82+
94.83450672430196,45.69430680250754,1
83+
67.31925746917527,66.58935317747915,1
84+
57.23870631569862,59.51428198012956,1
85+
80.36675600171273,90.96014789746954,1
86+
68.46852178591112,85.59430710452014,1
87+
42.0754545384731,78.84478600148043,0
88+
75.47770200533905,90.42453899753964,1
89+
78.63542434898018,96.64742716885644,1
90+
52.34800398794107,60.76950525602592,0
91+
94.09433112516793,77.15910509073893,1
92+
90.44855097096364,87.50879176484702,1
93+
55.48216114069585,35.57070347228866,0
94+
74.49269241843041,84.84513684930135,1
95+
89.84580670720979,45.35828361091658,1
96+
83.48916274498238,48.38028579728175,1
97+
42.2617008099817,87.10385094025457,1
98+
99.31500880510394,68.77540947206617,1
99+
55.34001756003703,64.9319380069486,1
100+
74.77589300092767,89.52981289513276,1

LogisticRegression/data2.txt

Lines changed: 118 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,118 @@
1+
0.051267,0.69956,1
2+
-0.092742,0.68494,1
3+
-0.21371,0.69225,1
4+
-0.375,0.50219,1
5+
-0.51325,0.46564,1
6+
-0.52477,0.2098,1
7+
-0.39804,0.034357,1
8+
-0.30588,-0.19225,1
9+
0.016705,-0.40424,1
10+
0.13191,-0.51389,1
11+
0.38537,-0.56506,1
12+
0.52938,-0.5212,1
13+
0.63882,-0.24342,1
14+
0.73675,-0.18494,1
15+
0.54666,0.48757,1
16+
0.322,0.5826,1
17+
0.16647,0.53874,1
18+
-0.046659,0.81652,1
19+
-0.17339,0.69956,1
20+
-0.47869,0.63377,1
21+
-0.60541,0.59722,1
22+
-0.62846,0.33406,1
23+
-0.59389,0.005117,1
24+
-0.42108,-0.27266,1
25+
-0.11578,-0.39693,1
26+
0.20104,-0.60161,1
27+
0.46601,-0.53582,1
28+
0.67339,-0.53582,1
29+
-0.13882,0.54605,1
30+
-0.29435,0.77997,1
31+
-0.26555,0.96272,1
32+
-0.16187,0.8019,1
33+
-0.17339,0.64839,1
34+
-0.28283,0.47295,1
35+
-0.36348,0.31213,1
36+
-0.30012,0.027047,1
37+
-0.23675,-0.21418,1
38+
-0.06394,-0.18494,1
39+
0.062788,-0.16301,1
40+
0.22984,-0.41155,1
41+
0.2932,-0.2288,1
42+
0.48329,-0.18494,1
43+
0.64459,-0.14108,1
44+
0.46025,0.012427,1
45+
0.6273,0.15863,1
46+
0.57546,0.26827,1
47+
0.72523,0.44371,1
48+
0.22408,0.52412,1
49+
0.44297,0.67032,1
50+
0.322,0.69225,1
51+
0.13767,0.57529,1
52+
-0.0063364,0.39985,1
53+
-0.092742,0.55336,1
54+
-0.20795,0.35599,1
55+
-0.20795,0.17325,1
56+
-0.43836,0.21711,1
57+
-0.21947,-0.016813,1
58+
-0.13882,-0.27266,1
59+
0.18376,0.93348,0
60+
0.22408,0.77997,0
61+
0.29896,0.61915,0
62+
0.50634,0.75804,0
63+
0.61578,0.7288,0
64+
0.60426,0.59722,0
65+
0.76555,0.50219,0
66+
0.92684,0.3633,0
67+
0.82316,0.27558,0
68+
0.96141,0.085526,0
69+
0.93836,0.012427,0
70+
0.86348,-0.082602,0
71+
0.89804,-0.20687,0
72+
0.85196,-0.36769,0
73+
0.82892,-0.5212,0
74+
0.79435,-0.55775,0
75+
0.59274,-0.7405,0
76+
0.51786,-0.5943,0
77+
0.46601,-0.41886,0
78+
0.35081,-0.57968,0
79+
0.28744,-0.76974,0
80+
0.085829,-0.75512,0
81+
0.14919,-0.57968,0
82+
-0.13306,-0.4481,0
83+
-0.40956,-0.41155,0
84+
-0.39228,-0.25804,0
85+
-0.74366,-0.25804,0
86+
-0.69758,0.041667,0
87+
-0.75518,0.2902,0
88+
-0.69758,0.68494,0
89+
-0.4038,0.70687,0
90+
-0.38076,0.91886,0
91+
-0.50749,0.90424,0
92+
-0.54781,0.70687,0
93+
0.10311,0.77997,0
94+
0.057028,0.91886,0
95+
-0.10426,0.99196,0
96+
-0.081221,1.1089,0
97+
0.28744,1.087,0
98+
0.39689,0.82383,0
99+
0.63882,0.88962,0
100+
0.82316,0.66301,0
101+
0.67339,0.64108,0
102+
1.0709,0.10015,0
103+
-0.046659,-0.57968,0
104+
-0.23675,-0.63816,0
105+
-0.15035,-0.36769,0
106+
-0.49021,-0.3019,0
107+
-0.46717,-0.13377,0
108+
-0.28859,-0.060673,0
109+
-0.61118,-0.067982,0
110+
-0.66302,-0.21418,0
111+
-0.59965,-0.41886,0
112+
-0.72638,-0.082602,0
113+
-0.83007,0.31213,0
114+
-0.72062,0.53874,0
115+
-0.59389,0.49488,0
116+
-0.48445,0.99927,0
117+
-0.0063364,0.99927,0
118+
0.63265,-0.030612,0
Lines changed: 153 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,153 @@
1+
#-*- coding: utf-8 -*-
2+
import numpy as np
3+
import matplotlib.pyplot as plt
4+
from scipy import optimize
5+
from matplotlib.font_manager import FontProperties
6+
font = FontProperties(fname=r"c:\windows\fonts\simsun.ttc", size=14) # 解决windows环境下画图汉字乱码问题
7+
8+
9+
def LogisticRegression():
10+
data = loadtxtAndcsv_data("data2.txt", ",", np.float64)
11+
X = data[:,0:-1]
12+
y = data[:,-1]
13+
14+
plot_data(X,y) # 作图
15+
16+
X = mapFeature(X[:,0],X[:,1]) #映射为多项式
17+
initial_theta = np.zeros((X.shape[1],1))#初始化theta
18+
initial_lambda = 0.1 #初始化正则化系数,一般取0.01,0.1,1.....
19+
20+
J = costFunction(initial_theta,X,y,initial_lambda) #计算一下给定初始化的theta和lambda求出的代价J
21+
22+
print J #输出一下计算的值,应该为0.693147
23+
# result = optimize.fmin(costFunction, initial_theta, args=(X,y,initial_lambda)) #直接使用最小化的方法,效果不好
24+
'''调用scipy中的优化算法fmin_bfgs(拟牛顿法Broyden-Fletcher-Goldfarb-Shanno)
25+
- costFunction是自己实现的一个求代价的函数,
26+
- initial_theta表示初始化的值,
27+
- fprime指定costFunction的梯度
28+
- args是其余测参数,以元组的形式传入,最后会将最小化costFunction的theta返回
29+
'''
30+
result = optimize.fmin_bfgs(costFunction, initial_theta, fprime=gradient, args=(X,y,initial_lambda))
31+
p = predict(X, result) #预测
32+
print u'在训练集上的准确度为%f%%'%np.mean(np.float64(p==y)*100) # 与真实值比较,p==y返回True,转化为float
33+
34+
X = data[:,0:-1]
35+
y = data[:,-1]
36+
plotDecisionBoundary(result,X,y) #画决策边界
37+
38+
39+
40+
# 加载txt和csv文件
41+
def loadtxtAndcsv_data(fileName,split,dataType):
42+
return np.loadtxt(fileName,delimiter=split,dtype=dataType)
43+
44+
# 加载npy文件
45+
def loadnpy_data(fileName):
46+
return np.load(fileName)
47+
48+
# 显示二维图形
49+
def plot_data(X,y):
50+
pos = np.where(y==1) #找到y==1的坐标位置
51+
neg = np.where(y==0) #找到y==0的坐标位置
52+
#作图
53+
plt.figure(figsize=(15,12))
54+
plt.plot(X[pos,0],X[pos,1],'ro') # red o
55+
plt.plot(X[neg,0],X[neg,1],'bo') # blue o
56+
plt.title(u"两个类别散点图",fontproperties=font)
57+
plt.show()
58+
59+
# 映射为多项式
60+
def mapFeature(X1,X2):
61+
degree = 3; # 映射的最高次方
62+
out = np.ones((X1.shape[0],1)) # 映射后的结果数组(取代X)
63+
'''
64+
这里以degree=2为例,映射为1,x1,x2,x1^2,x1,x2,x2^2
65+
'''
66+
for i in np.arange(1,degree+1):
67+
for j in range(i+1):
68+
temp = X1**(i-j)*(X2**j) #矩阵直接乘相当于matlab中的点乘.*
69+
out = np.hstack((out, temp.reshape(-1,1)))
70+
return out
71+
72+
# 代价函数
73+
def costFunction(initial_theta,X,y,inital_lambda):
74+
m = len(y)
75+
J = 0
76+
77+
h = sigmoid(np.dot(X,initial_theta)) # 计算h(z)
78+
theta1 = initial_theta.copy() # 因为正则化j=1从1开始,不包含0,所以赋值一份,前theta(0)值为0
79+
theta1[0] = 0
80+
81+
temp = np.dot(np.transpose(theta1),theta1)
82+
J = (-np.dot(np.transpose(y),np.log(h))-np.dot(np.transpose(1-y),np.log(1-h))+temp*inital_lambda/2)/m # 正则化的代价方程
83+
return J
84+
85+
# 计算梯度
86+
def gradient(initial_theta,X,y,inital_lambda):
87+
m = len(y)
88+
grad = np.zeros((initial_theta.shape[0]))
89+
90+
h = sigmoid(np.dot(X,initial_theta))# 计算h(z)
91+
theta1 = initial_theta.copy()
92+
theta1[0] = 0
93+
94+
grad = np.dot(np.transpose(X),h-y)/m+inital_lambda/m*theta1 #正则化的梯度
95+
return grad
96+
97+
# S型函数
98+
def sigmoid(z):
99+
h = np.zeros((len(z),1)) # 初始化,与z的长度一置
100+
101+
h = 1.0/(1+np.exp(-z))
102+
return h
103+
104+
105+
#画决策边界
106+
def plotDecisionBoundary(theta,X,y):
107+
pos = np.where(y==1) #找到y==1的坐标位置
108+
neg = np.where(y==0) #找到y==0的坐标位置
109+
#作图
110+
plt.figure(figsize=(15,12))
111+
plt.plot(X[pos,0],X[pos,1],'ro') # red o
112+
plt.plot(X[neg,0],X[neg,1],'bo') # blue o
113+
plt.title(u"决策边界",fontproperties=font)
114+
115+
#u = np.linspace(30,100,100)
116+
#v = np.linspace(30,100,100)
117+
118+
u = np.linspace(-1,1.5,50) #根据具体的数据,这里需要调整
119+
v = np.linspace(-1,1.5,50)
120+
121+
z = np.zeros((len(u),len(v)))
122+
for i in range(len(u)):
123+
for j in range(len(v)):
124+
z[i,j] = np.dot(mapFeature(u[i].reshape(1,-1),v[j].reshape(1,-1)),theta) # 计算对应的值,需要map
125+
126+
z = np.transpose(z)
127+
plt.contour(u,v,z,[0,0.01],linewidth=2.0) # 画等高线,范围在[0,0.01],即近似为决策边界
128+
#plt.legend()
129+
plt.show()
130+
131+
# 预测
132+
def predict(X,theta):
133+
m = X.shape[0]
134+
p = np.zeros((m,1))
135+
p = sigmoid(np.dot(X,theta)) # 预测的结果,是个概率值
136+
137+
for i in range(m):
138+
if p[i] > 0.5: #概率大于0.5预测为1,否则预测为0
139+
p[i] = 1
140+
else:
141+
p[i] = 0
142+
return p
143+
144+
145+
# 测试逻辑回归函数
146+
def testLogisticRegression():
147+
LogisticRegression()
148+
149+
150+
if __name__ == "__main__":
151+
testLogisticRegression()
152+
153+

0 commit comments

Comments
 (0)