Thanks to visit codestin.com
Credit goes to github.com

Skip to content

Commit a2207b3

Browse files
author
sherlock-coding
committed
update
1 parent 09f603b commit a2207b3

File tree

1 file changed

+29
-10
lines changed

1 file changed

+29
-10
lines changed

Homework/HousingPricePrediction.py

Lines changed: 29 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -1,12 +1,14 @@
11
#LMS算法
22
import math
3+
import matplotlib.pyplot as plt
34

45
#求h(x,theta)
56
def GetHThetaX(x,theta):
6-
theta_1 = theta[1:]
7-
h = theta[0] + sum([xi*thetai for xi,thetai in zip(x,theta_1)])
8-
return h
7+
theta_1 = theta[1:]
8+
h = theta[0] + sum([xi*thetai for xi,thetai in zip(x,theta_1)])
9+
return h
910

11+
#求J(theta)
1012
def GetJ(X,Y,theta):
1113
J = sum(pow(GetHThetaX(x,theta)-y,2) for x,y in zip(X,Y))/2
1214
return J
@@ -27,9 +29,14 @@ def GetGradient(X,Y,theta,j):
2729

2830

2931
#n表示n维的特征向量
30-
def LMS_Prediction(X,Y,n,year):
32+
def LMS_Prediction(X,X1,Y,n,year):
3133
#theta全都初始化为0
32-
theta = [i for i in range(0,n)]
34+
#theta = [i for i in range(0,n)]
35+
theta = [-1500,1]
36+
37+
plt.plot(X1,Y,'ro')
38+
plt.axis([2000,2015,1,15])
39+
plt.plot(X1,[theta[0]+theta[1]*x for x in X1],'y-')
3340

3441
#learning rate设置
3542
alpha = 0.00000001
@@ -38,6 +45,8 @@ def LMS_Prediction(X,Y,n,year):
3845

3946
curJ = GetJ(X,Y,theta)
4047
preJ = curJ
48+
49+
4150

4251
while True:
4352
temp = []
@@ -53,13 +62,23 @@ def LMS_Prediction(X,Y,n,year):
5362
curJ = GetJ(X,Y,theta)
5463
if curJ>preJ:
5564
break
65+
plt.plot(X1,[theta[0]+theta[1]*x for x in X1],'b-')
66+
plt.plot(X1,[theta[0]+theta[1]*x for x in X1],'r-')
67+
plt.show()
5668

5769

5870

5971
if __name__ == '__main__':
60-
#X = [[1],[2],[5],[7], [10], [15]]
61-
#Y = [2, 6, 7, 9, 14, 19]
62-
X = [[2000],[2001],[2002],[2003],[2004],[2005],[2006],[2007],[2008],[2009],[2010],[2011],[2012],[2013]]
63-
Y = [2.000,2.500,2.900,3.147,4.515,4.903,5.365,5.704,6.853,7.971,8.561,10.000,11.280,12.900]
64-
LMS_Prediction(X,Y,2,2014)
72+
X = [[2000],[2001],[2002],[2003],[2004],[2005],[2006],[2007],[2008],[2009],[2010],[2011],[2012],[2013]]
73+
Y = [2.000,2.500,2.900,3.147,4.515,4.903,5.365,5.704,6.853,7.971,8.561,10.000,11.280,12.900]
74+
X1 = [2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013]
75+
LMS_Prediction(X,X1,Y,2,2014)
76+
#X = [[1],[2],[5],[7], [10], [15]]
77+
#Y = [2, 6, 7, 9, 14, 19]
78+
#X = [[2000],[2001],[2002],[2003],[2004],[2005],[2006],[2007],[2008],[2009],[2010],[2011],[2012],[2013]]
79+
#Y = [2.000,2.500,2.900,3.147,4.515,4.903,5.365,5.704,6.853,7.971,8.561,10.000,11.280,12.900]
80+
#X1 = [2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013]
81+
#pylab.scatter(X1,Y)
82+
#pylab.show()
83+
6584

0 commit comments

Comments
 (0)