1
1
#LMS算法
2
2
import math
3
+ import matplotlib .pyplot as plt
3
4
4
5
#求h(x,theta)
5
6
def GetHThetaX (x ,theta ):
6
- theta_1 = theta [1 :]
7
- h = theta [0 ] + sum ([xi * thetai for xi ,thetai in zip (x ,theta_1 )])
8
- return h
7
+ theta_1 = theta [1 :]
8
+ h = theta [0 ] + sum ([xi * thetai for xi ,thetai in zip (x ,theta_1 )])
9
+ return h
9
10
11
+ #求J(theta)
10
12
def GetJ (X ,Y ,theta ):
11
13
J = sum (pow (GetHThetaX (x ,theta )- y ,2 ) for x ,y in zip (X ,Y ))/ 2
12
14
return J
@@ -27,9 +29,14 @@ def GetGradient(X,Y,theta,j):
27
29
28
30
29
31
#n表示n维的特征向量
30
- def LMS_Prediction (X ,Y ,n ,year ):
32
+ def LMS_Prediction (X ,X1 , Y ,n ,year ):
31
33
#theta全都初始化为0
32
- theta = [i for i in range (0 ,n )]
34
+ #theta = [i for i in range(0,n)]
35
+ theta = [- 1500 ,1 ]
36
+
37
+ plt .plot (X1 ,Y ,'ro' )
38
+ plt .axis ([2000 ,2015 ,1 ,15 ])
39
+ plt .plot (X1 ,[theta [0 ]+ theta [1 ]* x for x in X1 ],'y-' )
33
40
34
41
#learning rate设置
35
42
alpha = 0.00000001
@@ -38,6 +45,8 @@ def LMS_Prediction(X,Y,n,year):
38
45
39
46
curJ = GetJ (X ,Y ,theta )
40
47
preJ = curJ
48
+
49
+
41
50
42
51
while True :
43
52
temp = []
@@ -53,13 +62,23 @@ def LMS_Prediction(X,Y,n,year):
53
62
curJ = GetJ (X ,Y ,theta )
54
63
if curJ > preJ :
55
64
break
65
+ plt .plot (X1 ,[theta [0 ]+ theta [1 ]* x for x in X1 ],'b-' )
66
+ plt .plot (X1 ,[theta [0 ]+ theta [1 ]* x for x in X1 ],'r-' )
67
+ plt .show ()
56
68
57
69
58
70
59
71
if __name__ == '__main__' :
60
- #X = [[1],[2],[5],[7], [10], [15]]
61
- #Y = [2, 6, 7, 9, 14, 19]
62
- X = [[2000 ],[2001 ],[2002 ],[2003 ],[2004 ],[2005 ],[2006 ],[2007 ],[2008 ],[2009 ],[2010 ],[2011 ],[2012 ],[2013 ]]
63
- Y = [2.000 ,2.500 ,2.900 ,3.147 ,4.515 ,4.903 ,5.365 ,5.704 ,6.853 ,7.971 ,8.561 ,10.000 ,11.280 ,12.900 ]
64
- LMS_Prediction (X ,Y ,2 ,2014 )
72
+ X = [[2000 ],[2001 ],[2002 ],[2003 ],[2004 ],[2005 ],[2006 ],[2007 ],[2008 ],[2009 ],[2010 ],[2011 ],[2012 ],[2013 ]]
73
+ Y = [2.000 ,2.500 ,2.900 ,3.147 ,4.515 ,4.903 ,5.365 ,5.704 ,6.853 ,7.971 ,8.561 ,10.000 ,11.280 ,12.900 ]
74
+ X1 = [2000 ,2001 ,2002 ,2003 ,2004 ,2005 ,2006 ,2007 ,2008 ,2009 ,2010 ,2011 ,2012 ,2013 ]
75
+ LMS_Prediction (X ,X1 ,Y ,2 ,2014 )
76
+ #X = [[1],[2],[5],[7], [10], [15]]
77
+ #Y = [2, 6, 7, 9, 14, 19]
78
+ #X = [[2000],[2001],[2002],[2003],[2004],[2005],[2006],[2007],[2008],[2009],[2010],[2011],[2012],[2013]]
79
+ #Y = [2.000,2.500,2.900,3.147,4.515,4.903,5.365,5.704,6.853,7.971,8.561,10.000,11.280,12.900]
80
+ #X1 = [2000,2001,2002,2003,2004,2005,2006,2007,2008,2009,2010,2011,2012,2013]
81
+ #pylab.scatter(X1,Y)
82
+ #pylab.show()
83
+
65
84
0 commit comments