|
| 1 | +#LMS算法 |
| 2 | +import math |
| 3 | + |
| 4 | +#求h(x,theta) |
| 5 | +def GetHThetaX(x,theta): |
| 6 | + theta_1 = theta[1:] |
| 7 | + h = theta[0] + sum([xi*thetai for xi,thetai in zip(x,theta_1)]) |
| 8 | + return h |
| 9 | + |
| 10 | +def GetJ(X,Y,theta): |
| 11 | + J = sum(pow(GetHThetaX(x,theta)-y,2) for x,y in zip(X,Y))/2 |
| 12 | + return J |
| 13 | + |
| 14 | + |
| 15 | +#求梯度 |
| 16 | +#j表示对thetaj求偏导数 |
| 17 | +def GetGradient(X,Y,theta,j): |
| 18 | + h = [] |
| 19 | + for x in X: |
| 20 | + h.append(GetHThetaX(x,theta)) |
| 21 | + g = 0; |
| 22 | + if j==0: |
| 23 | + g = sum([hi-yi for hi,yi in zip(h,Y)]) |
| 24 | + else: |
| 25 | + g = sum([(hi-yi)*x[j-1] for hi,yi,x in zip(h,Y,X)]) |
| 26 | + return g |
| 27 | + |
| 28 | + |
| 29 | +#n表示n维的特征向量 |
| 30 | +def LMS_Prediction(X,Y,n,year): |
| 31 | + #theta全都初始化为0 |
| 32 | + theta = [i for i in range(0,n)] |
| 33 | + |
| 34 | + #learning rate设置 |
| 35 | + alpha = 0.00000001 |
| 36 | + |
| 37 | + steps = 0 |
| 38 | + |
| 39 | + curJ = GetJ(X,Y,theta) |
| 40 | + preJ = curJ |
| 41 | + |
| 42 | + while True: |
| 43 | + temp = [] |
| 44 | + for i in range(0,n): |
| 45 | + g = GetGradient(X,Y,theta,i) |
| 46 | + #print(g) |
| 47 | + temp.append(theta[i]-alpha*g) |
| 48 | + #print(theta) |
| 49 | + theta = temp |
| 50 | + print(steps,":",temp) |
| 51 | + steps += 1 |
| 52 | + preJ = curJ |
| 53 | + curJ = GetJ(X,Y,theta) |
| 54 | + if curJ>preJ: |
| 55 | + break |
| 56 | + |
| 57 | + |
| 58 | + |
| 59 | +if __name__ == '__main__': |
| 60 | + #X = [[1],[2],[5],[7], [10], [15]] |
| 61 | + #Y = [2, 6, 7, 9, 14, 19] |
| 62 | + X = [[2000],[2001],[2002],[2003],[2004],[2005],[2006],[2007],[2008],[2009],[2010],[2011],[2012],[2013]] |
| 63 | + Y = [2.000,2.500,2.900,3.147,4.515,4.903,5.365,5.704,6.853,7.971,8.561,10.000,11.280,12.900] |
| 64 | + LMS_Prediction(X,Y,2,2014) |
| 65 | + |
0 commit comments