其他分享
首页 > 其他分享> > pytorch----gradient descent

pytorch----gradient descent

作者:互联网

import numpy as np
import matplotlib.pyplot as plt
x_data=[1,2,3]
y_data=[2,4,6]

w=1
def forword(x):
return x*w #linear model
def cost(xn,yn): #平均损失函数
cost=0
for x,y in zip(xn,yn):
y_pred=forword(x)
cost+=(y_pred-y)*(y_pred-y)
return cost/len(xn)
def gradient(xn,yn): #梯度函数g daoshu
grad=0
for x,y in zip(xn,yn):
grad+=2*x*(x*w-y)
return grad/len(xn)

print("训练之前",4,forword(4))
epoch_list=[]
cost_list=[]
for epoch in range(100):
cost_val=cost(x_data,y_data)
grad_val=gradient(x_data,y_data)
w-=0.01*grad_val #用学习率为0.01来更新
epoch_list.append(epoch)
cost_list.append(cost_val)
print('epoch:',epoch,'w=',w,'loss=',cost_val)
print("训练之后",4,forword(4))
plt.plot(epoch_list, cost_list)
plt.ylabel('数据集的平均损失')
plt.xlabel('次数')
plt.show()

标签:xn,val,gradient,list,epoch,----,pytorch,cost,data
来源: https://www.cnblogs.com/xinrui-wang/p/15767637.html