PyTorch深度学习实践(二)---线性模型
#课堂代码-线性模型
1 import numpy as np 2 import matplotlib.pyplot as plt 3 4 #y=2x 5 x_data=[1.0,2.0,3.0] 6 y_data=[2.0,4.0,6.0] 7 8 def forward(x): 9 return x * w 10 11 def loss(x,y): 12 y_pred=forward(x) 13 return(y_pred-y)**2 14 15 w_list=[] 16 mse_list=[] 17 18 for w in np.arange(0.0 , 4.1 , 0.1): 19 print("w=",w) 20 loss_sum=0 21 for x_val,y_val in zip(x_data,y_data): 22 y_pred_val=forward(x_val) 23 loss_val=loss(x_val,y_val) 24 loss_sum+=loss_val 25 print('\t',x_val,y_val,y_pred_val,loss_val) 26 print('MSE=',loss_sum) 27 w_list.append(w) 28 mse_list.append(loss_sum/3) 29 30 plt.plot(w_list,mse_list) 31 plt.ylabel('Loss') 32 plt.xlabel('w') 33 plt.show()
#课后作业-线性模型
1 import numpy as np 2 import matplotlib.pyplot as plt 3 4 #y=2x+2 5 x_data=[1.0,2.0,3.0] 6 y_data=[4.0,6.0,8.0] 7 8 def forward(x): 9 return w*x+b 10 11 def loss(x,y): 12 y_pred=forward(x) 13 return (y_pred-y)**2 14 15 w_list=[] 16 mse_list=[] 17 b_list=[] 18 19 for w in np.arange(0.0,4.1,0.1): 20 for b in np.arange(0.0,4.1,0.1): 21 loss_sum=0 22 for x_val,y_val in zip (x_data,y_data): 23 y_pred_val=forward(x_val) 24 loss_val=loss(x_val,y_val) 25 print('\t',x_val,y_val,y_pred_val,loss_val) 26 loss_sum+=loss_val 27 mse_list.append(loss_sum/3) 28 b_list.append(b) 29 print('::::::::::',w,b,loss_sum/3) 30 31 w_list.append(w)
注:
(1)zip()
1 a = [1,2,3] 2 b = [4,5,6] 3 c = [4,5,6,7,8] 4 5 zipped = zip(a,b) # 打包为元组的列表 6 [(1, 4), (2, 5), (3, 6)] #结果 7 8 zip(a,c) # 元素个数与最短的列表一致 9 [(1, 4), (2, 5), (3, 6)] #结果 10 11 zip(*zipped) # 与 zip 相反,*zipped 可理解为解压,返回二维矩阵式 12 [(1, 2, 3), (4, 5, 6)] #结果
(2)课程:线性模型