北京电子商务app网站建设大兴,app充值网站开发,石家庄免费建站模板,中文网站建设公司借助 PyTorch 实现深度神经网络 - 线性回归 - 第 2 周 | Coursera
线性回归预测
用PyTorch实现线性回归模块 创建自定义模块#xff08;内含一个线性回归#xff09; 训练线性回归模型
对于线性回归#xff0c;特定类型的噪声是高斯噪声 平均损失均方误差函数#xff1a…借助 PyTorch 实现深度神经网络 - 线性回归 - 第 2 周 | Coursera
线性回归预测
用PyTorch实现线性回归模块 创建自定义模块内含一个线性回归 训练线性回归模型
对于线性回归特定类型的噪声是高斯噪声 平均损失均方误差函数 loss求解导数0 梯度下降 表示学习率 学习率过高可能错过参数的最佳值
学习率过低需要大量的迭代才能获得最小值
Batch Gradient Descent使用整个训练集来更新模型的参数 用Pytorch实现线性回归--梯度 每个epoch就是一个iteration 画图版
import torch
wtorch.tensor(-10.0,requires_gradTrue)
Xtorch.arange(-3,3,0.1).view(-1,1)
f-3*X
# The class for plottingclass plot_diagram():# Constructordef __init__(self, X, Y, w, stop, go False):start w.dataself.error []self.parameter []print(type(X.numpy()))self.X X.numpy()self.Y Y.numpy()self.parameter_values torch.arange(start, stop)self.Loss_function [criterion(forward(X), Y) for w.data in self.parameter_values] w.data start# Executordef __call__(self, Yhat, w, error, n):self.error.append(error)self.parameter.append(w.data)plt.subplot(212)plt.plot(self.X, Yhat.detach().numpy())plt.plot(self.X, self.Y,ro)plt.xlabel(A)plt.ylim(-20, 20)plt.subplot(211)plt.title(Data Space (top) Estimated Line (bottom) Iteration str(n))# Convert lists to PyTorch tensorsparameter_values_tensor torch.tensor(self.parameter_values)loss_function_tensor torch.tensor(self.Loss_function)# Plot using the tensorsplt.plot(parameter_values_tensor.numpy(), loss_function_tensor.numpy())plt.plot(self.parameter, self.error, ro)plt.xlabel(B)plt.figure()# Destructordef __del__(self):plt.close(all)
gradient_plot plot_diagram(X, Y, w, stop 5)
# Define a function for train the modeldef train_model(iter):LOSS[]for epoch in range (iter):# make the prediction as we learned in the last labYhat forward(X)# calculate the iterationloss criterion(Yhat,Y)# plot the diagram for us to have a better ideagradient_plot(Yhat, w, loss.item(), epoch)# store the loss into listLOSS.append(loss.item())# backward pass: compute gradient of the loss with respect to all the learnable parametersloss.backward()# updata parametersw.data w.data - lr * w.grad.data# zero the gradients before running the backward passw.grad.data.zero_()
train_model(4) 用Pytorch实现线性回归--训练
与上文类似只是多加了个b
梯度 画函数图
# The class for plot the diagramclass plot_error_surfaces(object):# Constructordef __init__(self, w_range, b_range, X, Y, n_samples 30, go True):W np.linspace(-w_range, w_range, n_samples)B np.linspace(-b_range, b_range, n_samples)w, b np.meshgrid(W, B) Z np.zeros((30,30))count1 0self.y Y.numpy()self.x X.numpy()for w1, b1 in zip(w, b):count2 0for w2, b2 in zip(w1, b1):Z[count1, count2] np.mean((self.y - w2 * self.x b2) ** 2)count2 1count1 1self.Z Zself.w wself.b bself.W []self.B []self.LOSS []self.n 0if go True:plt.figure()plt.figure(figsize (7.5, 5))plt.axes(projection3d).plot_surface(self.w, self.b, self.Z, rstride 1, cstride 1,cmap viridis, edgecolor none)plt.title(Cost/Total Loss Surface)plt.xlabel(w)plt.ylabel(b)plt.show()plt.figure()plt.title(Cost/Total Loss Surface Contour)plt.xlabel(w)plt.ylabel(b)plt.contour(self.w, self.b, self.Z)plt.show()# Setterdef set_para_loss(self, W, B, loss):self.n self.n 1self.W.append(W)self.B.append(B)self.LOSS.append(loss)# Plot diagramdef final_plot(self): ax plt.axes(projection 3d)ax.plot_wireframe(self.w, self.b, self.Z)ax.scatter(self.W,self.B, self.LOSS, c r, marker x, s 200, alpha 1)plt.figure()plt.contour(self.w,self.b, self.Z)plt.scatter(self.W, self.B, c r, marker x)plt.xlabel(w)plt.ylabel(b)plt.show()# Plot diagramdef plot_ps(self):plt.subplot(121)plt.ylimplt.plot(self.x, self.y, ro, labeltraining points)plt.plot(self.x, self.W[-1] * self.x self.B[-1], label estimated line)plt.xlabel(x)plt.ylabel(y)plt.ylim((-10, 15))plt.title(Data Space Iteration: str(self.n))plt.subplot(122)plt.contour(self.w, self.b, self.Z)plt.scatter(self.W, self.B, c r, marker x)plt.title(Total Loss Surface Contour Iteration str(self.n))plt.xlabel(w)plt.ylabel(b)plt.show()