pytorch:一维线性回归(一)
生活随笔
收集整理的這篇文章主要介紹了
pytorch:一维线性回归(一)
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
# -- coding:utf-8 --
import torch
import numpy as np
from torch import nn, optim
import matplotlib.pyplot as plt
from torch.autograd import Variable
# 設置字體為中文
plt.rcParams['font.sans-serif'] = ['SimHei']
plt.rcParams['axes.unicode_minus'] = False
# 訓練數據
x_train = np.array([[3.3],[4.4],[5.5],[6.71],[6.93],[4.168],[9.779],[6.182],[7.59],[2.167],[7.042],[10.791],[5.313],[7.997],[3.1]],dtype=np.float32)
y_train = np.array([[1.7],[2.76],[2.09],[3.19],[1.694],[1.573],[3.366],[2.596],[2.53],[1.221],[2.827],[3.465],[1.65],[2.904],[1.3]],dtype=np.float32)
# 訓練數據可視化
plt.scatter(x_train, y_train, marker='o', c='red')
plt.title(label='一維線性回歸-原始數據分布展示')
plt.show()
# 把訓練數據轉換為張量numpy->torch
x_train = torch.from_numpy(x_train)
y_train = torch.from_numpy(y_train)# 構建線性模型
class LinearRegression(nn.Module):def __init__(self):super(LinearRegression, self).__init__()# 輸入和輸出的維度都是1self.linear = nn.Linear(1, 1)def forward(self, x):out = self.linear(x)return outif torch.cuda.is_available():model = LinearRegression().cuda()
else:model = LinearRegression()
# 損失函數:均方誤差
criterion = nn.MSELoss()
# 采用隨機梯度下降
optimizer = optim.SGD(model.parameters(), lr=1e-3)num_epochs = 1000 # 訓練總次數
ctn = [] # 訓練次數
lo = [] # 損失值
for epoch in range(num_epochs):if torch.cuda.is_available():inputs = Variable(x_train).cuda()target = Variable(y_train).cuda()else:inputs = Variable(x_train)target = Variable(y_train)# 向前傳播out = model(inputs)loss = criterion(out, target)# 向后傳播optimizer.zero_grad() # 梯度清零loss.backward() # 向后傳播optimizer.step() # 更新參數# 統計每次對應的損失值ctn.append(epoch+1)lo.append(loss.item())if (epoch + 1) % 20 == 0:print('Epoch[{}/{}], loss:{:.6f}'.format(epoch + 1, num_epochs, loss.item()))model.eval()
if torch.cuda.is_available():predict = model(Variable(x_train).cuda())predict = predict.data.cpu().numpy()
else:predict = model(Variable(x_train))predict = predict.data.numpy()plt.plot(x_train.numpy(), y_train.numpy(),'ro',label='Original Data')
plt.plot(x_train.numpy(), predict, label='擬合的直線')
plt.title(label='擬合的直線')
plt.show()
# 繪制訓練次數與損失值之間的關系
plt.plot(ctn,lo)
plt.title(label='訓練次數與損失')
plt.xlabel('訓練次數')
plt.ylabel('損失值')
plt.show()
實驗結果:
總結
以上是生活随笔為你收集整理的pytorch:一维线性回归(一)的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: win10:JDK12.0.1环境变量配
- 下一篇: pytorch:多项式回归