案例1

import torch
torch.__version__
x=torch.rand(3,4,requires_grad=True)
x
tensor([[0.9795, 0.8240, 0.6395, 0.1617],
        [0.4833, 0.4409, 0.3758, 0.7234],
        [0.9857, 0.9663, 0.5842, 0.8751]], requires_grad=True)
b=torch.rand(3,4,requires_grad=True)
t=x+b
y=t.sum()
y.backward()
b.grad
tensor([[1., 1., 1., 1.],
        [1., 1., 1., 1.],
        [1., 1., 1., 1.]])

案例2

x=torch.rand(1)
b=torch.rand(1,requires_grad=True)
w=torch.rand(1,requires_grad=True)
y=w*x
z=y+b
z.backward(retain_graph=True)#保留计算图
w.grad

tensor([0.5871])
b.grad
tensor([3.])

第一个线性回归模型

import numpy as np
x_values=[i for i in range(11)]
x_train= np.array(x_values, dtype=np.float32)
x_train=x_train.reshape(-1,1)
x_train.shape
(11, 1)
y_values=[2*i+1 for i in x_values]
y_train=np.array(y_values,dtype=np.float32)
y_train=y_train.reshape(-1,1)
y_train.shape
(11, 1)

建立模型

import torch.nn as nn
class linearRegressionModel(nn.Module):#定义了一个名为linearRegressionModel的类,该类继承自nn.Module类。


    def __init__(self,input_dim,output_dim):#是linearRegressionModel类的构造函数,它接受input_dim和output_dim两个参数。
        super(linearRegressionModel,self).__init__()
        self.linear=nn.Linear(input_dim,output_dim)#创建了一个全联接层对象linear,它接受输入维度input_dim和输出维度output_sum。
    def forward(self,x):
        out=self.linear(x)
        return out
input_dim=1
output_dim=1
model=linearRegressionModel(input_dim,output_dim)

指定好参数和损失函数

epochs=1000
learning_rate=0.01
optimizer=torch.optim.SGD(model.parameters(),lr=learning_rate)#创建随机梯度下降优化器对象
criterion=nn.MSELoss()#均方误差

训练模型

for epoch in range(epochs):
    epoch+=1
    inputs=torch.from_numpy(x_train)# numpy转tensor
    labels=torch.from_numpy(y_train)
    
    optimizer.zero_grad()#梯度清零,避免累加
    #前向传播
    outputs=model(inputs)
    #计算损失
    loss=criterion(outputs,labels)
    #反向传播
    loss.backward()
    #更新权重参数
    optimizer.step()
    if(epoch%50==0):
        print('epoch {}, loss {}'.format(epoch,loss.item()))
epoch 50, loss 0.04499879851937294
epoch 100, loss 0.025665638968348503
epoch 150, loss 0.014638797380030155
epoch 200, loss 0.008349399082362652
epoch 250, loss 0.004762190859764814
epoch 300, loss 0.0027161596808582544
epoch 350, loss 0.0015491894446313381
epoch 400, loss 0.0008836076012812555
epoch 450, loss 0.0005039655370637774
epoch 500, loss 0.000287443253910169
epoch 550, loss 0.000163945704116486
epoch 600, loss 9.350906475447118e-05
epoch 650, loss 5.333393710316159e-05
epoch 700, loss 3.0422697818721645e-05
epoch 750, loss 1.7352540453430265e-05
epoch 800, loss 9.89796535577625e-06
epoch 850, loss 5.644690645567607e-06
epoch 900, loss 3.2191221635002876e-06
epoch 950, loss 1.8363726894676802e-06
epoch 1000, loss 1.047302703227615e-06

title

用模型预测结果

predicted=model(torch.from_numpy(x_train).requires_grad_()).data.numpy()
predicted
array([[ 0.99809617],
       [ 2.9983704 ],
       [ 4.9986444 ],
       [ 6.9989185 ],
       [ 8.999193  ],
       [10.999468  ],
       [12.999742  ],
       [15.000015  ],
       [17.00029   ],
       [19.000565  ],
       [21.00084   ]], dtype=float32)

模型的保存和读取(训练好的权重)

#权重的保存
torch.save(model.state_dict(),'model.pk1')
#模型导入(读取权重数据)
model.load_state_dict(torch.load('model.pk1'))
<All keys matched successfully>


10-27 23:17