Jovian
⭐️
Sign In
In [104]:
import torch
import numpy as np
import matplotlib.pyplot as plt
In [105]:
import torch.nn as nn
In [106]:
X = torch.linspace(1,50,50).reshape(-1,1)
X
Out[106]:
tensor([[ 1.],
        [ 2.],
        [ 3.],
        [ 4.],
        [ 5.],
        [ 6.],
        [ 7.],
        [ 8.],
        [ 9.],
        [10.],
        [11.],
        [12.],
        [13.],
        [14.],
        [15.],
        [16.],
        [17.],
        [18.],
        [19.],
        [20.],
        [21.],
        [22.],
        [23.],
        [24.],
        [25.],
        [26.],
        [27.],
        [28.],
        [29.],
        [30.],
        [31.],
        [32.],
        [33.],
        [34.],
        [35.],
        [36.],
        [37.],
        [38.],
        [39.],
        [40.],
        [41.],
        [42.],
        [43.],
        [44.],
        [45.],
        [46.],
        [47.],
        [48.],
        [49.],
        [50.]])
In [107]:
torch.manual_seed(71)
noise = torch.randint(-8,9, (50,1))
In [108]:
y = 2*X+1 +noise
In [109]:
y.shape
Out[109]:
torch.Size([50, 1])
In [110]:
plt.rcParams['figure.figsize'] = [12,8]
plt.scatter(X, y)
Out[110]:
<matplotlib.collections.PathCollection at 0x23c304b7400>
In [111]:
class Model(nn.Module):
    def __init__(self, in_features, out_features):
        super().__init__()
        self.linear = nn.Linear(in_features, out_features)
    def forward(self, x):
        return self.linear(x)
In [112]:
torch.manual_seed(59)
Out[112]:
<torch._C.Generator at 0x23c289ba210>
In [113]:
model = Model(1,1)
print(model)
Model( (linear): Linear(in_features=1, out_features=1, bias=True) )
In [114]:
print(model.linear.weight, model.linear.bias)
Parameter containing: tensor([[0.1060]], requires_grad=True) Parameter containing: tensor([0.9638], requires_grad=True)
In [115]:
for name, param in model.named_parameters():
    print(f'{name} : {param.item()}')
linear.weight : 0.10597813129425049 linear.bias : 0.9637961387634277
In [116]:
x1 = np.linspace(0,50,50)
In [117]:
w1, b1 = 0.10597813129425049, 0.9637961387634277
y1 = w1*x1 +b1
In [118]:
plt.plot(x1, y1,'r')
plt.scatter(X, y)

Out[118]:
<matplotlib.collections.PathCollection at 0x23c307cb820>
In [119]:
criterion = nn.MSELoss()
optimizer = torch.optim.SGD(model.parameters(), lr=0.01)
In [120]:
import tqdm
epochs = 20
losses = []
for i in range(epochs):
    ypred = model(X)
    loss = criterion(ypred, y)
    losses.append(loss)
    print(f'Epoch: {i} Loss:{loss.item()}')
    optimizer.zero_grad()
    loss.backward()
    optimizer.step()
Epoch: 0 Loss:3057.216796875 Epoch: 1 Loss:795363.8125 Epoch: 2 Loss:208347296.0 Epoch: 3 Loss:54578487296.0 Epoch: 4 Loss:14297332711424.0 Epoch: 5 Loss:3745317245681664.0 Epoch: 6 Loss:9.811197203903939e+17 Epoch: 7 Loss:2.570133762674304e+20 Epoch: 8 Loss:6.7326999478539085e+22 Epoch: 9 Loss:1.7636917020894142e+25 Epoch: 10 Loss:4.620152631235363e+27 Epoch: 11 Loss:1.2102915280014751e+30 Epoch: 12 Loss:3.1704688918006736e+32 Epoch: 13 Loss:8.305331305573349e+34 Epoch: 14 Loss:inf Epoch: 15 Loss:inf Epoch: 16 Loss:inf Epoch: 17 Loss:inf Epoch: 18 Loss:inf Epoch: 19 Loss:inf
In [121]:
plt.plot(range(epochs), losses)
Out[121]:
[<matplotlib.lines.Line2D at 0x23c30969f10>]
In [ ]: