2 years ago
In [5]:
``import torch``

In [9]:
``````x = torch.ones(2, 2, requires_grad=True)
print(x)
``````
```tensor([[1., 1.], [1., 1.]], requires_grad=True) ```
In [10]:
``````y = x + 2
print(y)
``````
```tensor([[3., 3.], [3., 3.]], grad_fn=<AddBackward0>) ```
In [11]:
``print(y.grad_fn)``
```<AddBackward0 object at 0x7f9258b42be0> ```
In [12]:
``````z = y* y *3
z
``````
Out[12]:
``````tensor([[27., 27.],
In [14]:
``````out = z.mean()
print (z, out)  # 打印两个张量
# MulBackward0 相乘 MeanBackward0 平均数``````
```tensor([[27., 27.], [27., 27.]], grad_fn=<MulBackward0>) tensor(27., grad_fn=<MeanBackward0>) ```

In [15]:
``a = torch.randn(2,2)``
In [16]:
``a = ((a *3) / (a -1))``
In [17]:
``print(a.requires_grad)``
```False ```
In [18]:
``a.requires_grad_(True)``
Out[18]:
``````tensor([[-0.8642,  1.1833],
In [19]:
``b = (a * a).sum()``
In [20]:
``print(b.grad_fn)``
```<SumBackward0 object at 0x7f92588567b8> ```

In [27]:
``#print(x.backward())``
In [28]:
``print(x.grad) #grad 梯度，gradient的缩写``
```tensor([[4.5000, 4.5000], [4.5000, 4.5000]]) ```
In [29]:
``````x = torch.randn(3, requires_grad=True)
y = x * 2
while y.data.norm() < 1000:
y = y * 2
print(y)
``````
```tensor([ 598.5151, -414.5598, -1321.2048], grad_fn=<MulBackward0>) ```
In [30]:
``````gradients = torch.tensor([0.1, 1.0, 0.0001], dtype=torch.float)
```tensor([1.0240e+02, 1.0240e+03, 1.0240e-01]) ```
``import jovian``
``jovian.commit()``
```[jovian] Saving notebook.. ```
`` ``