Deep Learning Note4
作者:
Agoni7z
,
2024-08-14 10:32:09
,
所有人可见
,
阅读 14
Auto_grad
import torch
print('1. 自动梯度计算')
x = torch.arange(4.0, requires_grad=True)
print('x:', x)
print('x.grad:', x.grad)
y = 2 * torch.dot(x, x)
print('y:', y)
y.backward()
print('x.grad:', x.grad)
print('x.grad == 4*x:', x.grad == 4 * x)
x.grad.zero_()
y = x.sum()
print('y:', y)
y.backward()
print('x.grad:', x.grad)
x.grad.zero_()
print('x:', x)
y = x * x
y.sum().backward()
print('x.grad:', x.grad)
def f(a):
b = a * 2
print(b.norm())
while b.norm() < 1000:
b = b * 2
if b.sum() > 0:
c = b
else:
c = 100 * b
return c
print('2.Python控制流的梯度计算')
a = torch.tensor(2.0)
a.requires_grad_(True)
print('a:', a)
d = f(a)
print('d:', d)
d.backward()
print('a.grad:', a.grad)