-
Notifications
You must be signed in to change notification settings - Fork 473
/
Copy path05-autograd.py
47 lines (40 loc) · 1.13 KB
/
05-autograd.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import torch
print('1.自动梯度计算')
x = torch.arange(4.0, requires_grad=True) # 1.将梯度附加到想要对其计算偏导数的变量
print('x:', x)
print('x.grad:', x.grad)
y = 2 * torch.dot(x, x) # 2.记录目标值的计算
print('y:', y)
y.backward() # 3.执行它的反向传播函数
print('x.grad:', x.grad) # 4.访问得到的梯度
print('x.grad == 4*x:', x.grad == 4 * x)
## 计算另一个函数
x.grad.zero_()
y = x.sum()
print('y:', y)
y.backward()
print('x.grad:', x.grad)
# 非标量变量的反向传播
x.grad.zero_()
print('x:', x)
y = x * x
y.sum().backward()
print('x.grad:', x.grad)
def f(a):
b = a * 2
print(b.norm())
while b.norm() < 1000: # 求L2范数:元素平方和的平方根
b = b * 2
if b.sum() > 0:
c = b
else:
c = 100 * b
return c
print('2.Python控制流的梯度计算')
a = torch.tensor(2.0) # 初始化变量
a.requires_grad_(True) # 1.将梯度赋给想要对其求偏导数的变量
print('a:', a)
d = f(a) # 2.记录目标函数
print('d:', d)
d.backward() # 3.执行目标函数的反向传播函数
print('a.grad:', a.grad) # 4.获取梯度