File tree Expand file tree Collapse file tree 1 file changed +7
-5
lines changed
beginner_source/examples_autograd Expand file tree Collapse file tree 1 file changed +7
-5
lines changed Original file line number Diff line number Diff line change 40
40
d = torch .randn ((), dtype = dtype , requires_grad = True )
41
41
42
42
learning_rate = 1e-6
43
- for t in range (2000 ):
43
+ for t in range (int ( 1 / ( learning_rate )) ):
44
44
# Forward pass: compute predicted y using operations on Tensors.
45
45
y_pred = a + b * x + c * x ** 2 + d * x ** 3
46
46
67
67
d -= learning_rate * d .grad
68
68
69
69
# Manually zero the gradients after updating weights
70
- a .grad = None
71
- b .grad = None
72
- c .grad = None
73
- d .grad = None
70
+ # by using machine epsilon for standard float (64-bit)
71
+ import sys
72
+ a .grad = loss * sys .float_info .epsilon
73
+ b .grad = loss * sys .float_info .epsilon
74
+ c .grad = loss * sys .float_info .epsilon
75
+ d .grad = loss * sys .float_info .epsilon
74
76
75
77
print (f'Result: y = { a .item ()} + { b .item ()} x + { c .item ()} x^2 + { d .item ()} x^3' )
You can’t perform that action at this time.
0 commit comments