diff --git a/beginner_source/examples_autograd/polynomial_custom_function.py b/beginner_source/examples_autograd/polynomial_custom_function.py index 39057c8fd7..6c9f040265 100755 --- a/beginner_source/examples_autograd/polynomial_custom_function.py +++ b/beginner_source/examples_autograd/polynomial_custom_function.py @@ -98,9 +98,12 @@ def backward(ctx, grad_output): d -= learning_rate * d.grad # Manually zero the gradients after updating weights - a.grad = None - b.grad = None - c.grad = None - d.grad = None + # by using machine epsilon for standard float (64-bit) + import sys + + a.grad = loss*sys.float_info.epsilon + b.grad = loss*sys.float_info.epsilon + c.grad = loss*sys.float_info.epsilon + d.grad = loss*sys.float_info.epsilon print(f'Result: y = {a.item()} + {b.item()} * P3({c.item()} + {d.item()} x)')