From 4ed42912449c53960b2feeae9980491652ec094f Mon Sep 17 00:00:00 2001 From: jbaremoney Date: Thu, 11 Sep 2025 22:04:20 -0500 Subject: [PATCH] fix: clear wording error in numpy warmup In the backpropagation section, there was a comment saying the code was computing the gradient of parameters a, b, c, d wrt the loss. This is flipped and causes confusion about the math of backprop. --- beginner_source/examples_tensor/polynomial_numpy.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/beginner_source/examples_tensor/polynomial_numpy.py b/beginner_source/examples_tensor/polynomial_numpy.py index a1a378e50ed..6eea486f712 100755 --- a/beginner_source/examples_tensor/polynomial_numpy.py +++ b/beginner_source/examples_tensor/polynomial_numpy.py @@ -37,7 +37,7 @@ if t % 100 == 99: print(t, loss) - # Backprop to compute gradients of a, b, c, d with respect to loss + # Backprop to compute gradients of loss with respect to parameters a, b, c, d grad_y_pred = 2.0 * (y_pred - y) grad_a = grad_y_pred.sum() grad_b = (grad_y_pred * x).sum()