From 6dea156c2fc0f63e2454e5f444308086f797eb69 Mon Sep 17 00:00:00 2001 From: Nipun Sadvilkar Date: Fri, 3 Jul 2020 11:19:06 +0530 Subject: [PATCH] Typo gradient of loss comment gradients of loss with respect to w1 and w2 --- README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 65ffc4e..14ec8c9 100644 --- a/README.md +++ b/README.md @@ -65,7 +65,7 @@ for t in range(500): loss = np.square(y_pred - y).sum() print(t, loss) - # Backprop to compute gradients of w1 and w2 with respect to loss + # Backprop to compute gradients of loss with respect to w1 and w2 grad_y_pred = 2.0 * (y_pred - y) grad_w2 = h_relu.T.dot(grad_y_pred) grad_h_relu = grad_y_pred.dot(w2.T) @@ -131,7 +131,7 @@ for t in range(500): loss = (y_pred - y).pow(2).sum() print(t, loss.item()) - # Backprop to compute gradients of w1 and w2 with respect to loss + # Backprop to compute gradients of loss with respect to w1 and w2 grad_y_pred = 2.0 * (y_pred - y) grad_w2 = h_relu.t().mm(grad_y_pred) grad_h_relu = grad_y_pred.mm(w2.t())