In [10]:
for k in range(20):
  
    # Forward pass
    ypred = [n(x) for x in xs]
    loss = sum((yout - ygt) ** 2 for ygt, yout in zip(ys, ypred))

    # Backward pass
    for p in n.parameters():
        p.grad = 0.0
    
    # Compute gradients using the backward method
    loss.backward()

    # Update weights
    for p in n.parameters():
        p.data += -0.5 * p.grad

    print(k, loss.data)
0 4.4804998792402
1 0.007573759202783073
2 0.00017106008463719198
3 0.00015847488604998254
4 0.0001476920568438104
5 0.00013834432209593433
6 0.00013015875227055242
7 0.00012292811077977323
8 0.00011649209262856946
9 0.00011072467352173466
10 0.00010552535932148685
11 0.00010081299688759957
12 9.652130947567148e-05
13 9.259561919100697e-05
14 8.899040277298505e-05
15 8.566744282464198e-05
16 8.259441135651752e-05
17 7.974377178736054e-05
18 7.709191865095675e-05
19 7.461849689459692e-05