In [1]:
import torch

# create tensors and enable gradient tracking

# inputs x1,x2
x1 = torch.Tensor([2.0]); x1.requires_grad = True 
x2 = torch.Tensor([-.0]); x2.requires_grad = True

# weights w1, w2
w1 = torch.Tensor([-3.0]); w1.requires_grad = True  
w2 = torch.Tensor([1.0]); w2.requires_grad = True  

# bias of the neurons
b = torch.Tensor([6.8813735870195432]); b.requires_grad = True

# perform the computation: n = x1*w1 + x2*w2 + b
n = x1*w1 + x2*w2 + b

# apply the hyperbolic tangent function to n
o = torch.tanh(n)

print(o.item())

# perform backward propagation to compute gradients
o.backward()

print("------------")
print("x1", x1.grad.item())
print("w1", w1.grad.item())
print("x2", x2.grad.item())
print("w2", w2.grad.item())
0.7071067094802856
------------
x1 -1.5000003576278687
w1 1.000000238418579
x2 0.5000001192092896
w2 -0.0