# Create Tensors to hold input and outputs.# By default, requires_grad=False, which indicates that we do not need to# compute gradients with respect to these Tensors during the backward pass.x = torch.linspace(-math.pi, math.pi, 2000, dtype=dtype)y = torch.sin(x)plt.plotplt.plot(x.detach().cpu(),y.detach().cpu())
# Create random Tensors for weights. For a third order polynomial, we need# 4 weights: y = a + b x + c x^2 + d x^3# Setting requires_grad=True indicates that we want to compute gradients with# respect to these Tensors during the backward pass.from torch import tensora = tensor([1], dtype=dtype, requires_grad=True)b = tensor([0], dtype=dtype, requires_grad=True)c = tensor([0], dtype=dtype, requires_grad=True)d = tensor([0], dtype=dtype, requires_grad=True)a,b,c,d
learning_rate =1e-6y_pred = a + b * x + c * x **2+ d * x **3plt.plot(x.detach().cpu(),y_pred.detach().cpu())previous_loss = tensor([0], dtype=dtype, requires_grad=True)for t inrange(2000):# Forward pass: compute predicted y using operations on Tensors. y_pred = a + b * x + c * x **2+ d * x **3# Compute and print loss using operations on Tensors.# Now loss is a Tensor of shape (1,)# loss.item() gets the scalar value held in the loss. loss = (y_pred - y).pow(2).sum()if torch.abs(previous_loss - loss) >50: previous_loss = lossprint(t, loss.item(), previous_loss.item()) plt.plot(x.detach().cpu(),y_pred.detach().cpu())# Use autograd to compute the backward pass. This call will compute the# gradient of loss with respect to all Tensors with requires_grad=True.# After this call a.grad, b.grad. c.grad and d.grad will be Tensors holding# the gradient of the loss with respect to a, b, c, d respectively. loss.backward()# Manually update weights using gradient descent. Wrap in torch.no_grad()# because weights have requires_grad=True, but we don't need to track this# in autograd.with torch.no_grad(): a -= learning_rate * a.grad b -= learning_rate * b.grad c -= learning_rate * c.grad d -= learning_rate * d.grad# Manually zero the gradients after updating weights a.grad =None b.grad =None c.grad =None d.grad =Noneprint(f'Result: y = {a.item()} + {b.item()} x + {c.item()} x^2 + {d.item()} x^3')plt.plot(x.detach().cpu(),y.detach().cpu())