s = [10.0]
for _ in range(5):
s.append(s[-1] * 0.98)
s = [10.0, 9.8, 9.6]
loss = lambda x: (x**2) + 20
grad = lambda x: x*2
w = 9.8
print(loss(w), grad(w))
极值点, 导数为0, grad递减
At the extremum point, the derivative is zero, and the gradient is decreasing
import matplotlib.pyplot as plt
import numpy as np
w = [10.0]
for _ in range(100):
w.append(w[-1] * 0.98)
x = np.array(w)
y = np.pow(x,2) + 20
y_grad = x * 2
plt.figure(figsize=(10, 6))
plt.xlim(10,0)
plt.scatter(x, y, label = 'f(x)', color='blue')
plt.scatter(x, y_grad, label = "grad(x)", color='red')
plt.grid()
plt.legend()
plt.tight_layout()
plt.show()

Top comments (0)