import numpy as np
from scipy.optimize import minimize
import matplotlib.pyplot as plt
The data
x=np.array([-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5])
y=np.array([-53.9, -28.5, -20.7, -3.6, -9.8, 5.0, 4.2, 5.1, 11.4, 27.4, 44.0])
Creating a matix with the powers of x
xmat=np.vstack((x**3,x**2,x,np.ones(len(x))))
xmat
def residuo(w):
return (y-w.dot(xmat))
def err(w):
return np.sum(residuo(w)**2)
Testing the function
err(np.array([0,0,0,0]))
np.sum(y**2)
ok
def derr(w):
return -2.0*np.sum(residuo(w)*xmat,axis=1)
w=np.array([0,0,0,0])
print("erro: "+str(err(w)))
for i in range(50):
w=w-1.0e-5*derr(w)
if i%10==0:
print("erro: "+str(err(w)))
print("erro: "+str(err(w)))
print("w: "+str(w))
ypred= w[0]*x**3+w[1]*x**2+w[2]*x+w[3]
plt.plot(x, y, 'ro', label='Original data')
plt.plot(x, ypred, label='Fitted line')
plt.show()
# 2. Gradient descent with epsilon=1.e-4
w=np.array([0,0,0,0])
print("erro: "+str(err(w)))
for i in range(50):
w=w-1.0e-4*derr(w)
if i%10==0:
print("erro: "+str(err(w)))
print("erro: "+str(err(w)))
print("w: "+str(w))
ypred= w[0]*x**3+w[1]*x**2+w[2]*x+w[3]
plt.plot(x, y, 'ro', label='Original data')
plt.plot(x, ypred, label='Fitted line')
plt.show()
sol1=minimize(x0=np.array([0,0,0,0]),fun=err,method="BFGS")
sol1
ypred= sol1.x[0]*x**3+sol1.x[1]*x**2+sol1.x[2]*x+sol1.x[3]
plt.plot(x, y, 'ro', label='Original data')
plt.plot(x, ypred, label='Fitted line')
plt.show()
sol2=minimize(x0=np.array([0,0,0,0]),fun=err,method="BFGS",jac=derr)
sol2
sol3=minimize(x0=np.array([0,0,0,0]),fun=err,method="Nelder-Mead")
sol3