Professional Documents
Culture Documents
y_raw = []
x_raw = []
plt.scatter(x_raw,y_raw)
plt.title("Data")
plt.xlabel("X")
plt.ylabel("Y")
Out[73]: Text(0,0.5,'Y')
1
Now lets use gradient descent to minimize our objective function. This is the same code used
in gradient descent section with some additional fucntions for animations for visualization. Re-
member that here, x and y are given points and fixed. BETA IS OUR VARIABLE
y = np.matrix(y_raw).T
x = np.matrix(x_raw).T
ones = np.ones((x.shape[0],1))
x = np.concatenate((ones, x), axis=1)
def animate(i):
line.set_ydata(np.sin(x + i/10.0)) # update the data
return line,
2
# d_k is direction of descent = -D*gradient(at beta_k)
# where D is positive definite matrix
def d_k(beta_k, D=None):
D=np.eye(len(beta_k))
return -np.matmul(D,gradient(beta_k))
def animate(beta_k):
print(beta_k)
line.set_ydata(np.add(x*beta_k[1],beta_k[0]))
3
# update the data
return line,
def init():
line.set_ydata(np.ma.array(plot_x, mask=True))
return line,
minimizer = GradientDescent(D = 1)
beta_0 = np.matrix([[1], [10]])
optimal_beta, beta_history, fbeta_history = minimizer.train(f, d_k, beta_0)
print("Optimal beta -",optimal_beta)
fig, ax = plt.subplots()
def animate(beta_k):
line.set_ydata(np.dot(plot_x_w_ones,beta_k))
# update the data
4
return line,
HTML(ani.to_html5_video())
ani.save('linear_regression_animation.mp4', codec='h264')
Would you get the same result if you found the slope of the line with the equation given in the
lecture? Try it