diff --git a/hw1-conjugate-gradient/optimizer.py b/hw1-conjugate-gradient/optimizer.py index a22b22b1..5b9aa27e 100644 --- a/hw1-conjugate-gradient/optimizer.py +++ b/hw1-conjugate-gradient/optimizer.py @@ -1 +1,57 @@ import numpy as np +import matplotlib.pyplot as plt +from mpl_toolkits.mplot3d import Axes3D +import scipy.optimize + + +def my_callback(xk): + mystring = str(xk[0]) + "\t" + str(xk[1]) + "\n" + with open("minimize_steps.txt", "a") as myfile: + myfile.write(mystring) + +def find_minimum(func): + result = scipy.optimize.minimize(func, x0 = np.array([4.0, 4.0]), method='BFGS', callback=my_callback) + return result.x + +# This function implements S(x) given in the exercise sheet +def quadratic_function(x): + + # create column vector of input array + x.reshape(-1, 1) + + # create matrix and vector coefficients of S(x) given in exercise sheet + mat = np.array([[4, 0], [1, 3]]) + vec = np.array([0, 1]).transpose() + + # return S(x) + return (x.transpose() @ mat) @ x - x.transpose() @ vec + + +find_minimum(quadratic_function) + + +def plotting(): + fig = plt.figure() + axe = fig.add_subplot(111, projection="3d") + num_of_points = 100 + xmax = ymax = -10 + xmin = ymin = 10 + X, Y = np.meshgrid(np.linspace(xmin,xmax,num_of_points), np.linspace(ymin,ymax,num_of_points)) + Z = np.zeros_like(X) + for i in range(0,num_of_points): + for j in range(0, num_of_points): + Z[i,j] = quadratic_function( np.array([X[i,j], Y[i,j]]) ) + axe.plot_surface(X, Y, Z) + #axe.view_init(orientation=30) + #axe.contourf(X,Y,Z) + + step = np.loadtxt("minimize_steps.txt") + step_value = np.zeros(step.shape[0]) + for i in range(0, step.shape[0]): + step_value[i] = quadratic_function( np.array([step[i, 0], step[i, 1]]) ) + axe.plot(step[:,0], step[:,1], step_value, 'r*--') + #axe.plot(step[:,0], step[:,1], 'r*--') + + plt.show() + +plotting()