diff --git a/HW1/Optimizer.py b/HW1/Optimizer.py new file mode 100644 index 0000000..e601584 --- /dev/null +++ b/HW1/Optimizer.py @@ -0,0 +1,24 @@ +import scipy.optimize +import matplotlib.pyplot as plt +from mpl_toolkits.mplot3d import Axes3D +import numpy as np +import Plotting + + +def S(x): + output=2*x[0]*x[0]+3/2.*x[1]*x[1]+x[0]*x[1]-x[0]-2*x[1]+6 + return output + +value=[] # Variable that keeps solution of each iteration +init=[0,0] # Initial guess +value.append(init) +def getIterationSteps(x): + #print(x) + value.append(x) + +output=scipy.optimize.minimize(S,[0,0],tol=1e-9,method='CG',callback=getIterationSteps) + +print(output) + +# Plotting the function and the solution path +Plotting.plot(3,S,value) \ No newline at end of file diff --git a/HW1/Plotting.py b/HW1/Plotting.py new file mode 100644 index 0000000..12a9961 --- /dev/null +++ b/HW1/Plotting.py @@ -0,0 +1,18 @@ +import matplotlib.pyplot as plt +from mpl_toolkits.mplot3d import Axes3D +import numpy as np + +# rr=Range +def plot(rr,function,value): + fig=plt.figure() + axe=fig.add_subplot(111,projection='3d') + x=np.linspace(-rr,rr,100) + y=np.linspace(-rr,rr,100) + xv,yv=np.meshgrid(x,y) + function_vlaues=function([xv,yv]) + axe.plot_surface(xv,yv,function_vlaues) + value=np.array(value) + axe.plot(value[:,0],value[:,1],function([value[:,0],value[:,1]]),'-o') + plt.show() + + \ No newline at end of file diff --git a/HW1/Plotting.pyc b/HW1/Plotting.pyc new file mode 100644 index 0000000..727159f Binary files /dev/null and b/HW1/Plotting.pyc differ diff --git a/HW1/Read me.txt b/HW1/Read me.txt new file mode 100644 index 0000000..a7ef67e --- /dev/null +++ b/HW1/Read me.txt @@ -0,0 +1,7 @@ +#### Command for Exercise 1: Scipy optimization + +python Optimizer.py + +#### Command for Exercise 2: Conjugate Gradient + +python conjugate_gradient.py diff --git a/HW1/conjugate_gradient.py b/HW1/conjugate_gradient.py new file mode 100644 index 0000000..4d9943c --- /dev/null +++ b/HW1/conjugate_gradient.py @@ -0,0 +1,45 @@ +import numpy as np +import Plotting + +# Function +def S(x): + output=2*x[0]*x[0]+3/2.*x[1]*x[1]+x[0]*x[1]-x[0]-2*x[1]+6 + return output + + +# Function matirces +# F(x)=1/2*x^T*A*x-x^T*b+constant +A=np.array([[4.,1.],[1.,3.]]) +b=np.array([[1.],[2.]]) +constant=6.0 + +x0=np.array([[0.],[0.]]) # Initial guess + +x=np.array([x0]) # Variable that keeps solution of each iteration +r_old=b-np.einsum('ij,jk->ik',A,x[0]) # Residual vector +p_old=r_old # Direction vector + +maxcounter=20 # Maximum iteration + +# Conjugate gradient method solver +for i in range(maxcounter): + alpha=np.einsum('ij,ij->',r_old,r_old)/np.einsum('ji,jk,kl->',p_old,A,p_old) + x=np.append(x,[x[i]+alpha*p_old],axis=0) # New solution + r_new=r_old-alpha*np.einsum('ij,jk->ik',A,p_old) # New residual vector + + # If residual vector is small enough, the solutin procedure will be interrupted + if(abs(sum(r_new)/len(r_new))<0.1): + break + + beta=np.einsum('ij,ij->',r_new,r_new)/np.einsum('ij,ij->',r_old,r_old) + p_new=r_new+beta*p_old # New direction vector + + p_old=p_new + r_old=r_new + +x=np.einsum('ijk->ij',x) +print("Solution Iterations:\n") +print(x) + +# Plotting the function and the solution path +Plotting.plot(3,S,x) \ No newline at end of file