diff --git a/hw1-conjugate-gradient/conjugate_gradient.py b/hw1-conjugate-gradient/conjugate_gradient.py index 75a02559..d4035c1b 100644 --- a/hw1-conjugate-gradient/conjugate_gradient.py +++ b/hw1-conjugate-gradient/conjugate_gradient.py @@ -1,56 +1,56 @@ ### THIS PROGRAM RUNS THE CONJUGATE GRADIENT ALGORITHM ### IMPORT LIBRARIES import numpy as np import scipy as sp #import matplotlib.pyplt as plt ### INPUT ARGUMENTS #A = np.array([[2.0, 3.0, 5.0], [2.0, 3.0 , 6.0], [3.0, 6.0, 9.0]]) #b = np.array([2.0, 3.0, 5.0]) #x = np.array([10.0, 20.5, 0.0]).T # initial guess A = np.array([[4.0, 0], [1.0, 3.0]]) b = np.array([0.0, 1.0]) -x = np.array([0, 0]) # initial guess +x = np.array([4, 4]) # initial guess max_iterations = 1000 tolerance = 0.001 ### DEFINE FUNCTION : conjgrad ### DOES : CG algorithm def conjgrad(A, b, x, tolerance, max_iterations): # Dimension of problem dim = len(b) # Convert input to column vectors b.reshape(dim, -1) x.reshape(dim, -1) print(x.shape) # Initialization k = 0 r = b - np.einsum('ij,j', A, x) # b - A @ x if (np.sqrt(np.sum(r**2)) < tolerance): return (x, k) p = r # CG Algorithm for i in range(0, max_iterations): alpha = np.einsum('j,j',r,r) / np.einsum('j,j',p,np.einsum('ij,j', A, p)) x = x + alpha*p r_next = r - alpha*np.einsum('ij,j', A, p) beta = np.einsum('j,j',r_next,r_next)/np.einsum('j,j',r,r) r = r_next p = r + beta*p k = k + 1 if (np.sqrt(np.sum(r**2)) < tolerance): break return (x,k) # want to return x-array that minimizes S(x) result, k = conjgrad(A, b, x, tolerance, max_iterations) print("Minimum found at position ", result, " after ", k, " iterations.")