diff --git a/hw1-conjugate-gradient/conjugate_gradient.py b/hw1-conjugate-gradient/conjugate_gradient.py index 1aea021b..968239c3 100644 --- a/hw1-conjugate-gradient/conjugate_gradient.py +++ b/hw1-conjugate-gradient/conjugate_gradient.py @@ -1,39 +1,43 @@ -#THIS PROGRAM RUNS THE CONJUGATE GRADIENT ALGORITHM +### THIS PROGRAM RUNS THE CONJUGATE GRADIENT ALGORITHM + +### IMPORT LIBRARIES -# Import Libraries import numpy as np import scipy as sp -import matplotlib.pyplt as plt +#import matplotlib.pyplt as plt -# Input Arguments -# A = np.array([[2.0, 3.0, 5.0], [2.0, 3.0 , 6.0], [3.0, 6.0, 9.0]]) -# b = np.array([2.0, 3.0, 5.0]) +### INPUT ARGUMENTS +#A = np.array([[2.0, 3.0, 5.0], [2.0, 3.0 , 6.0], [3.0, 6.0, 9.0]]) +#b = np.array([2.0, 3.0, 5.0]) +#x = np.array([10.0, 20.5, 0.0]).T # initial guess A = np.array([[4.0, 0], [1.0, 3.0]]) -b = np.array([0.0, 1.0]) - +b = np.array([0.0, 1.0]).T +x = np.array([10.0, 20.5]).T # initial guess max_iterations = 1000 -tolerance = 0.0001 +tolerance = 0.001 + + +### DEFINE FUNCTION : conjgrad +### DOES : CG algorithm -# Define the function conjgrad -# DOES : CG algorithm def conjgrad(A, b, x, tolerance, max_iterations): # Initialization k = 0 r = b - np.matmul(A, x) # b - A @ x p = r - + + # Algorithm for i in range(0, max_iterations): - alpha = - x = - r_next = - beta = - p = - k = k + 1 + alpha = np.einsum('j,j',r,r) / np.einsum('j,j',p,np.einsum('ij,j', A, p)) + x = x + alpha*p + r_next = r - alpha*np.einsum('ij,j', A, p) + beta = np.einsum('j,j',r_next,r_next)/np.einsum('j,j',r,r) r = r_next - # Bertil continues + p = r + beta*p + k = k + 1 - if (np.sqrt(np.sum(r**2)) > tolerance): + if (np.sqrt(np.sum(r**2)) < tolerance): break - return x # want to return x-array that minimizes S(x) + return (x,k) # want to return x-array that minimizes S(x)