diff --git a/hw1-conjugate-gradient/conjugate_gradient.py b/hw1-conjugate-gradient/conjugate_gradient.py index 3490f337..1aea021b 100644 --- a/hw1-conjugate-gradient/conjugate_gradient.py +++ b/hw1-conjugate-gradient/conjugate_gradient.py @@ -1,26 +1,39 @@ #THIS PROGRAM RUNS THE CONJUGATE GRADIENT ALGORITHM # Import Libraries import numpy as np import scipy as sp import matplotlib.pyplt as plt # Input Arguments -A = [[2.0, 3.0, 5.0], [2.0, 3.0 , 6.0], [3.0, 6.0, 9.0]] -b = [2.0, 3.0, 5.0] +# A = np.array([[2.0, 3.0, 5.0], [2.0, 3.0 , 6.0], [3.0, 6.0, 9.0]]) +# b = np.array([2.0, 3.0, 5.0]) -float tolerance = 0.0005 +A = np.array([[4.0, 0], [1.0, 3.0]]) +b = np.array([0.0, 1.0]) + +max_iterations = 1000 +tolerance = 0.0001 # Define the function conjgrad # DOES : CG algorithm -def conjgrad(A, b, x, tolerance): +def conjgrad(A, b, x, tolerance, max_iterations): # Initialization - int k = 0 - r = b - np.matmul(A, x) + k = 0 + r = b - np.matmul(A, x) # b - A @ x p = r - - while(np.sqrt(np.sum(r**2)>tolerance): - k=k+1 - - return(r); - + + for i in range(0, max_iterations): + alpha = + x = + r_next = + beta = + p = + k = k + 1 + r = r_next + # Bertil continues + + if (np.sqrt(np.sum(r**2)) > tolerance): + break + + return x # want to return x-array that minimizes S(x)