import numpy as np def conjgrad(A, b, x, tolerance, max_iterations): # Dimension of problem dim = len(b) # Convert input to column vectors b.reshape(dim, -1) x.reshape(dim, -1) # Save steps taken my minimizer in this array (can be used for plotting) steps = x.reshape(-1, dim) # Precompute tolerance squared tolerance_sq = tolerance*tolerance # Initialization #k = 0 r = b - np.einsum('ij,j', A, x) # b - A @ x if (np.sqrt(np.sum(r**2)) < tolerance): return (x, steps) p = r r_sq = np.einsum('j,j',r,r) # CG Algorithm for i in range(0, max_iterations): Ap = np.einsum('ij,j', A, p) alpha = r_sq / np.einsum('j,j', p, Ap) x = x + alpha * p steps = np.vstack((steps, x.reshape(-1, dim))) # keep track of steps for plotting r = r - alpha * Ap r_sq_next = np.einsum('j,j', r, r) if (r_sq_next < tolerance_sq): break p = r + (r_sq_next / r_sq) * p r_sq = r_sq_next #k = k + 1 #print(k) return (x, steps) # want to return x-array that minimizes S(x) ########## TESTING AND VALIDATING CONJGRAD FUNCTION ########### #max_iterations = 1000 #tolerance = 0.001 ## Input parameters for 3D: #A = np.array([[2.0, 3.0, 5.0], [2.0, 3.0 , 6.0], [3.0, 6.0, 9.0]]) #b = np.array([2.0, 3.0, 5.0]) #x = np.array([10.0, 20.5, 0.0]).T # initial guess ## Input parameters for 2D: #A = np.array([[4.0, 0], [1.0, 3.0]]) #b = np.array([0.0, 1.0]) #x = np.array([4, 4]) # initial guess #result, steps = conjgrad(A, b, x, tolerance, max_iterations) #print("Minimum found at position ", result, " after ", len(steps)-1, " iterations.")