import numpy as np def conjgrad(A, b, x, tolerance, max_iterations): # Dimension of problem dim = len(b) # Convert input to column vectors b.reshape(dim, -1) x.reshape(dim, -1) # Save steps taken my minimizer in this array (can be used for plotting) steps = x.reshape(-1, dim) # Initialization #k = 0 r = b - np.einsum('ij,j', A, x) # b - A @ x p = r if (np.sqrt(np.sum(r**2)) < tolerance): return (x, steps) # CG Algorithm for i in range(0, max_iterations): alpha = np.einsum('j,j',r,r) / np.einsum('j,j',p,np.einsum('ij,j', A, p)) x = x + alpha*p steps = np.vstack((steps, x.reshape(-1, dim))) # keep track of steps for plotting r_next = r - alpha*np.einsum('ij,j', A, p) beta = np.einsum('j,j',r_next,r_next)/np.einsum('j,j',r,r) r = r_next p = r + beta*p #k = k + 1 #print(k) if (np.sqrt(np.sum(r**2)) < tolerance): break return (x, steps) # want to return x-array that minimizes S(x) ########## TESTING AND VALIDATING CONJGRAD FUNCTION ########### #max_iterations = 1000 #tolerance = 0.001 ## Input parameters for 3D: #A = np.array([[2.0, 3.0, 5.0], [2.0, 3.0 , 6.0], [3.0, 6.0, 9.0]]) #b = np.array([2.0, 3.0, 5.0]) #x = np.array([10.0, 20.5, 0.0]).T # initial guess ## Input parameters for 2D: #A = np.array([[4.0, 0], [1.0, 3.0]]) #b = np.array([0.0, 1.0]) #x = np.array([4, 4]) # initial guess #result, steps = conjgrad(A, b, x, tolerance, max_iterations) #print("Minimum found at position ", result, " after ", len(steps)-1, " iterations.")