Page MenuHomec4science

conjugate_gradient.py
No OneTemporary

File Metadata

Created
Thu, May 9, 23:16

conjugate_gradient.py

#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 10 10:29:34 2018
@author: sajjadazimi
"""
import numpy as np
import optimizer as opt
# part 1: implementation of linear conjugate gradient method
def conjugateGradient(A, b, x, tol):
r = b - np.einsum ('ij,j->i', A, x)
p = r
x_steps = x # x_steps is a matrix whose rows are the value of x at different steps
while np.sqrt(np.einsum('k,k->', r, r)) > tol:
alpha = np.einsum('k,k->', r, r) / np.einsum('k,k', p, np.einsum('ij,j->i', A, p))
x = x + alpha * p
x_steps = np.vstack([x_steps,x])
rnew = r - alpha * np.einsum('ij,j->i', A, p)
beta = np.einsum('k,k->', rnew, rnew) / np.einsum('k,k->', r, r)
p = rnew + beta * p
r = rnew
return x_steps
# part 2: solve for the function S given in exercise 1
# using the matrix form definition of the minimizer, 1/2 x'Ax - x'b = S(x), so
A = [[4,1],[1,3]] #matrix A for S
b = [1,2] #vector b for S
x = [3., 1.]
x_steps = conjugateGradient(A,b,x,1e-7)
n_steps = np.shape(x_steps)[0]-1 #number of steps
print("Number of Steps :", n_steps)
print("The converged solution :", x_steps[n_steps,:])
# part 3: plot
opt.plotFunc(opt.S, x_steps)

Event Timeline