diff --git a/exercice_1/README.md b/exercice_1/README.md
index c33962a..fdd5ffd 100644
--- a/exercice_1/README.md
+++ b/exercice_1/README.md
@@ -1,10 +1,15 @@
code created by Marti Bosch (marti.bosch@epfl.ch) and Marc Schwaerzel (marc.schwaerzel@epfl.ch)
-For this exercice python3 was used (-*- coding: utf-8 -*-)
+For this exercice `python3` was used with `-*- coding: utf-8 -*-`. The `pip` dependences can be found in the `requirements.txt` file in the parent directory of this repo.
-# Exercice 1
- 1) run the optimizer.py: it will calculate and plot on its own.
+# Exercise 1
+
+1. run the `optimizer.py`: it will calculate and plot on its own.
-# Exercice 2
- 1) run the conjugate_gradient.py code, it will plot on its own. You can set A and b and x0 as you wish. We used exercice 2.2 to determine A and b.
- 2) Use Jacobian....
- 3) just run and appreciate :)
+# Exercise 2
+
+1. run the `conjugate_gradient.py` code, it will plot on its own. You might edit `A`, `b` and `x0` in the `__main__` of the `conjugate_gradient.py` file as you wish.
+2. To solve the function
+
+from **Exercise 1**, the `__main__` from `conjugate_gradient.py` sets by default `A` to the function's Jacobian and `b` to
+
+3. just run and appreciate :)
diff --git a/exercice_1/conjugate_gradient.py b/exercice_1/conjugate_gradient.py
index 2b1f34f..f0be481 100644
--- a/exercice_1/conjugate_gradient.py
+++ b/exercice_1/conjugate_gradient.py
@@ -1,70 +1,76 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 9 13:49:52 2018
@author: masc
"""
import numpy as np
from optimizer import S
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
+
def conjugate_gradient(A, b, x):
-# initialisation
- r = b - np.einsum("ij,j",A, x) # r -> residual
- d = r # d -> direction
+ # initialisation
+ r = b - np.einsum("ij,j", A, x) # r -> residual
+ d = r # d -> direction
i = 1
- print("### initialisation ###","\ninitial residual\t", np.einsum("i,i",r,r))
+ print("### initialisation ###", "\ninitial residual\t",
+ np.einsum("i,i", r, r))
xmat = [x]
- while True:
- alpha = np.einsum("i,i",r, r) / np.einsum("i,i",np.einsum("i, ij",d, A), d)
- x = x + alpha*d
- r_new = r - np.dot(np.dot(alpha,A), d)
- beta = np.einsum("i,i",r_new, r_new) / np.einsum("i,i",r, r)
+ while True:
+ alpha = np.einsum("i,i", r, r) / np.einsum("i,i",
+ np.einsum("i, ij", d, A), d)
+ x = x + alpha * d
+ r_new = r - np.dot(np.dot(alpha, A), d)
+ beta = np.einsum("i,i", r_new, r_new) / np.einsum("i,i", r, r)
d_new = r_new + np.dot(beta, d)
- xmat.append([x[0],x[1]])
+ xmat.append([x[0], x[1]])
r = r_new
d = d_new
- if np.einsum("i,i",r,r) < 1e-9:#np.isclose(r_k, np.zeros(r_k.shape)).all():
+ # np.isclose(r_k, np.zeros(r_k.shape)).all()
+ if np.einsum("i,i", r, r) < 1e-9:
print("### Iteration finishes: r*r < 1e9 ###")
print("final r \t\t", r)
- print("final r*r \t\t",np.einsum("i,i",r,r))
+ print("final r*r \t\t", np.einsum("i,i", r, r))
print("nit \t\t\t", i)
print("final x \t\t", x)
break
-
- print("residual \t\t", np.einsum("i,i",r,r))
+
+ print("residual \t\t", np.einsum("i,i", r, r))
print("x \t\t\t", x)
print("")
-
+
i += 1
return xmat
+
def plot(x):
iterationPoints = []
for i in x:
- iterationPoints.append([i[0],i[1], S(i)])
+ iterationPoints.append([i[0], i[1], S(i)])
fig = plt.figure()
ax = Axes3D(fig)
X_1, X_2 = np.meshgrid(np.linspace(-3, 3, 100), np.linspace(3, -3, 100))
- f = np.array([S((x_1, x_2)) for x_1, x_2 in zip(np.ravel(X_1), np.ravel(X_2))])
+ f = np.array(
+ [S((x_1, x_2)) for x_1, x_2 in zip(np.ravel(X_1), np.ravel(X_2))])
F = f.reshape(X_1.shape)
ax.plot_surface(X_1, X_2, F)
iterationPoints = np.array(iterationPoints)
- print("\nx, y and S(x,y) for each iteration\n",iterationPoints)
- ax.plot(iterationPoints[:,0], iterationPoints[:,1], iterationPoints[:,2],
- 'ro-')
+ print("\nx, y and S(x,y) for each iteration\n", iterationPoints)
+ ax.plot(iterationPoints[:, 0], iterationPoints[:, 1],
+ iterationPoints[:, 2], 'ro-')
ax.set_title("Conjugate gradient method")
-
-
+
+
if __name__ == '__main__':
- A = [[4,1],[1,3]] # jacobian of function (A = "delta^2"function)
- b = [1,2] # b = Ax - "delta"function
- x = [1,3]
- xmat = conjugate_gradient(A=A,b=b,x=x)
-# print("\nx for all iterations: ##", xmat)
+ A = [[4, 1], [1, 3]] # function's Jacobian (A = `\nabla^2 S(\vec{x})`)
+ b = [1, 2] # b = A \vec{x} - `\nabla S(\vec{x}`
+ x = [1, 3]
+ xmat = conjugate_gradient(A=A, b=b, x=x)
+ # print("\nx for all iterations: ##", xmat)
plot(xmat)
-
+ plt.show()