# +----------------------------------------------------+
# |                  Lekce 5 - demo 1                  |
# +----------------------------------------------------+
#  METHOD OF STEEPEST DESCENT x CONJUGATED GRADIENTS 
#   
#  PROBLEM: Test convergence of SD versus CG
#  THIS VERSION:
#    2D projection of solution approximation for each iteration
#    matrix: Rand^T * Rand

import numpy as np
#import scipy.linalg as linalg
import matplotlib.pyplot as plt

# Matrix generator: random symmetric positive definite
def GenMat(N):
    A=np.random.rand(N,N)
    A=np.dot(A,A.transpose())
    return A

#CG solver: - returns also 2D projection of iterated vector for monitoring convergence
def CGsolve(A,b,n):
    it=2*n           #number of iterations
    x1=np.zeros(it)
    x2=np.zeros(it)
  #CG algorithm
    x=np.zeros(n)
    r=b
    p=r
    gm=np.dot(r,r)
    for i in range(it):
        w=np.dot(A,p)
        al=gm/np.dot(p,w)
        x=x+al*p
        r=r-al*w
        gm_old=gm
        gm=np.dot(r,r)
        be=gm/gm_old
        p=r+be*p
        x1[i]=x[1]
        x2[i]=x[2]
    return x,x1,x2

#Steepest descend solver - returns also 2D projection of iterated vector for monitoring convergence
def SDsolve(A,b,n):
    it=20*n         #number of iterations
    x1=np.zeros(it)
    x2=np.zeros(it)
  #CG algorithm
    x=np.zeros(n)
    r=b
    for i in range(it):
        w=np.dot(A,r)
        al=np.dot(r,r)/np.dot(r,w)
        x=x+al*r
        r=r-al*w
        x1[i]=x[1]
        x2[i]=x[2]
    return x,x1,x2

N=5
b=np.ones(N)
A=GenMat(N)
xx_CG, x1_CG,x2_CG = CGsolve(A,b,N)
xx_SD, x1_SD,x2_SD = SDsolve(A,b,N)

print('kondition(A)=',np.linalg.cond(A))
print('eigvals=',np.linalg.eigvalsh(A))

%matplotlib inline
plt.plot(x1_CG,x2_CG,'go')
plt.plot(x1_CG,x2_CG,'g')
plt.plot(x1_SD,x2_SD,'bo')
plt.plot(x1_SD,x2_SD,'b')
plt.plot(xx_CG[1],xx_CG[2],'ro')
plt.show()
