MPI-SCATCI  2.0
An MPI version of SCATCI
SLEPCDiagonalizer_module.F90
Go to the documentation of this file.
1 ! Copyright 2019
2 !
3 ! For a comprehensive list of the developers that contributed to these codes
4 ! see the UK-AMOR website.
5 !
6 ! This file is part of UKRmol-in (UKRmol+ suite).
7 !
8 ! UKRmol-in is free software: you can redistribute it and/or modify
9 ! it under the terms of the GNU General Public License as published by
10 ! the Free Software Foundation, either version 3 of the License, or
11 ! (at your option) any later version.
12 !
13 ! UKRmol-in is distributed in the hope that it will be useful,
14 ! but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ! GNU General Public License for more details.
17 !
18 ! You should have received a copy of the GNU General Public License
19 ! along with UKRmol-in (in source/COPYING). Alternatively, you can also visit
20 ! <https://www.gnu.org/licenses/>.
21 
31 
32  use const_gbl, only: stdout
34  use mpi_gbl, only: master
35  use precisn, only: longint, wp
36 
37  use basematrix_module, only: basematrix
41  use options_module, only: options
42  use parallelization_module, only: grid => process_grid
43  use timing_module, only: master_timer
45 
46  use petsc
47  use slepceps
48 
49 #include <finclude/petsc.h>
50 #include <finclude/slepceps.h>
51 
52  implicit none
53 
54  public slepcdiagonalizer
55 
56  private
57 
59  contains
60  procedure, public :: diagonalize => diagonalize_slepc
61  !procedure, public :: DoNonSLEPCMat
62  procedure, public :: useslepcmat
63  procedure, public :: selecteps
64  end type
65 
66 contains
67 
68  subroutine useslepcmat (this, matrix_elements, mat_out)
69  class(slepcdiagonalizer) :: this
70  class(slepcmatrix), intent(in) :: matrix_elements
71  mat, pointer, intent(inout) :: mat_out
72 
73  write(stdout,"('--------Optimized SLEPC Matrix Format chosen------')")
74 
75  mat_out => matrix_elements % get_PETSC_matrix()
76 
77  if (.not. associated(mat_out)) then
78  stop "Matrix doesn;t exist for SLEPC, are you sure you created it?"
79  end if
80 
81  end subroutine useslepcmat
82 
83 
84  subroutine selecteps (this, eps)
85  class(slepcdiagonalizer) :: this
86  eps, intent(inout) :: eps
87  petscerrorcode :: ierr
88 
89  write(stdout,"('--------KRYLOVSCHUR used as Diagonalizer------')")
90 
91  ! ** Create eigensolver context
92  call epssettype(eps,epskrylovschur,ierr)
93  !call EPSSetTrueResidual(eps,PETSC_TRUE,ierr);
94  !call EPSLanczosSetReorthog(eps,EPS_LANCZOS_REORTHOG_SELECTIVE,ierr)
95  chkerrq(ierr)
96 
97  end subroutine selecteps
98 
99 
100  subroutine diagonalize_slepc (this, matrix_elements, num_eigenpair, dresult, all_procs, option, integrals)
101  class(slepcdiagonalizer) :: this
102  class(diagonalizerresult) :: dresult
103  class(basematrix), intent(in) :: matrix_elements
104  class(baseintegral), intent(in) :: integrals
105  type(options), intent(in) :: option
106  integer, intent(in) :: num_eigenpair
107  logical, intent(in) :: all_procs
108 
109  integer :: mat_dimen, number_solved
110  real(wp), allocatable :: diagonal_temp(:), eigen(:)
111 
112  !-----SLEPC values
113  mat, pointer :: a
114  eps :: eps
115  epstype :: tname
116  st :: st_eps
117  epsconvergedreason :: conv_reason
118  petscreal :: maxtol,tol, error
119  petscscalar :: kr, ki, current_shift
120  petscscalar, pointer :: xx_v(:)
121  vec :: xr, xi, f_xr
122  petscint :: nev, maxit, its, nconv, matrix_size, ido, n
123  petscerrorcode :: ierr
124  vecscatter :: vsctx
125  ksp :: ksp
126  pc :: pc
127 
128  maxit = option % max_iterations
129  maxtol = option % max_tolerance
130 
131  if (maxit < 0) maxit = petsc_default_integer
132 
133  matrix_size = matrix_elements % get_matrix_size()
134  mat_dimen = matrix_size
135 
136  write (stdout, "()")
137  write (stdout, "('-----------WARNING----------------------- ')")
138  write (stdout, "('When dealing with symmetries that are degenerate SLEPC may give incorrect values')")
139  write (stdout, "('YOU HAVE BEEN WARNED')")
140  write (stdout, "('---------------------------------------- ')")
141  write (stdout, "('N: ',i8)") matrix_size
142  write (stdout, "('Requested # of eigenpairs',i8)") num_eigenpair
143  write (stdout, "('Maximum iterations',i8)") maxit
144  write (stdout, "('Maximum tolerance',es9.2)") maxtol
145  write (stdout, "()")
146 
147  select type(matrix_elements)
148  type is (slepcmatrix)
149  call this % UseSLEPCMat(matrix_elements, a)
150  class is (basematrix)
151  stop "Matrix format not yet implemented for SLEPC"
152  !class is (BaseMatrix)
153  !call this%DoNonSLEPCMat(matrix_elements,num_eigenpair,eigen,vecs,maxit,maxtol)
154  end select
155 
156  call matcreatevecs(a, xr, xi, ierr)
157 
158  if (all_procs) then
159  call vecscattercreatetoall(xr, vsctx, f_xr, ierr)
160  else
161  call vecscattercreatetozero(xr, vsctx, f_xr, ierr)
162  end if
163 
164  call epscreate(int(grid % gcomm, kind(petsc_comm_world)), eps, ierr)
165  call this % SelectEPS(eps)
166  call epssetoperators(eps, a, petsc_null_mat, ierr)
167  call epssetproblemtype(eps, eps_hep, ierr)
168 
169 !#if defined(PETSC_HAVE_MUMPS)
170 ! if(num_eigenpair > 20) then
171 ! call EPSGetST(eps,st_eps,ierr);
172 ! call STSetType(st_eps,STSINVERT,ierr)
173 ! call EPSSetST(eps,st_eps,ierr)
174 ! call EPSSetWhichEigenpairs(eps,EPS_ALL,ierr);
175 ! call EPSSetInterval(eps,-90,-80,ierr);
176 ! call STGetKSP(st_eps,ksp,ierr);
177 ! call KSPSetType(ksp,KSPPREONLY,ierr);
178 ! call KSPGetPC(ksp,pc,ierr);
179 !
180 ! call PCSetType(pc,PCCHOLESKY,ierr);
181 !
182 ! call EPSKrylovSchurSetDetectZeros(eps,PETSC_TRUE,ierr)
183 ! call PCFactorSetMatSolverPackage(pc,MATSOLVERMUMPS,ierr)
184 !
185 ! call PetscOptionsInsertString("-mat_mumps_icntl_13 1 -mat_mumps_icntl_24 1 -mat_mumps_cntl_3 1e-12",ierr);
186 !
187 ! else
188 ! call EPSSetWhichEigenpairs(eps, EPS_SMALLEST_REAL ,ierr);
189 ! endif
190 !#else
191 ! call EPSSetWhichEigenpairs(eps, EPS_SMALLEST_REAL ,ierr);
192 !#endif
193 
194  !call MatView(A, PETSC_VIEWER_DRAW_WORLD,ierr)
195 
196  !** Set operators. In this case, it is a standard eigenvalue problem
197 
198  !Create the ST
199  n = num_eigenpair
200  call epssetdimensions(eps, n, petsc_decide, petsc_decide, ierr)
201  ! call EPSSetType(eps,EPSJD,ierr)
202  call epssettolerances(eps, maxtol, maxit, ierr)
203  call master_timer % start_timer("SLEPC solve")
204  !call EPSView(eps, PETSC_VIEWER_STDOUT_WORLD,ierr)
205  call epssolve(eps, ierr)
206  chkerrq(ierr)
207  call master_timer % stop_timer("SLEPC solve")
208  !call EPSGetConvergedReason(eps,conv_reason,ierr)
209 
210  ! write(stdout,"('Converged reason ',i8)") conv_reason
211  call epsgetiterationnumber(eps, its, ierr)
212  call epsgettolerances(eps, tol, maxit, ierr)
213 
214  if (grid % grank == master) then
215  write (stdout, *)
216  write (stdout, "(('/ Stopping condition: tol=',1P,E11.4,', maxit=',I4,', stopped at it=',I4))") tol, maxit, its
217  end if
218 
219  call epsgetconverged(eps, nconv, ierr)
220 
221  if (nconv < num_eigenpair) then
222  write (stdout, *) 'Not all requested eigenpairs have converged!!!!'
223  write (stdout, *) 'Only ', nconv, ' have converged against ', num_eigenpair, ' requested'
224  stop "Diagoanlization not completed"
225  end if
226 
227  allocate(eigen(num_eigenpair))
228 
229  do ido = 0, min(nconv, n) - 1
230  ! ** Get converged eigenpairs: i-th eigenvalue is stored in kr
231  ! ** (real part) and ki (imaginary part)
232  call epsgeteigenpair(eps, ido, kr, ki, xr, xi, ierr)
233  eigen(ido + 1) = petscrealpart(kr)
234  end do
235 
236  ! store Hamiltonian information into the CDENPROP vector
237  if (iand(option % vector_storage_method, pass_to_cdenprop) /= 0) then
238  call dresult % export_header(option, integrals)
239  end if
240 
241  ! write Hamiltonian information into the output disk file (the current context master will do this)
242  if (grid % grank == master) then
243  call dresult % write_header(option, integrals)
244  end if
245 
246  ! save eigenvalues by master of the context (to disk), or by everyone (to internal arrays)
247  if (all_procs .or. grid % grank == master) then
248  call dresult % handle_eigenvalues(eigen, matrix_elements % diagonal, num_eigenpair, mat_dimen)
249  end if
250 
251  do ido = 0, min(nconv, n) - 1
252  ! ** Get converged eigenpairs: i-th eigenvalue is stored in kr
253  ! ** (real part) and ki (imaginary part)
254  call epsgeteigenpair(eps, ido, kr, ki, xr, xi, ierr)
255 
256  ! ** Compute the relative error associated to each eigenpair
257  !call EPSComputeError(eps,ido,EPS_ERROR_RELATIVE,error,ierr)
258  !write(stdout,*) PetscRealPart(kr), error
259  !160 format (1P,' ',E12.4,' ',E12.4)
260  !eigen(ido+1) = PetscRealPart(kr)
261  ! write(stdout,*)eigen(ido+1)
262 
263  !Grab the vectors
264  call vecscatterbegin(vsctx, xr, f_xr, insert_values, scatter_forward, ierr)
265  call vecscatterend(vsctx, xr, f_xr, insert_values, scatter_forward, ierr)
266  if (all_procs .or. grid % grank == master) then
267  call vecgetarrayreadf90(f_xr, xx_v, ierr)
268  call dresult % handle_eigenvector(xx_v(1:mat_dimen), mat_dimen)
269  !vecs(1:matrix_size,ido+1) = xx_v(1:matrix_size)
270  call vecrestorearrayreadf90(f_xr, xx_v, ierr)
271  end if
272  end do
273 
274  ! finish writing the solution file and let other processes access it
275  if (grid % grank == master) then
276  call dresult % finalize_solutions(option)
277  end if
278 
279  call epsdestroy(eps, ierr)
280  call vecdestroy(xr, ierr)
281  call vecdestroy(xi, ierr)
282 
283  deallocate(eigen)
284 
285  end subroutine diagonalize_slepc
286 
287 end module slepcdiagonalizer_module
diagonalizerresult_module::diagonalizerresult
Output from diagonalization.
Definition: DiagonalizerResult_module.f90:48
timing_module
Timing module.
Definition: Timing_Module.f90:28
slepcmatrix_module
SLEPc matrix module.
Definition: SLEPCMatrix_module.F90:30
slepcmatrix_module::slepcmatrix
Definition: SLEPCMatrix_module.F90:62
slepcdiagonalizer_module::useslepcmat
subroutine useslepcmat(this, matrix_elements, mat_out)
Definition: SLEPCDiagonalizer_module.F90:69
options_module::options
Calculation setup for a single Hamiltonian diagonalization.
Definition: Options_module.f90:74
slepcdiagonalizer_module
Diagonalizer type using SLEPc backend.
Definition: SLEPCDiagonalizer_module.F90:30
parallelization_module::process_grid
type(processgrid) process_grid
Definition: Parallelization_module.F90:80
basematrix_module
Base matrix module.
Definition: BaseMatrix_module.f90:28
consts_mpi_ci
MPI SCATCI Constants module.
Definition: consts_mpi_ci.f90:31
basematrix_module::basematrix
Base matrix type.
Definition: BaseMatrix_module.f90:47
baseintegral_module
Base integral module.
Definition: BaseIntegral_module.f90:28
parallelization_module
Distribution of processes into a grid.
Definition: Parallelization_module.F90:29
diagonalizerresult_module
Diagonalizer result data object.
Definition: DiagonalizerResult_module.f90:28
diagonalizer_module
Base type for all diagonalizers.
Definition: Diagonalizer_module.f90:30
diagonalizer_module::basediagonalizer
Definition: Diagonalizer_module.f90:34
slepcdiagonalizer_module::slepcdiagonalizer
Definition: SLEPCDiagonalizer_module.F90:58
slepcdiagonalizer_module::diagonalize_slepc
subroutine diagonalize_slepc(this, matrix_elements, num_eigenpair, dresult, all_procs, option, integrals)
Definition: SLEPCDiagonalizer_module.F90:101
baseintegral_module::baseintegral
Definition: BaseIntegral_module.f90:41
options_module
Options module.
Definition: Options_module.f90:41
timing_module::master_timer
type(timer), public master_timer
Definition: Timing_Module.f90:74
consts_mpi_ci::pass_to_cdenprop
integer, parameter pass_to_cdenprop
Eigenpairs will be saved in memory and passed to CDENPROP and outer interface.
Definition: consts_mpi_ci.f90:110
slepcdiagonalizer_module::selecteps
subroutine selecteps(this, eps)
Definition: SLEPCDiagonalizer_module.F90:85