MPI-SCATCI 2.0
An MPI version of SCATCI
Loading...
Searching...
No Matches
SLEPCDiagonalizer_module.F90
Go to the documentation of this file.
1! Copyright 2019
2!
3! For a comprehensive list of the developers that contributed to these codes
4! see the UK-AMOR website.
5!
6! This file is part of UKRmol-in (UKRmol+ suite).
7!
8! UKRmol-in is free software: you can redistribute it and/or modify
9! it under the terms of the GNU General Public License as published by
10! the Free Software Foundation, either version 3 of the License, or
11! (at your option) any later version.
12!
13! UKRmol-in is distributed in the hope that it will be useful,
14! but WITHOUT ANY WARRANTY; without even the implied warranty of
15! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16! GNU General Public License for more details.
17!
18! You should have received a copy of the GNU General Public License
19! along with UKRmol-in (in source/COPYING). Alternatively, you can also visit
20! <https://www.gnu.org/licenses/>.
21
22!> \brief Diagonalizer type using SLEPc backend
23!> \authors A Al-Refaie
24!> \date 2017
25!>
26!> This module will be built only if SLEPc is available.
27!>
28!> \note 16/01/2019 - Jakub Benda: Unifom coding style and expanded documentation.
29!>
30module slepcdiagonalizer_module
31
32 use blas_lapack_gbl, only: blasint
33 use const_gbl, only: stdout
35 use cdenprop_defs, only: civect
36 use mpi_gbl, only: master, mpi_xermsg
37 use precisn, only: longint, wp
38
39 use basematrix_module, only: basematrix
40 use baseintegral_module, only: baseintegral
41 use diagonalizer_module, only: basediagonalizer
42 use diagonalizerresult_module, only: diagonalizerresult
43 use options_module, only: options
44 use parallelization_module, only: grid => process_grid
45 use timing_module, only: master_timer
46 use slepcmatrix_module, only: slepcmatrix
47
48 use petsc
49 use slepceps
50
51#include <finclude/petsc.h>
52#include <finclude/slepceps.h>
53
54 implicit none
55
56 public slepcdiagonalizer
57
58 private
59
60 type, extends(basediagonalizer) :: slepcdiagonalizer
61 contains
62 procedure, public :: diagonalize => diagonalize_slepc
63 !procedure, public :: DoNonSLEPCMat
64 procedure, public :: useslepcmat
65 procedure, public :: selecteps
66 end type
67
68contains
69
70 subroutine useslepcmat (this, matrix_elements, mat_out)
71 class(slepcdiagonalizer) :: this
72 class(slepcmatrix), intent(in) :: matrix_elements
73 mat, pointer, intent(inout) :: mat_out
74
75 write(stdout,"('--------Optimized SLEPC Matrix Format chosen------')")
76
77 mat_out => matrix_elements % get_PETSC_matrix()
78
79 if (.not. associated(mat_out)) then
80 stop "Matrix doesn;t exist for SLEPC, are you sure you created it?"
81 end if
82
83 end subroutine useslepcmat
84
85
86 subroutine selecteps (this, eps)
87 class(slepcdiagonalizer) :: this
88 eps, intent(inout) :: eps
89 petscerrorcode :: ierr
90
91 write(stdout,"('--------KRYLOVSCHUR used as Diagonalizer------')")
92
93 ! ** Create eigensolver context
94 call epssettype(eps,epskrylovschur,ierr)
95 !call EPSSetTrueResidual(eps,PETSC_TRUE,ierr);
96 !call EPSLanczosSetReorthog(eps,EPS_LANCZOS_REORTHOG_SELECTIVE,ierr)
97 if (ierr /= 0) then
98 call mpi_xermsg('SLEPCDiagonalizer_module', 'SelectEPS', 'Failed to select eigensolver', int(ierr), 1)
99 end if
100
101 end subroutine selecteps
102
103
104 subroutine diagonalize_slepc (this, matrix_elements, num_eigenpair, dresult, all_procs, option, integrals)
105 class(slepcdiagonalizer) :: this
106 class(diagonalizerresult) :: dresult
107 class(basematrix), intent(in) :: matrix_elements
108 class(baseintegral), intent(in) :: integrals
109 type(options), intent(in) :: option
110 integer, intent(in) :: num_eigenpair
111 logical, intent(in) :: all_procs
112
113 integer :: mat_dimen, number_solved
114 integer(blasint) :: grid_master_context
115 real(wp), allocatable :: diagonal_temp(:), eigen(:)
116
117 type(civect) :: eigenvector
118
119 !-----SLEPC values
120 mat, pointer :: a
121 eps :: eps
122 epstype :: tname
123 st :: st_eps
124 epsconvergedreason :: conv_reason
125 petscreal :: maxtol,tol, error
126 petscscalar :: kr, ki, current_shift
127 petscscalar, pointer :: xx_v(:)
128 vec :: xr, xi, f_xr
129 petscint :: nev, maxit, its, nconv, matrix_size, ido, n
130 petscerrorcode :: ierr
131 vecscatter :: vsctx
132 ksp :: ksp
133 pc :: pc
134
135 maxit = option % max_iterations
136 maxtol = option % max_tolerance
137
138 if (maxit < 0) maxit = petsc_default_integer
139
140 matrix_size = matrix_elements % get_matrix_size()
141 mat_dimen = matrix_size
142
143 write (stdout, "()")
144 write (stdout, "('-----------WARNING----------------------- ')")
145 write (stdout, "('When dealing with symmetries that are degenerate SLEPC may give incorrect values')")
146 write (stdout, "('YOU HAVE BEEN WARNED')")
147 write (stdout, "('---------------------------------------- ')")
148 write (stdout, "('N: ',i8)") matrix_size
149 write (stdout, "('Requested # of eigenpairs',i8)") num_eigenpair
150 write (stdout, "('Maximum iterations',i8)") maxit
151 write (stdout, "('Maximum tolerance',es9.2)") maxtol
152 write (stdout, "()")
153
154 select type(matrix_elements)
155 type is (slepcmatrix)
156 call this % UseSLEPCMat(matrix_elements, a)
157 class is (basematrix)
158 stop "Matrix format not yet implemented for SLEPC"
159 !class is (BaseMatrix)
160 !call this%DoNonSLEPCMat(matrix_elements,num_eigenpair,eigen,vecs,maxit,maxtol)
161 end select
162
163 call matcreatevecs(a, xr, xi, ierr)
164
165 if (all_procs) then
166 call vecscattercreatetoall(xr, vsctx, f_xr, ierr)
167 else
168 call vecscattercreatetozero(xr, vsctx, f_xr, ierr)
169 end if
170
171 call epscreate(int(grid % gcomm, kind(petsc_comm_world)), eps, ierr)
172 call this % SelectEPS(eps)
173 call epssetoperators(eps, a, petsc_null_mat, ierr)
174 call epssetproblemtype(eps, eps_hep, ierr)
175
176!#if defined(PETSC_HAVE_MUMPS)
177! if(num_eigenpair > 20) then
178! call EPSGetST(eps,st_eps,ierr);
179! call STSetType(st_eps,STSINVERT,ierr)
180! call EPSSetST(eps,st_eps,ierr)
181! call EPSSetWhichEigenpairs(eps,EPS_ALL,ierr);
182! call EPSSetInterval(eps,-90,-80,ierr);
183! call STGetKSP(st_eps,ksp,ierr);
184! call KSPSetType(ksp,KSPPREONLY,ierr);
185! call KSPGetPC(ksp,pc,ierr);
186!
187! call PCSetType(pc,PCCHOLESKY,ierr);
188!
189! call EPSKrylovSchurSetDetectZeros(eps,PETSC_TRUE,ierr)
190! call PCFactorSetMatSolverPackage(pc,MATSOLVERMUMPS,ierr)
191!
192! call PetscOptionsInsertString("-mat_mumps_icntl_13 1 -mat_mumps_icntl_24 1 -mat_mumps_cntl_3 1e-12",ierr);
193!
194! else
195! call EPSSetWhichEigenpairs(eps, EPS_SMALLEST_REAL ,ierr);
196! endif
197!#else
198! call EPSSetWhichEigenpairs(eps, EPS_SMALLEST_REAL ,ierr);
199!#endif
200
201 !call MatView(A, PETSC_VIEWER_DRAW_WORLD,ierr)
202
203 !** Set operators. In this case, it is a standard eigenvalue problem
204
205 !Create the ST
206 n = num_eigenpair
207 call epssetdimensions(eps, n, petsc_decide, petsc_decide, ierr)
208 ! call EPSSetType(eps,EPSJD,ierr)
209 call epssettolerances(eps, maxtol, maxit, ierr)
210 call master_timer % start_timer("SLEPC solve")
211 !call EPSView(eps, PETSC_VIEWER_STDOUT_WORLD,ierr)
212 call epssolve(eps, ierr)
213 if (ierr /= 0) then
214 call mpi_xermsg('SLEPCDiagonalizer_module', 'SelectEPS', 'Failed to select eigensolver', int(ierr), 1)
215 end if
216 call master_timer % stop_timer("SLEPC solve")
217 !call EPSGetConvergedReason(eps,conv_reason,ierr)
218
219 ! write(stdout,"('Converged reason ',i8)") conv_reason
220 call epsgetiterationnumber(eps, its, ierr)
221 call epsgettolerances(eps, tol, maxit, ierr)
222
223 if (grid % grank == master) then
224 write (stdout, *)
225 write (stdout, "(('/ Stopping condition: tol=',1P,E11.4,', maxit=',I0,', stopped at it=',I0))") tol, maxit, its
226 end if
227
228 call epsgetconverged(eps, nconv, ierr)
229
230 if (nconv < num_eigenpair) then
231 write (stdout, *) 'Not all requested eigenpairs have converged!!!!'
232 write (stdout, *) 'Only ', nconv, ' have converged against ', num_eigenpair, ' requested'
233 stop "Diagoanlization not completed"
234 end if
235
236 allocate(eigen(num_eigenpair))
237
238 do ido = 0, min(nconv, n) - 1
239 ! ** Get converged eigenpairs: i-th eigenvalue is stored in kr
240 ! ** (real part) and ki (imaginary part)
241 call epsgeteigenpair(eps, ido, kr, ki, xr, xi, ierr)
242 eigen(ido + 1) = petscrealpart(kr)
243 end do
244
245 ! store Hamiltonian information into the CDENPROP vector
246 if (iand(option % vector_storage_method, pass_to_cdenprop) /= 0) then
247 call dresult % export_header(option, integrals)
248 end if
249
250 ! write Hamiltonian information into the output disk file (the current context master will do this)
251 if (grid % grank == master) then
252 call dresult % write_header(option, integrals)
253 end if
254
255 ! save eigenvalues by master of the context (to disk), or by everyone (to internal arrays)
256 if (all_procs .or. grid % grank == master) then
257 call dresult % handle_eigenvalues(eigen, matrix_elements % diagonal, num_eigenpair, mat_dimen)
258 end if
259
260 ! when the post-processing stage is required, set up the needed ScaLAPACK matrices
261 if (iand(dresult % vector_storage, pass_to_cdenprop) /= 0) then
262
263 ! split off a 1-by-1 (i.e. master only) sub-grid
264 grid_master_context = grid % gcntxt
265#if defined(usempi) && defined(scalapack)
266 call blacs_gridinit(grid_master_context, 'R', 1_blasint, 1_blasint)
267#endif
268
269 ! set up ScaLAPACK storage for a single eigenvector (master-only ScaLAPACK matrix)
270 eigenvector % blacs_context = grid_master_context
271 eigenvector % CV_is_scalapack = .true.
272 call eigenvector % init_CV(mat_dimen, 1)
273
274 ! set up ScaLAPACK storage for all eigenvectors, distributed among processes in the current BLACS grid
275 dresult % ci_vec % blacs_context = grid % gcntxt
276 dresult % ci_vec % CV_is_scalapack = .true.
277 call dresult % ci_vec % init_CV(mat_dimen, num_eigenpair)
278
279 end if
280
281 do ido = 0, min(nconv, n) - 1
282 ! ** Get converged eigenpairs: i-th eigenvalue is stored in kr
283 ! ** (real part) and ki (imaginary part)
284 call epsgeteigenpair(eps, ido, kr, ki, xr, xi, ierr)
285
286 ! ** Compute the relative error associated to each eigenpair
287 !call EPSComputeError(eps,ido,EPS_ERROR_RELATIVE,error,ierr)
288 !write(stdout,*) PetscRealPart(kr), error
289 !160 format (1P,' ',E12.4,' ',E12.4)
290 !eigen(ido+1) = PetscRealPart(kr)
291 ! write(stdout,*)eigen(ido+1)
292
293 !Grab the vectors
294 call vecscatterbegin(vsctx, xr, f_xr, insert_values, scatter_forward, ierr)
295 call vecscatterend(vsctx, xr, f_xr, insert_values, scatter_forward, ierr)
296 if (all_procs .or. grid % grank == master) then
297 call vecgetarrayread(f_xr, xx_v, ierr)
298 call dresult % handle_eigenvector(xx_v(1:mat_dimen), mat_dimen)
299 end if
300 if (iand(dresult % vector_storage, pass_to_cdenprop) /= 0) then
301 if (grid % grank == master) then
302 eigenvector % CV(1:mat_dimen, 1) = xx_v(1:mat_dimen)
303 end if
304 call dresult % ci_vec % redistribute(eigenvector, grid % gcntxt, mat_dimen, 1, 1, 1, 1, ido + 1)
305 end if
306 if (all_procs .or. grid % grank == master) then
307 call vecrestorearrayread(f_xr, xx_v, ierr)
308 end if
309 end do
310
311 if (iand(dresult % vector_storage, pass_to_cdenprop) /= 0) then
312 call dresult % export_eigenvalues(eigen(1:num_eigenpair), matrix_elements % diagonal, num_eigenpair, &
313 mat_dimen, dresult % ci_vec % ei, dresult % ci_vec % iphz)
314 write (stdout, '(/," Eigensolutions exported into the CIVect structure.",/)')
315 end if
316
317 ! finish writing the solution file and let other processes access it
318 if (grid % grank == master) then
319 call dresult % finalize_solutions(option)
320 end if
321
322 call epsdestroy(eps, ierr)
323 call vecdestroy(xr, ierr)
324 call vecdestroy(xi, ierr)
325
326 deallocate(eigen)
327
328 end subroutine diagonalize_slepc
329
330end module slepcdiagonalizer_module
MPI SCATCI Constants module.
integer, parameter diagonalize
Diagonalize.
integer, parameter pass_to_cdenprop
Eigenpairs will be saved in memory and passed to CDENPROP and outer interface.