MPI-SCATCI  2.0
An MPI version of SCATCI
Dispatcher_module.F90
Go to the documentation of this file.
1 ! Copyright 2019
2 !
3 ! For a comprehensive list of the developers that contributed to these codes
4 ! see the UK-AMOR website.
5 !
6 ! This file is part of UKRmol-in (UKRmol+ suite).
7 !
8 ! UKRmol-in is free software: you can redistribute it and/or modify
9 ! it under the terms of the GNU General Public License as published by
10 ! the Free Software Foundation, either version 3 of the License, or
11 ! (at your option) any later version.
12 !
13 ! UKRmol-in is distributed in the hope that it will be useful,
14 ! but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ! GNU General Public License for more details.
17 !
18 ! You should have received a copy of the GNU General Public License
19 ! along with UKRmol-in (in source/COPYING). Alternatively, you can also visit
20 ! <https://www.gnu.org/licenses/>.
21 
33 
34  use precisn, only: longint, wp
35  use const_gbl, only: stdout
36  use mpi_gbl, only: nprocs, mpi_xermsg
37  use consts_mpi_ci
38  use options_module, only: options
39  use basematrix_module, only: basematrix
44 #ifdef arpack
46 #endif
48  use timing_module, only: master_timer
50  use sweden_module, only: swedenintegral
52  use ukrmol_module, only: ukrmolintegral
53  !use BaseCorePotential_module, only: BaseCorePotential
54  !use MOLPROCorePotential_module, only: MOLPROCorePotential
55 #ifdef usempi
56 #ifdef slepc
59  !use SLEPCDavidsonDiagonalizer_module, only: SLEPCDavidsonDiagonalizer
60 #endif
61 #ifdef scalapack
64 #ifdef elpa
65  use elpamatrix_module, only: elpamatrix
67 #endif
68 #endif
69 #endif
70 
71  implicit none
72 
74 
75  private
76 
77 contains
78 
86 #if defined(usempi) && defined(slepc)
87  call initialize_slepc
88 #endif
89 #if defined(usempi) && defined(scalapack) && defined(elpa)
90  call initialize_elpa
91 #endif
92  end subroutine initialize_libraries
93 
94 
111  subroutine dispatchmatrixanddiagonalizer (diag_choice, force_serial, matrix_size, number_of_eigenpairs, matrix, diagonalizer, &
112  integral, matrix_io)
113  class(basematrix), pointer, intent(out) :: matrix
114  class(basediagonalizer), pointer, intent(out) :: diagonalizer
115  class(baseintegral), intent(in) :: integral
116 
117  integer, intent(in) :: diag_choice, force_serial
118  integer, intent(in) :: matrix_size, number_of_eigenpairs
119  integer, intent(in) :: matrix_io
120  integer :: final_choice
121 
122  !Now if we forced a diagonalizer then use that, other wise use the rules
123  if (diag_choice /= scatci_decide) then
124  final_choice = diag_choice
125  else
126  if (number_of_eigenpairs > real(matrix_size) * 0.2) then ! More the 20% of eigenvectors requires a dense diagonalizer
127  final_choice = dense_diag
128  else if (number_of_eigenpairs <= 3) then ! Less than three, we use davidson
129  final_choice = davidson_diag
130  else !Inbetween we use an iterative library like ARPACK or SLEPC
131  final_choice = iterative_diag
132  end if
133  end if
134 
135 #ifdef usempi
136  !If we are dealing with an MPI diagonalization then we select the appropriate version of the diagonalizer
137  if (nprocs > 1 .and. force_serial == 0) then
138  select case (final_choice)
139  case (dense_diag) ! The MPI DENSE diagonalization is SCALAPACK
140 #ifdef scalapack
141 #ifdef elpa
142  write (stdout, "('ELPA chosen as diagonalizer of choice. Hohoho!')")
143  allocate(elpamatrix::matrix)
144  allocate(elpadiagonalizer::diagonalizer)
145 #else
146  write (stdout, "('ScaLAPACK chosen as diagonalizer of choice. Nice!')")
147  allocate(scalapackmatrix::matrix)
148  allocate(scalapackdiagonalizer::diagonalizer)
149 #endif
150 #else
151  call mpi_xermsg('Dispatcher_module', 'DispatchMatrixAndDiagonalizer', &
152  'ScaLAPACK needed for dense diagonalization!', 1, 1)
153 #endif
154  case (davidson_diag) !Use the SCATCI Davidson diagonalizer
155  write (stdout, "('Davidson chosen as diagonalizer of choice. Sweet!')")
156 #ifdef slepc
157  write (stdout, "('SLEPc chosen as diagonalizer of choice. Amazing!')")
158  allocate(slepcmatrix::matrix)
159  allocate(slepcdiagonalizer::diagonalizer)
160 #elif defined(scalapack)
161 #ifdef elpa
162  write (stdout, "('Compiled without SLEPc, defaulting to ELPA')")
163  allocate(elpamatrix::matrix)
164  allocate(elpadiagonalizer::diagonalizer)
165 #else
166  write (stdout, "('Compiled without SLEPc, defaulting to ScaLAPACK')")
167  allocate(scalapackmatrix::matrix)
168  allocate(scalapackdiagonalizer::diagonalizer)
169 #endif
170 #else
171  call mpi_xermsg('Dispatcher_module', 'DispatchMatrixAndDiagonalizer', &
172  'SLEPc or ScaLAPACK needed for distributed diagonalization!', 1, 1)
173 #endif
174  case (iterative_diag) ! ITERATIVE uses SLEPC
175 #ifdef slepc
176  write (stdout, "('SLEPc chosen as diagonalizer of choice. Amazing!')")
177  allocate(slepcmatrix::matrix)
178  allocate(slepcdiagonalizer::diagonalizer)
179 #elif defined(scalapack)
180 #ifdef elpa
181  write (stdout, "('Compiled without SLEPc, defaulting to ELPA')")
182  allocate(elpamatrix::matrix)
183  allocate(elpadiagonalizer::diagonalizer)
184 #else
185  write (stdout, "('Compiled without SLEPc, defaulting to ScaLAPACK')")
186  allocate(scalapackmatrix::matrix)
187  allocate(scalapackdiagonalizer::diagonalizer)
188 #endif
189 #else
190  call mpi_xermsg('Dispatcher_module', 'DispatchMatrixAndDiagonalizer', &
191  'SLEPc or ScaLAPACK needed for distributed diagonalization!', 1, 1)
192 #endif
193  case default
194  write (stdout, "('That is not a diagonalizer I would have chosen tbh, lets not do it')")
195  call mpi_xermsg('Dispatcher_module', 'DispatchMatrixAndDiagonalizer', &
196  'Invalid diagonalizer flag chosen', 1, 1)
197  end select
198  return
199  end if
200 #endif
201 
202  select case (final_choice)
203  case (dense_diag) !Use our rolled LAPACK diagonalizer
204  write (stdout, "('LAPACK chosen as diagonalizer of choice. Nice!')")
205  !allocate(MatrixElementVector::matrix)
206  call integral % write_matrix_header(matrix_io, matrix_size)
207  allocate(writermatrix::matrix)
208  allocate(lapackdiagonalizer::diagonalizer)
209  case (davidson_diag) !Use the SCATCI Davidson diagonalizer
210  write (stdout, "('Davidson chosen as diagonalizer of choice. Sweet!')")
211  call integral % write_matrix_header(matrix_io, matrix_size)
212  allocate(writermatrix::matrix)
213  allocate(davidsondiagonalizer::diagonalizer)
214  case (iterative_diag) !Use the SCATCI ARPACK diagonalizer
215 #ifdef arpack
216  write (stdout, "('ARPACK chosen as diagonalizer of choice. Amazing!')")
217  call integral % write_matrix_header(matrix_io, matrix_size)
218  allocate(writermatrix::matrix)
219  allocate(arpackdiagonalizer::diagonalizer)
220 #else
221  write (stdout, "('ARPACK not available - using LAPACK. Sorry!')")
222  call integral % write_matrix_header(matrix_io, matrix_size)
223  allocate(writermatrix::matrix)
224  allocate(lapackdiagonalizer::diagonalizer)
225 #endif
226  case default
227  write (stdout, "('That is not a diagonalizer I would have chosen tbh, lets not do it')")
228  stop "Invalid diagonalizer flag chosen"
229  end select
230 
231  end subroutine dispatchmatrixanddiagonalizer
232 
233 
242  subroutine dispatchintegral (sym_group_flag, ukrmolplus_integrals, integral)
243  class(baseintegral), pointer, intent(out) :: integral
244  integer, intent(in) :: sym_group_flag
245  logical, intent(in) :: ukrmolplus_integrals
246 
247  if (sym_group_flag /= symtype_d2h) then
248  allocate(alchemyintegral::integral)
249  else
250  if (ukrmolplus_integrals) then
251  allocate(ukrmolintegral::integral)
252  else
253  allocate(swedenintegral::integral)
254  end if
255  end if
256 
257  end subroutine dispatchintegral
258 
259 
260  !subroutine DispatchECP(ecp_type,ecp_filename,vals_defined,num_targ_symm,num_targ_states_symm,spatial_symm,spin_symm,ecp)
261  ! integer,intent(in) :: ecp_type
262  ! character(len=line_len),intent(in) :: ecp_filename
263  ! logical,intent(in) :: vals_defined
264  ! integer,intent(in) :: num_targ_symm,num_targ_states_symm(num_targ_symm),spatial_symm(num_targ_symm),spin_symm(num_targ_symm)
265  ! class(BaseCorePotential),pointer,intent(out) :: ecp
266  !
267  ! if(ecp_type == ECP_TYPE_NULL) then
268  ! ecp => null()
269  ! return
270  ! else if(vals_defined == .true.) then
271  ! select case(ecp_type)
272  ! case(ECP_TYPE_MOLPRO)
273  ! allocate(MOLPROCorePotential::ecp)
274  ! case default
275  ! stop "Unrecognized ECP type"
276  ! end select
277  !
278  ! call ecp%construct(num_targ_symm,num_targ_states_symm,spatial_symm,spin_symm)
279  !
280  ! call ecp%parse_ecp(ecp_filename)
281  ! else
282  ! write(stdout,"('TARGET SPATIAL SYMMETRY AND MULTIPLICITY NOT DEFINED IN INPUT')")
283  ! stop "TARGET SPATIAL SYMMETRY AND MULTIPLICITY NOT DEFINED IN INPUT"
284  ! endif
285  !
286  !end subroutine DispatchECP
287 
288 end module dispatcher_module
matrixelement_module
Matrix element module.
Definition: MatrixElement_module.f90:28
davidsondiagonalizer_module::davidsondiagonalizer
Definition: DavidsonDiagonalizer_module.f90:45
alchemy_module
ALCHEMY integral module.
Definition: ALCHEMY_Module.f90:28
writermatrix_module
Write matrix module.
Definition: WriterMatrix_module.f90:31
davidsondiagonalizer_module
Diagonalizer type using Davidson backend.
Definition: DavidsonDiagonalizer_module.f90:30
timing_module
Timing module.
Definition: Timing_Module.f90:28
slepcmatrix_module
SLEPc matrix module.
Definition: SLEPCMatrix_module.F90:30
sweden_module::swedenintegral
Definition: SWEDEN_Module.F90:47
slepcmatrix_module::slepcmatrix
Definition: SLEPCMatrix_module.F90:62
elpamatrix_module
ELPA matrix module.
Definition: ELPAMatrix_module.f90:28
consts_mpi_ci::symtype_d2h
integer, parameter symtype_d2h
This describes D_2_h symmetries.
Definition: consts_mpi_ci.f90:57
options_module::options
Calculation setup for a single Hamiltonian diagonalization.
Definition: Options_module.f90:74
lapackdiagonalizer_module::lapackdiagonalizer
Definition: LapackDiagonalizer_module.f90:47
consts_mpi_ci::dense_diag
integer, parameter dense_diag
Force a dense diagonalizer (all eigenvalues)
Definition: consts_mpi_ci.f90:101
slepcmatrix_module::initialize_slepc
subroutine, public initialize_slepc
Initialize SLEPc.
Definition: SLEPCMatrix_module.F90:116
slepcdiagonalizer_module
Diagonalizer type using SLEPc backend.
Definition: SLEPCDiagonalizer_module.F90:30
writermatrix_module::writermatrix
Matrix associated with a disk drive.
Definition: WriterMatrix_module.f90:56
arpackdiagonalizer_module
Diagonalizer type using Arpack backend.
Definition: ARPACKDiagonalizer_module.f90:31
matrixelement_module::matrixelementvector
This handles the matrix elements and also expands the vector size if we have reached max capacity.
Definition: MatrixElement_module.f90:45
ukrmol_module::ukrmolintegral
Definition: UKRMOL_module.f90:43
ukrmol_module
UKRmol+ integral module.
Definition: UKRMOL_module.f90:30
consts_mpi_ci::scatci_decide
integer, parameter scatci_decide
SCATCI chooses the diagonalizer.
Definition: consts_mpi_ci.f90:99
dispatcher_module::dispatchintegral
subroutine, public dispatchintegral(sym_group_flag, ukrmolplus_integrals, integral)
This subroutine dispatches the correct integral class based on simple parameters.
Definition: Dispatcher_module.F90:243
basematrix_module
Base matrix module.
Definition: BaseMatrix_module.f90:28
consts_mpi_ci
MPI SCATCI Constants module.
Definition: consts_mpi_ci.f90:31
arpackdiagonalizer_module::arpackdiagonalizer
Definition: ARPACKDiagonalizer_module.f90:47
dispatcher_module
Dispatcher module.
Definition: Dispatcher_module.F90:32
basematrix_module::basematrix
Base matrix type.
Definition: BaseMatrix_module.f90:47
baseintegral_module
Base integral module.
Definition: BaseIntegral_module.f90:28
dispatcher_module::dispatchmatrixanddiagonalizer
subroutine, public dispatchmatrixanddiagonalizer(diag_choice, force_serial, matrix_size, number_of_eigenpairs, matrix, diagonalizer, integral, matrix_io)
This subroutine determines which matrix format/diagonalizer pair to be used by SCATCI in build diagon...
Definition: Dispatcher_module.F90:113
elpadiagonalizer_module::elpadiagonalizer
Diagonalizer class.
Definition: ELPADiagonalizer_module.f90:57
scalapackmatrix_module::scalapackmatrix
Definition: SCALAPACKMatrix_module.f90:47
elpadiagonalizer_module
Diagonalizer type using ELPA backend.
Definition: ELPADiagonalizer_module.f90:28
diagonalizer_module
Base type for all diagonalizers.
Definition: Diagonalizer_module.f90:30
diagonalizer_module::basediagonalizer
Definition: Diagonalizer_module.f90:34
slepcdiagonalizer_module::slepcdiagonalizer
Definition: SLEPCDiagonalizer_module.F90:58
baseintegral_module::baseintegral
Definition: BaseIntegral_module.f90:41
sweden_module
SWEDEN integral module.
Definition: SWEDEN_Module.F90:28
scalapackdiagonalizer_module::scalapackdiagonalizer
Definition: SCALAPACKDiagonalizer_module.f90:54
alchemy_module::alchemyintegral
Definition: ALCHEMY_Module.f90:45
options_module
Options module.
Definition: Options_module.f90:41
timing_module::master_timer
type(timer), public master_timer
Definition: Timing_Module.f90:74
scalapackmatrix_module
ScaLAPACK matrix module.
Definition: SCALAPACKMatrix_module.f90:32
consts_mpi_ci::iterative_diag
integer, parameter iterative_diag
Force an iterative (like ARPACK) diagonalizer.
Definition: consts_mpi_ci.f90:105
elpamatrix_module::elpamatrix
ELPA distributed matrix.
Definition: ELPAMatrix_module.f90:47
scalapackdiagonalizer_module
Diagonalizer type using ScaLAPACK backend.
Definition: SCALAPACKDiagonalizer_module.f90:32
consts_mpi_ci::davidson_diag
integer, parameter davidson_diag
Force a Davidson diagonalizer.
Definition: consts_mpi_ci.f90:103
lapackdiagonalizer_module
Diagonalizer type using LAPACK backend.
Definition: LapackDiagonalizer_module.f90:31
elpadiagonalizer_module::initialize_elpa
subroutine initialize_elpa
Initialize the ELPA library.
Definition: ELPADiagonalizer_module.f90:73
dispatcher_module::initialize_libraries
subroutine, public initialize_libraries
Initialize libraries used by MPI-SCATCI.
Definition: Dispatcher_module.F90:86