MPI-SCATCI  2.0
An MPI version of SCATCI
SLEPCMatrix_module.F90
Go to the documentation of this file.
1 ! Copyright 2019
2 !
3 ! For a comprehensive list of the developers that contributed to these codes
4 ! see the UK-AMOR website.
5 !
6 ! This file is part of UKRmol-in (UKRmol+ suite).
7 !
8 ! UKRmol-in is free software: you can redistribute it and/or modify
9 ! it under the terms of the GNU General Public License as published by
10 ! the Free Software Foundation, either version 3 of the License, or
11 ! (at your option) any later version.
12 !
13 ! UKRmol-in is distributed in the hope that it will be useful,
14 ! but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ! GNU General Public License for more details.
17 !
18 ! You should have received a copy of the GNU General Public License
19 ! along with UKRmol-in (in source/COPYING). Alternatively, you can also visit
20 ! <https://www.gnu.org/licenses/>.
21 
31 
32  use const_gbl, only: stdout
34  use precisn, only: wp
35  use mpi_gbl, only: mpi_mod_allgather, mpi_mod_scan_sum
39  use parallelization_module, only: grid => process_grid
40  use timing_module, only: master_timer
41  use petsc
42 
43 #include <finclude/petsc.h>
44 
45  implicit none
46 
48 
49  private
50 
51  type :: csrformat
52  petscint :: row
53  petscint, allocatable :: col(:)
54  petscscalar, allocatable :: coeff(:)
55  petscint :: num_cols
56  contains
57  procedure, public :: construct => construct_csr
58  procedure, public :: sort => sort_csr
59  final :: destroy_csr
60  end type csrformat
61 
62  type, extends(distributedmatrix) :: slepcmatrix
63 
64  type(matrixcache) :: slepc_cache
65  integer :: start_row
66  integer :: end_row
67  integer :: local_size
68  integer :: mem_track
69  mat, pointer :: a
70 
71  ! These are required to ensure that both PETSC and the local MPI configuration behave
72  petscint :: start_row_petsc
73  petscint :: end_row_petsc
74  petscint :: local_size_petsc
75  petscint :: matrix_dimension_petsc
76 
78  petscint, allocatable :: diagonal_nonzero(:)
79  petscint, allocatable :: offdiagonal_nonzero(:)
80 
81  !This stores which row belongs to which process
82  integer :: diagonal_start,diagonal_end
83  logical :: compressed = .false.
84  logical :: mat_created = .false.
85 
86  contains
87 
88  procedure, public :: print => print_slepc
89  procedure, public :: setup_diag_matrix => initialize_struct_slepc
90  procedure, public :: get_matelem_self => get_matelem_slepc
91  procedure, public :: clear_matrix => clear_slepc
92  procedure, public :: destroy_matrix => destroy_slepc
93  procedure, public :: finalize_matrix_self => finalize_matrix_slepc
94 
95  procedure, public :: insert_into_diag_matrix => insert_into_hard_cache
96  procedure, private :: compress_cache_to_csr_format
97  procedure, private :: insert_csr_into_hard_cache
98  procedure, public :: print_nonzeros
99  procedure, public :: get_petsc_matrix
100  procedure, private :: create_petsc_mat
101  procedure, private :: destroy_petsc_mat
102 
103  end type slepcmatrix
104 
105 contains
106 
115  subroutine initialize_slepc
116 
117  petscerrorcode :: ierr
118 
119  call slepcinitialize(petsc_null_character, ierr)
120  write (stdout, "('SLEPc initialization status = ',I0)") ierr
121  chkerrq(ierr)
122 
123  end subroutine initialize_slepc
124 
125 
126  subroutine construct_csr (this, num_cols)
127  class(csrformat) :: this
128  integer, intent(in) :: num_cols
129 
130  if (allocated(this % coeff)) deallocate(this % coeff)
131  if (allocated(this % col)) deallocate(this % col)
132 
133  allocate(this % coeff(num_cols))
134  allocate(this % col(num_cols))
135 
136  end subroutine construct_csr
137 
138 
139  subroutine destroy_csr (this)
140  type(csrformat) :: this
141 
142  if (allocated(this % col)) deallocate(this % col)
143  if (allocated(this % coeff)) deallocate(this % coeff)
144 
145  end subroutine destroy_csr
146 
147 
148  subroutine sort_csr (this)
149  class(csrformat) :: this
150 
151  call qsortcsr (this % col, this % coeff)
152 
153  end subroutine sort_csr
154 
155 
156  function get_petsc_matrix (this) result(m)
157  class(slepcmatrix) :: this
158  mat, pointer :: m
159 
160  m => this % A
161 
162  end function get_petsc_matrix
163 
164 
165  subroutine create_petsc_mat (this, matrix_size, matrix_type)
166  class(slepcmatrix) :: this
167  petscint, intent(in) :: matrix_size
168  integer, intent(in) :: matrix_type
169  integer :: ierr, non_zero_guess
170 
171  if (this % mat_created) then
172  stop "PETSC matrix already created!!"
173  endif
174 
175  allocate(this % A)
176 
177  if (matrix_type == mat_dense) then
178 
179  else if (matrix_type == mat_sparse) then
180 
181  end if
182 
183  this % mat_created = .true.
184 
185  end subroutine create_petsc_mat
186 
187 
188  subroutine destroy_petsc_mat (this)
189  class(slepcmatrix) :: this
190  petscerrorcode :: ierr
191 
192  if (this % mat_created) then
193  call matdestroy(this % A, ierr)
194  deallocate(this % A)
195  this % A => null()
196  call master_memory % free_memory(this % mem_track, 1)
197  this % mem_track = 0
198  this % mat_created = .false.
199  end if
200 
201  end subroutine destroy_petsc_mat
202 
203 
204  subroutine shuffle (a)
205  petscint, intent(inout) :: a(:)
206 
207  integer :: i, randpos, temp
208  real :: r
209 
210  do i = size(a), 2, -1
211  call random_number(r)
212  randpos = int(r * i) + 1
213  temp = a(randpos)
214  a(randpos) = a(i)
215  a(i) = temp
216  end do
217 
218  end subroutine shuffle
219 
220 
221  subroutine initialize_struct_slepc (this, matrix_size, matrix_type, block_size)
222 
223  class(slepcmatrix) :: this
224  integer, intent(in) :: matrix_size, matrix_type, block_size
225 
226  petscint :: non_zero_guess, petsc_row, to_insert, one = 1, dummy_int
227  petscerrorcode :: ierr
228  real(wp) :: mem_usage_begin, mem_usage_end, ratio, dummy_real
229  integer :: ido, total_vals, per_elm, l_update, c_update, indicies, total_elems, nprocs
230 
231  integer, allocatable :: mem_use_total(:)
232  petscint, allocatable :: test_insertion_ij(:)
233  petscscalar, allocatable :: test_insertion_val(:)
234 
235  !If we are dealing with a dense matrix then we don't bother since SLEPC automatically handles this
236  ierr = 0
237 
238  call this % slepc_cache % construct
239  call this % destroy_PETSC_mat
240 
241  this % matrix_dimension_PETSC = this % matrix_dimension
242  write (stdout, "('Mat_size = ',i8)") this % matrix_dimension_PETSC
243 
244  call this % create_PETSC_mat(this % matrix_dimension_PETSC, matrix_type)
245 
246  this % local_size_PETSC = petsc_decide
247 
248  !Lets find our local dimension size
249  call petscsplitownership(int(grid % gcomm, kind(petsc_comm_world)), &
250  this % local_size_PETSC, this % matrix_dimension_PETSC, ierr)
251 
252  !Convert to our local integer
253  this % local_size = this % local_size_PETSC
254 
255  write (stdout, "('Local_size = ',i8)") this % local_size
256  write (stdout, "('THRESHOLD = ',es16.8)") this % threshold
257 
258  !Get the end index
259  call mpi_mod_scan_sum(this % local_size, this % end_row, grid % gcomm)
260  !Get the start index
261  this % start_row = this % end_row - this % local_size
262 
263  !store the rest just in case petsc needs it
264  this % start_row_PETSC = this % start_row
265  this % end_row_PETSC = this % end_row
266  write (stdout, "('start,end = ',2i8)") this % start_row, this % end_row
267 
268  !Dense doesn';t require much
269  if (this % matrix_type == mat_dense) then
270  call petscmemorygetcurrentusage(mem_usage_begin, ierr)
271  call matcreatedense(grid % gcomm, petsc_decide, petsc_decide, matrix_size, matrix_size, &
272  petsc_null_scalar, this % A, ierr)
273  call petscmemorygetcurrentusage(mem_usage_end, ierr)
274 
275  write (stdout, "('Matrix uses ',f8.3,' GB of memory')") (mem_usage_end - mem_usage_begin) / 1024**3
276  this % mem_track = mem_usage_end - mem_usage_begin
277 
278  call master_memory % track_memory(this % mem_track, 1, 0, 'PETSC MATRIX')
279  return
280  end if
281 
282  !Now we need to allocate what is needed
283  if (allocated(this % diagonal_nonzero)) deallocate(this % diagonal_nonzero)
284  if (allocated(this % offdiagonal_nonzero)) deallocate(this % offdiagonal_nonzero)
285  !The final procedure should take care of all the arrays
286  !This will store our non-zeros for the local matrix
287  allocate(this % diagonal_nonzero(this % local_size), stat = ierr)
288  allocate(this % offdiagonal_nonzero(this % local_size), stat = ierr)
289 
290  this % diagonal_nonzero = 0
291  this % offdiagonal_nonzero = 0
292 
293  total_elems = 0
294 
295  this % diagonal_start = this % start_row_PETSC
296  this % diagonal_end = min(this % start_row_PETSC + this % local_size, this % matrix_dimension)
297 
298  do ido = 1, this % local_size
299  total_vals = this % matrix_dimension - this % start_row - ido + 1
300  this % diagonal_nonzero(ido) = this % diagonal_end - this % diagonal_start + 1 - ido
301  this % offdiagonal_nonzero(ido) = total_vals - this % diagonal_nonzero(ido)
302 
303  !If we're above the block size then we use the 90% sparse guess
304  if ((this % start_row + ido - 1) >= block_size) then
305  !Now we are dealing with sparsity
306  !Lets compute the ratio between the diagonal and off diagonal
307  ratio = real(this % diagonal_nonzero(ido)) / real(total_vals)
308  !Reduce the number of values to 30% of max
309  total_vals = real(total_vals) * 0.1
310  !Now distribute
311  !There should be at least one in the diagonal
312  !The off diagonal will be larger so we will give the diagonal a bit more leeway
313  this % diagonal_nonzero(ido) = max(1, int(real(total_vals) * ratio))
314  this % offdiagonal_nonzero(ido) = int(real(total_vals) * (1.0 - ratio))
315  end if
316  total_elems = total_elems + this % diagonal_nonzero(ido) + this % offdiagonal_nonzero(ido)
317  !write(stdout,"(5i8)") this%start_row,this%end_row,total_vals,this%diagonal_nonzero(ido),this%offdiagonal_nonzero(ido)
318  end do
319 
320  call petscmemorygetcurrentusage(mem_usage_begin, ierr)
321 
322  this % continuum_counter = 0
323  this % L2_counter = 0
324 
325  call matcreatesbaij(grid % gcomm, 1, petsc_decide, petsc_decide, matrix_size, matrix_size, &
326  petsc_default_integer, this % diagonal_nonzero, petsc_default_integer, this % offdiagonal_nonzero, &
327  this % A, ierr)
328 
329  !call MatCreate(PETSC_COMM_WORLD,this%A,ierr)
330  !CHKERRQ(ierr)
331  !call MatSetType(this%A,MATSBAIJ,ierr)
332  !CHKERRQ(ierr)
333  !call MatSetSizes(this%A,PETSC_DECIDE,PETSC_DECIDE,matrix_size,matrix_size,ierr)
334  !CHKERRQ(ierr)
335  !Now we set the preallocation
336  !if(nprocs > 1) then
337  ! call MatMPISBAIJSetPreallocation(this%A,1,PETSC_DECIDE,this%diagonal_nonzero,PETSC_DECIDE,this%offdiagonal_nonzero,ierr)
338  !else
339  ! call MatSeqSBAIJSetPreallocation(this%A,1,PETSC_DECIDE,this%diagonal_nonzero,ierr)
340  !endif
341  !call MatSetUp(this%A,ierr)
342  !call MatXAIJSetPreallocation(this%A, 1,PETSC_NULL_INTEGER, PETSC_NULL_INTEGER,this%diagonal_nonzero, this%offdiagonal_nonzero,ierr)
343  chkerrq(ierr)
344  !call MatSetUp(this%A,ierr)
345 
346  call matsetoption(this % A, mat_ignore_off_proc_entries, petsc_true, ierr)
347  call matsetoption(this % A, mat_no_off_proc_entries, petsc_true, ierr)
348  call matsetoption(this % A, mat_ignore_lower_triangular, petsc_true, ierr)
349  call matsetoption(this % A, mat_new_nonzero_allocation_err, petsc_false, ierr)
350 
351  deallocate(this % diagonal_nonzero, this % offdiagonal_nonzero)
352 
353  allocate(test_insertion_ij(this % matrix_dimension), test_insertion_val(this % matrix_dimension))
354 
355  do ido = 1, this % matrix_dimension
356  test_insertion_ij(ido) = ido - 1
357  test_insertion_val(ido) = 1.0_wp
358  end do
359 
360  !--------We use this to embed the non-zero structce into the block to improve performance when inserting values
361  call master_timer % start_timer("Insertion time")
362  do ido = 1, this % local_size
363  total_vals = this % matrix_dimension - this % start_row - ido + 1
364  petsc_row = ido - 1
365  dummy_int = this % start_row + ido
366  if ((dummy_int - 1) < block_size) then
367  to_insert = size(test_insertion_ij(dummy_int:this % matrix_dimension))
368  !write(stdout,*) ido,to_insert,dummy_int,total_vals
369  !call MatSetValuesBlocked(this%A,1,ido,csr(ido)%num_cols,csr(ido)%col,csr(ido)%coeff,INSERT_VALUES,ierr)
370  call matsetvaluesblocked(this % A, one, dummy_int - one, to_insert, &
371  test_insertion_ij(dummy_int : this % matrix_dimension), &
372  test_insertion_val(dummy_int : this % matrix_dimension), &
373  insert_values, ierr)
374  else
375  exit
376  end if
377  end do
378  call matassemblybegin(this % A, mat_flush_assembly, ierr)
379  call matassemblyend(this % A, mat_flush_assembly, ierr)
380 
381  !Now lets clear just in case we have any issues
382  call matzeroentries(this % A, ierr)
383 
384 
385  deallocate(test_insertion_ij, test_insertion_val)
386 
387  write (stdout, "('PETSC matrix creation completed!')")
388 
389  call master_timer % stop_timer("Insertion time")
390 
391  call petscmemorygetcurrentusage(mem_usage_end, ierr)
392 
393  write (stdout, "('Matrix uses ',f15.8,' B of memory')") mem_usage_end - mem_usage_begin
394  write (stdout, "('Estimated uses ',i12,' B of memory')") total_elems * 16
395 
396  this % mem_track = mem_usage_end - mem_usage_begin
397 
398  allocate(mem_use_total(grid % gprocs))
399 
400  total_elems = total_elems*16
401 
402  call mpi_mod_allgather(total_elems, mem_use_total, grid % gcomm)
403 
404  total_elems = sum(mem_use_total)
405  total_elems = total_elems / nprocs
406  this % mem_track = total_elems
407 
408  deallocate(mem_use_total)
409 
410  write (stdout, "('Average uses ',i12,' B of memory')") total_elems
411 
412  call master_memory % track_memory(1, total_elems, 0, 'PETSC MATRIX')
413 
414  end subroutine initialize_struct_slepc
415 
416 
417  !TODO: Write a 'compressed' version of this
418  subroutine get_matelem_slepc (this, idx, i, j, coeff)
419  class(slepcmatrix) :: this
420  integer, intent(in) :: idx
421  integer, intent(out) :: i, j
422  real(wp), intent(out) :: coeff
423 
424  i = 0
425  j = 0
426  coeff = 0.0
427  !call this%matrix_cache%get_from_cache(idx,i,j,coeff)
428 
429  end subroutine get_matelem_slepc
430 
431 
432  subroutine print_nonzeros (this)
433  class(slepcmatrix) :: this
434  integer :: ido
435 
436  do ido = 1, this % local_size
437  write (stdout, "('dnnz = ',i8,'onnz = ',i8)") this % diagonal_nonzero(ido), this % offdiagonal_nonzero(ido)
438  end do
439 
440  end subroutine print_nonzeros
441 
442 
443  logical function compress_cache_to_csr_format (this, matrix_cache, csr_matrix)
444  class(slepcmatrix) :: this
445  type(csrformat), allocatable, intent(out) :: csr_matrix(:)
446  class(matrixcache) :: matrix_cache
447 
448  integer :: ido, jdo, start_idx, end_idx, num_elems, row_count, row_idx, first_row
449  integer :: current_chunk, current_row, next_row, last_chunk, i, j
450  real(wp) :: coeff
451 
453 
454  !Lets not bother if its a DENSE matrix
455  if (this % matrix_type == mat_dense) return
456 
457  !sort the matrix cache in ascending row order
458  call matrix_cache % sort
459 
460  current_row = -1
461  row_count = 0
462  do ido = 1, matrix_cache % get_size()
463  call matrix_cache % get_from_cache(ido, i, j, coeff)
464  if (ido == 1) first_row = i
465  if (i /= current_row) then
466  row_count = row_count + 1
467  current_row = i
468  end if
469  end do
470 
471  call master_timer % start_timer("CSR conversion")
472 
473  num_elems = matrix_cache % get_size()
474 
475  if (row_count == 0) then
476  call master_timer % stop_timer("CSR conversion")
477  return
478  end if
479 
480  if (allocated(csr_matrix)) deallocate(csr_matrix)
481 
482  !allocate the number of local rows
483  allocate(csr_matrix(row_count))
484 
485  csr_matrix(:) % num_cols = 0
486 
487  next_row = -1
488  last_chunk = 1
489  start_idx = 1
490  end_idx = 99
491  ido = 1
492  row_idx = 1
493  current_row = first_row
494 
495  !Loop through number of elements
496  do while (ido <= num_elems)
497  row_count = 0
498 
499  !counting phase
500  do jdo = ido, num_elems
501  call matrix_cache % get_from_cache(jdo, i, j, coeff)
502 
503  !Check row and threshold count
504  if (i == current_row) then
505  if (abs(coeff) > this % threshold) row_count = row_count + 1
506  end_idx = jdo
507  else
508  next_row = i
509  exit
510  end if
511  end do
512  csr_matrix(row_idx) % row = current_row
513  csr_matrix(row_idx) % num_cols = row_count
514 
515  !Do we have anything to store?
516  if (row_count > 0) then
517  !allocation phase
518  call csr_matrix(row_idx) % construct(row_count)
519 
520  !now we loop through and store the elements
521  do jdo = 1, row_count
522  !Get the element
523  call matrix_cache % get_from_cache(jdo + start_idx - 1, i, j, coeff)
524  !add it to the list
525  csr_matrix(row_idx) % col(jdo) = j
526  csr_matrix(row_idx) % coeff(jdo) = coeff
527  current_chunk = matrix_cache % get_chunk_idx(jdo + start_idx - 1)
528  !also if we've moved to the next chunk then delete the lastone
529  if (last_chunk /= current_chunk) then
530  call matrix_cache % matrix_arrays(last_chunk) % destroy
531  last_chunk = current_chunk
532  end if
533  end do
534  end if
535 
536  !If we've hit the last element previously then we can leave
537  ido = end_idx + 1
538  start_idx = end_idx + 1
539  row_idx = row_idx + 1
540  current_row = next_row
541  end do
542 
544 
545  !After this we can delete anything left over
546  call matrix_cache % destroy
547  call matrix_cache % construct
548  call master_timer % stop_timer("CSR conversion")
549 
550  end function compress_cache_to_csr_format
551 
552 
559  logical function insert_into_hard_cache (this, row, column, coefficient)
560  class(slepcmatrix) :: this
561  integer, intent(in) :: row, column
562  real(wp), intent(in) :: coefficient
563  petscint :: i, j
564  petscerrorcode :: ierr
565 
566  !if(row==column) call this%store_diagonal(row,coefficient)
567 
568  i = row - 1
569  j = column - 1
570 
571  if (abs(coefficient) < this % threshold) return
572 
573  !Dense has slightly different rules for this
574  if (this % matrix_type == mat_dense) then
575  if (i >= this % start_row .or. i < this % end_row ) then
576  call matsetvalue(this % A, i, j, coefficient, insert_values, ierr)
577  end if
578  if (j >= this % start_row .or. j < this % end_row) then
579  call matsetvalue(this % A, j, i, coefficient, insert_values, ierr)
580  insert_into_hard_cache = .true.
581  end if
582  return
583  end if
584 
585  if (j < this % start_row .or. j >= this % end_row) then
586  insert_into_hard_cache = .false.
587  return
588  end if
589 
590  insert_into_hard_cache = .true.
591  !write(stdout,"(2i8,1)") row,column
592  !call this%slepc_cache%insert_into_cache(column-1,row-1,coefficient)
593 
594  call matsetvalue(this % A, j, i, coefficient, insert_values, ierr)
595 
596  this % n = this % n + 1
597 
598  end function insert_into_hard_cache
599 
600 
601  subroutine insert_csr_into_hard_cache (this, csr)
602  class(slepcmatrix) :: this
603  class(csrformat), intent(in) :: csr(:)
604  integer :: size_csr
605  petscint :: row, column, ido, one = 1
606  petscerrorcode :: ierr
607 
608  if (this % matrix_type == mat_dense) return
609 
610  size_csr = size(csr)
611 
612  do ido = 1, size_csr
613  !call csr(ido)%sort
614  row = csr(ido) % row
615  if (row < this % start_row .or. row >= this % end_row) cycle
616  if (csr(ido) % num_cols == 0) cycle
617  !write(stdout,"('Inserting row ',2i8)") row,csr(ido)%num_cols
618 
619  call matsetvaluesblocked(this % A, one, csr(ido) % row, csr(ido) % num_cols, &
620  csr(ido) % col, csr(ido) % coeff, insert_values, ierr)
621 
622  this % n = this % n + csr(ido) % num_cols
623  end do
624 
625  end subroutine insert_csr_into_hard_cache
626 
627 
628  subroutine finalize_matrix_slepc (this)
629  class(slepcmatrix) :: this
630  petscerrorcode :: ierr
631 
632  if (.not. this % mat_created) then
633  stop "Finalizing matrix that doesn't exist!!!"
634  else
635  write (stdout, "('Finalizing SLEPC matrix')")
636  call master_timer % start_timer("Matrix assembly")
637  call matassemblybegin(this % A, mat_final_assembly, ierr)
638  call matassemblyend(this % A, mat_final_assembly, ierr)
639  !call MatSetOption(this%A,MAT_SYMMETRIC ,PETSC_TRUE,ierr)
640  !call MatSetOption(this%A,MAT_HERMITIAN ,PETSC_TRUE,ierr)
641  !call MatSetOption(this%A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE,ierr)
642  call master_timer % stop_timer("Matrix assembly")
643  write (stdout, "('Finished assembly')")
644  end if
645 
646  end subroutine finalize_matrix_slepc
647 
648 
649  subroutine print_slepc (this)
650  class(slepcmatrix) :: this
651 
652  write (stdout, "('-------TEMP CACHE---------')")
653  call this % temp_cache % print
654 
655  end subroutine print_slepc
656 
657 
658  subroutine clear_slepc (this)
659  class(slepcmatrix) :: this
660 
661  this % n = 0
662 
663  end subroutine clear_slepc
664 
665 
666  subroutine destroy_slepc (this)
667  class(slepcmatrix) :: this
668 
669  call this % clear
670  call this % destroy_PETSC_mat
671 
672  end subroutine destroy_slepc
673 
674  recursive subroutine qsortcsr (A, coeff)
675  petscint, intent(inout), dimension(:) :: a
676  petscscalar, intent(inout), dimension(:) :: coeff
677  integer :: iq
678 
679  if (size(a) > 1) then
680  call partition(a, coeff, iq)
681  call qsortcsr(a(:iq-1), coeff(:iq-1))
682  call qsortcsr(a(iq:), coeff(iq:))
683  end if
684 
685  end subroutine qsortcsr
686 
687 
688  subroutine partition (A, coeff, marker)
689  petscint, intent(inout), dimension(:) :: a
690  petscscalar, intent(inout), dimension(:) :: coeff
691  integer, intent(out) :: marker
692  integer :: i, j
693  petscint :: temp_a
694  petscscalar :: temp_coeff
695  petscint :: x_a ! pivot point
696 
697  x_a = a(1)
698  i = 0
699  j = size(a) + 1
700 
701  do
702  j = j - 1
703  do
704  if (a(j) <= x_a) exit
705  j = j - 1
706  end do
707  i = i + 1
708  do
709  if (a(j)>= x_a) exit
710  i = i + 1
711  end do
712  if (i < j) then
713  ! exchange A(i) and A(j)
714  temp_a = a(i); a(i) = a(j); a(j) = temp_a
715  temp_coeff = coeff(i); coeff(i) = coeff(j); coeff(j) = temp_coeff
716  else if (i == j) then
717  marker = i + 1
718  return
719  else
720  marker = i
721  return
722  end if
723  end do
724 
725  end subroutine partition
726 
727 end module slepcmatrix_module
slepcmatrix_module::insert_csr_into_hard_cache
subroutine insert_csr_into_hard_cache(this, csr)
Definition: SLEPCMatrix_module.F90:602
consts_mpi_ci::mat_sparse
integer, parameter mat_sparse
Matrix is sparse.
Definition: consts_mpi_ci.f90:120
memorymanager_module
Memory manager module.
Definition: MemoryManager_module.f90:31
slepcmatrix_module::finalize_matrix_slepc
subroutine finalize_matrix_slepc(this)
Definition: SLEPCMatrix_module.F90:629
slepcmatrix_module::create_petsc_mat
subroutine create_petsc_mat(this, matrix_size, matrix_type)
Definition: SLEPCMatrix_module.F90:166
timing_module
Timing module.
Definition: Timing_Module.f90:28
slepcmatrix_module
SLEPc matrix module.
Definition: SLEPCMatrix_module.F90:30
slepcmatrix_module::slepcmatrix
Definition: SLEPCMatrix_module.F90:62
slepcmatrix_module::insert_into_hard_cache
logical function insert_into_hard_cache(this, row, column, coefficient)
Inserts an element into the hard storage which is considered the final location before diagonalizatio...
Definition: SLEPCMatrix_module.F90:560
slepcmatrix_module::destroy_slepc
subroutine destroy_slepc(this)
Definition: SLEPCMatrix_module.F90:667
slepcmatrix_module::initialize_slepc
subroutine, public initialize_slepc
Initialize SLEPc.
Definition: SLEPCMatrix_module.F90:116
slepcmatrix_module::get_petsc_matrix
pointer get_petsc_matrix(this)
Definition: SLEPCMatrix_module.F90:157
slepcmatrix_module::compress_cache_to_csr_format
logical function compress_cache_to_csr_format(this, matrix_cache, csr_matrix)
Definition: SLEPCMatrix_module.F90:444
slepcmatrix_module::get_matelem_slepc
subroutine get_matelem_slepc(this, idx, i, j, coeff)
Definition: SLEPCMatrix_module.F90:419
distributedmatrix_module::distributedmatrix
Distributed matrix type.
Definition: DistributedMatrix_module.f90:57
slepcmatrix_module::partition
subroutine partition(A, coeff, marker)
Definition: SLEPCMatrix_module.F90:689
parallelization_module::process_grid
type(processgrid) process_grid
Definition: Parallelization_module.F90:80
distributedmatrix_module
Distributed matrix module.
Definition: DistributedMatrix_module.f90:32
consts_mpi_ci
MPI SCATCI Constants module.
Definition: consts_mpi_ci.f90:31
slepcmatrix_module::print_slepc
subroutine print_slepc(this)
Definition: SLEPCMatrix_module.F90:650
slepcmatrix_module::destroy_csr
subroutine destroy_csr(this)
Definition: SLEPCMatrix_module.F90:140
parallelization_module
Distribution of processes into a grid.
Definition: Parallelization_module.F90:29
consts_mpi_ci::mat_dense
integer, parameter mat_dense
Matrix is dense.
Definition: consts_mpi_ci.f90:118
slepcmatrix_module::print_nonzeros
subroutine print_nonzeros(this)
Definition: SLEPCMatrix_module.F90:433
slepcmatrix_module::shuffle
subroutine shuffle(a)
Definition: SLEPCMatrix_module.F90:205
slepcmatrix_module::qsortcsr
recursive subroutine qsortcsr(A, coeff)
Definition: SLEPCMatrix_module.F90:675
slepcmatrix_module::destroy_petsc_mat
subroutine destroy_petsc_mat(this)
Definition: SLEPCMatrix_module.F90:189
slepcmatrix_module::construct_csr
subroutine construct_csr(this, num_cols)
Definition: SLEPCMatrix_module.F90:127
matrixcache_module
Matrix cache module.
Definition: MatrixCache_module.f90:28
slepcmatrix_module::sort_csr
subroutine sort_csr(this)
Definition: SLEPCMatrix_module.F90:149
timing_module::master_timer
type(timer), public master_timer
Definition: Timing_Module.f90:74
slepcmatrix_module::initialize_struct_slepc
subroutine initialize_struct_slepc(this, matrix_size, matrix_type, block_size)
Definition: SLEPCMatrix_module.F90:222
slepcmatrix_module::csrformat
Definition: SLEPCMatrix_module.F90:51
slepcmatrix_module::clear_slepc
subroutine clear_slepc(this)
Definition: SLEPCMatrix_module.F90:659
memorymanager_module::master_memory
type(memorymanager), public master_memory
This is the global memory manager.
Definition: MemoryManager_module.f90:69
matrixcache_module::matrixcache
This handles the matrix elements and also expands the vector size if we have reached max capacity.
Definition: MatrixCache_module.f90:60