MPI-SCATCI 2.0
An MPI version of SCATCI
Loading...
Searching...
No Matches
SLEPCMatrix_module.F90
Go to the documentation of this file.
1! Copyright 2019
2!
3! For a comprehensive list of the developers that contributed to these codes
4! see the UK-AMOR website.
5!
6! This file is part of UKRmol-in (UKRmol+ suite).
7!
8! UKRmol-in is free software: you can redistribute it and/or modify
9! it under the terms of the GNU General Public License as published by
10! the Free Software Foundation, either version 3 of the License, or
11! (at your option) any later version.
12!
13! UKRmol-in is distributed in the hope that it will be useful,
14! but WITHOUT ANY WARRANTY; without even the implied warranty of
15! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16! GNU General Public License for more details.
17!
18! You should have received a copy of the GNU General Public License
19! along with UKRmol-in (in source/COPYING). Alternatively, you can also visit
20! <https://www.gnu.org/licenses/>.
21
22!> \brief SLEPc matrix module
23!> \authors A Al-Refaie
24!> \date 2017
25!>
26!> This module is built in only when SLEPc library is available.
27!>
28!> \note 16/01/2019 - Jakub Benda: Unifom coding style and expanded documentation.
29!>
30module slepcmatrix_module
31
32 use const_gbl, only: stdout
34 use precisn, only: wp
35 use mpi_gbl, only: mpi_mod_allgather, mpi_mod_scan_sum, mpi_xermsg
36 use distributedmatrix_module, only: distributedmatrix
37 use matrixcache_module, only: matrixcache
38 use memorymanager_module, only: master_memory
39 use parallelization_module, only: grid => process_grid
40 use timing_module, only: master_timer
41 use slepcsys, only: slepcinitialize
42 use petsc
43
44#include <finclude/petsc.h>
45
46 implicit none
47
48 public slepcmatrix, initialize_slepc
49
50 private
51
52 type :: csrformat
53 petscint :: row
54 petscint, allocatable :: col(:)
55 petscscalar, allocatable :: coeff(:)
56 petscint :: num_cols
57 contains
58 procedure, public :: construct => construct_csr
59 procedure, public :: sort => sort_csr
60 final :: destroy_csr
61 end type csrformat
62
63 type, extends(distributedmatrix) :: slepcmatrix
64
65 type(matrixcache) :: slepc_cache
66 integer :: start_row
67 integer :: end_row
68 integer :: local_size
69 integer :: mem_track
70 mat, pointer :: a
71
72 ! These are required to ensure that both PETSC and the local MPI configuration behave
73 petscint :: start_row_petsc
74 petscint :: end_row_petsc
75 petscint :: local_size_petsc
76 petscint :: matrix_dimension_petsc
77
78 !> These are required for preallocation
79 petscint, allocatable :: diagonal_nonzero(:)
80 petscint, allocatable :: offdiagonal_nonzero(:)
81
82 !This stores which row belongs to which process
83 integer :: diagonal_start,diagonal_end
84 logical :: compressed = .false.
85 logical :: mat_created = .false.
86
87 contains
88
89 procedure, public :: print => print_slepc
90 procedure, public :: setup_diag_matrix => initialize_struct_slepc
91 procedure, public :: get_matelem_self => get_matelem_slepc
92 procedure, public :: clear_matrix => clear_slepc
93 procedure, public :: destroy_matrix => destroy_slepc
94 procedure, public :: finalize_matrix_self => finalize_matrix_slepc
95
96 procedure, public :: insert_into_diag_matrix => insert_into_hard_cache
97 procedure, private :: compress_cache_to_csr_format
98 procedure, private :: insert_csr_into_hard_cache
99 procedure, public :: print_nonzeros
100 procedure, public :: get_petsc_matrix
101 procedure, private :: create_petsc_mat
102 procedure, private :: destroy_petsc_mat
103
104 end type slepcmatrix
105
106contains
107
108 !> \brief Initialize SLEPc
109 !> \authors J Benda
110 !> \date 2019
111 !>
112 !> SLEPc needs to be initialized by all processes at once. Originally, it was being initialized in the SLEPc matrix init
113 !> routine, but since there are now multiple concurrent diagonalizations and not all of them need to use SLEPc, this needs
114 !> to be separated out.
115 !>
116 subroutine initialize_slepc
117
118 petscerrorcode :: ierr
119
120 call slepcinitialize(petsc_null_character, ierr)
121 write (stdout, "('SLEPc initialization status = ',I0)") ierr
122 if (ierr /= 0) then
123 call mpi_xermsg('SLEPCMatrix_module', 'initialize_slepc', 'Failed to initialize SLEPc', int(ierr), 1)
124 end if
125
126 end subroutine initialize_slepc
127
128
129 subroutine construct_csr (this, num_cols)
130 class(csrformat) :: this
131 integer, intent(in) :: num_cols
132
133 if (allocated(this % coeff)) deallocate(this % coeff)
134 if (allocated(this % col)) deallocate(this % col)
135
136 allocate(this % coeff(num_cols))
137 allocate(this % col(num_cols))
138
139 end subroutine construct_csr
140
141
142 subroutine destroy_csr (this)
143 type(csrformat) :: this
144
145 if (allocated(this % col)) deallocate(this % col)
146 if (allocated(this % coeff)) deallocate(this % coeff)
147
148 end subroutine destroy_csr
149
150
151 subroutine sort_csr (this)
152 class(csrformat) :: this
153
154 call qsortcsr (this % col, this % coeff)
155
156 end subroutine sort_csr
157
158
159 function get_petsc_matrix (this) result(m)
160 class(slepcmatrix) :: this
161 mat, pointer :: m
162
163 m => this % A
164
165 end function get_petsc_matrix
166
167
168 subroutine create_petsc_mat (this, matrix_size, matrix_type)
169 class(slepcmatrix) :: this
170 petscint, intent(in) :: matrix_size
171 integer, intent(in) :: matrix_type
172 integer :: ierr, non_zero_guess
173
174 if (this % mat_created) then
175 stop "PETSC matrix already created!!"
176 endif
177
178 allocate(this % A)
179
180 if (matrix_type == mat_dense) then
181
182 else if (matrix_type == mat_sparse) then
183
184 end if
185
186 this % mat_created = .true.
187
188 end subroutine create_petsc_mat
189
190
191 subroutine destroy_petsc_mat (this)
192 class(slepcmatrix) :: this
193 petscerrorcode :: ierr
194
195 if (this % mat_created) then
196 call matdestroy(this % A, ierr)
197 deallocate(this % A)
198 this % A => null()
199 call master_memory % free_memory(this % mem_track, 1)
200 this % mem_track = 0
201 this % mat_created = .false.
202 end if
203
204 end subroutine destroy_petsc_mat
205
206
207 subroutine shuffle (a)
208 petscint, intent(inout) :: a(:)
209
210 integer :: i, randpos, temp
211 real :: r
212
213 do i = size(a), 2, -1
214 call random_number(r)
215 randpos = int(r * i) + 1
216 temp = a(randpos)
217 a(randpos) = a(i)
218 a(i) = temp
219 end do
220
221 end subroutine shuffle
222
223
224 subroutine initialize_struct_slepc (this, matrix_size, matrix_type, block_size)
225
226 class(slepcmatrix) :: this
227 integer, intent(in) :: matrix_size, matrix_type, block_size
228
229 petscint :: non_zero_guess, petsc_row, to_insert, one = 1, dummy_int, matrix_size_petsc
230 petscerrorcode :: ierr
231 real(wp) :: mem_usage_begin, mem_usage_end, ratio, dummy_real
232 integer :: ido, total_vals, per_elm, l_update, c_update, indicies, total_elems
233
234 integer, allocatable :: mem_use_total(:)
235 petscint, allocatable :: test_insertion_ij(:)
236 petscscalar, allocatable :: test_insertion_val(:)
237
238 !If we are dealing with a dense matrix then we don't bother since SLEPC automatically handles this
239 ierr = 0
240
241 call this % slepc_cache % construct
242 call this % destroy_PETSC_mat
243
244 this % matrix_dimension_PETSC = this % matrix_dimension
245 write (stdout, "('Mat_size = ',i8)") this % matrix_dimension_PETSC
246
247 call this % create_PETSC_mat(this % matrix_dimension_PETSC, matrix_type)
248
249 matrix_size_petsc = matrix_size
250 this % local_size_PETSC = petsc_decide
251
252 !Lets find our local dimension size
253 call petscsplitownership(int(grid % gcomm, kind(petsc_comm_world)), &
254 this % local_size_PETSC, this % matrix_dimension_PETSC, ierr)
255
256 !Convert to our local integer
257 this % local_size = this % local_size_PETSC
258
259 write (stdout, "('Local_size = ',i8)") this % local_size
260 write (stdout, "('THRESHOLD = ',es16.8)") this % threshold
261
262 !Get the end index
263 call mpi_mod_scan_sum(this % local_size, this % end_row, grid % gcomm)
264 !Get the start index
265 this % start_row = this % end_row - this % local_size
266
267 !store the rest just in case petsc needs it
268 this % start_row_PETSC = this % start_row
269 this % end_row_PETSC = this % end_row
270 write (stdout, "('start,end = ',2i8)") this % start_row, this % end_row
271
272 !Dense doesn';t require much
273 if (this % matrix_type == mat_dense) then
274 call petscmemorygetcurrentusage(mem_usage_begin, ierr)
275 call matcreatedense(grid % gcomm, petsc_decide, petsc_decide, matrix_size_petsc, matrix_size_petsc, &
276 petsc_null_scalar_array, this % A, ierr)
277 call petscmemorygetcurrentusage(mem_usage_end, ierr)
278
279 write (stdout, "('Matrix uses ',f8.3,' GB of memory')") (mem_usage_end - mem_usage_begin) / 1024**3
280 this % mem_track = mem_usage_end - mem_usage_begin
281
282 call master_memory % track_memory(this % mem_track, 1, 0, 'PETSC MATRIX')
283 return
284 end if
285
286 !Now we need to allocate what is needed
287 if (allocated(this % diagonal_nonzero)) deallocate(this % diagonal_nonzero)
288 if (allocated(this % offdiagonal_nonzero)) deallocate(this % offdiagonal_nonzero)
289 !The final procedure should take care of all the arrays
290 !This will store our non-zeros for the local matrix
291 allocate(this % diagonal_nonzero(this % local_size), stat = ierr)
292 allocate(this % offdiagonal_nonzero(this % local_size), stat = ierr)
293
294 this % diagonal_nonzero = 0
295 this % offdiagonal_nonzero = 0
296
297 total_elems = 0
298
299 this % diagonal_start = this % start_row_PETSC
300 this % diagonal_end = min(this % start_row_PETSC + this % local_size, this % matrix_dimension)
301
302 do ido = 1, this % local_size
303 total_vals = this % matrix_dimension - this % start_row - ido + 1
304 this % diagonal_nonzero(ido) = this % diagonal_end - this % diagonal_start + 1 - ido
305 this % offdiagonal_nonzero(ido) = total_vals - this % diagonal_nonzero(ido)
306
307 !If we're above the block size then we use the 90% sparse guess
308 if ((this % start_row + ido - 1) >= block_size) then
309 !Now we are dealing with sparsity
310 !Lets compute the ratio between the diagonal and off diagonal
311 ratio = real(this % diagonal_nonzero(ido)) / real(total_vals)
312 !Reduce the number of values to 30% of max
313 total_vals = real(total_vals) * 0.1
314 !Now distribute
315 !There should be at least one in the diagonal
316 !The off diagonal will be larger so we will give the diagonal a bit more leeway
317 this % diagonal_nonzero(ido) = max(1, int(real(total_vals) * ratio))
318 this % offdiagonal_nonzero(ido) = int(real(total_vals) * (1.0 - ratio))
319 end if
320 total_elems = total_elems + this % diagonal_nonzero(ido) + this % offdiagonal_nonzero(ido)
321 !write(stdout,"(5i8)") this%start_row,this%end_row,total_vals,this%diagonal_nonzero(ido),this%offdiagonal_nonzero(ido)
322 end do
323
324 call petscmemorygetcurrentusage(mem_usage_begin, ierr)
325
326 this % continuum_counter = 0
327 this % L2_counter = 0
328
329 call matcreatesbaij(grid % gcomm, one, petsc_decide, petsc_decide, matrix_size_petsc, matrix_size_petsc, &
330 petsc_default_integer, this % diagonal_nonzero, petsc_default_integer, this % offdiagonal_nonzero, &
331 this % A, ierr)
332
333 !call MatCreate(PETSC_COMM_WORLD,this%A,ierr)
334 !CHKERRQ(ierr)
335 !call MatSetType(this%A,MATSBAIJ,ierr)
336 !CHKERRQ(ierr)
337 !call MatSetSizes(this%A,PETSC_DECIDE,PETSC_DECIDE,matrix_size,matrix_size,ierr)
338 !CHKERRQ(ierr)
339 !Now we set the preallocation
340 !if(nprocs > 1) then
341 ! call MatMPISBAIJSetPreallocation(this%A,1,PETSC_DECIDE,this%diagonal_nonzero,PETSC_DECIDE,this%offdiagonal_nonzero,ierr)
342 !else
343 ! call MatSeqSBAIJSetPreallocation(this%A,1,PETSC_DECIDE,this%diagonal_nonzero,ierr)
344 !endif
345 !call MatSetUp(this%A,ierr)
346 !call MatXAIJSetPreallocation(this%A, 1,PETSC_NULL_INTEGER, PETSC_NULL_INTEGER,this%diagonal_nonzero, this%offdiagonal_nonzero,ierr)
347 if (ierr /= 0) then
348 call mpi_xermsg('SLEPCMatrix_module', 'initialize_struct_SLEPC', 'Failed to create matrix', int(ierr), 1)
349 end if
350 !call MatSetUp(this%A,ierr)
351
352 call matsetoption(this % A, mat_ignore_off_proc_entries, petsc_true, ierr)
353 call matsetoption(this % A, mat_no_off_proc_entries, petsc_true, ierr)
354 call matsetoption(this % A, mat_ignore_lower_triangular, petsc_true, ierr)
355 call matsetoption(this % A, mat_new_nonzero_allocation_err, petsc_false, ierr)
356
357 deallocate(this % diagonal_nonzero, this % offdiagonal_nonzero)
358
359 allocate(test_insertion_ij(this % matrix_dimension), test_insertion_val(this % matrix_dimension))
360
361 do ido = 1, this % matrix_dimension
362 test_insertion_ij(ido) = ido - 1
363 test_insertion_val(ido) = 1.0_wp
364 end do
365
366 !--------We use this to embed the non-zero structce into the block to improve performance when inserting values
367 call master_timer % start_timer("Insertion time")
368 do ido = 1, this % local_size
369 total_vals = this % matrix_dimension - this % start_row - ido + 1
370 petsc_row = ido - 1
371 dummy_int = this % start_row + ido
372 if ((dummy_int - 1) < block_size) then
373 to_insert = size(test_insertion_ij(dummy_int:this % matrix_dimension))
374 !write(stdout,*) ido,to_insert,dummy_int,total_vals
375 !call MatSetValuesBlocked(this%A,1,ido,csr(ido)%num_cols,csr(ido)%col,csr(ido)%coeff,INSERT_VALUES,ierr)
376 call matsetvaluesblocked(this % A, one, [dummy_int - one], to_insert, &
377 test_insertion_ij(dummy_int : this % matrix_dimension), &
378 test_insertion_val(dummy_int : this % matrix_dimension), &
379 insert_values, ierr)
380 else
381 exit
382 end if
383 end do
384 call matassemblybegin(this % A, mat_flush_assembly, ierr)
385 call matassemblyend(this % A, mat_flush_assembly, ierr)
386
387 !Now lets clear just in case we have any issues
388 call matzeroentries(this % A, ierr)
389
390
391 deallocate(test_insertion_ij, test_insertion_val)
392
393 write (stdout, "('PETSC matrix creation completed!')")
394
395 call master_timer % stop_timer("Insertion time")
396
397 call petscmemorygetcurrentusage(mem_usage_end, ierr)
398
399 per_elm = (storage_size(test_insertion_ij) + storage_size(test_insertion_val)) / 8
400
401 write (stdout, "('Matrix uses ',f0.0,' B of memory')") mem_usage_end - mem_usage_begin
402 write (stdout, "('Estimated uses ',i0,' B of memory')") total_elems * per_elm
403
404 this % mem_track = mem_usage_end - mem_usage_begin
405
406 allocate(mem_use_total(grid % gprocs))
407
408 call mpi_mod_allgather(total_elems, mem_use_total, grid % gcomm)
409
410 total_elems = sum(mem_use_total)
411 total_elems = total_elems / grid % gprocs
412 this % mem_track = total_elems
413
414 deallocate(mem_use_total)
415
416 write (stdout, "('Average uses ',i0,' B of memory')") total_elems*per_elm
417
418 call master_memory % track_memory(per_elm, total_elems, 0, 'PETSC MATRIX')
419
420 end subroutine initialize_struct_slepc
421
422
423 !TODO: Write a 'compressed' version of this
424 subroutine get_matelem_slepc (this, idx, i, j, coeff)
425 class(slepcmatrix) :: this
426 integer, intent(in) :: idx
427 integer, intent(out) :: i, j
428 real(wp), intent(out) :: coeff
429
430 i = 0
431 j = 0
432 coeff = 0.0
433 !call this%matrix_cache%get_from_cache(idx,i,j,coeff)
434
435 end subroutine get_matelem_slepc
436
437
438 subroutine print_nonzeros (this)
439 class(slepcmatrix) :: this
440 integer :: ido
441
442 do ido = 1, this % local_size
443 write (stdout, "('dnnz = ',i8,'onnz = ',i8)") this % diagonal_nonzero(ido), this % offdiagonal_nonzero(ido)
444 end do
445
446 end subroutine print_nonzeros
447
448
449 logical function compress_cache_to_csr_format (this, matrix_cache, csr_matrix)
450 class(slepcmatrix) :: this
451 type(csrformat), allocatable, intent(out) :: csr_matrix(:)
452 class(matrixcache) :: matrix_cache
453
454 integer :: ido, jdo, start_idx, end_idx, num_elems, row_count, row_idx, first_row
455 integer :: current_chunk, current_row, next_row, last_chunk, i, j
456 real(wp) :: coeff
457
458 compress_cache_to_csr_format = .false.
459
460 !Lets not bother if its a DENSE matrix
461 if (this % matrix_type == mat_dense) return
462
463 !sort the matrix cache in ascending row order
464 call matrix_cache % sort
465
466 current_row = -1
467 row_count = 0
468 do ido = 1, matrix_cache % get_size()
469 call matrix_cache % get_from_cache(ido, i, j, coeff)
470 if (ido == 1) first_row = i
471 if (i /= current_row) then
472 row_count = row_count + 1
473 current_row = i
474 end if
475 end do
476
477 call master_timer % start_timer("CSR conversion")
478
479 num_elems = matrix_cache % get_size()
480
481 if (row_count == 0) then
482 call master_timer % stop_timer("CSR conversion")
483 return
484 end if
485
486 if (allocated(csr_matrix)) deallocate(csr_matrix)
487
488 !allocate the number of local rows
489 allocate(csr_matrix(row_count))
490
491 csr_matrix(:) % num_cols = 0
492
493 next_row = -1
494 last_chunk = 1
495 start_idx = 1
496 end_idx = 99
497 ido = 1
498 row_idx = 1
499 current_row = first_row
500
501 !Loop through number of elements
502 do while (ido <= num_elems)
503 row_count = 0
504
505 !counting phase
506 do jdo = ido, num_elems
507 call matrix_cache % get_from_cache(jdo, i, j, coeff)
508
509 !Check row and threshold count
510 if (i == current_row) then
511 if (abs(coeff) > this % threshold) row_count = row_count + 1
512 end_idx = jdo
513 else
514 next_row = i
515 exit
516 end if
517 end do
518 csr_matrix(row_idx) % row = current_row
519 csr_matrix(row_idx) % num_cols = row_count
520
521 !Do we have anything to store?
522 if (row_count > 0) then
523 !allocation phase
524 call csr_matrix(row_idx) % construct(row_count)
525
526 !now we loop through and store the elements
527 do jdo = 1, row_count
528 !Get the element
529 call matrix_cache % get_from_cache(jdo + start_idx - 1, i, j, coeff)
530 !add it to the list
531 csr_matrix(row_idx) % col(jdo) = j
532 csr_matrix(row_idx) % coeff(jdo) = coeff
533 current_chunk = matrix_cache % get_chunk_idx(jdo + start_idx - 1)
534 !also if we've moved to the next chunk then delete the lastone
535 if (last_chunk /= current_chunk) then
536 call matrix_cache % matrix_arrays(last_chunk) % destroy
537 last_chunk = current_chunk
538 end if
539 end do
540 end if
541
542 !If we've hit the last element previously then we can leave
543 ido = end_idx + 1
544 start_idx = end_idx + 1
545 row_idx = row_idx + 1
546 current_row = next_row
547 end do
548
549 compress_cache_to_csr_format = .true.
550
551 !After this we can delete anything left over
552 call matrix_cache % destroy
553 call matrix_cache % construct
554 call master_timer % stop_timer("CSR conversion")
555
556 end function compress_cache_to_csr_format
557
558
559 !> \brief Inserts an element into the hard storage which is considered the final location before diagonalization
560 !> \authors A Al-Refaie
561 !> \date 2017
562 !>
563 !> It also checks wherther the element exists within the aloowed range and tells us if it was successfully inserted.
564 !>
565 logical function insert_into_hard_cache (this, row, column, coefficient)
566 class(slepcmatrix) :: this
567 integer, intent(in) :: row, column
568 real(wp), intent(in) :: coefficient
569 petscint :: i, j
570 petscerrorcode :: ierr
571
572 !if(row==column) call this%store_diagonal(row,coefficient)
573
574 i = row - 1
575 j = column - 1
576
577 if (abs(coefficient) < this % threshold) return
578
579 !Dense has slightly different rules for this
580 if (this % matrix_type == mat_dense) then
581 if (i >= this % start_row .or. i < this % end_row ) then
582 call matsetvalue(this % A, i, j, coefficient, insert_values, ierr)
583 end if
584 if (j >= this % start_row .or. j < this % end_row) then
585 call matsetvalue(this % A, j, i, coefficient, insert_values, ierr)
586 insert_into_hard_cache = .true.
587 end if
588 return
589 end if
590
591 if (j < this % start_row .or. j >= this % end_row) then
592 insert_into_hard_cache = .false.
593 return
594 end if
595
596 insert_into_hard_cache = .true.
597 !write(stdout,"(2i8,1)") row,column
598 !call this%slepc_cache%insert_into_cache(column-1,row-1,coefficient)
599
600 call matsetvalue(this % A, j, i, coefficient, insert_values, ierr)
601
602 this % n = this % n + 1
603
604 end function insert_into_hard_cache
605
606
607 subroutine insert_csr_into_hard_cache (this, csr)
608 class(slepcmatrix) :: this
609 class(csrformat), intent(in) :: csr(:)
610 integer :: size_csr
611 petscint :: row, column, ido, one = 1
612 petscerrorcode :: ierr
613
614 if (this % matrix_type == mat_dense) return
615
616 size_csr = size(csr)
617
618 do ido = 1, size_csr
619 !call csr(ido)%sort
620 row = csr(ido) % row
621 if (row < this % start_row .or. row >= this % end_row) cycle
622 if (csr(ido) % num_cols == 0) cycle
623 !write(stdout,"('Inserting row ',2i8)") row,csr(ido)%num_cols
624
625 call matsetvaluesblocked(this % A, one, [csr(ido) % row], csr(ido) % num_cols, &
626 csr(ido) % col, csr(ido) % coeff, insert_values, ierr)
627
628 this % n = this % n + csr(ido) % num_cols
629 end do
630
631 end subroutine insert_csr_into_hard_cache
632
633
634 subroutine finalize_matrix_slepc (this)
635 class(slepcmatrix) :: this
636 petscerrorcode :: ierr
637
638 if (.not. this % mat_created) then
639 stop "Finalizing matrix that doesn't exist!!!"
640 else
641 write (stdout, "('Finalizing SLEPC matrix')")
642 call master_timer % start_timer("Matrix assembly")
643 call matassemblybegin(this % A, mat_final_assembly, ierr)
644 call matassemblyend(this % A, mat_final_assembly, ierr)
645 !call MatSetOption(this%A,MAT_SYMMETRIC ,PETSC_TRUE,ierr)
646 !call MatSetOption(this%A,MAT_HERMITIAN ,PETSC_TRUE,ierr)
647 !call MatSetOption(this%A,MAT_SYMMETRY_ETERNAL,PETSC_TRUE,ierr)
648 call master_timer % stop_timer("Matrix assembly")
649 write (stdout, "('Finished assembly')")
650 end if
651
652 end subroutine finalize_matrix_slepc
653
654
655 subroutine print_slepc (this)
656 class(slepcmatrix) :: this
657
658 write (stdout, "('-------TEMP CACHE---------')")
659 call this % temp_cache % print
660
661 end subroutine print_slepc
662
663
664 subroutine clear_slepc (this)
665 class(slepcmatrix) :: this
666
667 this % n = 0
668
669 end subroutine clear_slepc
670
671
672 subroutine destroy_slepc (this)
673 class(slepcmatrix) :: this
674
675 call this % clear
676 call this % destroy_PETSC_mat
677
678 end subroutine destroy_slepc
679
680 recursive subroutine qsortcsr (A, coeff)
681 petscint, intent(inout), dimension(:) :: a
682 petscscalar, intent(inout), dimension(:) :: coeff
683 integer :: iq
684
685 if (size(a) > 1) then
686 call partition(a, coeff, iq)
687 call qsortcsr(a(:iq-1), coeff(:iq-1))
688 call qsortcsr(a(iq:), coeff(iq:))
689 end if
690
691 end subroutine qsortcsr
692
693
694 subroutine partition (A, coeff, marker)
695 petscint, intent(inout), dimension(:) :: a
696 petscscalar, intent(inout), dimension(:) :: coeff
697 integer, intent(out) :: marker
698 integer :: i, j
699 petscint :: temp_a
700 petscscalar :: temp_coeff
701 petscint :: x_a ! pivot point
702
703 x_a = a(1)
704 i = 0
705 j = size(a) + 1
706
707 do
708 j = j - 1
709 do
710 if (a(j) <= x_a) exit
711 j = j - 1
712 end do
713 i = i + 1
714 do
715 if (a(j)>= x_a) exit
716 i = i + 1
717 end do
718 if (i < j) then
719 ! exchange A(i) and A(j)
720 temp_a = a(i); a(i) = a(j); a(j) = temp_a
721 temp_coeff = coeff(i); coeff(i) = coeff(j); coeff(j) = temp_coeff
722 else if (i == j) then
723 marker = i + 1
724 return
725 else
726 marker = i
727 return
728 end if
729 end do
730
731 end subroutine partition
732
733end module slepcmatrix_module
MPI SCATCI Constants module.
integer, parameter mat_sparse
Matrix is sparse.
integer, parameter mat_dense
Matrix is dense.