Page MenuHomec4science

petsc_mod.F90
No OneTemporary

File Metadata

Created
Sat, May 4, 22:16

petsc_mod.F90

!>
!> @file petsc_mod.F90
!>
!> @brief
!>
!> @copyright
!> Copyright (©) 2021 EPFL (Ecole Polytechnique Fédérale de Lausanne)
!> SPC (Swiss Plasma Center)
!>
!> SPClibs is free software: you can redistribute it and/or modify it under
!> the terms of the GNU Lesser General Public License as published by the Free
!> Software Foundation, either version 3 of the License, or (at your option)
!> any later version.
!>
!> SPClibs is distributed in the hope that it will be useful, but WITHOUT ANY
!> WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
!> FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
!>
!> You should have received a copy of the GNU Lesser General Public License
!> along with this program. If not, see <https://www.gnu.org/licenses/>.
!>
!> @authors
!> (in alphabetical order)
!> @author Trach-Minh Tran <trach-minh.tran@epfl.ch>
!>
MODULE petsc_bsplines
!
! PETSC_BSPLINES: Simple interface to the parallel iterative
! solver PETSC
!
! T.M. Tran, CRPP-EPFL
! June 2011
!
USE sparse
IMPLICIT NONE
#include "finclude/petsc.h90"
!
TYPE petsc_mat
INTEGER :: rank
INTEGER(8) :: nnz, nnz_loc
INTEGER :: nterms, kmat
INTEGER :: istart, iend
INTEGER, POINTER :: rcounts(:) => NULL()
INTEGER, POINTER :: rdispls(:) => NULL()
INTEGER :: comm
LOGICAL :: nlsym
LOGICAL :: nlforce_zero
TYPE(spmat), POINTER :: mat => NULL()
INTEGER, POINTER :: cols(:) => NULL()
INTEGER, POINTER :: irow(:) => NULL()
DOUBLE PRECISION, POINTER :: val(:) => NULL()
!
Mat :: AMAT
KSP :: SOLVER
END TYPE petsc_mat
!
INTERFACE init
MODULE PROCEDURE init_petsc_mat
END INTERFACE init
!
INTERFACE clear_mat
MODULE PROCEDURE clear_petsc_mat
END INTERFACE clear_mat
!
INTERFACE updtmat
MODULE PROCEDURE updt_petsc_mat
END INTERFACE updtmat
!
INTERFACE putele
MODULE PROCEDURE putele_petsc_mat
END INTERFACE putele
!
INTERFACE getele
MODULE PROCEDURE getele_petsc_mat
END INTERFACE getele
!
INTERFACE putrow
MODULE PROCEDURE putrow_petsc_mat
END INTERFACE putrow
!
INTERFACE getrow
MODULE PROCEDURE getrow_petsc_mat
END INTERFACE getrow
!
INTERFACE putcol
MODULE PROCEDURE putcol_petsc_mat
END INTERFACE putcol
!
INTERFACE getcol
MODULE PROCEDURE getcol_petsc_mat
END INTERFACE getcol
!
INTERFACE get_count
MODULE PROCEDURE get_count_petsc_mat
END INTERFACE get_count
!
INTERFACE to_mat
MODULE PROCEDURE to_petsc_mat
END INTERFACE to_mat
!
INTERFACE save_mat
MODULE PROCEDURE save_petsc_mat
END INTERFACE save_mat
!
INTERFACE load_mat
MODULE PROCEDURE load_petsc_mat
END INTERFACE load_mat
!
INTERFACE bsolve
MODULE PROCEDURE bsolve_petsc_mat1, bsolve_petsc_matn
END INTERFACE bsolve
!
INTERFACE vmx
MODULE PROCEDURE vmx_petsc_mat, vmx_petsc_matn
END INTERFACE vmx
!
INTERFACE destroy
MODULE PROCEDURE destroy_petsc_mat
END INTERFACE destroy
!
INTERFACE mcopy
MODULE PROCEDURE mcopy_petsc_mat
END INTERFACE mcopy
!
INTERFACE maddto
MODULE PROCEDURE maddto_petsc_mat
END INTERFACE maddto
!
CONTAINS
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE init_petsc_mat(n, nterms, mat, kmat, nlsym, &
& nlforce_zero, comm)
!
! Initialize an empty sparse petsc matrix
!
USE pputils2
INCLUDE 'mpif.h'
!
INTEGER, INTENT(in) :: n, nterms
TYPE(petsc_mat) :: mat
INTEGER, OPTIONAL, INTENT(in) :: kmat
LOGICAL, OPTIONAL, INTENT(in) :: nlsym
LOGICAL, OPTIONAL, INTENT(in) :: nlforce_zero
INTEGER, OPTIONAL, INTENT(in) :: comm
!
INTEGER :: me, npes
INTEGER :: i, ierr, nloc
PetscBool :: flg
!!$ PetscTruth :: flg ! Petsc version before 3.2
!
! Prologue
!
CALL mpi_comm_size(comm, npes, ierr)
CALL mpi_comm_rank(comm, me, ierr)
!
mat%rank = n
mat%nterms = nterms
mat%nnz = 0
mat%nlsym = .FALSE.
mat%nlforce_zero = .TRUE. ! Do not remove existing nodes with zero val
IF(PRESENT(kmat)) mat%kmat = kmat
IF(PRESENT(nlsym)) mat%nlsym = nlsym
IF(PRESENT(nlforce_zero)) mat%nlforce_zero = nlforce_zero
!
! Inititialize the PETSC environment
!
IF(PRESENT(comm)) THEN
PETSC_COMM_WORLD = comm
ELSE ! Single process Petsc
PETSC_COMM_WORLD = MPI_COMM_SELF
END IF
CALL PetscInitialize(PETSC_NULL_CHARACTER, ierr)
mat%comm = PETSC_COMM_WORLD
!
! Matrix partition
!
CALL dist1d(mat%comm, 1, n, mat%istart, nloc)
mat%iend = mat%istart + nloc - 1
!
IF(ASSOCIATED(mat%rcounts)) DEALLOCATE(mat%rcounts)
IF(ASSOCIATED(mat%rdispls)) DEALLOCATE(mat%rdispls)
ALLOCATE(mat%rcounts(0:npes-1))
ALLOCATE(mat%rdispls(0:npes-1))
CALL mpi_allgather(nloc, 1, MPI_INTEGER, mat%rcounts, 1, MPI_INTEGER, &
& mat%comm, ierr)
mat%rdispls(0) = 0
DO i=1,npes-1
mat%rdispls(i) = mat%rdispls(i-1)+mat%rcounts(i-1)
END DO
!
! Initialize linked list for sparse matrix
!
IF(ASSOCIATED(mat%mat)) DEALLOCATE(mat%mat)
ALLOCATE(mat%mat)
CALL init(n, mat%mat, mat%istart, mat%iend)
!
! Create PETSC matrix
!
CALL MatCreate(mat%comm, mat%AMAT, ierr)
CALL MatSetSizes(mat%AMAT, nloc, nloc, n, n, ierr)
CALL MatSetFromOptions(mat%AMAT, ierr)
!
! Create PETSC SOLVER
!
CALL KSPCreate(mat%comm, mat%SOLVER, ierr)
!
END SUBROUTINE init_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE clear_petsc_mat(mat)
!
! Clear matrix, keeping its sparse structure unchanged
!
TYPE(petsc_mat) :: mat
!
IF(ASSOCIATED(mat%val)) THEN
mat%val = 0.0d0
ELSE
CALL MatZeroEntries(mat%AMAT)
END IF
END SUBROUTINE clear_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE updt_petsc_mat(mat, i, j, val)
!
! Update element Aij of petsc matrix
!
TYPE(petsc_mat) :: mat
INTEGER, INTENT(in) :: i, j
DOUBLE PRECISION, INTENT(in) :: val
INTEGER :: ierr
!
IF(mat%nlsym) THEN ! Store only upper part for symmetric matrices
IF(i.GT.j) RETURN
END IF
IF(i.LT.mat%istart .OR. i.GT.mat%iend) THEN
WRITE(*,'(a,2i6)') 'UPDT: i, j out of range ', i, j
WRITE(*,'(a,2i6)') ' istart, iend ', mat%istart, mat%iend
STOP '*** Abnormal EXIT in MODULE mumps_mod ***'
END IF
!
IF(ASSOCIATED(mat%mat)) THEN
CALL updtmat(mat%mat, i, j, val)
ELSE
CALL MatSetValue(mat%AMAT, i-1, j-1, ADD_VALUES, ierr)
END IF
END SUBROUTINE updt_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE putele_petsc_mat(mat, i, j, val)
!
! Put element (i,j) of matrix
!
TYPE(petsc_mat) :: mat
INTEGER, INTENT(in) :: i, j
DOUBLE PRECISION, INTENT(in) :: val
INTEGER :: iput, jput
INTEGER :: ierr
!
iput = i
jput = j
IF(mat%nlsym) THEN
IF( i.GT.j ) THEN ! Lower triangular part
iput = j
jput = i
END IF
END IF
!
! Do nothing if outside
IF(iput.LT.mat%istart .OR. iput.GT.mat%iend) RETURN
!
IF(ASSOCIATED(mat%mat)) THEN
CALL putele(mat%mat, iput, jput, val, &
& nlforce_zero=mat%nlforce_zero)
ELSE
CALL MatSetValue(mat%AMAT, iput-1, jput-1, val, INSERT_VALUES, ierr)
END IF
END SUBROUTINE putele_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE getele_petsc_mat(mat, i, j, val)
!
! Get element (i,j) of sparse matrix
!
TYPE(petsc_mat) :: mat
INTEGER, INTENT(in) :: i, j
DOUBLE PRECISION, INTENT(out) :: val
INTEGER :: iget, jget
INTEGER :: ierr
!
iget = i
jget = j
IF(mat%nlsym) THEN
IF( i.GT.j ) THEN ! Lower triangular part
iget = j
jget = i
END IF
END IF
!
val = 0.0d0 ! Assume zero val if outside
IF( iget.LT.mat%istart .OR. iget.GT.mat%iend ) RETURN
!
IF(ASSOCIATED(mat%mat)) THEN
CALL getele(mat%mat, iget, jget, val)
ELSE
CALL MatGetValues(mat%AMAT, 1, iget-1, 1, jget-1, val, ierr)
END IF
END SUBROUTINE getele_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE putrow_petsc_mat(amat, i, arr, cols)
!
! Put a row into sparse matrix
!
TYPE(petsc_mat), INTENT(inout) :: amat
INTEGER, INTENT(in) :: i
DOUBLE PRECISION, INTENT(in) :: arr(:)
INTEGER, INTENT(in), OPTIONAL :: cols(:)
INTEGER :: j
!
IF(i.GT.amat%iend .OR. i.LT.amat%istart) RETURN ! Do nothing
!
IF(PRESENT(cols)) THEN
DO j=1,SIZE(cols)
CALL putele(amat, i, cols(j), arr(j))
END DO
ELSE
DO j=1,amat%rank
CALL putele(amat, i, j, arr(j))
END DO
END IF
END SUBROUTINE putrow_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE getrow_petsc_mat(amat, i, arr)
!
! Get a row from sparse matrix
!
TYPE(petsc_mat), INTENT(in) :: amat
INTEGER, INTENT(in) :: i
DOUBLE PRECISION, INTENT(out) :: arr(:)
INTEGER :: j, ierr
INTEGER :: ncols, cols(amat%rank)
DOUBLE PRECISION :: vals(amat%rank)
!
arr = 0.0d0
IF(i.GT.amat%iend .OR. i.LT.amat%istart) RETURN ! return 0 if outside
IF(ASSOCIATED(amat%mat)) THEN
DO j=1,amat%rank
CALL getele(amat%mat, i, j, arr(j))
END DO
ELSE
CALL MatGetRow(amat%AMAT, i-1, ncols, cols, vals, ierr) ! 0-based array
DO j=1,ncols
arr(cols(j)+1) = vals(j)
END DO
CALL MatRestoreRow(amat%AMAT, i-1, ncols, cols, vals, ierr)
END IF
END SUBROUTINE getrow_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE putcol_petsc_mat(amat, j, arr)
!
! Put a column into sparse matrix
!
TYPE(petsc_mat), INTENT(inout) :: amat
INTEGER, INTENT(in) :: j
DOUBLE PRECISION, INTENT(in) :: arr(:)
INTEGER :: i
!
DO i=amat%istart,amat%iend
CALL putele(amat, i, j, arr(i))
END DO
END SUBROUTINE putcol_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE getcol_petsc_mat(amat, j, arr)
!
! Get a column from sparse matrix
!
TYPE(petsc_mat), INTENT(in) :: amat
INTEGER, INTENT(in) :: j
DOUBLE PRECISION, INTENT(out) :: arr(:)
INTEGER :: i
!
arr = 0.0d0
DO i=amat%istart,amat%iend
CALL getele(amat, i, j, arr(i))
END DO
END SUBROUTINE getcol_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
INTEGER FUNCTION get_count_petsc_mat(mat, nnz)
!
! Number of non-zeros in sparse matrix
!
TYPE(petsc_mat) :: mat
INTEGER, INTENT(out), OPTIONAL :: nnz(:)
INTEGER :: i, ierr
DOUBLE PRECISION :: info(MAT_INFO_SIZE)
!
IF(ASSOCIATED(mat%mat)) THEN
get_count_petsc_mat = get_count(mat%mat, nnz)
ELSE
CALL MatGetInfo(mat%AMAT, MAT_LOCAL, info, ierr)
get_count_petsc_mat = info(MAT_INFO_NZ_ALLOCATED)
!!$ IF(PRESENT(nnz)) THEN
!!$ DO i=1,mat%rank
!!$ nnz(i) = mat%irow(i+1)-mat%irow(i)
!!$ END DO
!!$ END IF
END IF
END FUNCTION get_count_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE to_petsc_mat(mat, nlkeep)
!
! Convert linked list spmat to petsc matrice structure
!
TYPE(petsc_mat) :: mat
LOGICAL, INTENT(in), OPTIONAL :: nlkeep
!
INTEGER :: me, i, j, jj, nnz, rank, s, e
INTEGER :: istart, iend
INTEGER :: iloc, k1, k2, ncol
INTEGER :: d_nz, d_nnz(mat%istart:mat%iend)
INTEGER :: o_nz, o_nnz(mat%istart:mat%iend)
INTEGER :: comm, ierr
LOGICAL :: nlclean
!
nlclean = .TRUE.
IF(PRESENT(nlkeep)) THEN
nlclean = .NOT. nlkeep
END IF
!
comm = mat%comm
CALL mpi_comm_rank(comm, me, ierr)
!
! Allocate the Petsc matrix structure
!
rank = mat%rank
mat%nnz_loc = get_count(mat)
istart = mat%istart
iend = mat%iend
CALL mpi_allreduce(mat%nnz_loc, mat%nnz, 1, MPI_INTEGER8, MPI_SUM, comm, ierr)
!
IF(ASSOCIATED(mat%cols)) DEALLOCATE(mat%cols)
IF(ASSOCIATED(mat%irow)) DEALLOCATE(mat%irow)
IF(ASSOCIATED(mat%val)) DEALLOCATE(mat%val)
ALLOCATE(mat%val(mat%nnz_loc))
ALLOCATE(mat%cols(mat%nnz_loc))
ALLOCATE(mat%irow(mat%istart:mat%iend+1))
!
! Get Sparse structure from linked list
!
d_nnz(:) = 0
o_nnz(:) = 0
mat%irow(istart) = 1
DO i=istart,iend
mat%irow(i+1) = mat%irow(i) + mat%mat%row(i)%nnz
s = mat%irow(i)
e = mat%irow(i+1)-1
CALL getrow(mat%mat%row(i), mat%val(s:e), mat%cols(s:e))
d_nnz(i) = COUNT(mat%cols(s:e) .GE. istart .AND. &
& mat%cols(s:e) .LE. iend)
o_nnz(i) = mat%mat%row(i)%nnz - d_nnz(i)
IF(nlclean) CALL destroy(mat%mat%row(i))
END DO
IF(nlclean) DEALLOCATE(mat%mat)
!
! Petsc matrix preallocation
!
CALL MatMPIAIJSetPreallocation(mat%AMAT, PETSC_NULL_INTEGER, &
& d_nnz, PETSC_NULL_INTEGER, o_nnz, ierr)
CALL MatSeqAIJSetPreallocation(mat%AMAT, PETSC_NULL_INTEGER, &
& d_nnz, ierr)
!
! Petsc matrix assembly
!
mat%cols = mat%cols-1 ! Start column index = 0
DO i=istart,iend
iloc = i-istart+1
k1 = mat%irow(i)
k2 = mat%irow(i+1)
ncol = k2-k1
CALL MatSetValues(mat%AMAT, 1, i-1, ncol, mat%cols(k1:k2-1), &
& mat%val(k1:k2-1), INSERT_VALUES, ierr)
END DO
!
CALL MatAssemblyBegin(mat%AMAT, MAT_FINAL_ASSEMBLY ,ierr)
CALL MatAssemblyEnd(mat%AMAT, MAT_FINAL_ASSEMBLY, ierr)
!
DEALLOCATE(mat%irow)
DEALLOCATE(mat%cols)
DEALLOCATE(mat%val)
!
END SUBROUTINE to_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE save_petsc_mat(mat, file)
!
! Save matrix in PETSC binary format
!
TYPE(petsc_mat) :: mat
CHARACTER(len=*), INTENT(in) :: file
!
INTEGER :: ierr
PetscViewer :: viewer
!
CALL PetscViewerBinaryOpen(mat%comm, TRIM(file), FILE_MODE_WRITE,&
& viewer, ierr)
CALL MatView(mat%AMAT, viewer, ierr)
CALL PetscViewerDestroy(viewer, ierr)
!
END SUBROUTINE save_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE load_petsc_mat(mat, file)
!
! Load matrix in PETSC binary format
!
TYPE(petsc_mat) :: mat
CHARACTER(len=*), INTENT(in) :: file
!
INTEGER :: nloc, i, npes, ierr
PetscViewer :: viewer
!
CALL mpi_comm_size(mat%comm, npes, ierr)
!
! Clean up unneeded sparse matrix
!
IF(ASSOCIATED(mat%mat)) THEN
CALL destroy(mat%mat)
DEALLOCATE(mat%mat)
END IF
!
! Load matrix from file
!
CALL PetscViewerBinaryOpen(mat%comm, TRIM(file), FILE_MODE_READ,&
& viewer, ierr)
CALL MatLoad(mat%AMAT, viewer, ierr)
CALL PetscViewerDestroy(viewer, ierr)
!
! Some mat info
!
CALL MatGetSize(mat%AMAT, mat%rank, PETSC_NULL_INTEGER, ierr)
mat%nnz_loc = get_count(mat)
CALL mpi_allreduce(mat%nnz_loc, mat%nnz, 1, MPI_INTEGER8, MPI_SUM, &
& mat%comm, ierr)
!
!
! Recompute matrix partition from loaded matrix
!
CALL MatGetOwnershipRange(mat%AMAT, mat%istart, mat%iend, ierr)
mat%istart = mat%istart+1 ! Convert from Petsc definition
nloc = mat%iend - mat%istart + 1
CALL mpi_allgather(nloc, 1, MPI_INTEGER, mat%rcounts, 1, MPI_INTEGER, &
& mat%comm, ierr)
mat%rdispls(0) = 0
DO i=1,npes-1
mat%rdispls(i) = mat%rdispls(i-1)+mat%rcounts(i-1)
END DO
!
END SUBROUTINE load_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE bsolve_petsc_mat1(mat, rhs, sol, rtol_in, nitmax_in, nits)
!
! Backsolve, using Petsc
!
TYPE(petsc_mat) :: mat
DOUBLE PRECISION, INTENT(inout) :: rhs(:)
DOUBLE PRECISION, INTENT(out), OPTIONAL :: sol(:)
DOUBLE PRECISION, INTENT(in), OPTIONAL :: rtol_in
INTEGER, INTENT(in), OPTIONAL :: nitmax_in
INTEGER, INTENT(out), OPTIONAL :: nits
!
DOUBLE PRECISION :: rtol=PETSC_DEFAULT_DOUBLE_PRECISION
INTEGER :: nitmax=PETSC_DEFAULT_INTEGER
INTEGER :: i, istart, iend, nrank_loc, nrank
INTEGER :: npes, me, ierr
INTEGER :: idx(mat%istart:mat%iend)
!
Vec :: vec_rhs, vec_sol
PetscScalar :: scal
PetscScalar, POINTER :: psol_loc(:)
KSPConvergedReason :: reason
!
CALL mpi_comm_size(mat%comm, npes, ierr)
CALL mpi_comm_rank(mat%comm, me, ierr)
!
istart = mat%istart
iend = mat%iend
nrank_loc = iend-istart+1
nrank = mat%rank
idx = (/ (i, i=istart,iend) /) - 1 ! 0-based petsc vector
!
! Create Vectors
!
CALL VecCreate(mat%comm, vec_rhs, ierr)
CALL VecSetSizes(vec_rhs, nrank_loc, nrank, ierr)
CALL VecSetFromOptions(vec_rhs, ierr)
CALL VecDuplicate(vec_rhs, vec_sol, ierr)
!
! Set solver
!
IF(PRESENT(rtol_in)) rtol = rtol_in
IF(PRESENT(nitmax_in)) nitmax = nitmax_in
!
CALL KSPSetOperators(mat%SOLVER, mat%AMAT, mat%AMAT, SAME_PRECONDITIONER, ierr)
CALL KSPSetTolerances(mat%SOLVER, rtol, PETSC_DEFAULT_DOUBLE_PRECISION,&
& PETSC_DEFAULT_DOUBLE_PRECISION, nitmax, ierr)
CALL KSPSetFromOptions(mat%SOLVER, ierr)
!
! Set RHS
!
CALL VecSetValues(vec_rhs, nrank_loc, idx, rhs(istart), INSERT_VALUES, ierr)
CALL VecAssemblyBegin(vec_rhs, ierr)
CALL VecAssemblyEnd(vec_rhs, ierr)
!
CALL KSPSolve(mat%SOLVER, vec_rhs, vec_sol, ierr)
CALL KSPGetConvergedReason(mat%SOLVER, reason, ierr)
IF(reason .LT. 0) THEN
IF(me.EQ.0) WRITE(*,'(a,i4)') 'BSOLVE: diverges with reason', reason
END IF
IF(PRESENT(nits)) THEN
CALL KSPGetIterationNumber(mat%SOLVER, nits, ierr)
END IF
!
! Get global solutions on all MPI processes
!
CALL VecGetArrayF90(vec_sol, psol_loc, ierr)
!
IF(PRESENT(sol)) THEN
CALL mpi_allgatherv(psol_loc, nrank_loc, MPI_DOUBLE_PRECISION, &
& sol, mat%rcounts, mat%rdispls, MPI_DOUBLE_PRECISION, &
& mat%comm, ierr)
ELSE
CALL mpi_allgatherv(psol_loc, nrank_loc, MPI_DOUBLE_PRECISION, &
& rhs, mat%rcounts, mat%rdispls, MPI_DOUBLE_PRECISION, &
& mat%comm, ierr)
END IF
!
CALL VecRestoreArrayF90(vec_sol, psol_loc, ierr)
END SUBROUTINE bsolve_petsc_mat1
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE bsolve_petsc_matn(mat, rhs, sol, rtol_in, nitmax_in, nits)
!
! Backsolve, using Petsc, multiple RHS
!
TYPE(petsc_mat) :: mat
DOUBLE PRECISION :: rhs(:,:)
DOUBLE PRECISION, OPTIONAL :: sol(:,:)
DOUBLE PRECISION, INTENT(in), OPTIONAL :: rtol_in
INTEGER, INTENT(in), OPTIONAL :: nitmax_in
INTEGER, INTENT(out), OPTIONAL :: nits(:)
!
DOUBLE PRECISION :: rtol=PETSC_DEFAULT_DOUBLE_PRECISION
INTEGER :: nitmax=PETSC_DEFAULT_INTEGER
INTEGER :: j, nrhs
INTEGER :: i, istart, iend, nrank_loc, nrank
INTEGER :: npes, me, ierr
INTEGER :: idx(mat%istart:mat%iend)
!
Vec :: vec_rhs, vec_sol
PetscScalar :: scal
PetscScalar, POINTER :: psol_loc(:)
KSPConvergedReason :: reason
!
CALL mpi_comm_size(mat%comm, npes, ierr)
CALL mpi_comm_rank(mat%comm, me, ierr)
!
nrhs = SIZE(rhs,2)
istart = mat%istart
iend = mat%iend
nrank_loc = iend-istart+1
nrank = mat%rank
idx = (/ (i, i=istart,iend) /) - 1 ! 0-based petsc vector
!
! Create Vectors
!
CALL VecCreate(mat%comm, vec_rhs, ierr)
CALL VecSetSizes(vec_rhs, nrank_loc, nrank, ierr)
CALL VecSetFromOptions(vec_rhs, ierr)
CALL VecDuplicate(vec_rhs, vec_sol, ierr)
!
! Set solver
!
IF(PRESENT(rtol_in)) rtol = rtol_in
IF(PRESENT(nitmax_in)) nitmax = nitmax_in
!
CALL KSPSetOperators(mat%SOLVER, mat%AMAT, mat%AMAT, SAME_PRECONDITIONER, ierr)
CALL KSPSetTolerances(mat%SOLVER, rtol, PETSC_DEFAULT_DOUBLE_PRECISION,&
& PETSC_DEFAULT_DOUBLE_PRECISION, nitmax, ierr)
CALL KSPSetFromOptions(mat%SOLVER, ierr)
!
! Set RHS
!
DO j=1,nrhs
CALL VecSetValues(vec_rhs, nrank_loc, idx, rhs(istart,j), INSERT_VALUES, ierr)
CALL VecAssemblyBegin(vec_rhs, ierr)
CALL VecAssemblyEnd(vec_rhs, ierr)
!
CALL KSPSolve(mat%SOLVER, vec_rhs, vec_sol, ierr)
CALL KSPGetConvergedReason(mat%SOLVER, reason, ierr)
IF(reason .LT. 0) THEN
IF(me.EQ.0) THEN
WRITE(*,'(a,i4,a,i8)') 'BSOLVE: diverges with reason', reason, &
& ' for j =', j
END IF
END IF
IF(PRESENT(nits)) THEN
CALL KSPGetIterationNumber(mat%SOLVER, nits(j), ierr)
END IF
!
! Get global solutions on all MPI processes
!
CALL VecGetArrayF90(vec_sol, psol_loc, ierr)
!
IF(PRESENT(sol)) THEN
CALL mpi_allgatherv(psol_loc, nrank_loc, MPI_DOUBLE_PRECISION, &
& sol(1,j), mat%rcounts, mat%rdispls, MPI_DOUBLE_PRECISION, &
& mat%comm, ierr)
ELSE
CALL mpi_allgatherv(psol_loc, nrank_loc, MPI_DOUBLE_PRECISION, &
& rhs(1,j), mat%rcounts, mat%rdispls, MPI_DOUBLE_PRECISION, &
& mat%comm, ierr)
END IF
!
CALL VecRestoreArrayF90(vec_sol, psol_loc, ierr)
END DO
!
END SUBROUTINE bsolve_petsc_matn
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
FUNCTION vmx_petsc_mat(mat, xarr) RESULT(yarr)
!
! Return product mat*x
!
TYPE(petsc_mat) :: mat
DOUBLE PRECISION, INTENT(in) :: xarr(:)
DOUBLE PRECISION :: yarr(SIZE(xarr))
DOUBLE PRECISION :: alpha=1.0d0, beta=0.0d0
CHARACTER(len=6) :: matdescra
INTEGER :: n, i, j
!
n = mat%rank
!
#ifdef MKL
IF(mat%nlsym) THEN
matdescra = 'sun'
ELSE
matdescra = 'g'
END IF
!
CALL mkl_dcsrmv('N', n, n, alpha, matdescra, mat%val, &
& mat%cols, mat%irow(1), mat%irow(2), xarr, &
& beta, yarr)
#else
yarr = 0.0d0
DO i=1,n
DO j=mat%irow(i), mat%irow(i+1)-1
yarr(i) = yarr(i) + mat%val(j)*xarr(mat%cols(j))
END DO
IF(mat%nlsym) THEN
DO j=mat%irow(i)+1, mat%irow(i+1)-1
yarr(mat%cols(j)) = yarr(mat%cols(j)) &
& + mat%val(j)*xarr(i)
END DO
END IF
END DO
#endif
!
END FUNCTION vmx_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
FUNCTION vmx_petsc_matn(mat, xarr) RESULT(yarr)
!
! Return product mat*x
!
TYPE(petsc_mat) :: mat
DOUBLE PRECISION, INTENT(in) :: xarr(:,:)
DOUBLE PRECISION :: yarr(SIZE(xarr,1),SIZE(xarr,2))
!
DOUBLE PRECISION :: alpha=1.0d0, beta=0.0d0
INTEGER :: n, nrhs, i, j
CHARACTER(len=6) :: matdescra
!
n = mat%rank
nrhs = SIZE(xarr,2)
!
#ifdef MKL
IF(mat%nlsym) THEN
matdescra = 'sun'
ELSE
matdescra = 'g'
END IF
!
CALL mkl_dcsrmm('N', n, nrhs, n, alpha, matdescra, mat%val,&
& mat%cols, mat%irow(1), mat%irow(2), xarr, &
& n, beta, yarr, n)
#else
yarr = 0.0d0
DO i=1,n
DO j=mat%irow(i), mat%irow(i+1)-1
yarr(i,:) = yarr(i,:) + mat%val(j)*xarr(mat%cols(j),:)
END DO
IF(mat%nlsym) THEN
DO j=mat%irow(i)+1, mat%irow(i+1)-1
yarr(mat%cols(j),:) = yarr(mat%cols(j),:) &
& + mat%val(j)*xarr(i,:)
END DO
END IF
END DO
#endif
!
END FUNCTION vmx_petsc_matn
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE destroy_petsc_mat(mat)
!
! Deallocate the sparse matrix mat
!
TYPE(petsc_mat) :: mat
INTEGER :: ierr
!
IF(ASSOCIATED(mat%mat)) THEN
CALL destroy(mat%mat)
DEALLOCATE(mat%mat)
END IF
!
IF(ASSOCIATED(mat%cols)) DEALLOCATE(mat%cols)
IF(ASSOCIATED(mat%irow)) DEALLOCATE(mat%irow)
IF(ASSOCIATED(mat%val)) DEALLOCATE(mat%val)
!
CALL MatDestroy(mat%AMAT,ierr)
END SUBROUTINE destroy_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE mcopy_petsc_mat(mata, matb)
!
! Matrix copy: B = A
!
TYPE(petsc_mat) :: mata, matb
INTEGER :: ierr
!
! Assume that matb was already initialized by init_petsc_mat.
IF(matb%rank.LE.0) THEN
WRITE(*,'(a)') 'MCOPY: Mat B is not initialized with INIT'
STOP '*** Abnormal EXIT in MODULE petsc_mod ***'
END IF
!
IF(ASSOCIATED(matb%mat)) THEN
CALL destroy(matb%mat)
DEALLOCATE(matb%mat)
END IF
!
matb%rank = mata%rank
matb%nnz = mata%nnz
matb%nnz_loc = mata%nnz_loc
matb%istart = mata%istart
matb%iend = mata%iend
matb%nlsym = mata%nlsym
matb%nlforce_zero = mata%nlforce_zero
!
IF(ASSOCIATED(matb%mat)) THEN
CALL destroy(matb%mat)
DEALLOCATE(matb%mat)
END IF
!
! Destroy existing PETSC matrix and recreate a new one from scratch
!
CALL MatDestroy(matb%AMAT, ierr)
CALL MatConvert(mata%AMAT, MATSAME, MAT_INITIAL_MATRIX, matb%AMAT, ierr)
END SUBROUTINE mcopy_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
SUBROUTINE maddto_petsc_mat(mata, alpha, matb)
!
! A <- A + alpha*B
!
TYPE(petsc_mat) :: mata, matb
DOUBLE PRECISION :: alpha
!
mata%val = mata%val + alpha*matb%val
END SUBROUTINE maddto_petsc_mat
!++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
!
END MODULE petsc_bsplines

Event Timeline