diff --git a/src/basic_mod.F90 b/src/basic_mod.F90 index 58e1540..ece5f9b 100644 --- a/src/basic_mod.F90 +++ b/src/basic_mod.F90 @@ -1,373 +1,374 @@ MODULE basic ! Basic module for time dependent problems use, intrinsic :: iso_c_binding use prec_const IMPLICIT none ! INCLUDE 'fftw3-mpi.f03' INTEGER :: nrun = 1 ! Number of time steps to run real(dp) :: tmax = 100000.0 ! Maximum simulation time real(dp) :: dt = 1.0 ! Time step real(dp) :: time = 0 ! Current simulation time (Init from restart file) INTEGER :: comm0 ! Default communicator with a topology INTEGER :: group0 ! Default group with a topology INTEGER :: rank_0 ! Ranks in comm0 ! Communicators for 1-dim cartesian subgrids of comm0 INTEGER :: comm_p, comm_ky, comm_z INTEGER :: rank_p, rank_ky, rank_z! Ranks INTEGER :: comm_pz, rank_pz ! 2D comm for N_a(p,j,z) output (mspfile) INTEGER :: comm_kyz, rank_kyz ! 2D comm for N_a(p,j,z) output (mspfile) INTEGER :: comm_ky0, rank_ky0 ! comm along ky with p=0 INTEGER :: comm_z0, rank_z0 ! comm along z with p=0 INTEGER :: group_ky0, group_z0 INTEGER :: jobnum = 0 ! Job number INTEGER :: step = 0 ! Calculation step of this run INTEGER :: cstep = 0 ! Current step number (Init from restart file) LOGICAL :: nlend = .FALSE. ! Signal end of run LOGICAL :: crashed = .FALSE. ! Signal end of crashed run INTEGER :: ierr ! flag for MPI error INTEGER :: my_id ! Rank in COMM_WORLD INTEGER :: num_procs ! number of MPI processes INTEGER :: num_procs_p ! Number of processes in p INTEGER :: num_procs_ky ! Number of processes in r INTEGER :: num_procs_z ! Number of processes in z INTEGER :: num_procs_pz ! Number of processes in pz comm INTEGER :: num_procs_kyz ! Number of processes in kyz comm INTEGER :: nbr_L, nbr_R ! Left and right neighbours (along p) INTEGER :: nbr_T, nbr_B ! Top and bottom neighbours (along kx) INTEGER :: nbr_U, nbr_D ! Upstream and downstream neighbours (along z) INTEGER :: iframe0d ! counting the number of times 0d datasets are outputed (for diagnose) INTEGER :: iframe1d ! counting the number of times 1d datasets are outputed (for diagnose) INTEGER :: iframe2d ! counting the number of times 2d datasets are outputed (for diagnose) INTEGER :: iframe3d ! counting the number of times 3d datasets are outputed (for diagnose) INTEGER :: iframe5d ! counting the number of times 5d datasets are outputed (for diagnose) ! List of logical file units INTEGER :: lu_in = 90 ! File duplicated from STDIN INTEGER :: lu_stop = 91 ! stop file, see subroutine TESEND ! To measure computation time real :: start, finish real(dp) :: t0_rhs, t0_adv_field, t0_poisson, t0_Sapj, t0_diag, t0_checkfield,& t0_step, t0_clos, t0_ghost, t0_coll, t0_process real(dp) :: t1_rhs, t1_adv_field, t1_poisson, t1_Sapj, t1_diag, t1_checkfield,& t1_step, t1_clos, t1_ghost, t1_coll, t1_process real(dp) :: tc_rhs, tc_adv_field, tc_poisson, tc_Sapj, tc_diag, tc_checkfield,& tc_step, tc_clos, tc_ghost, tc_coll, tc_process real(dp) :: maxruntime = 1e9 ! Maximum simulation CPU time LOGICAL :: GATHERV_OUTPUT = .true. INTERFACE allocate_array MODULE PROCEDURE allocate_array_dp1,allocate_array_dp2,allocate_array_dp3,allocate_array_dp4, allocate_array_dp5, allocate_array_dp6 MODULE PROCEDURE allocate_array_dc1,allocate_array_dc2,allocate_array_dc3,allocate_array_dc4, allocate_array_dc5, allocate_array_dc6 MODULE PROCEDURE allocate_array_i1,allocate_array_i2,allocate_array_i3,allocate_array_i4 MODULE PROCEDURE allocate_array_l1,allocate_array_l2,allocate_array_l3,allocate_array_l4 END INTERFACE allocate_array CONTAINS !================================================================================ SUBROUTINE basic_data ! Read basic data for input file use prec_const IMPLICIT NONE NAMELIST /BASIC/ nrun, dt, tmax, maxruntime CALL find_input_file READ(lu_in,basic) !Init cumulative timers - tc_rhs = 0.;tc_adv_field = 0.; tc_poisson = 0. - tc_Sapj = 0.; tc_diag = 0.; tc_checkfield = 0. - + tc_rhs = 0.; tc_adv_field = 0.; tc_poisson = 0. + tc_Sapj = 0.; tc_diag = 0.; tc_checkfield = 0. + tc_ghost = 0.; tc_coll = 0.; tc_process = 0. + tc_clos = 0.; tc_step = 0.; END SUBROUTINE basic_data SUBROUTINE basic_outputinputs(fid, str) ! ! Write the input parameters to the results_xx.h5 file ! USE prec_const USE futils, ONLY: attach IMPLICIT NONE INTEGER, INTENT(in) :: fid CHARACTER(len=256), INTENT(in) :: str CALL attach(fid, TRIM(str), "start_iframe0d", iframe0d) CALL attach(fid, TRIM(str), "start_iframe2d", iframe2d) CALL attach(fid, TRIM(str), "start_iframe3d", iframe3d) CALL attach(fid, TRIM(str), "start_iframe5d", iframe5d) CALL attach(fid, TRIM(str), "start_time", time) CALL attach(fid, TRIM(str), "start_cstep", cstep-1) CALL attach(fid, TRIM(str), "dt", dt) CALL attach(fid, TRIM(str), "tmax", tmax) CALL attach(fid, TRIM(str), "nrun", nrun) CALL attach(fid, TRIM(str), "cpu_time", -1) CALL attach(fid, TRIM(str), "Nproc", num_procs) CALL attach(fid, TRIM(str), "Np_p" , num_procs_p) CALL attach(fid, TRIM(str), "Np_kx",num_procs_ky) CALL attach(fid, TRIM(str), "Np_z", num_procs_z) END SUBROUTINE basic_outputinputs !================================================================================ SUBROUTINE find_input_file IMPLICIT NONE CHARACTER(len=32) :: str, input_file INTEGER :: nargs, fileid, l LOGICAL :: mlexist nargs = COMMAND_ARGUMENT_COUNT() IF((nargs .EQ. 1) .OR. (nargs .EQ. 4)) THEN CALL GET_COMMAND_ARGUMENT(nargs, str, l, ierr) READ(str(1:l),'(i3)') fileid WRITE(input_file,'(a,a1,i2.2,a3)') 'fort','_',fileid,'.90' INQUIRE(file=input_file, exist=mlexist) IF( mlexist ) THEN IF(my_id.EQ.0) write(*,*) 'Reading input ', input_file,'...' OPEN(lu_in, file=input_file) ELSE IF(my_id.EQ.0) write(*,*) 'Reading input fort.90...' OPEN(lu_in, file='fort.90') ENDIF ENDIF END SUBROUTINE find_input_file !================================================================================ SUBROUTINE daytim(str) ! Print date and time use prec_const IMPLICIT NONE CHARACTER(len=*) , INTENT(in) :: str CHARACTER(len=16) :: d, t, dat, time !________________________________________________________________________________ ! CALL DATE_AND_TIME(d,t) dat=d(7:8) // '/' // d(5:6) // '/' // d(1:4) time=t(1:2) // ':' // t(3:4) // ':' // t(5:10) WRITE(*,'(a,1x,a,1x,a)') str, dat(1:10), time(1:12) ! END SUBROUTINE daytim !================================================================================ SUBROUTINE display_h_min_s(time) real :: time integer :: days, hours, mins, secs days = FLOOR(time/24./3600.); hours= FLOOR(time/3600.); mins = FLOOR(time/60.); secs = FLOOR(time); IF ( days .GT. 0 ) THEN !display day h min s hours = (time/3600./24. - days) * 24 mins = (time/3600. - days*24. - hours) * 60 secs = (time/60. - days*24.*60 - hours*60 - mins) * 60 IF (my_id .EQ. 0) WRITE(*,*) 'CPU Time = ', days, '[day]', hours, '[h]', mins, '[min]', secs, '[s]' IF (my_id .EQ. 0) WRITE(*,*) '(',time,'[s])' ELSEIF ( hours .GT. 0 ) THEN !display h min s mins = (time/3600. - hours) * 60 secs = (time/60. - hours*60 - mins) * 60 IF (my_id .EQ. 0) WRITE(*,*) 'CPU Time = ', hours, '[h]', mins, '[min]', secs, '[s]' IF (my_id .EQ. 0) WRITE(*,*) '(',time,'[s])' ELSEIF ( mins .GT. 0 ) THEN !display min s secs = (time/60. - mins) * 60 IF (my_id .EQ. 0) WRITE(*,*) 'CPU Time = ', mins, '[min]', secs, '[s]' IF (my_id .EQ. 0) WRITE(*,*) '(',time,'[s])' ELSE ! display s IF (my_id .EQ. 0) WRITE(*,*) 'CPU Time = ', FLOOR(time), '[s]' ENDIF END SUBROUTINE display_h_min_s !================================================================================ ! To allocate arrays of doubles, integers, etc. at run time SUBROUTINE allocate_array_dp1(a,is1,ie1) IMPLICIT NONE real(dp), DIMENSION(:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1 ALLOCATE(a(is1:ie1)) a=0.0_dp END SUBROUTINE allocate_array_dp1 SUBROUTINE allocate_array_dp2(a,is1,ie1,is2,ie2) IMPLICIT NONE real(dp), DIMENSION(:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2 ALLOCATE(a(is1:ie1,is2:ie2)) a=0.0_dp END SUBROUTINE allocate_array_dp2 SUBROUTINE allocate_array_dp3(a,is1,ie1,is2,ie2,is3,ie3) IMPLICIT NONE real(dp), DIMENSION(:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3)) a=0.0_dp END SUBROUTINE allocate_array_dp3 SUBROUTINE allocate_array_dp4(a,is1,ie1,is2,ie2,is3,ie3,is4,ie4) IMPLICIT NONE real(dp), DIMENSION(:,:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3,is4,ie4 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3,is4:ie4)) a=0.0_dp END SUBROUTINE allocate_array_dp4 SUBROUTINE allocate_array_dp5(a,is1,ie1,is2,ie2,is3,ie3,is4,ie4,is5,ie5) IMPLICIT NONE real(dp), DIMENSION(:,:,:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3,is4,ie4,is5,ie5 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3,is4:ie4,is5:ie5)) a=0.0_dp END SUBROUTINE allocate_array_dp5 SUBROUTINE allocate_array_dp6(a,is1,ie1,is2,ie2,is3,ie3,is4,ie4,is5,ie5,is6,ie6) IMPLICIT NONE real(dp), DIMENSION(:,:,:,:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3,is4,ie4,is5,ie5,is6,ie6 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3,is4:ie4,is5:ie5,is6:ie6)) a=0.0_dp END SUBROUTINE allocate_array_dp6 !======================================== SUBROUTINE allocate_array_dc1(a,is1,ie1) IMPLICIT NONE DOUBLE COMPLEX, DIMENSION(:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1 ALLOCATE(a(is1:ie1)) a=CMPLX(0.0_dp,0.0_dp) END SUBROUTINE allocate_array_dc1 SUBROUTINE allocate_array_dc2(a,is1,ie1,is2,ie2) IMPLICIT NONE DOUBLE COMPLEX, DIMENSION(:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2 ALLOCATE(a(is1:ie1,is2:ie2)) a=CMPLX(0.0_dp,0.0_dp) END SUBROUTINE allocate_array_dc2 SUBROUTINE allocate_array_dc3(a,is1,ie1,is2,ie2,is3,ie3) IMPLICIT NONE DOUBLE COMPLEX, DIMENSION(:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3)) a=CMPLX(0.0_dp,0.0_dp) END SUBROUTINE allocate_array_dc3 SUBROUTINE allocate_array_dc4(a,is1,ie1,is2,ie2,is3,ie3,is4,ie4) IMPLICIT NONE DOUBLE COMPLEX, DIMENSION(:,:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3,is4,ie4 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3,is4:ie4)) a=CMPLX(0.0_dp,0.0_dp) END SUBROUTINE allocate_array_dc4 SUBROUTINE allocate_array_dc5(a,is1,ie1,is2,ie2,is3,ie3,is4,ie4,is5,ie5) IMPLICIT NONE DOUBLE COMPLEX, DIMENSION(:,:,:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3,is4,ie4,is5,ie5 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3,is4:ie4,is5:ie5)) a=CMPLX(0.0_dp,0.0_dp) END SUBROUTINE allocate_array_dc5 SUBROUTINE allocate_array_dc6(a,is1,ie1,is2,ie2,is3,ie3,is4,ie4,is5,ie5,is6,ie6) IMPLICIT NONE DOUBLE COMPLEX, DIMENSION(:,:,:,:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3,is4,ie4,is5,ie5,is6,ie6 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3,is4:ie4,is5:ie5,is6:ie6)) a=CMPLX(0.0_dp,0.0_dp) END SUBROUTINE allocate_array_dc6 !======================================== SUBROUTINE allocate_array_i1(a,is1,ie1) IMPLICIT NONE INTEGER, DIMENSION(:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1 ALLOCATE(a(is1:ie1)) a=0 END SUBROUTINE allocate_array_i1 SUBROUTINE allocate_array_i2(a,is1,ie1,is2,ie2) IMPLICIT NONE INTEGER, DIMENSION(:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2 ALLOCATE(a(is1:ie1,is2:ie2)) a=0 END SUBROUTINE allocate_array_i2 SUBROUTINE allocate_array_i3(a,is1,ie1,is2,ie2,is3,ie3) IMPLICIT NONE INTEGER, DIMENSION(:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3)) a=0 END SUBROUTINE allocate_array_i3 SUBROUTINE allocate_array_i4(a,is1,ie1,is2,ie2,is3,ie3,is4,ie4) IMPLICIT NONE INTEGER, DIMENSION(:,:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3,is4,ie4 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3,is4:ie4)) a=0 END SUBROUTINE allocate_array_i4 SUBROUTINE allocate_array_i5(a,is1,ie1,is2,ie2,is3,ie3,is4,ie4,is5,ie5) IMPLICIT NONE real(dp), DIMENSION(:,:,:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3,is4,ie4,is5,ie5 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3,is4:ie4,is5:ie5)) a=0 END SUBROUTINE allocate_array_i5 !======================================== SUBROUTINE allocate_array_l1(a,is1,ie1) IMPLICIT NONE LOGICAL, DIMENSION(:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1 ALLOCATE(a(is1:ie1)) a=.false. END SUBROUTINE allocate_array_l1 SUBROUTINE allocate_array_l2(a,is1,ie1,is2,ie2) IMPLICIT NONE LOGICAL, DIMENSION(:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2 ALLOCATE(a(is1:ie1,is2:ie2)) a=.false. END SUBROUTINE allocate_array_l2 SUBROUTINE allocate_array_l3(a,is1,ie1,is2,ie2,is3,ie3) IMPLICIT NONE LOGICAL, DIMENSION(:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3)) a=.false. END SUBROUTINE allocate_array_l3 SUBROUTINE allocate_array_l4(a,is1,ie1,is2,ie2,is3,ie3,is4,ie4) IMPLICIT NONE LOGICAL, DIMENSION(:,:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3,is4,ie4 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3,is4:ie4)) a=.false. END SUBROUTINE allocate_array_l4 SUBROUTINE allocate_array_l5(a,is1,ie1,is2,ie2,is3,ie3,is4,ie4,is5,ie5) IMPLICIT NONE LOGICAL, DIMENSION(:,:,:,:,:), ALLOCATABLE, INTENT(INOUT) :: a INTEGER, INTENT(IN) :: is1,ie1,is2,ie2,is3,ie3,is4,ie4,is5,ie5 ALLOCATE(a(is1:ie1,is2:ie2,is3:ie3,is4:ie4,is5:ie5)) a=.false. END SUBROUTINE allocate_array_l5 END MODULE basic diff --git a/src/control.F90 b/src/control.F90 index 5c2571d..9420880 100644 --- a/src/control.F90 +++ b/src/control.F90 @@ -1,84 +1,85 @@ SUBROUTINE control ! Control the run use basic use prec_const IMPLICIT NONE CALL cpu_time(start) !________________________________________________________________________________ ! 1. Prologue ! 1.1 Initialize the parallel environment CALL ppinit IF (my_id .EQ. 0) WRITE(*,'(a/)') 'MPI initialized' CALL daytim('Start at ') ! 1.2 Define data specific to run IF (my_id .EQ. 0) WRITE(*,*) 'Load basic data...' CALL basic_data ! CALL mpi_barrier(MPI_COMM_WORLD, ierr) IF (my_id .EQ. 0) WRITE(*,'(a/)') '...basic data loaded.' ! 1.3 Read input parameters from input file IF (my_id .EQ. 0) WRITE(*,*) 'Read input parameters...' CALL readinputs ! CALL mpi_barrier(MPI_COMM_WORLD, ierr) IF (my_id .EQ. 0) WRITE(*,'(a/)') '...input parameters read' ! 1.4 Set auxiliary values (allocate arrays, set grid, ...) IF (my_id .EQ. 0) WRITE(*,*) 'Calculate auxval...' CALL auxval ! CALL mpi_barrier(MPI_COMM_WORLD, ierr) IF (my_id .EQ. 0) WRITE(*,'(a/)') '...auxval calculated' ! 1.5 Initial conditions IF (my_id .EQ. 0) WRITE(*,*) 'Create initial state...' CALL inital ! CALL mpi_barrier(MPI_COMM_WORLD, ierr) IF (my_id .EQ. 0) WRITE(*,'(a/)') '...initial state created' ! 1.6 Initial diagnostics IF (my_id .EQ. 0) WRITE(*,*) 'Initial diagnostics...' CALL diagnose(0) ! CALL mpi_barrier(MPI_COMM_WORLD, ierr) IF (my_id .EQ. 0) WRITE(*,'(a/)') '...initial diagnostics done' CALL FLUSH(stdout) CALL mpi_barrier(MPI_COMM_WORLD, ierr) !________________________________________________________________________________ IF (my_id .EQ. 0) WRITE(*,*) 'Time integration loop..' !________________________________________________________________________________ ! 2. Main loop DO CALL cpu_time(t0_step) ! Measuring time step = step + 1 cstep = cstep + 1 CALL stepon time = time + dt CALL tesend IF( nlend ) EXIT ! exit do loop CALL diagnose(step) - CALL cpu_time(t1_step); tc_step = tc_step + (t1_step - t0_step) + CALL cpu_time(t1_step); + tc_step = tc_step + (t1_step - t0_step) END DO IF (my_id .EQ. 0) WRITE(*,'(a/)') '...time integration done' !________________________________________________________________________________ ! 9. Epilogue CALL diagnose(-1) CALL endrun IF (my_id .EQ. 0) CALL daytim('Done at ') CALL ppexit END SUBROUTINE control diff --git a/src/ghosts_mod.F90 b/src/ghosts_mod.F90 index 8f0f436..bea0f5c 100644 --- a/src/ghosts_mod.F90 +++ b/src/ghosts_mod.F90 @@ -1,365 +1,366 @@ module ghosts USE basic USE grid USE time_integration USE model, ONLY : KIN_E, beta USE geometry, ONLY : SHEARED, ikx_zBC_L, ikx_zBC_R IMPLICIT NONE INTEGER :: status(MPI_STATUS_SIZE), source, dest, count, ipg PUBLIC :: update_ghosts_moments, update_ghosts_EM CONTAINS SUBROUTINE update_ghosts_moments CALL cpu_time(t0_ghost) IF (num_procs_p .GT. 1) THEN ! Do it only if we share the p IF(KIN_E)& CALL update_ghosts_p_e CALL update_ghosts_p_i ENDIF IF(Nz .GT. 1) THEN IF(KIN_E) & CALL update_ghosts_z_e CALL update_ghosts_z_i ENDIF + CALL cpu_time(t1_ghost) tc_ghost = tc_ghost + (t1_ghost - t0_ghost) END SUBROUTINE update_ghosts_moments SUBROUTINE update_ghosts_EM CALL cpu_time(t0_ghost) IF(Nz .GT. 1) THEN CALL update_ghosts_z_phi IF(beta .GT. 0._dp) & CALL update_ghosts_z_psi ENDIF tc_ghost = tc_ghost + (t1_ghost - t0_ghost) END SUBROUTINE update_ghosts_EM !Communicate p+1, p+2 moments to left neighboor and p-1, p-2 moments to right one ! [a b|C D|e f] : proc n has moments a to f where a,b,e,f are ghosts ! !proc 0: [0 1 2 3 4|5 6] ! V V ^ ^ !proc 1: [3 4|5 6 7 8|9 10] ! V V ^ ^ !proc 2: [7 8|9 10 11 12|13 14] ! V V ^ ^ !proc 3: [11 12|13 14 15 16|17 18] ! ^ ^ !Closure by zero truncation : 0 0 SUBROUTINE update_ghosts_p_e USE fields, ONLY : moments_e IMPLICIT NONE count = (ijge_e-ijgs_e+1)*(ikye-ikys+1)*(ikxe-ikxs+1)*(izge-izgs+1) !!!!!!!!!!! Send ghost to right neighbour !!!!!!!!!!!!!!!!!!!!!! ! Send the last local moment to fill the -1 neighbour ghost CALL mpi_sendrecv(moments_e(ipe_e ,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_R, 10, & ! Send to right moments_e(ips_e-1,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_L, 10, & ! Recieve from left comm0, status, ierr) IF (deltape .EQ. 1) & ! If we have odd Hermite degrees we need a 2nd order stencil CALL mpi_sendrecv(moments_e(ipe_e-1,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_R, 11, & ! Send to right moments_e(ips_e-2,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_L, 11, & ! Recieve from left comm0, status, ierr) !!!!!!!!!!! Send ghost to left neighbour !!!!!!!!!!!!!!!!!!!!!! CALL mpi_sendrecv(moments_e(ips_e ,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_L, 12, & ! Send to left moments_e(ipe_e+1,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_R, 12, & ! Recieve from right comm0, status, ierr) IF (deltape .EQ. 1) & ! If we have odd Hermite degrees we need a 2nd order stencil CALL mpi_sendrecv(moments_e(ips_e+1,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_L, 13, & ! Send to left moments_e(ipe_e+2,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_R, 13, & ! Recieve from right comm0, status, ierr) END SUBROUTINE update_ghosts_p_e !Communicate p+1, p+2 moments to left neighboor and p-1, p-2 moments to right one SUBROUTINE update_ghosts_p_i USE fields, ONLY : moments_i IMPLICIT NONE count = (ijge_i-ijgs_i+1)*(ikye-ikys+1)*(ikxe-ikxs+1)*(izge-izgs+1) ! Number of elements sent !!!!!!!!!!! Send ghost to right neighbour !!!!!!!!!!!!!!!!!!!!!! CALL mpi_sendrecv(moments_i(ipe_i ,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_R, 14, & moments_i(ips_i-1,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_L, 14, & comm0, status, ierr) IF (deltapi .EQ. 1) & ! If we have odd Hermite degrees we need a 2nd order stencil CALL mpi_sendrecv(moments_i(ipe_i-1,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_R, 15, & moments_i(ips_i-2,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_L, 15, & comm0, status, ierr) !!!!!!!!!!! Send ghost to left neighbour !!!!!!!!!!!!!!!!!!!!!! CALL mpi_sendrecv(moments_i(ips_i ,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_L, 16, & moments_i(ipe_i+1,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_R, 16, & comm0, status, ierr) IF (deltapi .EQ. 1) & ! If we have odd Hermite degrees we need a 2nd order stencil CALL mpi_sendrecv(moments_i(ips_i+1,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_L, 17, & moments_i(ipe_i+2,:,:,:,:,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_R, 17, & comm0, status, ierr) END SUBROUTINE update_ghosts_p_i !Communicate z+1, z+2 moments to left neighboor and z-1, z-2 moments to right one ! [a b|C D|e f] : proc n has moments a to f where a,b,e,f are ghosts ! !proc 0: [0 1 2 3 4|5 6] ! V V ^ ^ !proc 1: [3 4|5 6 7 8|9 10] ! V V ^ ^ !proc 2: [7 8|9 10 11 12|13 14] ! V V ^ ^ !proc 3: [11 12|13 14 15 16|17 18] ! ^ ^ !Periodic: 0 1 SUBROUTINE update_ghosts_z_e USE parallel, ONLY : buff_pjxy_zBC_e USE fields, ONLY : moments_e IMPLICIT NONE INTEGER :: ikxBC_L, ikxBC_R IF(Nz .GT. 1) THEN IF (num_procs_z .GT. 1) THEN CALL MPI_BARRIER(MPI_COMM_WORLD,ierr) count = (ipge_e-ipgs_e+1)*(ijge_e-ijgs_e+1)*(ikye-ikys+1)*(ikxe-ikxs+1) !!!!!!!!!!! Send ghost to up neighbour !!!!!!!!!!!!!!!!!!!!!! ! Send the last local moment to fill the -1 neighbour ghost CALL mpi_sendrecv(moments_e(:,:,:,:,ize ,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_U, 20, & ! Send to Up the last buff_pjxy_zBC_e(:,:,:,:,-1), count, MPI_DOUBLE_COMPLEX, nbr_D, 20, & ! Recieve from Down the first-1 comm0, status, ierr) CALL mpi_sendrecv(moments_e(:,:,:,:,ize-1,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_U, 21, & ! Send to Up the last buff_pjxy_zBC_e(:,:,:,:,-2), count, MPI_DOUBLE_COMPLEX, nbr_D, 21, & ! Recieve from Down the first-1 comm0, status, ierr) !!!!!!!!!!! Send ghost to down neighbour !!!!!!!!!!!!!!!!!!!!!! CALL mpi_sendrecv(moments_e(:,:,:,:,izs ,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_D, 22, & ! Send to Up the last buff_pjxy_zBC_e(:,:,:,:,+1), count, MPI_DOUBLE_COMPLEX, nbr_U, 22, & ! Recieve from Down the first-1 comm0, status, ierr) CALL mpi_sendrecv(moments_e(:,:,:,:,izs+1,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_D, 23, & ! Send to Up the last buff_pjxy_zBC_e(:,:,:,:,+2), count, MPI_DOUBLE_COMPLEX, nbr_U, 23, & ! Recieve from Down the first-1 comm0, status, ierr) ELSE !No parallel (copy) buff_pjxy_zBC_e(:,:,:,:,-1) = moments_e(:,:,:,:,ize ,updatetlevel) buff_pjxy_zBC_e(:,:,:,:,-2) = moments_e(:,:,:,:,ize-1,updatetlevel) buff_pjxy_zBC_e(:,:,:,:,+1) = moments_e(:,:,:,:,izs ,updatetlevel) buff_pjxy_zBC_e(:,:,:,:,+2) = moments_e(:,:,:,:,izs+1,updatetlevel) ENDIF DO iky = ikys,ikye DO ikx = ikxs,ikxe ikxBC_L = ikx_zBC_L(iky,ikx); IF (ikxBC_L .NE. -99) THEN ! Exchanging the modes that have a periodic pair (a) ! first-1 gets last moments_e(:,:,iky,ikx,izs-1,updatetlevel) = buff_pjxy_zBC_e(:,:,iky,ikxBC_L,-1) ! first-2 gets last-1 moments_e(:,:,iky,ikx,izs-2,updatetlevel) = buff_pjxy_zBC_e(:,:,iky,ikxBC_L,-2) ELSE moments_e(:,:,iky,ikx,izs-1,updatetlevel) = 0._dp moments_e(:,:,iky,ikx,izs-2,updatetlevel) = 0._dp ENDIF ikxBC_R = ikx_zBC_R(iky,ikx); IF (ikxBC_R .NE. -99) THEN ! Exchanging the modes that have a periodic pair (a) ! last+1 gets first moments_e(:,:,iky,ikx,ize+1,updatetlevel) = buff_pjxy_zBC_e(:,:,iky,ikxBC_R,+1) ! last+2 gets first+1 moments_e(:,:,iky,ikx,ize+2,updatetlevel) = buff_pjxy_zBC_e(:,:,iky,ikxBC_R,+2) ELSE moments_e(:,:,iky,ikx,ize+1,updatetlevel) = 0._dp moments_e(:,:,iky,ikx,ize+2,updatetlevel) = 0._dp ENDIF ENDDO ENDDO ENDIF END SUBROUTINE update_ghosts_z_e SUBROUTINE update_ghosts_z_i USE parallel, ONLY : buff_pjxy_zBC_i USE fields, ONLY : moments_i IMPLICIT NONE INTEGER :: ikxBC_L, ikxBC_R IF(Nz .GT. 1) THEN IF (num_procs_z .GT. 1) THEN CALL MPI_BARRIER(MPI_COMM_WORLD,ierr) count = (ipge_i-ipgs_i+1)*(ijge_i-ijgs_i+1)*(ikye-ikys+1)*(ikxe-ikxs+1) !!!!!!!!!!! Send ghost to up neighbour !!!!!!!!!!!!!!!!!!!!!! ! Send the last local moment to fill the -1 neighbour ghost CALL mpi_sendrecv(moments_i(:,:,:,:,ize ,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_U, 24, & ! Send to Up the last buff_pjxy_zBC_i(:,:,:,:,-1), count, MPI_DOUBLE_COMPLEX, nbr_D, 24, & ! Recieve from Down the first-1 comm0, status, ierr) CALL mpi_sendrecv(moments_i(:,:,:,:,ize-1,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_U, 25, & ! Send to Up the last buff_pjxy_zBC_i(:,:,:,:,-2), count, MPI_DOUBLE_COMPLEX, nbr_D, 25, & ! Recieve from Down the first-1 comm0, status, ierr) !!!!!!!!!!! Send ghost to down neighbour !!!!!!!!!!!!!!!!!!!!!! CALL mpi_sendrecv(moments_i(:,:,:,:,izs ,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_D, 26, & ! Send to Up the last buff_pjxy_zBC_i(:,:,:,:,+1), count, MPI_DOUBLE_COMPLEX, nbr_U, 26, & ! Recieve from Down the first-1 comm0, status, ierr) CALL mpi_sendrecv(moments_i(:,:,:,:,izs+1,updatetlevel), count, MPI_DOUBLE_COMPLEX, nbr_D, 27, & ! Send to Up the last buff_pjxy_zBC_i(:,:,:,:,+2), count, MPI_DOUBLE_COMPLEX, nbr_U, 27, & ! Recieve from Down the first-1 comm0, status, ierr) ELSE !No parallel (copy) buff_pjxy_zBC_i(:,:,:,:,-1) = moments_i(:,:,:,:,ize ,updatetlevel) buff_pjxy_zBC_i(:,:,:,:,-2) = moments_i(:,:,:,:,ize-1,updatetlevel) buff_pjxy_zBC_i(:,:,:,:,+1) = moments_i(:,:,:,:,izs ,updatetlevel) buff_pjxy_zBC_i(:,:,:,:,+2) = moments_i(:,:,:,:,izs+1,updatetlevel) ENDIF DO iky = ikys,ikye DO ikx = ikxs,ikxe ikxBC_L = ikx_zBC_L(iky,ikx); IF (ikxBC_L .NE. -99) THEN ! Exchanging the modes that have a periodic pair (a) ! first-1 gets last moments_i(:,:,iky,ikx,izs-1,updatetlevel) = buff_pjxy_zBC_i(:,:,iky,ikxBC_L,-1) ! first-2 gets last-1 moments_i(:,:,iky,ikx,izs-2,updatetlevel) = buff_pjxy_zBC_i(:,:,iky,ikxBC_L,-2) ELSE moments_i(:,:,iky,ikx,izs-1,updatetlevel) = 0._dp moments_i(:,:,iky,ikx,izs-2,updatetlevel) = 0._dp ENDIF ikxBC_R = ikx_zBC_R(iky,ikx); IF (ikxBC_R .NE. -99) THEN ! Exchanging the modes that have a periodic pair (a) ! last+1 gets first moments_i(:,:,iky,ikx,ize+1,updatetlevel) = buff_pjxy_zBC_i(:,:,iky,ikxBC_R,+1) ! last+2 gets first+1 moments_i(:,:,iky,ikx,ize+2,updatetlevel) = buff_pjxy_zBC_i(:,:,iky,ikxBC_R,+2) ELSE moments_i(:,:,iky,ikx,ize+1,updatetlevel) = 0._dp moments_i(:,:,iky,ikx,ize+2,updatetlevel) = 0._dp ENDIF ENDDO ENDDO ENDIF END SUBROUTINE update_ghosts_z_i SUBROUTINE update_ghosts_z_phi USE parallel, ONLY : buff_xy_zBC USE fields, ONLY : phi IMPLICIT NONE INTEGER :: ikxBC_L, ikxBC_R CALL cpu_time(t1_ghost) IF(Nz .GT. 1) THEN IF (num_procs_z .GT. 1) THEN CALL MPI_BARRIER(MPI_COMM_WORLD,ierr) count = (ikye-ikys+1) * (ikxe-ikxs+1) !!!!!!!!!!! Send ghost to up neighbour !!!!!!!!!!!!!!!!!!!!!! CALL mpi_sendrecv( phi(:,:,ize ), count, MPI_DOUBLE_COMPLEX, nbr_U, 30, & ! Send to Up the last buff_xy_zBC(:,:,-1), count, MPI_DOUBLE_COMPLEX, nbr_D, 30, & ! Receive from Down the first-1 comm0, status, ierr) CALL mpi_sendrecv( phi(:,:,ize-1), count, MPI_DOUBLE_COMPLEX, nbr_U, 31, & ! Send to Up the last buff_xy_zBC(:,:,-2), count, MPI_DOUBLE_COMPLEX, nbr_D, 31, & ! Receive from Down the first-2 comm0, status, ierr) !!!!!!!!!!! Send ghost to down neighbour !!!!!!!!!!!!!!!!!!!!!! CALL mpi_sendrecv( phi(:,:,izs ), count, MPI_DOUBLE_COMPLEX, nbr_D, 32, & ! Send to Down the first buff_xy_zBC(:,:,+1), count, MPI_DOUBLE_COMPLEX, nbr_U, 32, & ! Recieve from Up the last+1 comm0, status, ierr) CALL mpi_sendrecv( phi(:,:,izs+1), count, MPI_DOUBLE_COMPLEX, nbr_D, 33, & ! Send to Down the first buff_xy_zBC(:,:,+2), count, MPI_DOUBLE_COMPLEX, nbr_U, 33, & ! Recieve from Up the last+2 comm0, status, ierr) ELSE buff_xy_zBC(:,:,-1) = phi(:,:,ize ) buff_xy_zBC(:,:,-2) = phi(:,:,ize-1) buff_xy_zBC(:,:,+1) = phi(:,:,izs ) buff_xy_zBC(:,:,+2) = phi(:,:,izs+1) ENDIF DO iky = ikys,ikye DO ikx = ikxs,ikxe ikxBC_L = ikx_zBC_L(iky,ikx); IF (ikxBC_L .NE. -99) THEN ! Exchanging the modes that have a periodic pair (a) ! first-1 gets last phi(iky,ikx,izs-1) = buff_xy_zBC(iky,ikxBC_L,-1) ! first-2 gets last-1 phi(iky,ikx,izs-2) = buff_xy_zBC(iky,ikxBC_L,-2) ELSE phi(iky,ikx,izs-1) = 0._dp phi(iky,ikx,izs-2) = 0._dp ENDIF ikxBC_R = ikx_zBC_R(iky,ikx); IF (ikxBC_R .NE. -99) THEN ! Exchanging the modes that have a periodic pair (a) ! last+1 gets first phi(iky,ikx,ize+1) = buff_xy_zBC(iky,ikxBC_R,+1) ! last+2 gets first+1 phi(iky,ikx,ize+2) = buff_xy_zBC(iky,ikxBC_R,+2) ELSE phi(iky,ikx,ize+1) = 0._dp phi(iky,ikx,ize+2) = 0._dp ENDIF ENDDO ENDDO ENDIF CALL cpu_time(t1_ghost) tc_ghost = tc_ghost + (t1_ghost - t0_ghost) END SUBROUTINE update_ghosts_z_phi SUBROUTINE update_ghosts_z_psi USE parallel, ONLY : buff_xy_zBC USE fields, ONLY : psi IMPLICIT NONE INTEGER :: ikxBC_L, ikxBC_R CALL cpu_time(t1_ghost) IF(Nz .GT. 1) THEN IF (num_procs_z .GT. 1) THEN CALL MPI_BARRIER(MPI_COMM_WORLD,ierr) count = (ikye-ikys+1) * (ikxe-ikxs+1) !!!!!!!!!!! Send ghost to up neighbour !!!!!!!!!!!!!!!!!!!!!! CALL mpi_sendrecv( psi(:,:,ize ), count, MPI_DOUBLE_COMPLEX, nbr_U, 40, & ! Send to Up the last buff_xy_zBC(:,:,-1), count, MPI_DOUBLE_COMPLEX, nbr_D, 40, & ! Receive from Down the first-1 comm0, status, ierr) CALL mpi_sendrecv( psi(:,:,ize-1), count, MPI_DOUBLE_COMPLEX, nbr_U, 41, & ! Send to Up the last buff_xy_zBC(:,:,-2), count, MPI_DOUBLE_COMPLEX, nbr_D, 41, & ! Receive from Down the first-2 comm0, status, ierr) !!!!!!!!!!! Send ghost to down neighbour !!!!!!!!!!!!!!!!!!!!!! CALL mpi_sendrecv( psi(:,:,izs ), count, MPI_DOUBLE_COMPLEX, nbr_D, 42, & ! Send to Down the first buff_xy_zBC(:,:,+1), count, MPI_DOUBLE_COMPLEX, nbr_U, 42, & ! Recieve from Up the last+1 comm0, status, ierr) CALL mpi_sendrecv( psi(:,:,izs+1), count, MPI_DOUBLE_COMPLEX, nbr_D, 43, & ! Send to Down the first buff_xy_zBC(:,:,+2), count, MPI_DOUBLE_COMPLEX, nbr_U, 43, & ! Recieve from Up the last+2 comm0, status, ierr) ELSE buff_xy_zBC(:,:,-1) = psi(:,:,ize ) buff_xy_zBC(:,:,-2) = psi(:,:,ize-1) buff_xy_zBC(:,:,+1) = psi(:,:,izs ) buff_xy_zBC(:,:,+2) = psi(:,:,izs+1) ENDIF DO iky = ikys,ikye DO ikx = ikxs,ikxe ikxBC_L = ikx_zBC_L(iky,ikx); IF (ikxBC_L .NE. -99) THEN ! Exchanging the modes that have a periodic pair (a) ! first-1 gets last psi(iky,ikx,izs-1) = buff_xy_zBC(iky,ikxBC_L,-1) ! first-2 gets last-1 psi(iky,ikx,izs-2) = buff_xy_zBC(iky,ikxBC_L,-2) ELSE psi(iky,ikx,izs-1) = 0._dp psi(iky,ikx,izs-2) = 0._dp ENDIF ikxBC_R = ikx_zBC_R(iky,ikx); IF (ikxBC_R .NE. -99) THEN ! Exchanging the modes that have a periodic pair (a) ! last+1 gets first psi(iky,ikx,ize+1) = buff_xy_zBC(iky,ikxBC_R,+1) ! last+2 gets first+1 psi(iky,ikx,ize+2) = buff_xy_zBC(iky,ikxBC_R,+2) ELSE psi(iky,ikx,ize+1) = 0._dp psi(iky,ikx,ize+2) = 0._dp ENDIF ENDDO ENDDO ENDIF CALL cpu_time(t1_ghost) tc_ghost = tc_ghost + (t1_ghost - t0_ghost) END SUBROUTINE update_ghosts_z_psi END MODULE ghosts