Defines MPI derived type for communicating ghostcells.
| Type | Intent | Optional | Attributes | Name | ||
|---|---|---|---|---|---|---|
| class(block_obj), | intent(inout) | :: | this |
A block object |
impure subroutine block_obj_SetupMPITypes(this) !> Defines MPI derived type for communicating ghostcells. implicit none class(block_obj), intent(inout) :: this !! A block object ! Work variables integer :: Ng(3) integer :: count(3) integer :: length(3) integer :: stride(3) integer :: dir integer :: ierr ! Start by removing old datatypes do dir=1,3 if (this%gc_slab_r(dir).ne.MPI_DATATYPE_NULL) & call MPI_TYPE_FREE(this%gc_slab_r(dir),ierr) if (this%gc_slab_i(dir).ne.MPI_DATATYPE_NULL) & call MPI_TYPE_FREE(this%gc_slab_i(dir),ierr) end do associate (lo => this%lo, hi => this%hi,ngc=> this%ngc,& parallel => this%parallel ) ! Number of grid points (including ghostcells) Ng=hi-lo+1+2*ngc count = [Ng(2)*Ng(3), Ng(3), 1 ] length = [ngc , Ng(1)*ngc, Ng(2)*Ng(1)*ngc ] stride = [Ng(1) , Ng(1)*Ng(2), Ng(1)*Ng(2)*Ng(3)] do dir=1,3 call MPI_TYPE_VECTOR(count(dir), length(dir), stride(dir), parallel%REAL_WP, this%gc_slab_r(dir),ierr) call MPI_TYPE_COMMIT(this%gc_slab_r(dir),ierr) call MPI_TYPE_VECTOR(count(dir), length(dir), stride(dir), parallel%INTEGER, this%gc_slab_i(dir),ierr) call MPI_TYPE_COMMIT(this%gc_slab_i(dir),ierr) end do end associate return end subroutine block_obj_SetupMPITypes