block_obj_SetupMPITypes Subroutine

private impure subroutine block_obj_SetupMPITypes(this)

Defines MPI derived type for communicating ghostcells.

Type Bound

block_obj

Arguments

Type IntentOptional Attributes Name
class(block_obj), intent(inout) :: this

A block object


Calls

proc~~block_obj_setupmpitypes~~CallsGraph proc~block_obj_setupmpitypes block_obj%block_obj_SetupMPITypes mpi_type_commit mpi_type_commit proc~block_obj_setupmpitypes->mpi_type_commit mpi_type_free mpi_type_free proc~block_obj_setupmpitypes->mpi_type_free mpi_type_vector mpi_type_vector proc~block_obj_setupmpitypes->mpi_type_vector

Called by

proc~~block_obj_setupmpitypes~~CalledByGraph proc~block_obj_setupmpitypes block_obj%block_obj_SetupMPITypes proc~block_obj_partition block_obj%block_obj_Partition proc~block_obj_partition->proc~block_obj_setupmpitypes proc~block_obj_read block_obj%block_obj_Read proc~block_obj_read->proc~block_obj_setupmpitypes proc~block_obj_setupuniformgrid block_obj%block_obj_SetupUniformGrid proc~block_obj_setupuniformgrid->proc~block_obj_setupmpitypes proc~collision_obj_setupcollisionblock2 collision_obj%collision_obj_SetupCollisionBlock2 proc~collision_obj_setupcollisionblock2->proc~block_obj_setupmpitypes proc~collision_obj_setupcollisionblock2->proc~block_obj_setupuniformgrid none~initialize~6 block_obj%Initialize proc~collision_obj_setupcollisionblock2->none~initialize~6 proc~block_obj_init2 block_obj%block_obj_Init2 proc~block_obj_init2->proc~block_obj_setupuniformgrid proc~cdifs_obj_preparesolverblock cdifs_obj_PrepareSolverBlock proc~cdifs_obj_preparesolverblock->proc~block_obj_partition proc~cdifs_obj_preparesolverblock->none~initialize~6 proc~collision_obj_setupcollisionblock collision_obj%collision_obj_SetupCollisionBlock proc~collision_obj_setupcollisionblock->proc~block_obj_setupuniformgrid proc~collision_obj_setupcollisionblock->none~initialize~6 proc~grans_obj_preparesolverblock grans_obj_PrepareSolverBlock proc~grans_obj_preparesolverblock->proc~block_obj_partition proc~grans_obj_preparesolverblock->none~initialize~6 none~initialize~6->proc~block_obj_init2 proc~cdifs_obj_preparesolver cdifs_obj_PrepareSolver proc~cdifs_obj_preparesolver->proc~cdifs_obj_preparesolverblock proc~cdifs_obj_preparesolver->proc~collision_obj_setupcollisionblock proc~grans_obj_preparesolver grans_obj_PrepareSolver proc~grans_obj_preparesolver->proc~grans_obj_preparesolverblock proc~grans_obj_preparesolvercollision grans_obj_PrepareSolverCollision proc~grans_obj_preparesolver->proc~grans_obj_preparesolvercollision proc~grans_obj_preparesolvercollision->proc~collision_obj_setupcollisionblock interface~cdifs_obj_preparesolver cdifs_obj%cdifs_obj_PrepareSolver interface~cdifs_obj_preparesolver->proc~cdifs_obj_preparesolver interface~grans_obj_preparesolver grans_obj%grans_obj_PrepareSolver interface~grans_obj_preparesolver->proc~grans_obj_preparesolver

Source Code

    impure subroutine block_obj_SetupMPITypes(this)
      !> Defines MPI derived type for communicating ghostcells.
      implicit none
      class(block_obj), intent(inout) :: this                                  !! A block object
      ! Work variables
      integer :: Ng(3)
      integer :: count(3)
      integer :: length(3)
      integer :: stride(3)
      integer :: dir
      integer :: ierr

      ! Start by removing old datatypes
      do dir=1,3
        if (this%gc_slab_r(dir).ne.MPI_DATATYPE_NULL) &
          call MPI_TYPE_FREE(this%gc_slab_r(dir),ierr)

        if (this%gc_slab_i(dir).ne.MPI_DATATYPE_NULL) &
          call MPI_TYPE_FREE(this%gc_slab_i(dir),ierr)
      end do

      associate (lo => this%lo, hi => this%hi,ngc=> this%ngc,&
        parallel => this%parallel )
        ! Number of grid points (including ghostcells)
        Ng=hi-lo+1+2*ngc

        count  = [Ng(2)*Ng(3), Ng(3),        1                ]
        length = [ngc        , Ng(1)*ngc,    Ng(2)*Ng(1)*ngc  ]
        stride = [Ng(1)      , Ng(1)*Ng(2),  Ng(1)*Ng(2)*Ng(3)]

        do dir=1,3
          call MPI_TYPE_VECTOR(count(dir), length(dir), stride(dir), parallel%REAL_WP, this%gc_slab_r(dir),ierr)
          call MPI_TYPE_COMMIT(this%gc_slab_r(dir),ierr)

          call MPI_TYPE_VECTOR(count(dir), length(dir), stride(dir), parallel%INTEGER, this%gc_slab_i(dir),ierr)
          call MPI_TYPE_COMMIT(this%gc_slab_i(dir),ierr)
        end do
      end associate

      return
    end subroutine block_obj_SetupMPITypes