collision_obj_SetupCollisionBlock2 Subroutine

private impure subroutine collision_obj_SetupCollisionBlock2(this, ds, ngc, block)

Uses

  • proc~~collision_obj_setupcollisionblock2~~UsesGraph proc~collision_obj_setupcollisionblock2 collision_obj%collision_obj_SetupCollisionBlock2 module~leapblock leapBlock proc~collision_obj_setupcollisionblock2->module~leapblock iso_fortran_env iso_fortran_env module~leapblock->iso_fortran_env module~leapio_hdf5 leapIO_hdf5 module~leapblock->module~leapio_hdf5 module~leapkinds leapKinds module~leapblock->module~leapkinds module~leapparallel leapParallel module~leapblock->module~leapparallel mpi_f08 mpi_f08 module~leapblock->mpi_f08 module~leapio_hdf5->module~leapkinds module~leapio_hdf5->module~leapparallel hdf5 hdf5 module~leapio_hdf5->hdf5 module~leaputils leapUtils module~leapio_hdf5->module~leaputils module~leapkinds->iso_fortran_env module~leapparallel->iso_fortran_env module~leapparallel->module~leapkinds module~leapparallel->mpi_f08 module~leaputils->module~leapkinds

Initializes cblock to handle collisions. This extra block is expected to be coarser than the simulation block, but larger than the maximum object size. It is used to expedite neighbor searches.

With this subroutine, each MPI rank owns a LOCAL view of the collision block.

Type Bound

collision_obj

Arguments

Type IntentOptional Attributes Name
class(collision_obj), intent(inout) :: this

Collision utility

real(kind=wp), intent(in) :: ds

Target grid spacing

integer, intent(in) :: ngc

Number of ghost cells for collision block

type(block_obj), intent(in), optional :: block

Optional block to conform to


Calls

proc~~collision_obj_setupcollisionblock2~~CallsGraph proc~collision_obj_setupcollisionblock2 collision_obj%collision_obj_SetupCollisionBlock2 none~initialize~6 block_obj%Initialize proc~collision_obj_setupcollisionblock2->none~initialize~6 proc~axis_obj_final axis_obj%axis_obj_Final proc~collision_obj_setupcollisionblock2->proc~axis_obj_final proc~block_obj_getowningcellwgc block_obj%block_obj_GetOwningCellWGC proc~collision_obj_setupcollisionblock2->proc~block_obj_getowningcellwgc proc~block_obj_setconveniencepointers block_obj%block_obj_SetConveniencePointers proc~collision_obj_setupcollisionblock2->proc~block_obj_setconveniencepointers proc~block_obj_setperiodicity block_obj%block_obj_SetPeriodicity proc~collision_obj_setupcollisionblock2->proc~block_obj_setperiodicity proc~block_obj_setupmpitypes block_obj%block_obj_SetupMPITypes proc~collision_obj_setupcollisionblock2->proc~block_obj_setupmpitypes proc~block_obj_setupuniformgrid block_obj%block_obj_SetupUniformGrid proc~collision_obj_setupcollisionblock2->proc~block_obj_setupuniformgrid proc~block_obj_updategridghostcells block_obj%block_obj_UpdateGridGhostCells proc~collision_obj_setupcollisionblock2->proc~block_obj_updategridghostcells proc~block_obj_updatemidpoints block_obj%block_obj_UpdateMidPoints proc~collision_obj_setupcollisionblock2->proc~block_obj_updatemidpoints proc~block_obj_updatespacing block_obj%block_obj_UpdateSpacing proc~collision_obj_setupcollisionblock2->proc~block_obj_updatespacing proc~block_obj_init block_obj%block_obj_Init none~initialize~6->proc~block_obj_init proc~block_obj_init2 block_obj%block_obj_Init2 none~initialize~6->proc~block_obj_init2 mpi_type_commit mpi_type_commit proc~block_obj_setupmpitypes->mpi_type_commit mpi_type_free mpi_type_free proc~block_obj_setupmpitypes->mpi_type_free mpi_type_vector mpi_type_vector proc~block_obj_setupmpitypes->mpi_type_vector proc~block_obj_setupuniformgrid->proc~block_obj_setconveniencepointers proc~block_obj_setupuniformgrid->proc~block_obj_setupmpitypes proc~block_obj_setupuniformgrid->proc~block_obj_updategridghostcells proc~block_obj_setupuniformgrid->proc~block_obj_updatemidpoints proc~block_obj_setupuniformgrid->proc~block_obj_updatespacing proc~axis_obj_init axis_obj%axis_obj_Init proc~block_obj_setupuniformgrid->proc~axis_obj_init mpi_irecv mpi_irecv proc~block_obj_updategridghostcells->mpi_irecv mpi_isend mpi_isend proc~block_obj_updategridghostcells->mpi_isend mpi_wait mpi_wait proc~block_obj_updategridghostcells->mpi_wait mpi_waitall mpi_waitall proc~block_obj_updategridghostcells->mpi_waitall proc~block_obj_updateextents block_obj%block_obj_UpdateExtents proc~block_obj_updategridghostcells->proc~block_obj_updateextents proc~block_obj_init2->proc~block_obj_setupuniformgrid proc~block_obj_updateextents->proc~axis_obj_init

Source Code

    impure subroutine collision_obj_SetupCollisionBlock2(this,ds,ngc,block)
      !> Initializes cblock to handle collisions. This
      ! extra block is expected to be coarser than the
      ! simulation block, but larger than the maximum
      ! object size. It is used to expedite neighbor
      ! searches.
      !
      ! With this subroutine, each MPI rank owns a
      ! LOCAL view of the collision block.
      use leapBlock, only : axis_obj
      implicit none
      class(collision_obj), intent(inout) :: this                              !! Collision utility
      real(wp),             intent(in)    :: ds                                !! Target grid spacing
      integer,              intent(in)    :: ngc                               !! Number of ghost cells for collision block
      type(block_obj),      intent(in),    &
                                 optional :: block                             !! Optional block to conform to
      ! work variables
      logical :: periods(3)
      integer :: lo(3)
      integer :: hi(3)
      real(wp):: xhi(3)
      real(wp):: xlo(3)
      real(wp):: local_xhi(3)
      real(wp):: local_xlo(3)
      integer :: Nb(3)
      integer :: dir,i
      integer :: minlo(3)
      integer :: maxhi(3)
      type(axis_obj):: axis_global(3)

      ! Figure out domain extents and periodicity
      if (present(block)) then
        xlo = block%pmin
        xhi = block%pmax
        periods = block%periods
        local_xlo = block%xlo
        local_xhi = block%xhi
      else if (associated(this%RP)) then
        xlo = this%RP%block%pmin
        xhi = this%RP%block%pmax
        local_xlo = this%RP%block%xlo
        local_xhi = this%RP%block%xhi
        periods = this%RP%block%periods
      else if (associated(this%PP)) then
        xlo = this%PP%block%pmin
        xhi = this%PP%block%pmax
        periods = this%PP%block%periods
        local_xlo = this%PP%block%xlo
        local_xhi = this%PP%block%xhi
      else
        call this%parallel%Stop("Cannot setup collision block without associated data")
      end if

      ! Number of grid points
      lo  = [1,1,1]
      hi  = floor((xhi-xlo)/ds)

      ! Create a uniform grid within each block
      call this%cblock%Initialize(ngc,this%parallel)

      ! Impose same periodicity as main simulation block
      call this%cblock%SetPeriodicity(periods)

      ! Create a uniform grid
      call this%cblock%SetupUniformGrid(xlo,xhi,lo,hi)

      ! Partition to match simulation block as closely as possible

      ! Stoore global indices
      minlo = lo
      maxhi = hi

      ! Store global axis prior to subdividing
      do dir=1,3
        call axis_global(dir)%Initialize(minlo(dir),maxhi(dir),ngc)
        axis_global(dir)%x = this%cblock%axis(dir)%x
      end do

      ! Switch flag on
      this%cblock%is_partitioned=.true.

      ! Decomposition
      Nb = this%parallel%np

      ! Get bounds of sub-block ownded by this MPI rank
      this%cblock%hi = min(max(this%cblock%GetOwningCellWGC(local_xhi),     minlo),maxhi)
      this%cblock%lo = min(max(this%cblock%GetOwningCellWGC(local_xlo) + 1, minlo),maxhi)

      do dir=1,3
        if (this%parallel%rank%dir(dir).eq.1) then
          this%cblock%lo(dir) = lo(dir)
        end if
      end do

      ! Redifine hi/lo to be the local ones
      lo = this%cblock%lo
      hi = this%cblock%hi

      ! Reinitialize axes
      do dir=1,3
        call this%cblock%axis(dir)%Finalize()
        call this%cblock%axis(dir)%Initialize(lo(dir),hi(dir),ngc)
      end do

      ! Associate pointers
      call this%cblock%SetConveniencePointers

      ! Get local axes coordinates from global axes
      do dir=1,3
        do i=lo(dir),hi(dir)+1
          this%cblock%axis(dir)%x(i) = axis_global(dir)%x(i)
        end do
      end do

      ! Remove global arrays
      do dir=1,3
        call axis_global(dir)%Finalize
      end do

      ! Update values within ghostcells
      call this%cblock%UpdateGridGhostCells()

      ! Update mid points (xm)
      call this%cblock%UpdateMidPoints()

      ! Update spacing (dxm)
      call this%cblock%UpdateSpacing()

      ! Create MPI type for ghostcell communication
      call this%cblock%SetupMPITypes()

      ! Allocate neighbor list
      if (associated(this%RP)) then
        ! Allocate neighbor list
        allocate(this%RPneighbors(lo(1)-ngc:hi(1)+ngc,lo(2)-ngc:hi(2)+ngc,lo(3)-ngc:hi(3)+ngc))
        allocate(this%RPobjincell(lo(1)-ngc:hi(1)+ngc,lo(2)-ngc:hi(2)+ngc,lo(3)-ngc:hi(3)+ngc))
      end if
      if (associated(this%IB)) then
        ! Allocate neighbor list
        allocate(this%IBneighbors(lo(1)-ngc:hi(1)+ngc,lo(2)-ngc:hi(2)+ngc,lo(3)-ngc:hi(3)+ngc))
        allocate(this%IBobjincell(lo(1)-ngc:hi(1)+ngc,lo(2)-ngc:hi(2)+ngc,lo(3)-ngc:hi(3)+ngc))
      end if
      if (associated(this%PP)) then
        ! Allocate neighbor list
        allocate(this%PPneighbors(lo(1)-ngc:hi(1)+ngc,lo(2)-ngc:hi(2)+ngc,lo(3)-ngc:hi(3)+ngc))
        allocate(this%PPobjincell(lo(1)-ngc:hi(1)+ngc,lo(2)-ngc:hi(2)+ngc,lo(3)-ngc:hi(3)+ngc))
      end if

      return
    end subroutine collision_obj_SetupCollisionBlock2