InitializeMPI Subroutine

public subroutine InitializeMPI()

Uses

  • proc~~initializempi~~UsesGraph proc~initializempi InitializeMPI openacc openacc proc~initializempi->openacc

debug

Arguments

None

Calls

proc~~initializempi~~CallsGraph proc~initializempi InitializeMPI acc_get_num_devices acc_get_num_devices proc~initializempi->acc_get_num_devices acc_set_device_num acc_set_device_num proc~initializempi->acc_set_device_num mpi_bcast mpi_bcast proc~initializempi->mpi_bcast mpi_cart_coords mpi_cart_coords proc~initializempi->mpi_cart_coords mpi_cart_create mpi_cart_create proc~initializempi->mpi_cart_create mpi_cart_shift mpi_cart_shift proc~initializempi->mpi_cart_shift mpi_comm_rank mpi_comm_rank proc~initializempi->mpi_comm_rank mpi_comm_set_errhandler mpi_comm_set_errhandler proc~initializempi->mpi_comm_set_errhandler mpi_comm_size mpi_comm_size proc~initializempi->mpi_comm_size mpi_comm_split mpi_comm_split proc~initializempi->mpi_comm_split mpi_init mpi_init proc~initializempi->mpi_init

Called by

proc~~initializempi~~CalledByGraph proc~initializempi InitializeMPI program~main main program~main->proc~initializempi

Source Code

subroutine InitializeMPI
  use openacc
  implicit none
  integer::key,color
  integer::np_hyd

! Initialize MPI
  call MPI_INIT( ierr )
  call MPI_COMM_SIZE( MPI_COMM_WORLD, nprocs_w, ierr )
  call MPI_COMM_RANK( MPI_COMM_WORLD, myid_w  , ierr )
  
  if(myid_w == 0) then
     print *, "MPI process=",nprocs_w
     print *, "decomposition=",ntiles(1),ntiles(2),ntiles(3)
  endif

  call MPI_BCAST(ntiles,3,MPI_INTEGER,0,MPI_COMM_WORLD,ierr)
  call MPI_BCAST(periodic,3,MPI_LOGICAL,0,MPI_COMM_WORLD,ierr)

! Making 3D strucure
  np_hyd = ntiles(1)*ntiles(2)*ntiles(3)
  color = int(myid_w/np_hyd)
  key   = myid_w   
  call MPI_COMM_SPLIT(MPI_COMM_WORLD,color,key,mpi_comm_hyd,ierr)
  call MPI_COMM_SIZE( mpi_comm_hyd, nprocs_hyd, ierr )
  call MPI_COMM_RANK( mpi_comm_hyd, myid_hyd , ierr )     
  
! Create a virtual Cartesian topology for the domain decomposition.
!
  call MPI_CART_CREATE( mpi_comm_hyd, 3, ntiles, periodic &
       &                    , reorder, comm3d, ierr )
  call MPI_COMM_RANK( comm3d, myid,     ierr )
  call MPI_COMM_SIZE( comm3d, nprocs,   ierr )
!
! Find the ranks of my neighbors; find my virtual Cartesian coords.
!
  call MPI_CART_SHIFT( comm3d, 0, 1, n1m, n1p, ierr )
  call MPI_CART_SHIFT( comm3d, 1, 1, n2m, n2p, ierr )
  call MPI_CART_SHIFT( comm3d, 2, 1, n3m, n3p, ierr )
  !
  call MPI_CART_COORDS( comm3d, myid, 3, coords, ierr )

!> debug  
  call MPI_Comm_set_errhandler(comm3d, MPI_ERRORS_RETURN, ierr)
  
  ngpus = acc_get_num_devices(acc_device_nvidia)
  if(myid_w == 0) then
     print *, "num of GPUs = ", ngpus
  end if

  if(ngpus == 0) then
     gpuid = -1
  else
     gpuid = mod(myid_w, ngpus)
  endif
  
  if(gpuid >= 0) then
     call acc_set_device_num(gpuid, acc_device_nvidia)
  end if
!$acc update device (myid_w)
  return
end subroutine InitializeMPI