Program test #include "petsc/finclude/petscdmda.h" use petscdmda implicit none DM :: daGlobal, daSub Vec :: vtmpParent, transVec, vtmp AO :: aoParent, aoSub IS :: from, to VecScatter :: parentToSub PetscInt, allocatable :: ind1(:), ind2(:) PetscErrorCode :: ierr, ioerr, alloc_err PetscScalar, pointer :: ptr_v(:,:,:,:) PetscScalar, pointer :: ptr_x1(:) PetscReal :: rval PetscInt :: nx, ny, nz PetscInt :: i, j, k, nis, iComp PetscInt :: M, mstart, mend, mlocal PetscInt :: xs, ys, zs, xm, ym, zm, xe, ye, ze PetscInt :: gxs, gys, gzs, gxm, gym, gzm, gxe, gye, gze PetscInt :: xs_s, ys_s, zs_s, xm_s, ym_s, zm_s, xe_s, ye_s, ze_s PetscInt :: gxs_s, gys_s, gzs_s, gxm_s, gym_s, gzm_s, gxe_s, gye_s, gze_s PetscBool :: flg PetscMPIInt :: size, rank, subrank, subsize, nproc_per_sub, nhostname, nsubs, sub character(len=200) :: cstring character (len=MPI_MAX_PROCESSOR_NAME) :: hostname MPI_Comm :: subcomm, comm_parent PetscMPIInt, parameter :: i0=0 PetscInt, parameter :: i1=1 PetscInt, parameter :: i2=2 call PetscInitialize(PETSC_NULL_CHARACTER,ierr) ! input some parameters call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-nx',nx,flg,ierr) call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-ny',ny,flg,ierr) call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-nz',nz,flg,ierr) call PetscOptionsGetInt(PETSC_NULL_OPTIONS,PETSC_NULL_CHARACTER,'-nsubs',nsubs,flg,ierr) !========================================================================================= ! create subcomms call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr) call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr) call MPI_Get_processor_name(hostname, nhostname, ierr) if (mod(size, nsubs) /= 0) then cstring='The total number of MPI processes is not a multiple of nsubcomm\n\n' call PetscPrintf(PETSC_COMM_WORLD,TRIM(cstring),ierr) ioerr = 1 goto 999 end if ! Initiate sub-communicators nsubs = min(size,nsubs) nproc_per_sub = size / nsubs sub = min(rank / nproc_per_sub, nsubs - 1) ! Create subcommunicator and get team ranks call MPI_Comm_split(PETSC_COMM_WORLD, sub, i0, SUBCOMM, ierr) call MPI_Comm_rank(SUBCOMM,subrank,ierr) call MPI_Comm_size(SUBCOMM,subsize,ierr) !========================================================================================= ! set up DM on entire comm call DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE, & DMDA_STENCIL_BOX,nx,ny,nz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,i2,i2,PETSC_NULL_INTEGER, & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, daGlobal,ierr) call DMSetUp(daGlobal,ierr) ! Get the Global DMDA grid info for each processor call DMDAGetCorners(daGlobal,xs,ys,zs,xm,ym,zm,ierr) call DMDAGetGhostCorners(daGlobal,gxs,gys,gzs,gxm,gym,gzm,ierr) xs=xs+1 ys=ys+1 zs=zs+1 gxs=gxs+1 gys=gys+1 gzs=gzs+1 xe=xs+xm-1 ye=ys+ym-1 ze=zs+zm-1 gxe=gxs+gxm-1 gye=gys+gym-1 gze=gzs+gzm-1 ! Set AO type here call DMDASetAOType(daGlobal,AOMEMORYSCALABLE,ierr) ! set up DM on subcomm call DMDACreate3d(subcomm,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE, & DMDA_STENCIL_BOX,nx,ny,nz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,i2,i2,PETSC_NULL_INTEGER, & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER,daSub,ierr) call DMSetUp(daSub,ierr) ! Get the SUB DMDA grid info for each processor call DMDAGetCorners(daSub,xs_s,ys_s,zs_s,xm_s,ym_s,zm_s,ierr) call DMDAGetGhostCorners(daSub,gxs_s,gys_s,gzs_s,gxm_s,gym_s,gzm_s,ierr) xs_s=xs_s+1 ys_s=ys_s+1 zs_s=zs_s+1 gxs_s=gxs_s+1 gys_s=gys_s+1 gzs_s=gzs_s+1 xe_s=xs_s+xm_s-1 ye_s=ys_s+ym_s-1 ze_s=zs_s+zm_s-1 gxe_s=gxs_s+gxm_s-1 gye_s=gys_s+gym_s-1 gze_s=gzs_s+gzm_s-1 ! Set AO type here call DMDASetAOType(daSub,AOMEMORYSCALABLE,ierr) !========================================================================================= ! Create Vector Scatter from parent to subcomm !========================================================================================= ! Get communicator for parent DA call PetscObjectGetComm(daGlobal, comm_parent, ierr) ! Get application orderings call DMDAGetAO(daGlobal,aoParent,ierr) call DMDAGetAO(daSub,aoSub,ierr) ! Get vector on global comm and its size call DMGetGlobalVector(daGlobal,vtmpParent,ierr) call VecGetSize(vtmpParent,M,ierr) call VecGetOwnershipRange(vtmpParent,mstart,mend,ierr) mlocal=mend-mstart ! Create index sets for scatter nis=mlocal*nsubs allocate (ind1(0:nis-1), ind2(0:nis-1), STAT=alloc_err) call MPI_Allreduce(MPI_IN_PLACE, alloc_err, 1, MPI_INTEGER, MPI_SUM, comm_parent, ierr) if (alloc_err /= 0) goto 999 ind1=0 ind2=0 j=0 do k=0,nsubs-1 do i=mstart,mend-1 ind1(j)=i j=j+1 end do end do ind2=ind1 call AOApplicationToPetsc(aoParent,nis,ind1,ierr) call AOApplicationToPetsc(aoSub,nis,ind2,ierr) j=0 do k=0,nsubs-1 do i=mstart,mend-1 ind2(j)=ind2(j)+M*k j=j+1 end do end do call ISCreateGeneral(comm_parent,nis,ind1,PETSC_COPY_VALUES,from,ierr) call ISCreateGeneral(comm_parent,nis,ind2,PETSC_COPY_VALUES,to,ierr) deallocate (ind1, ind2) ! Create empty global vector to receive vector scatter on all sub communicators call DMGetGlobalVector(daSub,vtmp,ierr) call VecGetLocalSize(vtmp,mlocal,ierr) call VecCreateMPIWithArray(comm_parent,i1,mlocal,PETSC_DECIDE,PETSC_NULL_SCALAR,transVec,ierr) ! Create vector scatter call VecScatterCreate(vtmpParent,from,transVec,to,parentToSub,ierr) cstring='Successful VecScatter create\n' call PetscPrintf(PETSC_COMM_WORLD,TRIM(cstring),ierr) ! Release memory call ISDestroy(from,ierr) call ISDestroy(to,ierr) call VecDestroy(transVec, ierr) call DMRestoreGlobalVector(daSub,vtmp,ierr) !========================================================================================= ! Now do the vector Scatter from parent to subcomm !========================================================================================= ! Set some elements in the global vector of the Global DMDA call DMDAVecGetArrayF90(daGlobal,vtmpParent,ptr_v,ierr) do k=zs,ze do j=ys,ye do i=xs,xe do iComp=1,i2 ptr_v(iComp-1,i-1,j-1,k-1)=1000.0*real(iComp)+100.0*real(i) + 10.0*real(j) + real(k) end do end do end do end do call DMDAVecRestoreArrayF90(daGlobal,vtmpParent,ptr_v,ierr) call DMGetGlobalVector(daSub,vtmp,ierr) call VecGetLocalSize(vtmp,mlocal,ierr) call VecGetArrayF90(vtmp,ptr_x1,ierr) call VecCreateMPIWithArray(comm_parent,i1,mlocal,PETSC_DECIDE,ptr_x1,transVec,ierr) call VecScatterBegin(parentToSub,vtmpParent,transVec,INSERT_VALUES,SCATTER_FORWARD,ierr) call VecScatterEnd(parentTosub,vtmpParent,transVec,INSERT_VALUES,SCATTER_FORWARD,ierr) call VecDestroy(transVec, ierr) call VecRestoreArrayF90(vtmp,ptr_x1,ierr) ! Check that our scatter was correct call DMDAVecGetArrayF90(daSub,vtmp,ptr_v,ierr) do k=zs_s,ze_s do j=ys_s,ye_s do i=xs_s,xe_s do iComp=1,i2 rval=1000.0*real(iComp)+100.0*real(i) + 10.0*real(j) + real(k) !print*,sub,subrank,i,j,k,rval,ptr_v(iComp-1,i-1,j-1,k-1) if (rval /= ptr_v(iComp-1,i-1,j-1,k-1)) print*, 'problem ',rank,i,j,k,rval,ptr_v(iComp-1,i-1,j-1,k-1) end do end do end do end do call DMDAVecRestoreArrayF90(daSub,vtmp,ptr_v,ierr) ! Release memory call DMRestoreGlobalVector(daGlobal,vtmpParent,ierr) call DMRestoreGlobalVector(daSub,vtmp,ierr) 999 continue call PetscFinalize(ierr) end program test