[PETSC #17275] Rank of the neighboring processors

Mehdi Bostandoost mbostandoust at yahoo.com
Thu Feb 7 16:36:38 CST 2008


Hi
  I am working on Navier stokes equation and lagrangian particles using Petsc.I am done with the main code.I had the same problem that you described, so for this code,I already wrote couple of subroutine that take care of this issue for DA objects. but it will be really a good addition to petsc,if they can add that. 
   
  here is the subroutine(I dont have this subroutine in my code like this.I just copy and paste different part of it from different part of my code to make an example).
   
  subroutine findneighbours 
  implicit none
  
  integer :: domainsize
  integer :: rank
  
  integer :: xs,xe,xm
  integer :: gxs,gxe,gxm
  integer :: ys,ye,ym
  integer :: gys,gye,gym
  integer :: zs,ze,zm
  integer :: gzs,gze,gzm
  integer :: xsl,ysl,zsl ! local index beginning
  integer :: xel,yel,zel ! local index ending
  integer,allocatable:: xsdomains(:)
  integer,allocatable:: xedomains(:)
  integer,allocatable:: ysdomains(:)
  integer,allocatable:: yedomains(:)
  integer,allocatable:: zsdomains(:)
  integer,allocatable:: zedomains(:)
  integer::xleftdomainrank
  integer::yleftdomainrank
  integer::zleftdomainrank
  integer::xrightdomainrank
  integer::yrightdomainrank
  integer::zrightdomainrank
  integer::nprocx
  integer::nprocy
  integer::nprocz 
  
  integer::nx=25
  integer::ny=35
  integer::nz=45 
  
  integer::dof1=3
  
  integer::s=4
  integer :: ierr
  integer :: i
  
  call PetscInitialize(PETSC_NULL_CHARACTER,ierr)
  CHKERRQ(ierr) 
  !------------------------------------- 
  call MPI_Comm_size(PETSC_COMM_WORLD,domainsize,ierr)
  CHKERRQ(ierr)
  call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr)
  CHKERRQ(ierr) 
  
  !------------------------------------- 
  ALLOCATE(xsdomains(domainsize),STAT=status)
  if(status/=0)then
  write(logfile,*) 'memory allocation problem xsdomain'
  call programabort
  endif
  xsdomains(:)=-10
  !-------------------------------------
  ALLOCATE(xedomains(domainsize),STAT=status)
  if(status/=0)then
  write(logfile,*) 'memory allocation problem xedomain'
  call programabort
  endif
  xedomains(:)=-10
  !-------------------------------------
  ALLOCATE(ysdomains(domainsize),STAT=status)
  if(status/=0)then
  write(logfile,*) 'memory allocation problem ysdomain'
  call programabort
  endif
  ysdomains(:)=-10
  !-------------------------------------
  ALLOCATE(yedomains(domainsize),STAT=status)
  if(status/=0)then
  write(logfile,*) 'memory allocation problem yedomain'
  call programabort
  endif
  yedomains(:)=-10
  !-------------------------------------
  ALLOCATE(zsdomains(domainsize),STAT=status)
  if(status/=0)then
  write(logfile,*) 'memory allocation problem zsdomain'
  call programabort
  endif
  zsdomains(:)=-10
  !-------------------------------------
  ALLOCATE(zedomains(domainsize),STAT=status)
  if(status/=0)then
  write(logfile,*) 'memory allocation problem zedomain'
  call programabort
  endif
  zedomains(:)=-10
  !------------------------------------- 
  
  call DACreate3d(PETSC_COMM_WORLD,DA_NONPERIODIC,DA_STENCIL_BOX,
  & nx,ny,nz,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,
  & dof1,s, 
  & PETSC_NULL_INTEGER,PETSC_NULL_INTEGER, 
  & PETSC_NULL_INTEGER,daa,ierr) 
  CHKERRQ(ierr) 
  call DAGetCorners(daa,xs,ys,zs,xm,ym,zm,ierr)
  CHKERRQ(ierr) 
  call DAGetGhostCorners(daa,gxs,gys,gzs,gxm,gym,gzm,ierr)
  CHKERRQ(ierr) 
  !------------------------------------- 
  
  !**************************************** 
  ! finding the neighbors
  !**************************************** 
  
  xsdomains(rank+1)=xs
  call MPI_ALLGATHER(xs,1,MPI_INTEGER, 
  & xsdomains ,1,MPI_INTEGER, 
  & PETSC_COMM_WORLD,ierr)
  CHKERRQ(ierr) 
  call MPI_ALLGATHER(xe,1,MPI_INTEGER, 
  & xedomains ,1,MPI_INTEGER, 
  & PETSC_COMM_WORLD,ierr)
  CHKERRQ(ierr) 
  call MPI_ALLGATHER(ys,1,MPI_INTEGER, 
  & ysdomains ,1,MPI_INTEGER, 
  & PETSC_COMM_WORLD,ierr)
  CHKERRQ(ierr) 
  call MPI_ALLGATHER(ye,1,MPI_INTEGER, 
  & yedomains ,1,MPI_INTEGER, 
  & PETSC_COMM_WORLD,ierr)
  CHKERRQ(ierr) 
  call MPI_ALLGATHER(zs,1,MPI_INTEGER, 
  & zsdomains ,1,MPI_INTEGER, 
  & PETSC_COMM_WORLD,ierr)
  CHKERRQ(ierr) 
  call MPI_ALLGATHER(ze,1,MPI_INTEGER, 
  & zedomains ,1,MPI_INTEGER, 
  & PETSC_COMM_WORLD,ierr)
  CHKERRQ(ierr)
   
  xleftdomainrank=-10
  yleftdomainrank=-10
  zleftdomainrank=-10
  xrightdomainrank=-10
  yrightdomainrank=-10
  zrightdomainrank=-10
  do i=1,domainsize
  if(xs==1)then
  xleftdomainrank=-10
  endif
  if(ys==1)then
  yleftdomainrank=-10
  endif
  if(zs==1)then
  zleftdomainrank=-10
  endif
  if(xe==nx)then
  xrightdomainrank=-10
  endif
  if(ye==ny)then
  yrightdomainrank=-10
  endif
  if(ze==nz)then
  zrightdomainrank=-10
  endif
  enddo
   
  do i=1,domainsize
  if((xs-1)==xedomains(i) .and. 
  & ys==ysdomains(i) .and. ye==yedomains(i) .and.
  & zs==zsdomains(i) .and. ze==zedomains(i)
  & )then
  xleftdomainrank=i-1
  endif
  if((ys-1)==yedomains(i) .and. 
  & xs==xsdomains(i) .and. xe==xedomains(i) .and.
  & zs==zsdomains(i) .and. ze==zedomains(i)
  & )then
  yleftdomainrank=i-1
  endif
  if((zs-1)==zedomains(i) .and. 
  & xs==xsdomains(i) .and. xe==xedomains(i) .and.
  & ys==ysdomains(i) .and. ye==yedomains(i)
  & )then
  zleftdomainrank=i-1
  endif
  enddo
  do i=1,domainsize
  if((xe+1)==xsdomains(i) .and.
  & ys==ysdomains(i) .and. ye==yedomains(i) .and.
  & zs==zsdomains(i) .and. ze==zedomains(i)
  & )then
  xrightdomainrank=i-1
  endif
  if((ye+1)==ysdomains(i) .and.
  & xs==xsdomains(i) .and. xe==xedomains(i) .and.
  & zs==zsdomains(i) .and. ze==zedomains(i)
  & )then
  yrightdomainrank=i-1
  endif
  if((ze+1)==zsdomains(i) .and.
  & xs==xsdomains(i) .and. xe==xedomains(i) .and.
  & ys==ysdomains(i) .and. ye==yedomains(i)
  & )then
  zrightdomainrank=i-1
  endif
  enddo
  !**************************************** 
  ! end of finding the neighbors
  !**************************************** 
   
  !-------------------------------------
  DEALLOCATE(xsdomains,STAT=status)
  if(status/=0)then
  write(logfile,*) 'memory deallocation problem'
  call programabort
  endif
  !-------------------------------------
  DEALLOCATE(xedomains,STAT=status)
  if(status/=0)then
  write(logfile,*) 'memory deallocation problem'
  call programabort
  endif
  !-------------------------------------
  DEALLOCATE(ysdomains,STAT=status)
  if(status/=0)then
  write(logfile,*) 'memory deallocation problem'
  call programabort
  endif
  !-------------------------------------
  DEALLOCATE(yedomains,STAT=status)
  if(status/=0)then
  write(logfile,*) 'memory deallocation problem'
  call programabort
  endif
  !-------------------------------------
  DEALLOCATE(zsdomains,STAT=status)
  if(status/=0)then
  write(logfile,*) 'memory deallocation problem'
  call programabort
  endif
  !-------------------------------------
  DEALLOCATE(zedomains,STAT=status)
  if(status/=0)then
  write(logfile,*) 'memory deallocation problem'
  call programabort
  endif
  !-------------------------------------
  call DADestroy(daa,ierr)
  call PetscFinalize(ierr)
  return
  end 
   
  I hope it will be helpful.if you have question come back to me.
   
  Best Regard
   
  Mehdi
   
  

Barry Smith <bsmith at mcs.anl.gov> wrote:
  
On Feb 6, 2008, at 6:38 AM, Tobias Kempe wrote:

> Hi Barry,
>
> i have to propose you an additional PETSc feature that could 
> interesting for
> some users. In our lagrangian particle tracking it is neccessary to 
> send the
> particle data to some specific processor - the new particle "master
> processor" which performs the evaluation of the equations of motion 
> for this
> particle.
>
> In general we can do this with the MPI_isend, MPI_irecv routines.
>
> In the PETSc environment this may be realized by an MPI_vector

Do you mean a PETSc vector of type VECMPI, that is a parallel Vec?

> with different
> numer of entrys on each processor. The entrys of this MPI_vector may 
> be
> adressed by its index and the rank of the processor where you wish 
> to set
> the value.

Do you mean something like

VecSetValues(vec,n,ranks,localindices,values)

where ranks[n], localindices[n], values[n]? Much like VecSetValues() 
with the "global indices"
replaced with ranks,localindices?

Or do you want something more like a VecScatter operation where the 
"global indices"
are replaced with ranks,localindices

Barry

>
>
> Perhaps it is possible to realize this feature in following petsc 
> releases.
>
> Best regards,
>
> Tobias
>
>
>
>
> Am Samstag 02 Februar 2008 schrieben Sie:
>> Tobias,
>>
>> I have added DAGetNeighbors(). You will need to use petsc-dev
>> see the developers
>> page link from www.mcs.anl.gov/petsc
>>
>> Please let us know if you have any troubles.
>>
>> Barry
>>
>> On Feb 1, 2008, at 3:00 AM, Tobias Kempe wrote:
>>> Hi PETSc team,
>>>
>>> we are using distributed arrays for the communication of the
>>> processors in our
>>> domain decomposition. Nomally the user dont has to worry obout the
>>> rank of
>>> the neighboring processors. Now we wont to compute large amounts of
>>> particles
>>> in viscous fluids. The problem is when a particle leaves the 
>>> physical
>>> domain, an other processer becomes the "master" of this particle.
>>> With the
>>> backround of this lagrangian particle tracking it becomes neccesarry
>>> to
>>> determine the ranks of the neighboring processors in order to send
>>> particle
>>> specific data to this processors.
>>>
>>> Is there some PETSc routine to dertermine the rank of the 
>>> neighboring
>>> processors in the east, west, front ..... and so on?
>>>
>>> Thank you very much.
>>>
>>> Best regards
>>>
>>> Tobias Kempe
>>>
>>>
>>>
>>>
>>> *****************************************************
>>> Dipl.-Ing. Tobias Kempe
>>> Technische Universit?? Dresden
>>> Institut f?? Str??ungsmechanik
>>> Tel.: 0351-46336651
>>> Fax: 0351-46335246
>>> E-Mail: tobias.kempe at ism.mw.tu-dresden.de
>>> *****************************************************
>
>
>
> -- 
> *****************************************************
> Dipl.-Ing. Tobias Kempe
> Technische Universit?? Dresden
> Institut f?? Str??ungsmechanik
> Tel.: 0351-46336651
> Fax: 0351-46335246
> E-Mail: tobias.kempe at ism.mw.tu-dresden.de
> *****************************************************
>



       
---------------------------------
Looking for last minute shopping deals?  Find them fast with Yahoo! Search.
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.mcs.anl.gov/pipermail/petsc-dev/attachments/20080207/b66ffc4f/attachment.html>


More information about the petsc-dev mailing list