[petsc-users] Constructing a MATNEST with blocks defined in different procs

Diogo FERREIRA SABINO diogo.ferreira-sabino at univ-tlse3.fr
Tue Apr 9 09:36:37 CDT 2019


Hi Mark,
Thank you for the quick answer.

So, I defined each block of the Nest matrix as a MATMPIAIJ in the World communicator and I set the local sizes in such a way that A00 and A11 are in the processors 0 and 1, respectively.
However, now I'm not able to set the off diagonal blocks of the Nest matrix. For example, if I try to set the block (0,1) of the nest with A00, the following error occurs:

[0]PETSC ERROR: Arguments are incompatible
[0]PETSC ERROR: Local sizes (2,2) of nested submatrix (0,1) do not agree with space defined by index sets (2,0)

I'm using the following lines to generate the Nest matrix:

  AfullNESTpointer[0]=Ablocks[0];
  AfullNESTpointer[1]=Ablocks[0];   //Not working
  // AfullNESTpointer[1]=NULL;       //It works
  AfullNESTpointer[2]=NULL;
  AfullNESTpointer[3]=Ablocks[1];

  MatCreate(PETSC_COMM_WORLD,&AfullNEST);
  MatSetSizes(AfullNEST,PETSC_DECIDE,PETSC_DECIDE,4,4);
  MatCreateNest(PETSC_COMM_WORLD,2,NULL,2,NULL,AfullNESTpointer,&AfullNEST);

A simple test is given below, lunching it with: mpirun -n 2 ./Main_petsc.exe

Thanks in advance,
Diogo

static char help[]     = "Create MPI Nest";
#include <petscmat.h>
#undef __FUNCT__
#define __FUNCT__ "main"
int main(int argc,char **argv)
{
  PetscInitialize(&argc,&argv,(char*)0,help);
  //////////////////////////////////////////////////////////////////////////////
  PetscErrorCode  ierr;
  PetscInt        i,PetscIntTEMP;
  //////////////////////////////////////////////////////////////////////////////
  PetscMPIInt MPIrank,MPIsize;
  MPI_Comm_rank(PETSC_COMM_WORLD,&MPIrank);
  MPI_Comm_size(PETSC_COMM_WORLD,&MPIsize);
  ///////////////////////////////////////////////////////////////   Create Mats:
  Mat Ablocks[MPIsize];

  for(i=0;i<MPIsize;i++){
    MatCreate(PETSC_COMM_WORLD,&Ablocks[i]);

    PetscIntTEMP = (i==MPIrank) ? 2 : 0;
    MatSetSizes(Ablocks[i],PetscIntTEMP,PetscIntTEMP,PETSC_DECIDE,PETSC_DECIDE);
    MatSetType(Ablocks[i],MATMPIAIJ);
    MatMPIAIJSetPreallocation(Ablocks[i],PetscIntTEMP,NULL,PetscIntTEMP,NULL);

    PetscInt ISstart,ISend;
    MatGetOwnershipRange(Ablocks[i],&ISstart,&ISend); //Check info
    PetscSynchronizedPrintf(PETSC_COMM_WORLD,"proc(%d): Ablocks[%d]:(%d to %d) \n",MPIrank,i,ISstart,ISend);
  }
  PetscSynchronizedFlush(PETSC_COMM_WORLD,PETSC_STDOUT);

  MatSetValue(Ablocks[MPIrank],0,0,5+MPIrank,INSERT_VALUES);
  MatSetValue(Ablocks[MPIrank],0,1,10+MPIrank,INSERT_VALUES);
  MatSetValue(Ablocks[MPIrank],1,0,15+MPIrank,INSERT_VALUES);
  MatSetValue(Ablocks[MPIrank],1,1,20+MPIrank,INSERT_VALUES);

  for(i=0;i<MPIsize;i++){
    MatAssemblyBegin(Ablocks[i],MAT_FINAL_ASSEMBLY);     MatAssemblyEnd(Ablocks[i],MAT_FINAL_ASSEMBLY);
    // MatView(Ablocks[i],PETSC_VIEWER_STDOUT_WORLD);
  }
  ///////////////////////////////////////////////////////////////   Create Nest:
  Mat       AfullNEST, *AfullNESTpointer;
  Mat       AfullConverted;

  PetscMalloc1(4,&AfullNESTpointer);
  AfullNESTpointer[0]=Ablocks[0];
  AfullNESTpointer[1]=Ablocks[0];   //Not working
  // AfullNESTpointer[1]=NULL;       //It works
  AfullNESTpointer[2]=NULL;
  AfullNESTpointer[3]=Ablocks[1];

  MatCreate(PETSC_COMM_WORLD,&AfullNEST);
  // MatSetSizes(AfullNEST,2,2,PETSC_DECIDE,PETSC_DECIDE);
  MatSetSizes(AfullNEST,PETSC_DECIDE,PETSC_DECIDE,4,4);
  MatCreateNest(PETSC_COMM_WORLD,2,NULL,2,NULL,AfullNESTpointer,&AfullNEST);

  MatView(AfullNEST,PETSC_VIEWER_STDOUT_WORLD);
  MatConvert(AfullNEST,MATMPIAIJ,MAT_INITIAL_MATRIX,&AfullConverted);
  MatView(AfullConverted,PETSC_VIEWER_STDOUT_WORLD);

  ierr = PetscFinalize(); CHKERRQ(ierr);  return 0;
}

April 5, 2019 2:38:46 PM CEST Mark Adams <mfadams at lbl.gov> wrote:

On Fri, Apr 5, 2019 at 7:19 AM Diogo FERREIRA SABINO via petsc-users <petsc-users at mcs.anl.gov> wrote:

Hi,
I'm new in petsc and I'm trying to construct a MATNEST in two procs, by setting each block of the nested matrix with a MATMPIAIJ matrix defined in each proc.
I'm trying to use MatCreateNest or MatNestSetSubMats, but I'm not being able to do it.
Using MatNestSetSubMats, I'm trying to construct the MATNEST, giving a pointer to the correct matrices depending on the MPIrank of that proc.
I'm obtaining the error message for the line :MatNestSetSubMats(AfullNEST,1,&IS_ROW,2,&IS_COL,AfullNESTpointer);
[0]PETSC ERROR: Invalid argument
[0]PETSC ERROR: Wrong type of object: Parameter # 5

Is there a way of doing it, or all the blocks of the MATNEST have to exist in the same communicator as the MATNEST matrix?

Yes, they must all have the same communicator. A matrix can be empty on a process, so you just create them with the global communicator, set the local sizes that you want (eg, 0 on some procs).
 
A simple test is given below, lunching it with: mpirun -n 2 ./Main_petsc.exe

static char help[]     = "Create MPI Nest";
#include <petscmat.h>

#undef __FUNCT__
#define __FUNCT__ "main"
int main(int argc,char **argv)
{
  PetscInitialize(&argc,&argv,(char*)0,help);
  //////////////////////////////////////////////////////////////////////////////
  PetscErrorCode  ierr;
  PetscMPIInt MPIrank,MPIsize;
  MPI_Comm_rank(PETSC_COMM_WORLD,&MPIrank);
  MPI_Comm_size(PETSC_COMM_WORLD,&MPIsize);

  ////////////////////////////////////////////////////////   Create Each Matrix:
  Mat Adiag;

  //Create a Adiag different on each proc:
  ierr = MatCreate(PETSC_COMM_SELF,&Adiag);                     CHKERRQ(ierr);
  ierr = MatSetSizes(Adiag,2,2,PETSC_DECIDE,PETSC_DECIDE);      CHKERRQ(ierr);
  ierr = MatSetType(Adiag,MATMPIAIJ);                           CHKERRQ(ierr);
  ierr = MatSetFromOptions(Adiag);                              CHKERRQ(ierr);
  ierr = MatMPIAIJSetPreallocation(Adiag,2,NULL,2,NULL);        CHKERRQ(ierr);

  MatSetValue(Adiag,0,0,(MPIrank+5),INSERT_VALUES);
  MatSetValue(Adiag,0,1,(MPIrank+10),INSERT_VALUES);
  MatSetValue(Adiag,1,0,(MPIrank+15),INSERT_VALUES);
  MatSetValue(Adiag,1,1,(MPIrank+20),INSERT_VALUES);
  MatAssemblyBegin(Adiag,MAT_FINAL_ASSEMBLY);     MatAssemblyEnd(Adiag,MAT_FINAL_ASSEMBLY);

  ///////////////////////////////////////////////////////////////   Create Nest:
  MPI_Barrier(PETSC_COMM_WORLD);
  Mat       AfullNEST, *AfullNESTpointer;

  PetscMalloc1(2,&AfullNESTpointer);
  AfullNESTpointer[0]=NULL;
  AfullNESTpointer[1]=NULL;
  AfullNESTpointer[MPIrank]=Adiag;
  // Rank=0 --> AfullNESTpointer[0]=Adiag; AfullNESTpointer[1]=NULL;
  // Rank=1 --> AfullNESTpointer[0]=NULL;  AfullNESTpointer[1]=Adiag;

  IS        IS_ROW,IS_COL;
  ISCreateStride(PETSC_COMM_SELF,1,MPIrank,0,&IS_ROW);
  ISCreateStride(PETSC_COMM_SELF,2,0,1,&IS_COL);
  // Rank=0 --> IS_ROW= [ 0 ] ; IS_COL= [ 0, 1 ] ;
  // Rank=1 --> IS_ROW= [ 1 ] ; IS_COL= [ 0, 1 ] ;

  MatCreate(PETSC_COMM_WORLD,&AfullNEST);
  MatSetSizes(AfullNEST,2,2,PETSC_DECIDE,PETSC_DECIDE);
  // MatSetSizes(AfullNEST,PETSC_DECIDE,PETSC_DECIDE,4,4);
  // MatCreateNest(PETSC_COMM_WORLD,1,&IS_ROW,1,&IS_COL,AfullNESTpointer,&AfullNEST);
    ierr = MatNestSetSubMats(AfullNEST,1,&IS_ROW,2,&IS_COL,AfullNESTpointer); CHKERRQ(ierr);

  ierr = PetscFinalize(); CHKERRQ(ierr);
  return 0;
}
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.mcs.anl.gov/pipermail/petsc-users/attachments/20190409/5223196c/attachment.html>


More information about the petsc-users mailing list