Multilevel solver

Amit.Itagi at seagate.com Amit.Itagi at seagate.com
Thu Apr 24 12:01:44 CDT 2008


Matt,

So putting eveything together, I wrote this simple 4x4 example (to be run
on two processes).


=========================================================================================

#include <iostream>
#include <complex>
#include <stdlib.h>
#include "petsc.h"
#include "petscmat.h"
#include "petscvec.h"
#include "petscksp.h"

using namespace std;

int main( int argc, char *argv[] ) {

  int rank, size;
  Mat A;
  PetscErrorCode ierr;
  int nrow, ncol, loc;
  Vec x, b;
  KSP solver;
  PC prec;
  IS is1, is2;

  PetscScalar val;

  // Matrix dimensions
  nrow=4;
  ncol=4;

  // Number of non-zeros in each row
  int d_nnz1[2], d_nnz2[2], o_nnz1[2],o_nnz2[2];

  d_nnz1[0]=2;
  o_nnz1[0]=2;
  d_nnz1[1]=2;
  o_nnz1[1]=2;

  d_nnz2[0]=2;
  o_nnz2[0]=2;
  d_nnz2[1]=2;
  o_nnz2[1]=2;


  ierr=PetscInitialize(&argc,&argv,PETSC_NULL,PETSC_NULL); CHKERRQ(ierr);
  ierr=MPI_Comm_size(PETSC_COMM_WORLD,&size); CHKERRQ(ierr);
  ierr=MPI_Comm_rank(PETSC_COMM_WORLD,&rank); CHKERRQ(ierr);


  // Matrix assembly
  if(rank==0) {
    MatCreateMPIAIJ(PETSC_COMM_WORLD,2,2,4,4,0,d_nnz1,0,o_nnz1,&A);
    val=complex<double>(2.0,3.0);
    ierr=MatSetValue(A,0,0,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(5.0,-1.0);
    ierr=MatSetValue(A,0,1,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(1.0,2.0);
    ierr=MatSetValue(A,0,2,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(1.0,-1.0);
    ierr=MatSetValue(A,0,3,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(5.0,-1.0);
    ierr=MatSetValue(A,1,0,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(2.0,0.0);
    ierr=MatSetValue(A,1,1,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(3.0,0.0);
    ierr=MatSetValue(A,1,2,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(1.0,0.0);
    ierr=MatSetValue(A,1,3,val,INSERT_VALUES);CHKERRQ(ierr);
  }
  else if(rank==1) {
    MatCreateMPIAIJ(PETSC_COMM_WORLD,2,2,4,4,0,d_nnz2,0,o_nnz2,&A);
    val=complex<double>(1.0,2.0);
    ierr=MatSetValue(A,2,0,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(3.0,0.0);
    ierr=MatSetValue(A,2,1,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(0.0,2.0);
    ierr=MatSetValue(A,2,2,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(1.0,0.0);
    ierr=MatSetValue(A,2,3,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(1.0,-1.0);
    ierr=MatSetValue(A,3,0,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(1.0,0.0);
    ierr=MatSetValue(A,3,1,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(1.0,0.0);
    ierr=MatSetValue(A,3,2,val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(2.0,0.0);
    ierr=MatSetValue(A,3,3,val,INSERT_VALUES);CHKERRQ(ierr);
  }
  else {
    MatCreateMPIAIJ(PETSC_COMM_WORLD,0,0,4,4,0,PETSC_NULL,0,PETSC_NULL,&A);
  }


  ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
  ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);


  ierr=PetscSynchronizedPrintf(PETSC_COMM_WORLD,"Defined matrix\n"); CHKERRQ(ierr);
  ierr=PetscSynchronizedFlush(PETSC_COMM_WORLD); CHKERRQ(ierr);
  ierr=MatView(A,PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);


  // Vector assembly

  // Allocate memory for the vectors
  if(rank==0) {
    ierr=VecCreateMPI(PETSC_COMM_WORLD,2,4,&x); CHKERRQ(ierr);
    val=complex<double>(1.0,0.0);
    loc=0;
    ierr=VecSetValues(x,1,&loc,&val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(-1.0,0.0);
    loc=1;
    ierr=VecSetValues(x,1,&loc,&val,INSERT_VALUES);CHKERRQ(ierr);
  }
  else if(rank==1) {
    ierr=VecCreateMPI(PETSC_COMM_WORLD,2,4,&x); CHKERRQ(ierr);
    val=complex<double>(1.0,1.0);
    loc=2;
    ierr=VecSetValues(x,1,&loc,&val,INSERT_VALUES);CHKERRQ(ierr);
    val=complex<double>(1.0,0.0);
    loc=3;
    ierr=VecSetValues(x,1,&loc,&val,INSERT_VALUES);CHKERRQ(ierr);
  }
  else {
    ierr=VecCreateMPI(PETSC_COMM_WORLD,0,4,&x); CHKERRQ(ierr);
  }
  ierr=VecAssemblyBegin(x); CHKERRQ(ierr);
  ierr=VecAssemblyEnd(x); CHKERRQ(ierr);


  ierr=PetscSynchronizedPrintf(PETSC_COMM_WORLD,"Defined vector\n"); CHKERRQ(ierr);
  ierr=PetscSynchronizedFlush(PETSC_COMM_WORLD); CHKERRQ(ierr);
  ierr=VecView(x,PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);

  // Storage for the solution
  VecDuplicate(x,&b);

  // Create the Field Split index sets
  PetscInt idxA[2], idxB[2];

  idxA[0]=0;
  idxA[1]=1;
  idxB[0]=2;
  idxB[1]=3;

  ierr=ISCreateGeneral(PETSC_COMM_WORLD,2,idxA,&is1); CHKERRQ(ierr);
  ierr=ISCreateGeneral(PETSC_COMM_WORLD,2,idxB,&is2); CHKERRQ(ierr);


  // Krylov Solver

  ierr=KSPCreate(PETSC_COMM_WORLD,&solver); CHKERRQ(ierr);
  ierr=KSPSetOperators(solver,A,A,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
  ierr=KSPSetType(solver,KSPGMRES); CHKERRQ(ierr);

  // Pre-conditioner
  ierr=KSPGetPC(solver,&prec); CHKERRQ(ierr);

   ierr=PCSetType(prec,PCFIELDSPLIT); CHKERRQ(ierr);



  ierr=PCFieldSplitSetIS(prec,is1); CHKERRQ(ierr);
  ierr=PCFieldSplitSetIS(prec,is2); CHKERRQ(ierr);
  ierr=PCFieldSplitSetType(prec,PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE); CHKERRQ(ierr);

  ierr=PCSetFromOptions(prec); CHKERRQ(ierr);
  ierr=KSPSetFromOptions(solver); CHKERRQ(ierr);

  ierr=PetscSynchronizedPrintf(PETSC_COMM_WORLD,"Set KSP/PC options\n"); CHKERRQ(ierr);
  ierr=PetscSynchronizedFlush(PETSC_COMM_WORLD); CHKERRQ(ierr);


  ierr=PetscSynchronizedPrintf(PETSC_COMM_WORLD,"Solving the equation\n"); CHKERRQ(ierr);
  ierr=PetscSynchronizedFlush(PETSC_COMM_WORLD); CHKERRQ(ierr);

  ierr=KSPSolve(solver,x,b); CHKERRQ(ierr);

  ierr=PetscSynchronizedPrintf(PETSC_COMM_WORLD,"Solving over\n"); CHKERRQ(ierr);
  ierr=PetscSynchronizedFlush(PETSC_COMM_WORLD); CHKERRQ(ierr);


  ierr=PetscSynchronizedPrintf(PETSC_COMM_WORLD,"\nThe solution\n"); CHKERRQ(ierr);
  ierr=PetscSynchronizedFlush(PETSC_COMM_WORLD); CHKERRQ(ierr);
  ierr=VecView(b,PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);

 //  Clean up
  ierr=VecDestroy(x); CHKERRQ(ierr);
  ierr=VecDestroy(b); CHKERRQ(ierr);
  ierr=MatDestroy(A); CHKERRQ(ierr);
  ierr=KSPDestroy(solver); CHKERRQ(ierr);
  ierr=ISDestroy(is1); CHKERRQ(ierr);
  ierr=ISDestroy(is2); CHKERRQ(ierr);

  ierr=PetscFinalize(); CHKERRQ(ierr);
  // Finalize


  return 0;

}


==============================================================================================

I run the program with

mpiexec -np 2 ./main -fieldsplit_0_pc_type sor -fieldsplit_0_ksp_type_preonly -fieldsplit_1_pc_type sor -fieldsplit_1_ksp_type_preonly > &err

In the KSPSolve step, I get an error. Here is the output of my run.

=================================================================================================

Defined matrix
Defined matrix
row 0: (0, 2 + 3 i) (1, 5 - 1 i) (2, 1 + 2 i) (3, 1 - 1 i)
row 1: (0, 5 - 1 i) (1, 2)  (2, 3)  (3, 1)
row 2: (0, 1 + 2 i) (1, 3)  (2, 0 + 2 i) (3, 1)
row 3: (0, 1 - 1 i) (1, 1)  (2, 1)  (3, 2)
Defined vector
Defined vector
Process [0]
1
-1
Process [1]
1 + 1 i
1
Set KSP/PC options
Set KSP/PC options
Solving the equation
Solving the equation
[1]PETSC ERROR: --------------------- Error Message ------------------------------------
[1]PETSC ERROR: Nonconforming object sizes!
[1]PETSC ERROR: Local column sizes 0 do not add up to total number of columns 4!
[1]PETSC ERROR: ------------------------------------------------------------------------
[1]PETSC ERROR: Petsc Development Version 2.3.3, Patch 12, unknown HG revision: unknown
[1]PETSC ERROR: See docs/changes/index.html for recent updates.
[1]PETSC ERROR: See docs/faq.html for hints about trouble shooting.
[1]PETSC ERROR: See docs/index.html for manual pages.
[1]PETSC ERROR: ------------------------------------------------------------------------
[1]PETSC ERROR: ./main on a linux-gnu named tabla by amit Thu Apr 24 13:04:57 2008
[1]PETSC ERROR: Libraries linked from /home/amit/programs/ParEM/petsc-dev/lib
[1]PETSC ERROR: Configure run at Wed Apr 23 22:02:21 2008
[1]PETSC ERROR: Configure options --with-scalar-type=complex --with-debugging=yes --with-clanguage=cxx --with-mpi=1 --download-mpich=1 --with-metis=1
--download-metis=1 --with-parmetis=1 --download-parmetis=1 --with-superlu_dist=1 --download-superlu_dist=1 --with-mumps=1 --download-blacs=1
--download-scalapack=1 --download-mumps=1 --with-shared=0
[1]PETSC ERROR: ------------------------------------------------------------------------
[1]PETSC ERROR: MatGetSubMatrix_MPIAIJ() line 2974 in src/mat/impls/aij/mpi/mpiaij.c
[1]PETSC ERROR: MatGetSubMatrix() line 5956 in src/mat/interface/matrix.c
[1]PETSC ERROR: PCSetUp_FieldSplit() line 177 in src/ksp/pc/impls/fieldsplit/fieldsplit.c
[1]PETSC ERROR: PCSetUp() line 788 in src/ksp/pc/interface/precon.c
[1]PETSC ERROR: KSPSetUp() line 234 in src/ksp/ksp/interface/itfunc.c
[1]PETSC ERROR: KSPSolve() line 350 in src/ksp/ksp/interface/itfunc.c
[1]PETSC ERROR: User provided function() line 175 in main.cpp
[0]PETSC ERROR: ------------------------------------------------------------------------
[0]PETSC ERROR: Caught signal number 13 Broken Pipe: Likely while reading or writing to a socket
[0]PETSC ERROR: Try option -start_in_debugger or -on_error_attach_debugger
[0]PETSC ERROR: or see http://www.mcs.anl.gov/petsc/petsc-as/documentation/troubleshooting.html#Signal[0]PETSC ERROR: or try http://valgrind.org on
linux or man libgmalloc on Apple to find memory corruption errors
[0]PETSC ERROR: likely location of problem given in stack below
[0]PETSC ERROR: ---------------------  Stack Frames ------------------------------------
[0]PETSC ERROR: Note: The EXACT line numbers in the stack are not available,
[0]PETSC ERROR:       INSTEAD the line number of the start of the function
[0]PETSC ERROR:       is given.
[0]PETSC ERROR: [0] MatSetType line 45 src/mat/interface/matreg.c
[0]PETSC ERROR: [0] PCSetUp line 765 src/ksp/pc/interface/precon.c
[0]PETSC ERROR: [0] KSPSetUp line 183 src/ksp/ksp/interface/itfunc.c
[0]PETSC ERROR: [0] KSPSolve line 305 src/ksp/ksp/interface/itfunc.c
[0]PETSC ERROR: --------------------- Error Message ------------------------------------
[0]PETSC ERROR: Signal received!
[0]PETSC ERROR: ------------------------------------------------------------------------
[0]PETSC ERROR: Petsc Development Version 2.3.3, Patch 12, unknown HG revision: unknown
[0]PETSC ERROR: See docs/changes/index.html for recent updates.
[0]PETSC ERROR: See docs/faq.html for hints about trouble shooting.
[0]PETSC ERROR: See docs/index.html for manual pages.
[0]PETSC ERROR: ------------------------------------------------------------------------
[0]PETSC ERROR: ./main on a linux-gnu named tabla by amit Thu Apr 24 13:04:57 2008
[0]PETSC ERROR: Libraries linked from /home/amit/programs/ParEM/petsc-dev/lib
[0]PETSC ERROR: Configure run at Wed Apr 23 22:02:21 2008
[0]PETSC ERROR: Configure options --with-scalar-type=complex --with-debugging=yes --with-clanguage=cxx --with-mpi=1 --download-mpich=1 --with-metis=1
--download-metis=1 --with-parmetis=1 --download-parmetis=1 --with-superlu_dist=1 --download-superlu_dist=1 --with-mumps=1 --download-blacs=1
--download-scalapack=1 --download-mumps=1 --with-shared=0
[0]PETSC ERROR: ------------------------------------------------------------------------
[0]PETSC ERROR: User provided function() line 0 in unknown directory unknown file
application called MPI_Abort(MPI_COMM_WORLD, 59) - process 0[cli_0]: aborting job:
application called MPI_Abort(MPI_COMM_WORLD, 59) - process 0


What am I doing wrong ?

Thanks

Rgds,
Amit



                                                                           
             "Matthew Knepley"                                             
             <knepley at gmail.co                                             
             m>                                                         To 
             Sent by:                  petsc-users at mcs.anl.gov             
             owner-petsc-users                                          cc 
             @mcs.anl.gov                                                  
             No Phone Info                                         Subject 
             Available                 Re: Multilevel solver               
                                                                           
                                                                           
             04/24/2008 12:28                                              
             PM                                                            
                                                                           
                                                                           
             Please respond to                                             
             petsc-users at mcs.a                                             
                  nl.gov                                                   
                                                                           
                                                                           




On Thu, Apr 24, 2008 at 11:07 AM,  <Amit.Itagi at seagate.com> wrote:
>
>              "Matthew Knepley"
>              <knepley at gmail.co
>
>              m>
To
>              Sent by:                  petsc-users at mcs.anl.gov
>              owner-petsc-users
cc
>              @mcs.anl.gov
>              No Phone Info
Subject
>              Available                 Re: Multilevel solver
>
>
>              04/24/2008 10:19
>              AM
>
>
>
>              Please respond to
>              petsc-users at mcs.a
>                   nl.gov
>
>
>
>
>
>
>
>
>
>
> >  3) Since I want to set PC type to lu for field 0, and I want to use
>  MUMPS
>  >  for parallel LU, where do I set the submatrix type to MATAIJMUMPS ?
In
>  this
>  >  case, will a second copy of the submatrix be generated - one of type
>  MUMPS
>  >  for the PC and the other of the original MATAIJ type for the KSP ?
>
>  I will have to check. However if we are consistent, then it should be
>
>   -field_split_0_mat_type aijmumps
>
>
>  Can these options be set inside the code, instead of on the command line
?

Yes, however it becomes more complicated. You must pull these objects out
of
the FieldSplitPC (not that hard), but you must also be careful to set the
values
after options are read in, but before higher level things are
initialized (like the
outer solver), so it can be somewhat delicate. I would not advise
hardcoding
things which you are likely to change based upon architecture, problem,
etc.
However, if you want to, the easiest way is to use PetscSetOption().

  Matt

>  >  4) How is the PC applied when I do
PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE
>  ?
>
>  It is just the composition of the preconditioners, which is what you
want
>  here.
>
>   Matt
>
>  >  Thanks
>  >
>  >  Rgds,
>  >  Amit
>  >
>  >
>  >
>  >
>  >              Barry Smith
>  >              <bsmith at mcs.anl.g
>  >              ov>
>  To
>  >              Sent by:                  petsc-users at mcs.anl.gov
>  >              owner-petsc-users
>  cc
>  >              @mcs.anl.gov
>  >              No Phone Info
>  Subject
>  >              Available                 Re: Multilevel solver
>  >
>  >
>  >              04/22/2008 10:08
>  >              PM
>  >
>  >
>  >              Please respond to
>  >              petsc-users at mcs.a
>  >                   nl.gov
>  >
>  >
>  >
>  >
>  >
>  >
>  >   Amit,
>  >
>  >      Using a a PCSHELL should be fine (it can be used with GMRES),
>  >  my guess is there is a memory corruption error somewhere that is
>  >  causing the crash. This could be tracked down with www.valgrind.com
>  >
>  >     Another way to you could implement this is with some very recent
>  >  additions I made to PCFIELDSPLIT that are in petsc-dev
>  >  (http://www-unix.mcs.anl.gov/petsc/petsc-as/developers/index.html)
>  >  With this you would chose
>  >  PCSetType(pc,PCFIELDSPLIT
>  >  PCFieldSplitSetIS(pc,is1
>  >  PCFieldSplitSetIS(pc,is2
>  >  PCFieldSplitSetType(pc,PC_COMPOSITE_SYMMETRIC_MULTIPLICATIVE
>  >  to use LU on A11 use the command line options
>  >  -fieldsplit_0_pc_type lu -fieldsplit_0_ksp_type preonly
>  >  and SOR on A22
>  >  -fieldsplit_1_pc_type sor -fieldsplit_1_ksp_type preonly -
>  >  fieldsplit_1_pc_sor_lits <lits> where
>  >     <its> is the number of iterations you want to use block A22
>  >
>  >  is1 is the IS that contains the indices for all the vector entries in
>  >  the 1 block while is2 is all indices in the
>  >  vector for the 2 block. You can use ISCreateGeneral() to create
these.
>  >
>  >    Probably it is easiest just to try this out.
>  >
>  >    Barry
>  >
>  >
>  >  On Apr 22, 2008, at 8:45 PM, Amit.Itagi at seagate.com wrote:
>  >
>  >  >
>  >  > Hi,
>  >  >
>  >  > I am trying to implement a multilevel method for an EM problem. The
>  >  > reference is : "Comparison of hierarchical basis functions for
>  >  > efficient
>  >  > multilevel solvers", P. Ingelstrom, V. Hill and R. Dyczij-Edlinger,
>  >  > IET
>  >  > Sci. Meas. Technol. 2007, 1(1), pp 48-52.
>  >  >
>  >  > Here is the summary:
>  >  >
>  >  > The matrix equation Ax=b is solved using GMRES with a multilevel
>  >  > pre-conditioner. A has a block structure.
>  >  >
>  >  > A11    A12       *         x1  =  b1
>  >  > A21    A22                  x2       b2
>  >  >
>  >  > A11 is mxm and A33 is nxn, where m is not equal to n.
>  >  >
>  >  > Step 1  :      Solve  A11 *  e1   = b1     (parallel LU using
>  >  > superLU or
>  >  > MUMPS)
>  >  >
>  >  > Step 2:        Solve   A22 * e2    =b2-A21*e1    (might either user
>  >  > a SOR
>  >  > solver or a parallel LU)
>  >  >
>  >  > Step 3:        Solve   A11* e1 = b1-A12*e2   (parallel LU)
>  >  >
>  >  > This gives the approximate solution to
>  >  >
>  >  > A11     A12     *      e1   =  b1
>  >  > A21     A22             e2       b2
>  >  >
>  >  > and is used as the pre-conditioner for the GMRES.
>  >  >
>  >  >
>  >  > Which PetSc method can implement this pre-conditioner ? I tried a
>  >  > PCSHELL
>  >  > type PC. With Hong's help, I also got the parallel LU to work
>  >  > withSuperLU/MUMPS. My program runs successfully on multiple
>  >  > processes on a
>  >  > single machine. But when I submit the program over multiple
>  >  > machines, I get
>  >  > a crash in the PCApply routine after several GMRES iterations. I
>  >  > think this
>  >  > has to do with using PCSHELL with GMRES (which is not a good idea).
Is
>  >  > there a different way to implement this ? Does this resemble the
usage
>  >  > pattern of one of the AMG preconditioners ?
>  >  >
>  >  >
>  >  > Thanks
>  >  >
>  >  > Rgds,
>  >  > Amit
>  >  >
>  >
>  >
>  >
>  >
>
>
>
>  --
>  What most experimenters take for granted before they begin their
>  experiments is infinitely more interesting than any results to which
>  their experiments lead.
>  -- Norbert Wiener
>
>
>
>



--
What most experimenters take for granted before they begin their
experiments is infinitely more interesting than any results to which
their experiments lead.
-- Norbert Wiener






More information about the petsc-users mailing list