Direct LU solver

Matthew Knepley knepley at gmail.com
Thu Feb 28 14:45:48 CST 2008


I would recommend using KSP ex10 and then customizing with options.
That way we know it should work. For SuperLU_dist,

  -ksp_type preonly -pc_type lu -mat_type superlu_dist

   Matt

On Thu, Feb 28, 2008 at 1:07 PM,  <Amit.Itagi at seagate.com> wrote:
> Hi,
>
>  I need to do direct LU solves (repeatedly, with the same matrix) in one of
>  my MPI applications. I am having trouble implementing the solver. To
>  identify the problem, I wrote a short toy  code to run with 2 processes. I
>  can run it with either the spooles parallel matrix or the superlu_dist
>  matrix. I am using C++ and complex matrices.
>
>  Here is the code listing:
>
>  #include <iostream>
>  #include <complex>
>  #include <stdlib.h>
>  #include "petsc.h"
>  #include "petscmat.h"
>  #include "petscvec.h"
>  #include "petscksp.h"
>
>  using namespace std;
>
>  int main( int argc, char *argv[] ) {
>
>   int rank, size;
>   Mat A;
>   PetscErrorCode ierr;
>   PetscInt loc;
>   PetscScalar val;
>   Vec x, y;
>   KSP solver;
>   PC prec;
>   MPI_Comm comm;
>
>   // Number of non-zeros in each row
>   int d_nnz=1, o_nnz=1;
>
>   ierr=PetscInitialize(&argc,&argv,PETSC_NULL,PETSC_NULL); CHKERRQ(ierr);
>   // Initialization (including MPI)
>
>   comm=PETSC_COMM_WORLD;
>
>   ierr=MPI_Comm_size(PETSC_COMM_WORLD,&size); CHKERRQ(ierr);
>   ierr=MPI_Comm_rank(PETSC_COMM_WORLD,&rank); CHKERRQ(ierr);
>
>   // Assemble matrix A
>
>   if(rank==0) {
>     ierr=MatCreateMPIAIJ(comm,1,1,2,2,0,&d_nnz,0,&o_nnz,&A); CHKERRQ(ierr);
>     val=complex<double>(1.0,0.0);
>     ierr=MatSetValue(A,0,0,val,INSERT_VALUES);CHKERRQ(ierr);
>     val=complex<double>(0.0,1.0);
>     ierr=MatSetValue(A,0,1,val,INSERT_VALUES);CHKERRQ(ierr);
>   }
>   else {
>     ierr=MatCreateMPIAIJ(PETSC_COMM_WORLD,1,1,2,2,0,&d_nnz,0,&o_nnz,&A);
>  CHKERRQ(ierr);
>     val=complex<double>(1.0,1.0);
>     ierr=MatSetValue(A,1,1,val,INSERT_VALUES);CHKERRQ(ierr);
>     val=complex<double>(0.0,-1.0);
>     ierr=MatSetValue(A,1,0,val,INSERT_VALUES);CHKERRQ(ierr);
>   }
>
>   ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
>   ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
>
>   cout << "============  Mat A  ==================" << endl;
>   ierr=MatView(A,PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
>   cout << "======================================" << endl;
>
>   // For spooles
>   //ierr=MatConvert(A,MATMPIAIJSPOOLES,MAT_REUSE_MATRIX,&A); CHKERRQ(ierr);
>
>   // For superlu_dist
>   ierr=MatConvert(A,MATSUPERLU_DIST,MAT_REUSE_MATRIX,&A); CHKERRQ(ierr);
>
>   // Direct LU solver
>   ierr=KSPCreate(comm,&solver); CHKERRQ(ierr);
>   ierr=KSPSetType(solver,KSPPREONLY); CHKERRQ(ierr);
>   ierr=KSPSetOperators(solver,A,A,SAME_NONZERO_PATTERN); CHKERRQ(ierr);
>   ierr=KSPGetPC(solver,&prec); CHKERRQ(ierr);
>   ierr=PCSetType(prec,PCLU); CHKERRQ(ierr);
>   ierr=KSPSetFromOptions(solver); CHKERRQ(ierr);
>
>   //============  Vector assembly ========================
>
>   if(rank==0) {
>     ierr=VecCreateMPI(PETSC_COMM_WORLD,1,2,&x); CHKERRQ(ierr);
>     val=complex<double>(1.0,0.0);
>     loc=0;
>     ierr=VecSetValues(x,1,&loc,&val,ADD_VALUES);CHKERRQ(ierr);
>   }
>   else {
>     ierr=VecCreateMPI(PETSC_COMM_WORLD,1,2,&x); CHKERRQ(ierr);
>     val=complex<double>(-1.0,0.0);
>     loc=1;
>     ierr=VecSetValues(x,1,&loc,&val,ADD_VALUES);CHKERRQ(ierr);
>   }
>
>   ierr=VecAssemblyBegin(x); CHKERRQ(ierr);
>   ierr=VecAssemblyEnd(x); CHKERRQ(ierr);
>
>   cout << "============== Vec x ==================" << endl;
>   ierr=VecView(x,PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
>   cout << "======================================" << endl;
>
>   VecDuplicate(x,&y);  // Duplicate the matrix storage
>
>   // Solve the matrix equation
>   ierr=KSPSolve(solver,x,y); CHKERRQ(ierr);
>
>   cout << "============== Vec y =================" << endl;
>   ierr=VecView(y,PETSC_VIEWER_STDOUT_WORLD); CHKERRQ(ierr);
>   cout << "======================================" << endl;
>
>
>   // Destructors
>   ierr=KSPDestroy(solver); CHKERRQ(ierr);
>   ierr=VecDestroy(x); CHKERRQ(ierr);
>   ierr=VecDestroy(y); CHKERRQ(ierr);
>   ierr=MatDestroy(A); CHKERRQ(ierr);
>
>   // Finalize
>   ierr=PetscFinalize(); CHKERRQ(ierr);
>
>
>   return 0;
>
>  }
>
>
>  When I run the program with spooles, I get the following output.
>
>
>  ============  Mat A  ==================
>  ============  Mat A  ==================
>  ======================================
>  row 0: (0, 1)  (1, 0 + 1 i)
>  row 1: (0, 0 - 1 i) (1, 1 + 1 i)
>  ======================================
>  ============== Vec x ==================
>  ============== Vec x ==================
>  Process [0]
>  1
>  ======================================
>  Process [1]
>  -1
>  ======================================
>
>   fatal error in InpMtx_MPI_split()
>   firsttag = 0, tagbound = -1
>
>   fatal error in InpMtx_MPI_split()
>   firsttag = 0, tagbound = -1
>  -----------------------------------------------------------------------------
>  One of the processes started by mpirun has exited with a nonzero exit
>  code.  This typically indicates that the process finished in error.
>  If your process did not finish in error, be sure to include a "return
>  0" or "exit(0)" in your C code before exiting the application.
>
>  PID 22881 failed on node n0 (127.0.0.1) with exit status 255.
>  -----------------------------------------------------------------------------
>  mpirun failed with exit status 255
>
>
>  When run in the debugger, there is no stack trace. The error is
>
>  Program exited with code 0377
>
>
>  With superlu_dist, the output is
>
>  ============  Mat A  ==================
>  ============  Mat A  ==================
>  row 0: (0, 1)  (1, 0 + 1 i)
>  row 1: (0, 0 - 1 i) (1, 1 + 1 i)
>  ======================================
>  ======================================
>  ============== Vec x ==================
>  ============== Vec x ==================
>  Process [0]
>  1
>  Process [1]
>  -1
>  ======================================
>  ======================================
>  [0]PETSC ERROR:
>  ------------------------------------------------------------------------
>  [0]PETSC ERROR: Caught signal number 11 SEGV: Segmentation Violation,
>  probably memory access out of range
>  [0]PETSC ERROR: Try option -start_in_debugger or -on_error_attach_debugger
>  [0]PETSC ERROR: or see
>  http://www.mcs.anl.gov/petsc/petsc-as/documentation/troubleshooting.html#Signal[0]PETSC
>   ERROR: or try http://valgrind.org on linux or man libgmalloc on Apple to
>  find memory corruption errors
>  [0]PETSC ERROR: configure using --with-debugging=yes, recompile, link, and
>  run
>  [0]PETSC ERROR: to get more information on the crash.
>  [0]PETSC ERROR: --------------------- Error Message
>  ------------------------------------
>  [0]PETSC ERROR: Signal received!
>  [0]PETSC ERROR:
>  ------------------------------------------------------------------------
>  [0]PETSC ERROR: Petsc Release Version 2.3.3, Patch 8, Fri Nov 16 17:03:40
>  CST 2007 HG revision: 414581156e67e55c761739b0deb119f7590d0f4b
>  [0]PETSC ERROR: See docs/changes/index.html for recent updates.
>  [0]PETSC ERROR: See docs/faq.html for hints about trouble shooting.
>  [0]PETSC ERROR: See docs/index.html for manual pages.
>  [0]PETSC ERROR:
>  ------------------------------------------------------------------------
>  [0]PETSC ERROR: lu on a linux-gnu named tabla by amit Thu Feb 28 13:48:54
>  2008
>  [0]PETSC ERROR: Libraries linked from
>  /home/amit/programs/ParEM/petsc-2.3.3-p8/lib/linux-gnu-c-debug
>  [0]PETSC ERROR: Configure run at Thu Feb 28 12:19:39 2008
>  [0]PETSC ERROR: Configure options --with-scalar-type=complex
>  --with-debugging=no --with-fortran-kernels=generic --with-clanguage=cxx
>  --with-metis=1 --download-metis=1 --with-parmetis=1 --download-parmetis=1
>  --with-superlu_dist=1 --download-superlu_dist=1 --with-spooles=1
>  --with-spooles-dir=/home/amit/programs/ParEM/spooles-2.2 COPTFLAGS="-O3
>  -march=p4 -mtune=p4 -ffast-math -malign-double -funroll-loops -pipe
>  -fomit-frame-pointer -finline-functions -msse2" CXXOPTFLAGS="-O3 -march=p4
>  -mtune=p4 -ffast-math -malign-double -funroll-loops -pipe
>  -fomit-frame-pointer -finline-functions -msse2" FOPTS="-O3 -qarch=p4
>  -qtune=p4" --with-shared=0
>  [0]PETSC ERROR:
>  ------------------------------------------------------------------------
>  [0]PETSC ERROR: User provided function() line 0 in unknown directory
>  unknown file
>  -----------------------------------------------------------------------------
>  One of the processes started by mpirun has exited with a nonzero exit
>  code.  This typically indicates that the process finished in error.
>  If your process did not finish in error, be sure to include a "return
>  0" or "exit(0)" in your C code before exiting the application.
>
>  PID 22998 failed on node n0 (127.0.0.1) with exit status 1.
>  -----------------------------------------------------------------------------
>  mpirun failed with exit status 1
>
>
>  The debugger tracks the segmentation violation to
>
>
>  main -> KSPSolve -> KSPSetUp -> PCSetUp -> PCSetUp_LU -> MatLUFactorNumeric
>  -> MatLUFactorNumeric_SUperLU_DIST -> pzgssvx
>
>
>  Could  someone kindly point out what I am missing ?
>
>
>  Thanks
>
>  Rgds,
>  Amit
>
>



-- 
What most experimenters take for granted before they begin their
experiments is infinitely more interesting than any results to which
their experiments lead.
-- Norbert Wiener




More information about the petsc-users mailing list