[petsc-users] MUMPS Error 'INFOG(1)=-3 INFO(2)=3' (SPARSE MATRIX INVERSE)

maitri ksh maitri.ksh at gmail.com
Thu Jul 27 08:50:33 CDT 2023


I am using 'MatMumpsGetInverse()' to get the inverse of a sparse matrix. I
am using parts of ex214.c
<https://gitlab.com/petsc/petsc/-/blob/8372926a7ae049f36f382da6b36410caecb0415f/src/mat/examples/tests/ex214.c>
code
to get the inverse, but I get an error that seems to be coming from
MUMPS-library. Any suggestions?

ERROR:
[0]PETSC ERROR: --------------------- Error Message
--------------------------------------------------------------
[0]PETSC ERROR: Error in external library
[0]PETSC ERROR: Error reported by MUMPS in solve phase: INFOG(1)=-3
INFO(2)=3
[0]PETSC ERROR: See https://petsc.org/release/faq/ for trouble shooting.
[0]PETSC ERROR: Petsc Release Version 3.19.3, unknown
[0]PETSC ERROR: ./MatInv_MUMPS on a arch-linux-c-debug named
LAPTOP-0CP4FI1T by maitri Thu Jul 27 16:35:02 2023
[0]PETSC ERROR: Configure options --with-cc=gcc --with-cxx=g++
--with-fc=gfortran --download-mpich --download-fblaslapack --with-matlab
--with-matlab-dir=/usr/local/MATLAB/R2022a --download-hdf5 --with-hdf5=1
--download-mumps --download-scalapack --download-parmetis --download-metis
--download-ptscotch --download-bison --download-cmake
[0]PETSC ERROR: #1 MatMumpsGetInverse_MUMPS() at
/home/maitri/petsc/src/mat/impls/aij/mpi/mumps/mumps.c:2720
[0]PETSC ERROR: #2 MatMumpsGetInverse() at
/home/maitri/petsc/src/mat/impls/aij/mpi/mumps/mumps.c:2753
[0]PETSC ERROR: #3 main() at MatInv_MUMPS.c:74
[0]PETSC ERROR: No PETSc Option Table entries
[0]PETSC ERROR: ----------------End of Error Message -------send entire
error message to petsc-maint at mcs.anl.gov----------
application called MPI_Abort(MPI_COMM_SELF, 76) - process 0
[unset]: PMIU_write error; fd=-1 buf=:cmd=abort exitcode=76
message=application called MPI_Abort(MPI_COMM_SELF, 76) - process 0
:
system msg for write_line failure : Bad file descriptor


Maitri
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.mcs.anl.gov/pipermail/petsc-users/attachments/20230727/4b451881/attachment.html>
-------------- next part --------------
maitri at LAPTOP-0CP4FI1T:~/my_executables$ ./MatInv_MUMPS
using LU factorization
[0]PETSC ERROR: --------------------- Error Message --------------------------------------------------------------
[0]PETSC ERROR: Error in external library
[0]PETSC ERROR: Error reported by MUMPS in solve phase: INFOG(1)=-3 INFO(2)=3
[0]PETSC ERROR: See https://petsc.org/release/faq/ for trouble shooting.
[0]PETSC ERROR: Petsc Release Version 3.19.3, unknown
[0]PETSC ERROR: ./MatInv_MUMPS on a arch-linux-c-debug named LAPTOP-0CP4FI1T by maitri Thu Jul 27 16:35:02 2023
[0]PETSC ERROR: Configure options --with-cc=gcc --with-cxx=g++ --with-fc=gfortran --download-mpich --download-fblaslapack --with-matlab --with-matlab-dir=/usr/local/MATLAB/R2022a --download-hdf5 --with-hdf5=1 --download-mumps --download-scalapack --download-parmetis --download-metis --download-ptscotch --download-bison --download-cmake
[0]PETSC ERROR: #1 MatMumpsGetInverse_MUMPS() at /home/maitri/petsc/src/mat/impls/aij/mpi/mumps/mumps.c:2720
[0]PETSC ERROR: #2 MatMumpsGetInverse() at /home/maitri/petsc/src/mat/impls/aij/mpi/mumps/mumps.c:2753
[0]PETSC ERROR: #3 main() at MatInv_MUMPS.c:74
[0]PETSC ERROR: No PETSc Option Table entries
[0]PETSC ERROR: ----------------End of Error Message -------send entire error message to petsc-maint at mcs.anl.gov----------
application called MPI_Abort(MPI_COMM_SELF, 76) - process 0
[unset]: PMIU_write error; fd=-1 buf=:cmd=abort exitcode=76 message=application called MPI_Abort(MPI_COMM_SELF, 76) - process 0
:
system msg for write_line failure : Bad file descriptor
-------------- next part --------------
#include <petscmat.h>
#include <petscviewer.h>

int main(int argc,char **args)
{
  PetscErrorCode ierr;
  PetscMPIInt    size,rank;
  Mat            A,F,X,spRHST;
  PetscInt       m,n,nrhs,M,N,i,test;
  PetscScalar    v;
  PetscReal      norm,tol=PETSC_SQRT_MACHINE_EPSILON;
  PetscRandom    rand;
  PetscBool      displ=PETSC_FALSE;
 
  ierr = PetscInitialize(&argc, &args, NULL, NULL);if (ierr) return ierr;
  ierr = MPI_Comm_size(PETSC_COMM_WORLD,&size);CHKERRQ(ierr);
  ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);CHKERRQ(ierr);
  ierr = PetscOptionsGetBool(NULL,NULL,"-displ",&displ,NULL);CHKERRQ(ierr);

  /* Load matrix A */
  PetscViewer viewerA;
  ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD, "A1.petsc", FILE_MODE_READ, &viewerA);CHKERRQ(ierr);
  ierr = MatCreate(PETSC_COMM_WORLD, &A);CHKERRQ(ierr);
  ierr = MatLoad(A, viewerA);CHKERRQ(ierr);
  ierr = PetscViewerDestroy(&viewerA);CHKERRQ(ierr);
  ierr = MatGetLocalSize(A,&m,&n);CHKERRQ(ierr);
  ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
  if (m != n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ, "The matrix is not square (%d, %d)", m, n);

  /* Create dense matrix X */
  nrhs = N;
  ierr = PetscOptionsGetInt(NULL,NULL,"-nrhs",&nrhs,NULL);CHKERRQ(ierr);
  ierr = MatCreate(PETSC_COMM_WORLD, &X);CHKERRQ(ierr);
  ierr = MatSetSizes(X, m, PETSC_DECIDE, PETSC_DECIDE, nrhs);CHKERRQ(ierr);
  ierr = MatSetType(X, MATDENSE);CHKERRQ(ierr);
  ierr = MatSetFromOptions(X);CHKERRQ(ierr);
  ierr = MatSetUp(X);CHKERRQ(ierr);
  ierr = PetscRandomCreate(PETSC_COMM_WORLD,&rand);CHKERRQ(ierr);
  ierr = PetscRandomSetFromOptions(rand);CHKERRQ(ierr);
  ierr = MatSetRandom(X,rand);CHKERRQ(ierr);

  // factorise 'A' using LU Factorization 
  ierr = PetscPrintf(PETSC_COMM_WORLD,"using LU factorization\n");CHKERRQ(ierr);
  ierr = MatGetFactor(A,MATSOLVERMUMPS,MAT_FACTOR_LU,&F);CHKERRQ(ierr);
  ierr = MatLUFactorSymbolic(F,A,NULL,NULL,NULL);CHKERRQ(ierr);
  ierr = MatLUFactorNumeric(F,A,NULL);CHKERRQ(ierr);

  // Create spRHST: PETSc does not support compressed column format which is required by MUMPS for sparse RHS matrix,
  // thus user must create spRHST=spRHS^T 
  ierr = MatCreate(PETSC_COMM_WORLD,&spRHST);CHKERRQ(ierr);
  if (!rank) {
      /* MUMPS requires RHS be centralized on the host! */
      ierr = MatSetSizes(spRHST,nrhs,M,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
  } else {
      ierr = MatSetSizes(spRHST,0,0,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr);
  }
  ierr = MatSetType(spRHST,MATAIJ);CHKERRQ(ierr);
  ierr = MatSetFromOptions(spRHST);CHKERRQ(ierr);
  ierr = MatSetUp(spRHST);CHKERRQ(ierr);
  if (!rank) {
      v = 1.0;
      for (i=0; i<nrhs; i++) {
        ierr = MatSetValues(spRHST,1,&i,1,&i,&v,INSERT_VALUES);CHKERRQ(ierr);
      }
  }
  ierr = MatAssemblyBegin(spRHST,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
  ierr = MatAssemblyEnd(spRHST,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
  
  /* Create spRHS = spRHST^T. Two matrices share internal matrix data structure */
  Mat spRHS = NULL;
  ierr = MatCreateTranspose(spRHST,&spRHS);CHKERRQ(ierr);
  
  /* get inv(A) using MatMumpsGetInverse() & sparse RHS: */
  ierr = MatMumpsGetInverse(F,spRHS);CHKERRQ(ierr);
  ierr = MatDestroy(&spRHS);CHKERRQ(ierr);

  /* Free data structures */
  ierr = MatDestroy(&A);CHKERRQ(ierr);
  ierr = MatDestroy(&spRHS);CHKERRQ(ierr);
  ierr = MatDestroy(&spRHST);CHKERRQ(ierr);
  ierr = MatDestroy(&X);CHKERRQ(ierr);
  ierr = PetscRandomDestroy(&rand);CHKERRQ(ierr);
  ierr = PetscFinalize();
  return ierr;
 
}



More information about the petsc-users mailing list