[petsc-users] segv in VecSum when using nested vec
Klaij, Christiaan
C.Klaij at marin.nl
Tue Mar 6 02:13:00 CST 2012
In a larger code I'm getting a segmentation fault when setting a null space
with KSPSetNullSpace. After some trying I can reproduce the problem with
the small code below. It happens when calling VecSum on a nested vec.
/* test program to find VecSum segmentation fault */
#include <petscksp.h>
class vecsumsegv {
public:
PetscErrorCode ierr;
PetscInt nx, ny;
Vec x, subx[2];
IS isg[2];
vecsumsegv(PetscInt m, PetscInt n);
private:
PetscErrorCode setupVectors();
PetscErrorCode setupIndexSets();
PetscErrorCode setupVecBlock0();
PetscErrorCode setupVecBlock1();
};
vecsumsegv::vecsumsegv(PetscInt m, PetscInt n) {
nx=m;
ny=n;
ierr = setupVectors();
ierr = setupIndexSets();
ierr = VecCreateNest(PETSC_COMM_WORLD,2,isg,subx,&x); // nested vec
ierr = VecView(x,(PetscViewer)PETSC_VIEWER_DEFAULT);
// this causes the segv:
PetscScalar val;
ierr = VecSum(x,&val);
}
PetscErrorCode vecsumsegv::setupVectors() {
// the two vectors
ierr = setupVecBlock0(); CHKERRQ(ierr);
ierr = setupVecBlock1(); CHKERRQ(ierr);
return 0;
}
PetscErrorCode vecsumsegv::setupVecBlock0() {
// 2N vector corresponding to block 0
ierr = VecCreate(PETSC_COMM_WORLD,&subx[0]); CHKERRQ(ierr);
ierr = VecSetSizes(subx[0],PETSC_DECIDE,2*nx*ny); CHKERRQ(ierr);
ierr = VecSetType(subx[0],VECMPI); CHKERRQ(ierr);
return 0;
}
PetscErrorCode vecsumsegv::setupVecBlock1() {
// N vector corresponding to block 1
ierr = VecCreate(PETSC_COMM_WORLD,&subx[1]); CHKERRQ(ierr);
ierr = VecSetSizes(subx[1],PETSC_DECIDE,nx*ny); CHKERRQ(ierr);
ierr = VecSetType(subx[1],VECMPI); CHKERRQ(ierr);
return 0;
}
PetscErrorCode vecsumsegv::setupIndexSets() {
PetscInt rank,m0,m1;
ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank); CHKERRQ(ierr);
ierr = VecGetLocalSize(subx[0],&m0); CHKERRQ(ierr);
ierr = VecGetLocalSize(subx[1],&m1); CHKERRQ(ierr);
// first index set
PetscInt *idx0=new PetscInt[m0];
for (int i=0; i<m0; i++) { idx0[i]=i+rank*(m0+m1); }
ierr = ISCreateGeneral(PETSC_COMM_WORLD,m0,idx0,PETSC_COPY_VALUES,&isg[0]); CHKERRQ(ierr);
delete[] idx0;
// ISView(isg[0],PETSC_VIEWER_STDOUT_WORLD);
// second index set
PetscInt *idx1=new PetscInt[m1];
for (int i=0; i<m1; i++) { idx1[i]=i+(rank+1)*m0+rank*m1; }
ierr = ISCreateGeneral(PETSC_COMM_WORLD,m1,idx1,PETSC_COPY_VALUES,&isg[1]); CHKERRQ(ierr);
delete[] idx1;
// ISView(isg[1],PETSC_VIEWER_STDOUT_WORLD);
return 0;
}
int main(int argc, char **argv) {
PetscErrorCode ierr;
PetscReal val;
ierr = PetscInitialize(&argc, &argv, PETSC_NULL, PETSC_NULL); CHKERRQ(ierr);
vecsumsegv tryme(3,4);
ierr = PetscFinalize(); CHKERRQ(ierr);
return 0;
}
[0]PETSC ERROR: ------------------------------------------------------------------------
[0]PETSC ERROR: Caught signal number 11 SEGV: Segmentation Violation, probably memory access out of range
[0]PETSC ERROR: Try option -start_in_debugger or -on_error_attach_debugger
[0]PETSC ERROR: or see http://www.mcs.anl.gov/petsc/petsc-as/documentation/faq.html#valgrind[0]PETSC ERROR: or try http://valgrind.org on GNU/linux and Apple Mac OS X to find memory corruption errors
[0]PETSC ERROR: likely location of problem given in stack below
[0]PETSC ERROR: --------------------- Stack Frames ------------------------------------
[0]PETSC ERROR: Note: The EXACT line numbers in the stack are not available,
[0]PETSC ERROR: INSTEAD the line number of the start of the function
[0]PETSC ERROR: is given.
[0]PETSC ERROR: [0] VecSum line 1301 /home/CKlaij/ReFRESCO/Libraries/build/petsc-3.2-p5/src/vec/vec/utils/vinv.c
[0]PETSC ERROR: --------------------- Error Message ------------------------------------
[0]PETSC ERROR: Signal received!
[0]PETSC ERROR: ------------------------------------------------------------------------
[0]PETSC ERROR: Petsc Release Version 3.2.0, Patch 5, Sat Oct 29 13:45:54 CDT 2011
[0]PETSC ERROR: See docs/changes/index.html for recent updates.
[0]PETSC ERROR: See docs/faq.html for hints about trouble shooting.
[0]PETSC ERROR: See docs/index.html for manual pages.
[0]PETSC ERROR: ------------------------------------------------------------------------
[0]PETSC ERROR: ./vecsumsegv on a linux_64b named lin0133 by cklaij Tue Mar 6 08:55:25 2012
[0]PETSC ERROR: Libraries linked from /home/CKlaij/ReFRESCO/Libraries/build/petsc-3.2-p5/linux_64bit_debug/lib
[0]PETSC ERROR: Configure run at Thu Mar 1 10:59:35 2012
[0]PETSC ERROR: Configure options --with-mpi-dir=/opt/refresco/64bit_intelv11.1_openmpi/openmpi-1.4.5 --with-clanguage=c++ --with-x=1 --with-debugging=1 --with-hypre-include=/opt/refresco/64bit_intelv11.1_openmpi/hypre-2.7.0b/include --with-hypre-lib=/opt/refresco/64bit_intelv11.1_openmpi/hypre-2.7.0b/lib/libHYPRE.a --with-ml-include=/opt/refresco/64bit_intelv11.1_openmpi/ml-6.2/include --with-ml-lib=/opt/refresco/64bit_intelv11.1_openmpi/ml-6.2/lib/libml.a --with-blas-lapack-dir=/opt/intel/mkl
[0]PETSC ERROR: ------------------------------------------------------------------------
[0]PETSC ERROR: User provided function() line 0 in unknown directory unknown file
--------------------------------------------------------------------------
MPI_ABORT was invoked on rank 0 in communicator MPI_COMM_WORLD
with errorcode 59.
NOTE: invoking MPI_ABORT causes Open MPI to kill all MPI processes.
You may or may not see output from other processes, depending on
exactly when Open MPI kills them.
--------------------------------------------------------------------------
--------------------------------------------------------------------------
mpiexec has exited due to process rank 0 with PID 565 on
node lin0133 exiting without calling "finalize". This may
have caused other processes in the application to be
terminated by signals sent by mpiexec (as reported here).
--------------------------------------------------------------------------
dr. ir. Christiaan Klaij
CFD Researcher
Research & Development
E mailto:C.Klaij at marin.nl
T +31 317 49 33 44
MARIN
2, Haagsteeg, P.O. Box 28, 6700 AA Wageningen, The Netherlands
T +31 317 49 39 11, F +31 317 49 32 45, I www.marin.nl
More information about the petsc-users
mailing list