[petsc-users] PCSETUP_FAILED due to FACTOR_NUMERIC_ZEROPIVOT

Barry Smith bsmith at mcs.anl.gov
Sat Feb 18 11:07:53 CST 2017


   Because of the zeros on the diagonal the sparse (I)LU factorization failed (sparse (I)LU factorizations generally don't use partial pivoting and hence can fail on nonsingular matrices unlike dense factorizations).   Did you run with -pc_type lu ? Or the defaults: it is always useful to run with -ksp_view to see what solver options are being used.

   Where did this matrix come from? Usually PETSc users have a specific class of problems they are solving and so don't hit upon these matrices.

   SuperLU (not superlu_dist) does do some partial pivoting and so will usually factor such matrices. You can use ./configure --download-superlu    and then run the example with -pc_type lu -pc_factor_mat_solver_package superlu But for large matrices Superlu is pretty slow.

   Barry




> On Feb 18, 2017, at 5:06 AM, lixin chu <lixin_chu at yahoo.com> wrote:
> 
> Hello,
> 
> trying my first PETSc program, some matrices ok, but this one showing error:
> 
> Linear solve did not converge due to DIVERGED_PCSETUP_FAILED iterations 0
>                PCSETUP_FAILED due to FACTOR_NUMERIC_ZEROPIVOT
> 
> Not sure what I can try (I tried PC and GMRES, both no good) ?
> 
> 
> /*
>  * Matrix A
>  *   1  0  0  1  1  1
>  *   0  1  1  1  0  1
>  *   1  0  0  0  1  1
>  *   0  0 -1  0  1  1
>  *   1  1  0  1  1  1
>  *   0  1  1  1  0  0
>  *
>  * Expected inverse result
>  *	 0  0  0  -1  1  -1
>  *	-1  0  0   0  1   0
>  *	 0  0  1   0 -1   1
>  *	 1  0 -1   0  0   0
>  *	 0 -1  1   1 -1   2
>  *	 0  1  0   0  0  -1
>  *
>  */
> 
> Here is my code:
> 
> 	Vec            x,b;	        
> 	Mat            A;   	        
> 	KSP           solver;          
> 	PC             pc;                 
> 
> 	PetscMPIInt    size;
> 
> 	int mpi_rank;
> 	bool is_mpi_root = FALSE;
> 
> 	/*
> 	 *  initialize PETSc and MPI
> 	 */
> 	PetscInitialize ( &argc, &argv, (char*)0, USAGE);
> 
> 
> 	int col_index [6][6] = {
> 			{0, 1, 2, 3, 4, 5},
> 			{0, 1, 2, 3, 4, 5},
> 			{0, 1, 2, 3, 4, 5},
> 			{0, 1, 2, 3, 4, 5},
> 			{0, 1, 2, 3, 4, 5},
> 			{0, 1, 2, 3, 4, 5}
> 	};
> 
> 	double v [6][6] = {
> 			{1., 0.,  0., 1., 1., 1.},
> 			{0., 1.,  1., 1., 0., 1.},
> 			{1., 0.,  0., 0., 1., 1.},
> 			{0., 0., -1., 0., 1., 1.},
> 			{1., 1.,  0., 1., 1., 1.},
> 			{0., 1.,  1., 1., 0., 0.}
> 	};
> 
> 	int nz_rows [] = {6, 6, 6, 6, 6, 6};
> 
> 	int N = 6;
> 	int row, col;
> 	double value = 1.;
> 
> 	int m, n;
> 
> 	int b_start, b_end;
> 
> 	// create x
> 	VecCreate ( PETSC_COMM_WORLD, &x );
> 	VecSetSizes ( x, PETSC_DECIDE, N );
> 	VecSetType ( x, VECMPI );
> 
> 
> 	// create b
> 	VecCreate ( PETSC_COMM_WORLD, &b );
> 	VecSetSizes ( b, PETSC_DECIDE, N );
> 	VecSetType ( b, VECMPI );
> 
> 
> 	// create A
> 	MatCreate ( PETSC_COMM_WORLD, &A );
> 	MatSetSizes ( A, PETSC_DECIDE, PETSC_DECIDE, N, N );
> 
> 	MatSetUp ( A );
> 
> 	for ( row = 0; row < N; row++ ) {
> 		MatSetValues ( A, 1, &row, nz_rows[row], col_index[row], v[row], INSERT_VALUES );
> 	}
> 	MatAssemblyBegin ( A, MAT_FINAL_ASSEMBLY ) ;
> 	MatAssemblyEnd ( A, MAT_FINAL_ASSEMBLY ) ;
> 
> 	MatView ( A, PETSC_VIEWER_STDOUT_WORLD );
> 
> 	/*
> 	 * setting up KSP
> 	 */
> 	KSPCreate ( PETSC_COMM_WORLD, &solver );
> 	KSPSetOperators ( solver, A, A );
> 	// KSPSetType ( solver, KSPCG );
> 	KSPSetType ( solver, KSPGMRES );
> 
> 	for ( col = 0; col < N; col++ ) {
> 
> 		// set b in root process
> 		VecGetOwnershipRange ( b , &b_start , &b_end ) ;
> 
> 		// clear all values
> 		VecSet ( b, 0 );
> 		// set 1 in the correct process.
> 		// each process owns a block of b : [b_start, b_end)
> 		// index starts from 0
> 
> 		if ( col >= b_start && col < b_end )
> 			VecSetValues ( b, 1, &col, &value, INSERT_VALUES );
> 
> 		// distribute b
> 		VecAssemblyBegin ( b ) ;
> 		VecAssemblyEnd ( b ) ;
> 
> 		// must be called from all processes
> 		//VecView ( b, PETSC_VIEWER_STDOUT_WORLD );
> 
> 		KSPSolve ( solver, b, x );
> 
> 		VecView ( x, PETSC_VIEWER_STDOUT_WORLD );
> 	}
> 
> 	/*
> 	 * destroy KSP
> 	 */
> 	VecDestroy ( &b );
> 	VecDestroy ( &x );
> 	MatDestroy ( &A );
> 	KSPDestroy(&solver);
> 
> 	/*
> 	 * finalize PETSc
> 	 */
> 	PetscFinalize();
> 
> 	return 0;
> 
> I am running this with one MPI process, and the output seems to indicate that the A matrix is set correctly:
> 
> Mat Object: 1 MPI processes
>   type: seqaij
> row 0: (0, 1.)  (1, 0.)  (2, 0.)  (3, 1.)  (4, 1.)  (5, 1.) 
> row 1: (0, 0.)  (1, 1.)  (2, 1.)  (3, 1.)  (4, 0.)  (5, 1.) 
> row 2: (0, 1.)  (1, 0.)  (2, 0.)  (3, 0.)  (4, 1.)  (5, 1.) 
> row 3: (0, 0.)  (1, 0.)  (2, -1.)  (3, 0.)  (4, 1.)  (5, 1.) 
> row 4: (0, 1.)  (1, 1.)  (2, 0.)  (3, 1.)  (4, 1.)  (5, 1.) 
> row 5: (0, 0.)  (1, 1.)  (2, 1.)  (3, 1.)  (4, 0.)  (5, 0.) 
> 
> 
> Thanks for any help !
>  
> rgds
> LX



More information about the petsc-users mailing list