[petsc-users] Multigrid and interpolation matrices

Torquil Macdonald Sørensen torquil at gmail.com
Tue Nov 12 13:16:15 CST 2013


Hi!

I'm having some problems getting PCMG to work when manually populating the
interpolation matrix, i.e. using MatSetValue to set its values, instead of
the use of DMCreateInterpolation, as I have seen in an example. I have to
use MatSetValue to populate my PETSc matrices, because I'm obtaining them
in a different format from a FEM library.

I have reduced my program to a minimal test case (without involving the FEM
library), where the system matrix is simply an identity matrix, with two
multigrid levels of 8 and 4 degrees of freedom, and a very simple
interpolation matrix that can be seen by looking at the code I'm attaching.
I'm not including KSPSolve because the problem occurs prior to that, on
line 56, which corresponds to where I'm calling KSPSetUp().

I'm actually not certain how to create the matrix that will be passed to
PCMGSetInterpolation(). At the moment I'm only running sequentially, but
how should this non-square matrix be distributed across the MPI nodes when
running in parallel, i.e. what values should I pass to MatSetSizes() for
that matrix?

In any case, the error message I'm getting (running it with no command line
options) is:

[0]PETSC ERROR: --------------------- Error Message
------------------------------------
[0]PETSC ERROR: Object is in wrong state!
[0]PETSC ERROR: Mat object's type is not set: Argument # 1!
[0]PETSC ERROR:
------------------------------------------------------------------------
[0]PETSC ERROR: Petsc Release Version 3.4.3, Oct, 15, 2013
[0]PETSC ERROR: See docs/changes/index.html for recent updates.
[0]PETSC ERROR: See docs/faq.html for hints about trouble shooting.
[0]PETSC ERROR: See docs/index.html for manual pages.
[0]PETSC ERROR:
------------------------------------------------------------------------
[0]PETSC ERROR: ./multigrid2.elf on a arch-linux2-cxx-debug named
enlil.uio.no by tmac Tue Nov 12 19:57:59 2013
[0]PETSC ERROR: Libraries linked from
/mn/anatu/cma-u3/tmac/usr/stow/petsc_complex_dbg/lib
[0]PETSC ERROR: Configure run at Tue Nov 12 16:27:32 2013
[0]PETSC ERROR: Configure options
CPPFLAGS=-I/mn/anatu/cma-u3/tmac/usr/include
LDFLAGS=-L/mn/anatu/cma-u3/tmac/usr/lib -L/mn/anatu/cma-u3/tmac/usr/lib64
--prefix=/mn/anatu/cma-u3/tmac/usr/stow/petsc_complex_dbg
--with-clanguage=C++ --with-scalar-type=complex --with-shared-libraries=1
--with-debugging=1 --with-superlu=1
--with-superlu-lib=/mn/anatu/cma-u3/tmac/usr/lib/libsuperlu.so
--with-superlu-include=/mn/anatu/cma-u3/tmac/usr/include/superlu/
[0]PETSC ERROR:
------------------------------------------------------------------------
[0]PETSC ERROR: MatGetVecs() line 8131 in
/tmp/petsc-3.4.3/src/mat/interface/matrix.c
[0]PETSC ERROR: KSPGetVecs() line 929 in
/tmp/petsc-3.4.3/src/ksp/ksp/interface/iterativ.c
[0]PETSC ERROR: PCSetUp_MG() line 691 in
/tmp/petsc-3.4.3/src/ksp/pc/impls/mg/mg.c
[0]PETSC ERROR: PCSetUp() line 890 in
/tmp/petsc-3.4.3/src/ksp/pc/interface/precon.c
[0]PETSC ERROR: KSPSetUp() line 278 in
/tmp/petsc-3.4.3/src/ksp/ksp/interface/itfunc.c
[0]PETSC ERROR: main() line 56 in
"unknowndirectory/"/mn/anatu/cma-u3/tmac/programming/fem/getfem/source/multigrid2.cpp
--------------------------------------------------------------------------
MPI_ABORT was invoked on rank 0 in communicator MPI_COMM_WORLD
with errorcode 73.

NOTE: invoking MPI_ABORT causes Open MPI to kill all MPI processes.
You may or may not see output from other processes, depending on
exactly when Open MPI kills them.
--------------------------------------------------------------------------

The full code is:

#include "petscksp.h"

int main(int argc, char ** argv)
{
    PetscErrorCode ierr = PetscInitialize(&argc, &argv, 0, 0);
CHKERRQ(ierr);

    Vec x;
    Mat A;
    PetscInt rstart, rend, nlocal;

    PetscInt const n = 8;

    ierr = VecCreate(PETSC_COMM_WORLD, &x); CHKERRQ(ierr);
    ierr = VecSetSizes(x, PETSC_DECIDE, n); CHKERRQ(ierr);
    ierr = VecSetFromOptions(x); CHKERRQ(ierr);
    ierr = VecGetOwnershipRange(x, &rstart, &rend); CHKERRQ(ierr);
    ierr = VecGetLocalSize(x, &nlocal); CHKERRQ(ierr);

    ierr = MatCreate(PETSC_COMM_WORLD, &A); CHKERRQ(ierr);
    ierr = MatSetSizes(A, nlocal, nlocal, n, n); CHKERRQ(ierr);
    ierr = MatSetFromOptions(A); CHKERRQ(ierr);
    ierr = MatSetUp(A); CHKERRQ(ierr);
    for(PetscInt i = rstart; i < rend; ++i) {
        ierr = MatSetValue(A, i, i, 1.0, INSERT_VALUES); CHKERRQ(ierr);
    }
    ierr = MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
    ierr = MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);

    KSP ksp;
    ierr = KSPCreate(PETSC_COMM_WORLD, &ksp); CHKERRQ(ierr);
    ierr = KSPSetOperators(ksp, A, A, SAME_NONZERO_PATTERN); CHKERRQ(ierr);
    ierr = KSPSetType(ksp, KSPFGMRES); CHKERRQ(ierr);

    PC pc;
    ierr = KSPGetPC(ksp, &pc); CHKERRQ(ierr);
    ierr = PCSetType(pc, PCMG); CHKERRQ(ierr);
    ierr = PCMGSetLevels(pc, 2, 0); CHKERRQ(ierr);

    Mat interp;
    ierr = MatCreate(PETSC_COMM_WORLD, &interp); CHKERRQ(ierr);
    ierr = MatSetSizes(interp, PETSC_DECIDE, PETSC_DECIDE, n, n/2);
CHKERRQ(ierr);
    ierr = MatSetFromOptions(interp); CHKERRQ(ierr);
    ierr = MatSetUp(interp); CHKERRQ(ierr);
    ierr = MatGetOwnershipRange(interp, &rstart, &rend); CHKERRQ(ierr);
    for(PetscInt i = rstart; i < rend; ++i) {
        ierr = MatSetValue(interp, i, i/2, 1.0, INSERT_VALUES);
CHKERRQ(ierr);
    }
    ierr = MatAssemblyBegin(interp, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
    ierr = MatAssemblyEnd(interp, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);

    ierr = PCMGSetInterpolation(pc, 1, interp); CHKERRQ(ierr);
    ierr = MatDestroy(&interp); CHKERRQ(ierr);

    ierr = KSPSetFromOptions(ksp); CHKERRQ(ierr);
    ierr = KSPSetUp(ksp); CHKERRQ(ierr);

    ierr = VecDestroy(&x); CHKERRQ(ierr);
    ierr = MatDestroy(&A); CHKERRQ(ierr);
    ierr = KSPDestroy(&ksp); CHKERRQ(ierr);
    ierr = PetscFinalize(); CHKERRQ(ierr);

    return 0;
}
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.mcs.anl.gov/pipermail/petsc-users/attachments/20131112/e132d87c/attachment.html>


More information about the petsc-users mailing list