[petsc-users] Problem with TS and PCMG
Torquil Macdonald Sørensen
torquil at gmail.com
Tue Apr 1 17:00:51 CDT 2014
On 19/12/13 00:50, Jed Brown wrote:
>> I'm using PCMG. My minimal testcase program has everything
>> hardcoded. I'm not providing any runtime options other than
>> -start_in_debugger, so everything can be seen from the code. I've
>> since made a couple of small changes that might influence line number
>> statements from GDB, so I'm including the newest version in this
>> message.
>>
>> Here is the GDB output of "bt full" and the corresponding program code:
>>
>> (gdb) bt full
>> #1 0x00007ffdc080d172 in DMCoarsen (dm=0x1fa7df0, comm=0x7ffdbffdd9c0
>> <ompi_mpi_comm_null>, dmc=0x2048f00) at dm.c:1811
>> ierr = 0
>> link = 0x7ffdc1af1640
>> __func__ = "DMCoarsen"
>> #2 0x00007ffdc0c41c1e in PCSetUp_MG (pc=0x1ff7570) at mg.c:593
>> kdm = 0x1d5b370
>> p = 0xb453
>> rscale = 0x1ff7570
>> lu = PETSC_FALSE
>> svd = (PETSC_TRUE | unknown: 32764)
>> dB = 0x0
>> __func__ = "PCSetUp_MG"
>> tvec = 0x0
>> viewer = 0x0
>> mglevels = 0x2006980
>> ierr = 0
>> preonly = PETSC_FALSE
>> cholesky = (unknown: 3246112504)
>> mg = 0x20060c0
>> i = 0
>> n = 2
>> redundant = (unknown: 1211234)
>> use_amat = PETSC_TRUE
>> dms = 0x2048f00
>> cpc = 0x200f620
>> dump = PETSC_FALSE
>> opsset = PETSC_FALSE
>> dA = 0x7ffdc1af1640
>> uflag = DIFFERENT_NONZERO_PATTERN
> Okay, this is the relevant bit of code.
>
> /* Skipping this for galerkin==2 (externally managed hierarchy such as ML and GAMG). Cleaner logic here would be great. Wrap ML/GAMG as DMs? */
> if (pc->dm && mg->galerkin != 2 && !pc->setupcalled) {
> /* construct the interpolation from the DMs */
> Mat p;
> Vec rscale;
> ierr = PetscMalloc1(n,&dms);CHKERRQ(ierr);
> dms[n-1] = pc->dm;
> for (i=n-2; i>-1; i--) {
> DMKSP kdm;
> ierr = DMCoarsen(dms[i+1],MPI_COMM_NULL,&dms[i]);CHKERRQ(ierr);
> ierr = KSPSetDM(mglevels[i]->smoothd,dms[i]);CHKERRQ(ierr);
> if (mg->galerkin) {ierr = KSPSetDMActive(mglevels[i]->smoothd,PETSC_FALSE);CHKERRQ(ierr);}
> ierr = DMGetDMKSPWrite(dms[i],&kdm);CHKERRQ(ierr);
> /* Ugly hack so that the next KSPSetUp() will use the RHS that we set. A better fix is to change dmActive to take
> * a bitwise OR of computing the matrix, RHS, and initial iterate. */
> kdm->ops->computerhs = NULL;
> kdm->rhsctx = NULL;
> if (!mglevels[i+1]->interpolate) {
> ierr = DMCreateInterpolation(dms[i],dms[i+1],&p,&rscale);CHKERRQ(ierr);
>
> I'm inclined to implement DMCoarsen_Shell() so that it carries through
> to the levels. It won't be used for interpolation because of the final
> conditional (the user has already set mglevels[i+1]->interpolate). This
> is slightly annoying because it's one more function that users have to
> implement. I think the patch below will also work, though I'm nervous
> about DMShellSetCreateMatrix().
>
> diff --git i/src/dm/interface/dm.c w/src/dm/interface/dm.c
> index ec60763..c987200 100644
> --- i/src/dm/interface/dm.c
> +++ w/src/dm/interface/dm.c
> @@ -1943,7 +1943,11 @@ PetscErrorCode DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
>
> PetscFunctionBegin;
> PetscValidHeaderSpecific(dm,DM_CLASSID,1);
> - ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
> + if (dm->ops->coarsen) {
> + ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
> + } else {
> + ierr = DMShellCreate(comm,dmc);CHKERRQ(ierr);
> + }
> (*dmc)->ops->creatematrix = dm->ops->creatematrix;
> ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
> (*dmc)->ctx = dm->ctx;
>
I tried this patch against 3.4.4, and got the following error when
running my test program:
tmac at lenovo:/tmp/build$ ./ts_multigrid.exe
[0]PETSC ERROR: PetscCommDuplicate() line 148 in
/tmp/petsc-3.4.4/src/sys/objects/tagm.c
[0]PETSC ERROR: PetscHeaderCreate_Private() line 55 in
/tmp/petsc-3.4.4/src/sys/objects/inherit.c
[0]PETSC ERROR: DMCreate() line 82 in /tmp/petsc-3.4.4/src/dm/interface/dm.c
[0]PETSC ERROR: DMShellCreate() line 589 in
/tmp/petsc-3.4.4/src/dm/impls/shell/dmshell.c
[0]PETSC ERROR: DMCoarsen() line 1815 in
/tmp/petsc-3.4.4/src/dm/interface/dm.c
[0]PETSC ERROR: PCSetUp_MG() line 593 in
/tmp/petsc-3.4.4/src/ksp/pc/impls/mg/mg.c
[0]PETSC ERROR: PCSetUp() line 890 in
/tmp/petsc-3.4.4/src/ksp/pc/interface/precon.c
[0]PETSC ERROR: KSPSetUp() line 278 in
/tmp/petsc-3.4.4/src/ksp/ksp/interface/itfunc.c
[0]PETSC ERROR: KSPSolve() line 399 in
/tmp/petsc-3.4.4/src/ksp/ksp/interface/itfunc.c
[0]PETSC ERROR: SNESSolve_KSPONLY() line 44 in
/tmp/petsc-3.4.4/src/snes/impls/ksponly/ksponly.c
[0]PETSC ERROR: SNESSolve() line 3636 in
/tmp/petsc-3.4.4/src/snes/interface/snes.c
[0]PETSC ERROR: TSStep_Theta() line 183 in
/tmp/petsc-3.4.4/src/ts/impls/implicit/theta/theta.c
[0]PETSC ERROR: TSStep() line 2507 in /tmp/petsc-3.4.4/src/ts/interface/ts.c
[0]PETSC ERROR: main() line 57 in
"unknowndirectory/"/home/tmac/programming/petsc/source/ts_multigrid.cpp
--------------------------------------------------------------------------
MPI_ABORT was invoked on rank 0 in communicator MPI_COMM_WORLD
with errorcode 1.
NOTE: invoking MPI_ABORT causes Open MPI to kill all MPI processes.
You may or may not see output from other processes, depending on
exactly when Open MPI kills them.
--------------------------------------------------------------------------
The test program ts_multigrid.cpp is:
#include "petscts.h"
int main(int argc, char ** argv)
{
PetscErrorCode ierr = PetscInitialize(&argc, &argv, 0, 0);
CHKERRQ(ierr);
int const nb_dof_fine = 5;
int const nb_dof_coarse = 3;
Vec x; // A vector to hold the solution on the fine grid
ierr = VecCreateSeq(PETSC_COMM_SELF, nb_dof_fine, &x); CHKERRQ(ierr);
Mat Id; // Identity matrix on the vector space of the fine grid
ierr = MatCreateSeqAIJ(PETSC_COMM_SELF, nb_dof_fine, nb_dof_fine, 1,
0, &Id); CHKERRQ(ierr);
for(int i = 0; i != nb_dof_fine; ++i) {
MatSetValue(Id, i, i, 1.0, INSERT_VALUES);
}
ierr = MatAssemblyBegin(Id, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
ierr = MatAssemblyEnd(Id, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
Mat interpolation; // Multigrid interpolation matrix
ierr = MatCreateSeqAIJ(PETSC_COMM_SELF, nb_dof_fine, nb_dof_coarse,
2, 0, &interpolation); CHKERRQ(ierr);
ierr = MatSetValue(interpolation, 0, 0, 1.0, INSERT_VALUES);
CHKERRQ(ierr);
ierr = MatSetValue(interpolation, 1, 0, 0.5, INSERT_VALUES);
CHKERRQ(ierr);
ierr = MatSetValue(interpolation, 1, 1, 0.5, INSERT_VALUES);
CHKERRQ(ierr);
ierr = MatSetValue(interpolation, 2, 1, 1.0, INSERT_VALUES);
CHKERRQ(ierr);
ierr = MatSetValue(interpolation, 3, 1, 0.5, INSERT_VALUES);
CHKERRQ(ierr);
ierr = MatSetValue(interpolation, 3, 2, 0.5, INSERT_VALUES);
CHKERRQ(ierr);
ierr = MatSetValue(interpolation, 4, 2, 1.0, INSERT_VALUES);
CHKERRQ(ierr);
ierr = MatAssemblyBegin(interpolation, MAT_FINAL_ASSEMBLY);
CHKERRQ(ierr);
ierr = MatAssemblyEnd(interpolation, MAT_FINAL_ASSEMBLY); CHKERRQ(ierr);
TS ts;
ierr = TSCreate(PETSC_COMM_SELF, &ts); CHKERRQ(ierr);
ierr = TSSetProblemType(ts, TS_LINEAR); CHKERRQ(ierr);
ierr = TSSetType(ts, TSBEULER);
ierr = TSSetInitialTimeStep(ts, 0.0, 0.1); CHKERRQ(ierr);
ierr = TSSetSolution(ts, x); CHKERRQ(ierr);
ierr = TSSetIFunction(ts, 0, TSComputeIFunctionLinear, 0);
CHKERRQ(ierr);
ierr = TSSetIJacobian(ts, Id, Id, TSComputeIJacobianConstant, 0);
CHKERRQ(ierr);
KSP ksp;
ierr = TSGetKSP(ts, &ksp); CHKERRQ(ierr);
PC pc;
ierr = KSPGetPC(ksp, &pc); CHKERRQ(ierr);
ierr = PCSetType(pc, PCMG); CHKERRQ(ierr);
ierr = PCMGSetLevels(pc, 2, 0); CHKERRQ(ierr);
ierr = PCMGSetInterpolation(pc, 1, interpolation); CHKERRQ(ierr);
ierr = PCMGSetGalerkin(pc, PETSC_TRUE); CHKERRQ(ierr);
ierr = TSSetFromOptions(ts); CHKERRQ(ierr);
ierr = TSSetUp(ts); CHKERRQ(ierr);
ierr = TSStep(ts); CHKERRQ(ierr);
ierr = TSDestroy(&ts); CHKERRQ(ierr);
ierr = MatDestroy(&interpolation); CHKERRQ(ierr);
ierr = VecDestroy(&x); CHKERRQ(ierr);
ierr = MatDestroy(&Id); CHKERRQ(ierr);
ierr = PetscFinalize(); CHKERRQ(ierr);
return 0;
}
-------------- next part --------------
A non-text attachment was scrubbed...
Name: signature.asc
Type: application/pgp-signature
Size: 880 bytes
Desc: OpenPGP digital signature
URL: <http://lists.mcs.anl.gov/pipermail/petsc-users/attachments/20140402/924dec7f/attachment-0001.pgp>
More information about the petsc-users
mailing list