[petsc-users] DMView and DMLoad
Ataollah Mesgarnejad
amesga1 at tigers.lsu.edu
Mon Mar 9 11:28:51 CDT 2015
Dear all,
I'm trying to save and load distributed DMPlex in HDF5 format with DMView
and DMLoad. The problem is even though there are no errors while loading
the DM when I look at the DM loaded from file is not equal to the DM that
was saved. Also, when loading the DM if DMSetType is called before the
DMLoad, DMLoad produces errors (like line 316
<https://bitbucket.org/petsc/petsc/src/236f5a4d227fe9d0affddb8701edb9509ad39525/src/snes/examples/tutorials/ex12.c?at=master#cl-316>
in src/snes/examples/tutorials/ex12.c ).
I'm including a small code to replicate the problem here plus an input mesh
file.
Best,
Ata
/**
* @file
*
* Created by Ataollah Mesgarnejad on 3/6/15.
*
* This is a test to read and write a distributed DM.
*
*/
static char help[] = "An example of the usage of PetscSF to scatter data
back and forth between a \
a serial DM and its parallel counterpart.\n";
#include <petscdmplex.h>
#include <petscsnes.h>
#include <petscsf.h>
#include <exodusII.h>
#include <petsc-private/dmimpl.h> /*I "petscdm.h" I*/
#include <petscsf.h>
#include <petscviewerhdf5.h>
#undef __FUNCT__
#define __FUNCT__ "main"
int main(int argc,char **argv)
{
PetscErrorCode ierr;
DM dm, distDM,distDMold;
char ifilename[PETSC_MAX_PATH_LEN];
PetscBool flg;
int CPU_word_size = 0,IO_word_size = 0,exoid;
float version;
PetscInt numproc,rank;
PetscViewer stdoutViewer,DMPlexHDF5View;
PetscReal *VArray;
PetscInt dim;
PetscInt numFields = 2;
PetscInt numComp[2] = {1,1};
PetscInt numDof[6] = {1, 0, 0,
0, 0, 1}; /*{Vertex, Edge, Cell} */
PetscInt bcFields[1] = {0}, numBC=0;
//PetscInt *remoteOffsets;
char** namelist;
PetscInt off;
IS bcPoints[1] = {NULL};
IS *ISlist;
PetscSection seqSection, distSection;
Vec distV, V;
PetscSF pointSF;//,pointSFold;
PetscInt pStart, pEnd, dof;
PetscBool load_files = PETSC_FALSE, save_files = PETSC_FALSE;
MPI_Comm comm;
ierr = PetscInitialize(&argc, &argv, (char*)0, help);CHKERRQ(ierr);
ierr = PetscViewerASCIIGetStdout(PETSC_COMM_SELF,&stdoutViewer);CHKERRQ
(ierr);
comm = PETSC_COMM_WORLD;
// PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt
fields[], IS *is, DM *subdm)
ierr = PetscOptionsBegin(PETSC_COMM_WORLD,"","PetscSF Test Options",
"none");CHKERRQ(ierr);
ierr = PetscOptionsBool("-save_files","save the distributed vector in
HDF5 format","",PETSC_FALSE,&save_files,NULL);CHKERRQ(ierr);
ierr = PetscOptionsBool("-load_files","Load the distributed vector in
HDF5 format","",PETSC_FALSE,&load_files,NULL);CHKERRQ(ierr);
ierr = PetscOptionsEnd();
ierr = PetscOptionsGetString(PETSC_NULL,"-i",ifilename,sizeof
ifilename,&flg);CHKERRQ(ierr);
ierr = MPI_Comm_size(PETSC_COMM_WORLD,&numproc);
ierr = MPI_Comm_rank(PETSC_COMM_WORLD,&rank);
if (!rank)
{
exoid = ex_open(ifilename,EX_READ
,&CPU_word_size,&IO_word_size,&version);
if (exoid <= 0)
SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_LIB,"ex_open(\"%s\",...)
did not return a valid file ID",ifilename);
}
else
{
exoid = -1; /* Not used */
}
ierr = DMPlexCreateExodus(PETSC_COMM_WORLD,exoid,PETSC_FALSE,&dm);
CHKERRQ(ierr);
ierr = PetscObjectSetName((PetscObject) dm,"sequential-DM");
ierr = DMGetDimension(dm, &dim);
CHKERRQ(ierr);
if (numproc < 2 ) distDM = dm;
else ierr = DMPlexDistribute(dm, 0, &pointSF, &distDM);
CHKERRQ(ierr);
ierr = PetscObjectSetName((PetscObject) distDM,"Distributed-DM");
ierr = DMPlexCreateSection(dm, dim, numFields, numComp, numDof,
numBC, bcFields, bcPoints,PETSC_NULL,
&seqSection); CHKERRQ(ierr);
ierr = DMPlexCreateSection(distDM, dim, numFields, numComp, numDof,
numBC, bcFields, bcPoints,PETSC_NULL,
&distSection); CHKERRQ(ierr);
ierr = DMSetDefaultSection(dm, seqSection);
CHKERRQ(ierr);
ierr = DMSetDefaultSection(distDM, distSection);
CHKERRQ(ierr);
ierr = DMCreateGlobalVector(dm, &V);
CHKERRQ(ierr);
ierr = VecCreate(comm, &distV);
CHKERRQ(ierr);
ierr = PetscObjectSetName((PetscObject) distV,"Distributed-V");
ierr = VecGetArray(V, &VArray);
CHKERRQ(ierr);
// fill vertex data
ierr = DMPlexGetDepthStratum(dm, 0, &pStart, &pEnd);
CHKERRQ(ierr);
for (int p = pStart; p < pEnd; p++)
{
ierr = PetscSectionGetDof(seqSection, p, &dof);
CHKERRQ(ierr);
ierr = PetscSectionGetOffset(seqSection, p, &off);
CHKERRQ(ierr);
for (int d = 0; d < dof; d++)
{
VArray[off + d] = p;
}
}
// fill cell data
ierr = DMPlexGetDepthStratum(dm, 1, &pStart, &pEnd);
CHKERRQ(ierr);
for (int p = pStart; p < pEnd; p++)
{
ierr = PetscSectionGetDof(seqSection, p, &dof);
CHKERRQ(ierr);
ierr = PetscSectionGetOffset(seqSection, p, &off);
CHKERRQ(ierr);
for (int d = 0; d < dof; d++)
{
VArray[off + d] = -p-1;
}
}
ierr = VecRestoreArray(V, &VArray); CHKERRQ(ierr);
ierr = DMCreateFieldIS(dm, &numFields, &namelist, &ISlist);
CHKERRQ(ierr);
ierr = DMPlexDistributeField(dm, pointSF, seqSection, V, distSection,
distV); CHKERRQ(ierr);
#if defined(PETSC_HAVE_HDF5)
if (save_files)
{
// distribute fields
ierr = PetscPrintf(PETSC_COMM_WORLD, "==== Distrubuted DM\n");
CHKERRQ(ierr);
ierr = DMView(distDM, PETSC_VIEWER_STDOUT_WORLD);
CHKERRQ(ierr);
PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
// Save distributed data
ierr = PetscViewerHDF5Open(PetscObjectComm((PetscObject) distDM),
"distDM.h5", FILE_MODE_WRITE, &DMPlexHDF5View);CHKERRQ(ierr);
ierr = PetscPrintf(PETSC_COMM_WORLD,"Writing dist DM ...\n");
PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
ierr = DMView(distDM, DMPlexHDF5View);
CHKERRQ(ierr);
PetscViewerDestroy(&DMPlexHDF5View);
}
else if (load_files)
{
ierr = PetscPrintf(PETSC_COMM_WORLD,"Loading dist vectors ...\n");
CHKERRQ(ierr);
ierr = PetscViewerHDF5Open(PETSC_COMM_SELF,"distDM.h5",
FILE_MODE_READ, &DMPlexHDF5View); CHKERRQ(ierr);
ierr = DMCreate(comm,&distDMold);
CHKERRQ(ierr);
ierr = DMLoad(distDMold,DMPlexHDF5View);
CHKERRQ(ierr);
ierr = DMSetType(distDMold, DMPLEX);
CHKERRQ(ierr);
/*ierr = DMPlexCreateSection(distDMold, dim, numFields, numComp,
numDof,
numBC, bcFields, bcPoints,PETSC_NULL,
&distSectionold); CHKERRQ(ierr);
ierr = DMSetDefaultSection(distDMold, distSectionold);CHKERRQ(ierr);
ierr = DMGetPointSF(distDMold,&pointSFold);
CHKERRQ(ierr);*/
PetscViewerDestroy(&DMPlexHDF5View);
ierr = PetscPrintf(PETSC_COMM_WORLD, "==== Read DM\n");
CHKERRQ(ierr);
ierr = DMView(distDMold, PETSC_VIEWER_STDOUT_WORLD);
CHKERRQ(ierr);
PetscSynchronizedFlush(PETSC_COMM_WORLD, PETSC_STDOUT);
CHKERRQ(ierr);
DMPlexEqual(distDM, distDMold, &flg);
if (flg)
{
PetscPrintf(PETSC_COMM_WORLD,"\n DMs equal\n");
}
else
{
PetscPrintf(PETSC_COMM_WORLD,"\n DMs are not equal\n\n");
}
}
#endif
ierr = VecDestroy(&V); CHKERRQ(ierr);
ierr = VecDestroy(&distV); CHKERRQ(ierr);
//ierr = VecDestroy(&seqV); CHKERRQ(ierr);
ierr = DMDestroy(&dm);CHKERRQ(ierr);
ierr = DMDestroy(&distDM);CHKERRQ(ierr);
ierr = PetscFinalize();
return 0;
}
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.mcs.anl.gov/pipermail/petsc-users/attachments/20150309/c499a5dc/attachment-0001.html>
-------------- next part --------------
A non-text attachment was scrubbed...
Name: testDistributeSaveLoad.cpp
Type: text/x-c++src
Size: 9270 bytes
Desc: not available
URL: <http://lists.mcs.anl.gov/pipermail/petsc-users/attachments/20150309/c499a5dc/attachment-0001.cpp>
-------------- next part --------------
A non-text attachment was scrubbed...
Name: Rect-tri3.gen
Type: application/octet-stream
Size: 4972 bytes
Desc: not available
URL: <http://lists.mcs.anl.gov/pipermail/petsc-users/attachments/20150309/c499a5dc/attachment-0001.obj>
More information about the petsc-users
mailing list