testing new repository
Satish Balay
balay at mcs.anl.gov
Sat Oct 6 22:19:57 CDT 2007
On Fri, 5 Oct 2007, Lisandro Dalcin wrote:
> And please, send me the failures...
Lisandro,
I haven't yet checked the nightly builds properly. Some of the issues
I've been checking were with code already in petsc-dev. [So the
current checks are from the tests I ran on my laptop]
Most of the 'strict aliasing' I've seen so far are compile issues with
sieve and other external pacakges[spooles,superlu_dist,pmetis.c etc..]
wrt PetscObject usage.
[so the fix for PetscObject stuff was simple enough for met to do and
test]. I'm attaching the patch of the changes that worked. Let me know
if this is ok to commit and push.
There is also the following memory leak - which I haven't checked
yet.. Perhaps there is some incorrect reference counting somewhere.
>>>>>>>>>>>>>>>>>>
testexamples_C in: /home/balay/tmp/petsc-dalcinl/src/mat/examples/tests
0a1,10
> [0]Total space allocated 1380 bytes
> [ 0]460 bytes MatCreate() line 80 in src/mat/utils/gcreate.c
> [0] MatConvert_SeqBAIJ_SeqSBAIJ() line 276 in src/mat/impls/sbaij/seq/aijsbaij.c
> [0] MatConvert() line 3142 in src/mat/interface/matrix.c
> [ 0]460 bytes MatCreate() line 80 in src/mat/utils/gcreate.c
> [0] MatDuplicate_SeqBAIJ() line 2448 in src/mat/impls/baij/seq/baij.c
> [0] MatConvert() line 3142 in src/mat/interface/matrix.c
> [ 0]460 bytes MatCreate() line 80 in src/mat/utils/gcreate.c
> [0] MatConvert_SeqBAIJ_SeqAIJ() line 17 in src/mat/impls/baij/seq/aijbaij.c
> [0] MatConvert() line 3142 in src/mat/interface/matrix.c
Possible problem with ex55_1, diffs above
52a53,55
> [0]Total space allocated 460 bytes
> [ 0]460 bytes MatCreate() line 80 in src/mat/utils/gcreate.c
> [0] MatCreateSeqAIJ() line 2777 in src/mat/impls/aij/seq/aij.c
Possible problem with ex114_1, diffs above
<<<<<<<<<<<<<<<
Prometheus also nees some PetscObject changes - I can send Mark the
changes to update the prometheus tarball later..
thanks,
Satish
-------------- next part --------------
diff -r 069050f22c2f src/dm/mesh/impls/cartesian/cartesian.c
--- a/src/dm/mesh/impls/cartesian/cartesian.c Fri Oct 05 23:01:32 2007 -0500
+++ b/src/dm/mesh/impls/cartesian/cartesian.c Sat Oct 06 21:57:12 2007 -0500
@@ -110,7 +110,7 @@ PetscErrorCode PETSCDM_DLLEXPORT MeshVie
} else if (isdraw){
SETERRQ(PETSC_ERR_SUP, "Draw viewer not implemented for Cartesian Mesh");
} else {
- SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by this mesh object", viewer->type_name);
+ SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by this mesh object", ((PetscObject)viewer)->type_name);
}
PetscFunctionReturn(0);
}
diff -r 069050f22c2f src/dm/mesh/mesh.c
--- a/src/dm/mesh/mesh.c Fri Oct 05 23:01:32 2007 -0500
+++ b/src/dm/mesh/mesh.c Sat Oct 06 21:57:12 2007 -0500
@@ -247,7 +247,7 @@ PetscErrorCode MeshView_Sieve(const ALE:
} else if (isdraw){
SETERRQ(PETSC_ERR_SUP, "Draw viewer not implemented for Mesh");
} else {
- SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by this mesh object", viewer->type_name);
+ SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by this mesh object", ((PetscObject)viewer)->type_name);
}
PetscFunctionReturn(0);
}
@@ -317,7 +317,7 @@ PetscErrorCode PETSCDM_DLLEXPORT MeshVie
PetscValidHeaderSpecific(mesh, MESH_COOKIE, 1);
PetscValidType(mesh, 1);
if (!viewer) {
- ierr = PetscViewerASCIIGetStdout(mesh->comm,&viewer);CHKERRQ(ierr);
+ ierr = PetscViewerASCIIGetStdout(((PetscObject)mesh)->comm,&viewer);CHKERRQ(ierr);
}
PetscValidHeaderSpecific(viewer, PETSC_VIEWER_COOKIE, 2);
PetscCheckSameComm(mesh, 1, viewer, 2);
@@ -596,7 +596,7 @@ PetscErrorCode PETSCDM_DLLEXPORT MeshSet
ierr = PetscTypeCompare((PetscObject)mesh,type,&match);CHKERRQ(ierr);
if (match) PetscFunctionReturn(0);
- ierr = PetscFListFind(MeshList,mesh->comm,type,(void (**)(void)) &r);CHKERRQ(ierr);
+ ierr = PetscFListFind(MeshList,((PetscObject)mesh)->comm,type,(void (**)(void)) &r);CHKERRQ(ierr);
if (!r) SETERRQ1(PETSC_ERR_ARG_UNKNOWN_TYPE,"Unable to find requested Mesh type %s",type);
/* Destroy the previous private Mesh context */
if (mesh->ops->destroy) { ierr = (*mesh->ops->destroy)(mesh);CHKERRQ(ierr); }
@@ -631,7 +631,7 @@ PetscErrorCode PETSCDM_DLLEXPORT MeshGet
PetscFunctionBegin;
PetscValidHeaderSpecific(mesh,MESH_COOKIE,1);
PetscValidPointer(type,2);
- *type = mesh->type_name;
+ *type = ((PetscObject)mesh)->type_name;
PetscFunctionReturn(0);
}
diff -r 069050f22c2f src/dm/mesh/section.c
--- a/src/dm/mesh/section.c Fri Oct 05 23:01:32 2007 -0500
+++ b/src/dm/mesh/section.c Sat Oct 06 21:57:12 2007 -0500
@@ -35,7 +35,7 @@ PetscErrorCode SectionRealView_Sieve(Sec
} else if (isdraw){
SETERRQ(PETSC_ERR_SUP, "Draw viewer not implemented for Section");
} else {
- SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by this section object", viewer->type_name);
+ SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by this section object", ((PetscObject)viewer)->type_name);
}
PetscFunctionReturn(0);
}
@@ -89,7 +89,7 @@ PetscErrorCode SectionRealView(SectionRe
PetscValidHeaderSpecific(section, SECTIONREAL_COOKIE, 1);
PetscValidType(section, 1);
if (!viewer) {
- ierr = PetscViewerASCIIGetStdout(section->comm,&viewer);CHKERRQ(ierr);
+ ierr = PetscViewerASCIIGetStdout(((PetscObject)section)->comm,&viewer);CHKERRQ(ierr);
}
PetscValidHeaderSpecific(viewer, PETSC_VIEWER_COOKIE, 2);
PetscCheckSameComm(section, 1, viewer, 2);
@@ -844,7 +844,7 @@ PetscErrorCode SectionIntView_Sieve(Sect
} else if (isdraw){
SETERRQ(PETSC_ERR_SUP, "Draw viewer not implemented for Section");
} else {
- SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by this section object", viewer->type_name);
+ SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by this section object", ((PetscObject)viewer)->type_name);
}
PetscFunctionReturn(0);
}
@@ -898,7 +898,7 @@ PetscErrorCode SectionIntView(SectionInt
PetscValidHeaderSpecific(section, SECTIONINT_COOKIE, 1);
PetscValidType(section, 1);
if (!viewer) {
- ierr = PetscViewerASCIIGetStdout(section->comm,&viewer);CHKERRQ(ierr);
+ ierr = PetscViewerASCIIGetStdout(((PetscObject)section)->comm,&viewer);CHKERRQ(ierr);
}
PetscValidHeaderSpecific(viewer, PETSC_VIEWER_COOKIE, 2);
PetscCheckSameComm(section, 1, viewer, 2);
diff -r 069050f22c2f src/ksp/pc/impls/spai/ispai.c
--- a/src/ksp/pc/impls/spai/ispai.c Fri Oct 05 23:01:32 2007 -0500
+++ b/src/ksp/pc/impls/spai/ispai.c Sat Oct 06 21:57:12 2007 -0500
@@ -641,7 +641,6 @@ PetscErrorCode PETSCKSP_DLLEXPORT PCCrea
pc->ops->view = PCView_SPAI;
pc->ops->setfromoptions = PCSetFromOptions_SPAI;
- pc->name = 0;
ispai->epsilon = .4;
ispai->nbsteps = 5;
ispai->max = 5000;
diff -r 069050f22c2f src/mat/impls/aij/mpi/spooles/mpiaijspooles.c
--- a/src/mat/impls/aij/mpi/spooles/mpiaijspooles.c Fri Oct 05 23:01:32 2007 -0500
+++ b/src/mat/impls/aij/mpi/spooles/mpiaijspooles.c Sat Oct 06 21:57:12 2007 -0500
@@ -47,7 +47,7 @@ PetscErrorCode MatLUFactorSymbolic_MPIAI
lu->flg = DIFFERENT_NONZERO_PATTERN;
lu->options.useQR = PETSC_FALSE;
- ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(((PetscObject)lu)->comm_spooles));CHKERRQ(ierr);
+ ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_spooles));CHKERRQ(ierr);
if (!info->dtcol) {
lu->options.pivotingflag = SPOOLES_NO_PIVOTING;
diff -r 069050f22c2f src/mat/impls/aij/mpi/spooles/mpispooles.c
--- a/src/mat/impls/aij/mpi/spooles/mpispooles.c Fri Oct 05 23:01:32 2007 -0500
+++ b/src/mat/impls/aij/mpi/spooles/mpispooles.c Sat Oct 06 21:57:12 2007 -0500
@@ -32,7 +32,7 @@ PetscErrorCode MatDestroy_MPIAIJSpooles(
SubMtxManager_free(lu->mtxmanager);
DenseMtx_free(lu->mtxX);
DenseMtx_free(lu->mtxY);
- ierr = MPI_Comm_free(&(((PetscObject)lu)->comm_spooles));CHKERRQ(ierr);
+ ierr = MPI_Comm_free(&(lu->comm_spooles));CHKERRQ(ierr);
if ( lu->scat ){
ierr = VecDestroy(lu->vec_spooles);CHKERRQ(ierr);
ierr = ISDestroy(lu->iden);CHKERRQ(ierr);
@@ -98,7 +98,7 @@ PetscErrorCode MatSolve_MPISpooles(Mat A
by FrontMtx_MPI_split() is unknown */
lu->firsttag = 0;
newY = DenseMtx_MPI_splitByRows(lu->mtxY, lu->vtxmapIV, lu->stats, lu->options.msglvl,
- lu->options.msgFile, lu->firsttag, ((PetscObject)lu)->comm_spooles);
+ lu->options.msgFile, lu->firsttag, lu->comm_spooles);
DenseMtx_free(lu->mtxY);
lu->mtxY = newY ;
lu->firsttag += size ;
@@ -113,9 +113,9 @@ PetscErrorCode MatSolve_MPISpooles(Mat A
to match the final rows and columns in the fronts */
IV *rowmapIV ;
rowmapIV = FrontMtx_MPI_rowmapIV(lu->frontmtx, lu->ownersIV, lu->options.msglvl,
- lu->options.msgFile, ((PetscObject)lu)->comm_spooles);
+ lu->options.msgFile, lu->comm_spooles);
newY = DenseMtx_MPI_splitByRows(lu->mtxY, rowmapIV, lu->stats, lu->options.msglvl,
- lu->options.msgFile, lu->firsttag, ((PetscObject)lu)->comm_spooles);
+ lu->options.msgFile, lu->firsttag, lu->comm_spooles);
DenseMtx_free(lu->mtxY);
lu->mtxY = newY ;
IV_free(rowmapIV);
@@ -133,7 +133,7 @@ PetscErrorCode MatSolve_MPISpooles(Mat A
solvemanager = SubMtxManager_new();
SubMtxManager_init(solvemanager, NO_LOCK, 0);
FrontMtx_MPI_solve(lu->frontmtx, lu->mtxX, lu->mtxY, solvemanager, lu->solvemap, lu->cpus,
- lu->stats, lu->options.msglvl, lu->options.msgFile, lu->firsttag, ((PetscObject)lu)->comm_spooles);
+ lu->stats, lu->options.msglvl, lu->options.msgFile, lu->firsttag, lu->comm_spooles);
SubMtxManager_free(solvemanager);
if ( lu->options.msglvl > 2 ) {
ierr = PetscFPrintf(PETSC_COMM_SELF,lu->options.msgFile, "\n solution in new ordering");CHKERRQ(ierr);
@@ -323,7 +323,7 @@ PetscErrorCode MatFactorNumeric_MPISpool
*/
graph = Graph_new();
adjIVL = InpMtx_MPI_fullAdjacency(lu->mtxA, lu->stats,
- lu->options.msglvl, lu->options.msgFile, ((PetscObject)lu)->comm_spooles);
+ lu->options.msglvl, lu->options.msgFile, lu->comm_spooles);
nedges = IVL_tsize(adjIVL);
Graph_init2(graph, 0, M, 0, nedges, M, nedges, adjIVL, NULL, NULL);
if ( lu->options.msglvl > 2 ) {
@@ -364,7 +364,7 @@ PetscErrorCode MatFactorNumeric_MPISpool
DVfree(opcounts);
lu->frontETree = ETree_MPI_Bcast(lu->frontETree, root,
- lu->options.msglvl, lu->options.msgFile, ((PetscObject)lu)->comm_spooles);
+ lu->options.msglvl, lu->options.msgFile, lu->comm_spooles);
if ( lu->options.msglvl > 2 ) {
ierr = PetscFPrintf(PETSC_COMM_SELF,lu->options.msgFile, "\n\n best front tree");CHKERRQ(ierr);
ETree_writeForHumanEye(lu->frontETree, lu->options.msgFile);
@@ -406,7 +406,7 @@ PetscErrorCode MatFactorNumeric_MPISpool
/* redistribute the matrix */
lu->firsttag = 0 ;
newA = InpMtx_MPI_split(lu->mtxA, lu->vtxmapIV, lu->stats,
- lu->options.msglvl, lu->options.msgFile, lu->firsttag, ((PetscObject)lu)->comm_spooles);
+ lu->options.msglvl, lu->options.msgFile, lu->firsttag, lu->comm_spooles);
lu->firsttag += size ;
InpMtx_free(lu->mtxA);
@@ -420,7 +420,7 @@ PetscErrorCode MatFactorNumeric_MPISpool
/* compute the symbolic factorization */
lu->symbfacIVL = SymbFac_MPI_initFromInpMtx(lu->frontETree, lu->ownersIV, lu->mtxA,
- lu->stats, lu->options.msglvl, lu->options.msgFile, lu->firsttag, ((PetscObject)lu)->comm_spooles);
+ lu->stats, lu->options.msglvl, lu->options.msgFile, lu->firsttag, lu->comm_spooles);
lu->firsttag += lu->frontETree->nfront ;
if ( lu->options.msglvl > 2 ) {
ierr = PetscFPrintf(PETSC_COMM_SELF,lu->options.msgFile, "\n\n local symbolic factorization");CHKERRQ(ierr);
@@ -453,7 +453,7 @@ PetscErrorCode MatFactorNumeric_MPISpool
MPI_Barrier(((PetscObject)A)->comm);
lu->firsttag = 0;
newA = InpMtx_MPI_split(lu->mtxA, lu->vtxmapIV, lu->stats,
- lu->options.msglvl, lu->options.msgFile, lu->firsttag,((PetscObject)lu)->comm_spooles);
+ lu->options.msglvl, lu->options.msgFile, lu->firsttag,lu->comm_spooles);
lu->firsttag += size ;
InpMtx_free(lu->mtxA);
@@ -486,7 +486,7 @@ PetscErrorCode MatFactorNumeric_MPISpool
chvmanager = ChvManager_new();
ChvManager_init(chvmanager, NO_LOCK, 0);
- tagbound = maxTagMPI(((PetscObject)lu)->comm_spooles);
+ tagbound = maxTagMPI(lu->comm_spooles);
lasttag = lu->firsttag + 3*lu->frontETree->nfront + 2;
/* if(!rank) PetscPrintf(PETSC_COMM_SELF,"\n firsttag: %d, nfront: %d\n",lu->firsttag, lu->frontETree->nfront);*/
if ( lasttag > tagbound ) {
@@ -495,7 +495,7 @@ PetscErrorCode MatFactorNumeric_MPISpool
}
rootchv = FrontMtx_MPI_factorInpMtx(lu->frontmtx, lu->mtxA, lu->options.tau, droptol,
chvmanager, lu->ownersIV, lookahead, &sierr, lu->cpus,
- lu->stats, lu->options.msglvl, lu->options.msgFile, lu->firsttag,((PetscObject)lu)->comm_spooles);
+ lu->stats, lu->options.msglvl, lu->options.msgFile, lu->firsttag,lu->comm_spooles);
ChvManager_free(chvmanager);
lu->firsttag = lasttag;
if ( lu->options.msglvl > 2 ) {
@@ -537,7 +537,7 @@ PetscErrorCode MatFactorNumeric_MPISpool
lu->firsttag, lasttag, tagbound);
}
FrontMtx_MPI_postProcess(lu->frontmtx, lu->ownersIV, lu->stats, lu->options.msglvl,
- lu->options.msgFile, lu->firsttag, ((PetscObject)lu)->comm_spooles);
+ lu->options.msgFile, lu->firsttag, lu->comm_spooles);
lu->firsttag += 5*size ;
if ( lu->options.msglvl > 2 ) {
ierr = PetscFPrintf(PETSC_COMM_SELF,lu->options.msgFile, "\n\n numeric factorization after post-processing");CHKERRQ(ierr);
@@ -559,7 +559,7 @@ PetscErrorCode MatFactorNumeric_MPISpool
/* redistribute the submatrices of the factors */
FrontMtx_MPI_split(lu->frontmtx, lu->solvemap,
- lu->stats, lu->options.msglvl, lu->options.msgFile, lu->firsttag, ((PetscObject)lu)->comm_spooles);
+ lu->stats, lu->options.msglvl, lu->options.msgFile, lu->firsttag, lu->comm_spooles);
if ( lu->options.msglvl > 2 ) {
ierr = PetscFPrintf(PETSC_COMM_SELF,lu->options.msgFile, "\n\n numeric factorization after split");CHKERRQ(ierr);
FrontMtx_writeForHumanEye(lu->frontmtx, lu->options.msgFile);
diff -r 069050f22c2f src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c
--- a/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c Fri Oct 05 23:01:32 2007 -0500
+++ b/src/mat/impls/aij/mpi/superlu_dist/superlu_dist.c Sat Oct 06 21:57:12 2007 -0500
@@ -123,7 +123,7 @@ PetscErrorCode MatDestroy_SuperLU_DIST(M
/* Release the SuperLU_DIST process grid. */
superlu_gridexit(&lu->grid);
- ierr = MPI_Comm_free(&(((PetscObject)lu)->comm_superlu));CHKERRQ(ierr);
+ ierr = MPI_Comm_free(&(lu->comm_superlu));CHKERRQ(ierr);
}
ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
@@ -463,7 +463,7 @@ PetscErrorCode MatLUFactorSymbolic_Super
*/
set_default_options_dist(&options);
- ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(((PetscObject)lu)->comm_superlu));CHKERRQ(ierr);
+ ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_superlu));CHKERRQ(ierr);
ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
/* Default num of process columns and rows */
lu->npcol = (PetscMPIInt)(0.5 + sqrt((PetscReal)size));
@@ -550,7 +550,7 @@ PetscErrorCode MatLUFactorSymbolic_Super
PetscOptionsEnd();
/* Initialize the SuperLU process grid. */
- superlu_gridinit(((PetscObject)lu)->comm_superlu, lu->nprow, lu->npcol, &lu->grid);
+ superlu_gridinit(lu->comm_superlu, lu->nprow, lu->npcol, &lu->grid);
/* Initialize ScalePermstruct and LUstruct. */
ScalePermstructInit(M, N, &lu->ScalePermstruct);
diff -r 069050f22c2f src/mat/impls/dense/mpi/plapack/plapack.c
--- a/src/mat/impls/dense/mpi/plapack/plapack.c Fri Oct 05 23:01:32 2007 -0500
+++ b/src/mat/impls/dense/mpi/plapack/plapack.c Sat Oct 06 21:57:12 2007 -0500
@@ -159,8 +159,8 @@ PetscErrorCode MatSolve_Plapack(Mat A,Ve
ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,loc_m*loc_stride,loc_buf,&loc_x);CHKERRQ(ierr);
if (!lu->pla_solved){
- PLA_Temp_comm_row_info(lu->templ,&((PetscObject)lu)->comm_2d,&r_rank,&r_nproc);
- PLA_Temp_comm_col_info(lu->templ,&((PetscObject)lu)->comm_2d,&c_rank,&c_nproc);
+ PLA_Temp_comm_row_info(lu->templ,&lu->comm_2d,&r_rank,&r_nproc);
+ PLA_Temp_comm_col_info(lu->templ,&lu->comm_2d,&c_rank,&c_nproc);
/* printf(" [%d] rank: %d %d, nproc: %d %d\n",rank,r_rank,c_rank,r_nproc,c_nproc); */
/* Create IS and cts for VecScatterring */
@@ -334,7 +334,7 @@ PetscErrorCode MatFactorSymbolic_Plapack
/* Create a 2D communicator */
PLA_Comm_1D_to_2D(comm,lu->nprows,lu->npcols,&comm_2d);
- ((PetscObject)lu)->comm_2d = comm_2d;
+ lu->comm_2d = comm_2d;
/* Initialize PLAPACK */
PLA_Init(comm_2d);
diff -r 069050f22c2f src/mat/impls/sbaij/mpi/spooles/mpisbaijspooles.c
--- a/src/mat/impls/sbaij/mpi/spooles/mpisbaijspooles.c Fri Oct 05 23:01:32 2007 -0500
+++ b/src/mat/impls/sbaij/mpi/spooles/mpisbaijspooles.c Sat Oct 06 21:57:12 2007 -0500
@@ -90,7 +90,7 @@ PetscErrorCode MatCholeskyFactorSymbolic
lu->options.useQR = PETSC_FALSE;
lu->options.symflag = SPOOLES_SYMMETRIC; /* default */
- ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(((PetscObject)lu)->comm_spooles));CHKERRQ(ierr);
+ ierr = MPI_Comm_dup(((PetscObject)A)->comm,&(lu->comm_spooles));CHKERRQ(ierr);
*F = B;
PetscFunctionReturn(0);
}
diff -r 069050f22c2f src/mat/partition/impls/pmetis/pmetis.c
--- a/src/mat/partition/impls/pmetis/pmetis.c Fri Oct 05 23:01:32 2007 -0500
+++ b/src/mat/partition/impls/pmetis/pmetis.c Sat Oct 06 21:57:12 2007 -0500
@@ -125,7 +125,7 @@ PetscErrorCode MatPartitioningView_Parme
ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d]Number of cuts found %d\n",rank,parmetis->cuts);CHKERRQ(ierr);
ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
} else {
- SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this Parmetis partitioner",((PetscObject)viewer)((PetscObject))->type_name);
+ SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported for this Parmetis partitioner",((PetscObject)viewer)->type_name);
}
PetscFunctionReturn(0);
diff -r 069050f22c2f src/ts/impls/implicit/sundials/sundials.c
--- a/src/ts/impls/implicit/sundials/sundials.c Fri Oct 05 23:01:32 2007 -0500
+++ b/src/ts/impls/implicit/sundials/sundials.c Sat Oct 06 21:57:12 2007 -0500
@@ -234,7 +234,7 @@ PetscErrorCode TSDestroy_Sundials(TS ts)
if (cvode->rhs) {ierr = VecDestroy(cvode->rhs);CHKERRQ(ierr);}
if (cvode->w1) {ierr = VecDestroy(cvode->w1);CHKERRQ(ierr);}
if (cvode->w2) {ierr = VecDestroy(cvode->w2);CHKERRQ(ierr);}
- ierr = MPI_Comm_free(&(((PetscObject)cvode)->comm_sundials));CHKERRQ(ierr);
+ ierr = MPI_Comm_free(&(cvode->comm_sundials));CHKERRQ(ierr);
ierr = PetscFree(cvode);CHKERRQ(ierr);
PetscFunctionReturn(0);
}
@@ -255,7 +255,7 @@ PetscErrorCode TSSetUp_Sundials_Nonlinea
ierr = VecGetLocalSize(ts->vec_sol,&locsize);CHKERRQ(ierr);
/* allocate the memory for N_Vec y */
- cvode->y = N_VNew_Parallel(((PetscObject)cvode)->comm_sundials,locsize,glosize);
+ cvode->y = N_VNew_Parallel(cvode->comm_sundials,locsize,glosize);
if (!cvode->y) SETERRQ(1,"cvode->y is not allocated");
/* initialize N_Vec y */
@@ -797,7 +797,7 @@ PetscErrorCode PETSCTS_DLLEXPORT TSCreat
cvode->exact_final_time = PETSC_FALSE;
- ierr = MPI_Comm_dup(((PetscObject)ts)->comm,&(((PetscObject)cvode)->comm_sundials));CHKERRQ(ierr);
+ ierr = MPI_Comm_dup(((PetscObject)ts)->comm,&(cvode->comm_sundials));CHKERRQ(ierr);
/* set tolerance for Sundials */
cvode->abstol = 1e-6;
cvode->reltol = 1e-6;
More information about the petsc-dev
mailing list