[petsc-users] Global Vector Generated from Section memory error

Nicholas Arnold-Medabalimi narnoldm at umich.edu
Wed Dec 28 13:10:55 CST 2022


Hi Petsc Users

I've been working with vectors generated from a DM and getting some odd
memory errors. Using Valgrind, I have been able to trace the issue to
DMCreateGlobalVector. I've reduced the code to a relatively simple routine
(very similar to example 7) and attached it. I suspect the issue comes down
to something improperly set in the section. The code, when integrated, will
run correctly 10-30% of the time and otherwise give a memory corruption
error.

 Any insight on the issue or possible error on my part would be appreciated.


Using Valgrind, I get the following error.

==27064== Invalid write of size 8
==27064==    at 0x10C91E: main (section_vector_build.cpp:184)
==27064==  Address 0xc4aa248 is 4 bytes after a block of size 3,204 alloc'd
==27064==    at 0x483E0F0: memalign (in
/usr/lib/x86_64-linux-gnu/valgrind/vgpreload_memcheck-amd64-linux.so)
==27064==    by 0x483E212: posix_memalign (in
/usr/lib/x86_64-linux-gnu/valgrind/vgpreload_memcheck-amd64-linux.so)
==27064==    by 0x4C4DAB0: PetscMallocAlign (mal.c:54)
==27064==    by 0x4C5262F: PetscTrMallocDefault (mtr.c:186)
==27064==    by 0x4C501F7: PetscMallocA (mal.c:420)
==27064==    by 0x527E8A9: VecCreate_MPI_Private (pbvec.c:485)
==27064==    by 0x527F04F: VecCreate_MPI (pbvec.c:523)
==27064==    by 0x53E7097: VecSetType (vecreg.c:89)
==27064==    by 0x527FBC8: VecCreate_Standard (pbvec.c:547)
==27064==    by 0x53E7097: VecSetType (vecreg.c:89)
==27064==    by 0x6CD77C0: DMCreateGlobalVector_Section_Private (dmi.c:58)
==27064==    by 0x61D52DB: DMCreateGlobalVector_Plex (plexcreate.c:4130)



-- 
Nicholas Arnold-Medabalimi

Ph.D. Candidate
Computational Aeroscience Lab
University of Michigan
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.mcs.anl.gov/pipermail/petsc-users/attachments/20221228/6fcece4e/attachment.html>
-------------- next part --------------
#include <iostream>
#include <math.h>

#include <petscdm.h>
#include <petscksp.h>
#include <petscsnes.h>
#include <petscdmplex.h>
#include <petscsf.h>

#include "hdf5.h"


#define PI 3.14159265

static char help[] = "Show vector build seg fault error\n\n";
int rank, size;

PetscErrorCode ViewDM(DM V, std::string filename)
{
    PetscViewer viewer;
    PetscInt ierr;

    ierr = PetscViewerCreate(PETSC_COMM_WORLD, &viewer);
    ierr = PetscViewerSetType(viewer, PETSCVIEWERVTK);
    ierr = PetscViewerFileSetMode(viewer, FILE_MODE_WRITE);
    ierr = PetscViewerFileSetName(viewer, filename.c_str());
    DMView(V, viewer);
    ierr = PetscViewerDestroy(&viewer);
    return 0;
}

PetscErrorCode ViewVec(Vec V, std::string filename)
{
    PetscViewer viewer;
    PetscInt ierr;

    ierr = PetscViewerCreate(PETSC_COMM_WORLD, &viewer);
    ierr = PetscViewerSetType(viewer, PETSCVIEWERVTK);
    ierr = PetscViewerFileSetMode(viewer, FILE_MODE_WRITE);
    ierr = PetscViewerFileSetName(viewer, filename.c_str());
    VecView(V, viewer);
    ierr = PetscViewerDestroy(&viewer);
    return 0;
}

PetscErrorCode addSectionToDM(DM &dm, Vec &state)
{
    int ierr;
    int p0, p1, c0, c1;
    DMPlexGetHeightStratum(dm, 0, &c0, &c1);

    DMPlexGetChart(dm, &p0, &p1);
    PetscSection section_full;
    PetscSectionCreate(PETSC_COMM_WORLD, &section_full);
    PetscSectionSetNumFields(section_full, 2);
    PetscSectionSetChart(section_full, p0, p1);
    PetscSectionSetFieldComponents(section_full, 0, 1);
    PetscSectionSetFieldComponents(section_full, 1, 1);
    PetscSectionSetFieldName(section_full, 0, "Pressure");
    PetscSectionSetFieldName(section_full, 1, "Temperature");

    for (int i = c0; i < c1; i++)
    {
        PetscSectionSetDof(section_full, i, 2);
        PetscSectionSetFieldDof(section_full, i, 0, 1);
        PetscSectionSetFieldDof(section_full, i, 1, 1);
    }
    PetscSectionSetUp(section_full);
    DMSetNumFields(dm, 2);
    DMSetLocalSection(dm, section_full);

    //PetscSectionView(section_full, PETSC_VIEWER_STDOUT_WORLD);

    PetscSectionDestroy(&section_full);
    
    ierr=DMCreateGlobalVector(dm, &state);
    CHKERRQ(ierr);
    return 0;
}

int main(int argc, char **argv)
{
    PetscErrorCode ierr;

    // Creates Petsc world communicator just like MPI, No need for additional MPI call.
    ierr = PetscInitialize(&argc, &argv, (char *)0, help);

    MPI_Comm_size(PETSC_COMM_WORLD, &size);
    MPI_Comm_rank(PETSC_COMM_WORLD, &rank);
    if (!rank)
    {
        std::cout << "Running on " << size << " processors" << std::endl;
        std::cout << help << std::endl;
    }

    DM dm, dmDist;
    PetscInt i, dim = 2, overlap = 1;
    PetscInt faces[dim];

    /// Create 2D mesh
    // -meshdim will set the square dimension of the mesh
    // overlap sets the number of cells around each partition

    faces[0] = 20;
    PetscOptionsGetInt(NULL, NULL, "-meshdim", &(faces[0]), NULL);
    PetscOptionsGetInt(NULL, NULL, "-overlap", &overlap, NULL);
    faces[1] = faces[0];
    PetscSF distributionSF;
    PetscBool simplex = PETSC_FALSE, dmInterped = PETSC_TRUE;
    PetscReal lower[2] = {0.0, 0.0};
    PetscReal upper[2] = {2.0, 2.0};
    ierr = DMPlexCreateBoxMesh(PETSC_COMM_WORLD, dim, simplex, faces, lower, upper, /* periodicity */ NULL, dmInterped, &dm);
    CHKERRQ(ierr);
    PetscPrintf(PETSC_COMM_WORLD, "Mesh created\n");

    double t1 = 0, t2 = 0;

    t1 = MPI_Wtime();
    ierr = DMPlexDistribute(dm, overlap, &distributionSF, &dmDist);

    t2 = MPI_Wtime();
    CHKERRQ(ierr);
    if (dmDist)
    {
        ierr = DMDestroy(&dm);
        CHKERRQ(ierr);
        dm = dmDist;
    }
    PetscPrintf(PETSC_COMM_WORLD, "Mesh distributed\n");


    PetscInt c0, c1, f0, f1, e0, e1, v0, v1, p0, p1;
    if (dim == 2)
    {
        DMPlexGetHeightStratum(dm, 0, &c0, &c1);
        DMPlexGetHeightStratum(dm, 1, &e0, &e1);
        DMPlexGetHeightStratum(dm, 2, &v0, &v1);
    }

    PetscSynchronizedPrintf(PETSC_COMM_WORLD, "Rank %d\n", rank);
    PetscSynchronizedPrintf(PETSC_COMM_WORLD, "Cells:    p0:%d ,p1:%d \n", c0, c1);
    PetscSynchronizedPrintf(PETSC_COMM_WORLD, "Edges:    p0:%d ,p1:%d \n", e0, e1);
    PetscSynchronizedPrintf(PETSC_COMM_WORLD, "Vertices: p0:%d ,p1:%d \n", v0, v1);
    PetscSynchronizedFlush(PETSC_COMM_WORLD, NULL);

    

    Vec state_full;


    
    addSectionToDM(dm, state_full);
    int o0, o1;
    VecGetOwnershipRange(state_full, &o0, &o1);
    PetscScalar *state_full_array;
    VecGetArray(state_full, &state_full_array);

    PetscSection section_full;
    DMGetLocalSection(dm, &section_full);

    for (int i = c0; i < c1; i++)
    {
        int offset;
        
        PetscSectionGetOffset(section_full, i, &offset);
        //PetscSynchronizedPrintf(PETSC_COMM_WORLD, "Rank %d, Cell %d, Offset %d\n", rank, i,  offset);

        state_full_array[offset] = 100000 + i + rank * 100;
        state_full_array[offset + 1] = 300 + i + rank * 100;
    }
    //PetscSynchronizedFlush  (PETSC_COMM_WORLD, NULL);
    VecRestoreArray(state_full, &state_full_array);
    ViewVec(state_full, "mesh_vec.vtu");

    

    
    ierr = PetscFinalize();
    return ierr;
}


More information about the petsc-users mailing list