program simpleTest2 use newsubs implicit none #include ! Geometric dimension: #define DIM 3 MPI_Comm :: comm PetscMPIInt :: rank ! character(80) :: filename = "g3Block_v1.exo" character(80) :: filename = "simpleblock-100.exo" PetscInt, parameter :: dof = 1 PetscErrorCode :: ierr DM :: dm, dist_dm, ghost_dm PetscFV :: fvm PetscDS :: ds PetscInt :: num_ghost_cells PetscInt :: start_cell, end_cell, end_interior_cell PetscInt :: start_face, end_face, num_faces PetscInt :: overlap Mat :: TestJac call PetscInitialize(PETSC_NULL_CHARACTER, ierr); CHKERRQ(ierr) comm = PETSC_COMM_WORLD call MPI_Comm_rank(comm, rank, ierr); CHKERRQ(ierr) ! Set up mesh call DMPlexCreateExodusFromFile(comm, filename, PETSC_TRUE, dm, ierr); CHKERRQ(ierr) call DMPlexSetAdjacencyUseCone(dm, PETSC_TRUE, ierr); CHKERRQ(ierr) call DMPlexSetAdjacencyUseClosure(dm, PETSC_FALSE, ierr); CHKERRQ(ierr) ! Distribute mesh overlap = 1 call DMPlexDistribute(dm, overlap, PETSC_NULL_OBJECT, dist_dm, ierr) CHKERRQ(ierr) if (dist_dm /= 0) then call DMDestroy(dm, ierr); CHKERRQ(ierr) dm = dist_dm end if call DMSetFromOptions(dm, ierr); CHKERRQ(ierr) ! Create a label for the dm but don't assign it to any faces. This prevents ! DMPlexConstructGhostCells from labelling the faces call DMPlexCreateLabel(dm, "ClosedBoundaries", ierr); CHKERRQ(ierr) call DMView(dm, PETSC_VIEWER_STDOUT_WORLD, ierr); CHKERRQ(ierr) ! Construct ghost cells ! On a single processor there are no ghost cells but this labels all the outer ! boundaries as ghost faces which is good so that we don't try to work out the ! flux on those faces ! On two processeors this labels the two overlap cells as ghost cells and all ! the boundary faces as ghost faces. call DMPlexConstructGhostCells(dm, "ClosedBoundaries", num_ghost_cells, ghost_dm, ierr) CHKERRQ(ierr) if (ghost_dm /= 0) then call DMDestroy(dm, ierr); CHKERRQ(ierr) dm = ghost_dm end if ! Set up discretization call PetscFVCreate(comm, fvm, ierr); CHKERRQ(ierr) call PetscFVSetNumComponents(fvm, dof, ierr); CHKERRQ(ierr) call PetscFVSetSpatialDimension(fvm, DIM, ierr); CHKERRQ(ierr) call DMGetDS(dm, ds, ierr); CHKERRQ(ierr) call PetscDSAddDiscretization(ds, fvm, ierr); CHKERRQ(ierr) call DMView(dm, PETSC_VIEWER_STDOUT_WORLD, ierr); CHKERRQ(ierr) ! Testing call DMPlexGetHeightStratum(dm, 0, start_cell, end_cell, ierr); CHKERRQ(ierr) call DMPlexGetHybridBounds(dm, end_interior_cell, & PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, PETSC_NULL_INTEGER, ierr); CHKERRQ(ierr) call DMPlexGetHeightStratum(dm, 1, start_face, end_face, ierr); CHKERRQ(ierr) ! On two processors the end_cell = end_interior_cell values on both processors ! Is this correct? I guess we can identify the overlap cells as they have been labelled as ghost? write(*,*) start_cell, end_cell, end_interior_cell, start_face, end_face ! Creating a matix doesn't seem to work on two processors? call DMCreateMatrix(dm, TestJac, ierr); CHKERRQ(ierr) call MatView(TestJac, PETSC_VIEWER_STDOUT_WORLD, ierr); CHKERRQ(ierr) call DMDestroy(dm, ierr); CHKERRQ(ierr) call PetscFinalize(ierr); CHKERRQ(ierr) end program simpleTest2