// Using PETSc MatIS, how to get local matrix (= one domain) before and after assembly (neumann/dirichlet). // // A 3x3 global matrix is built (diag: 1, 2, 1): it's made of 2 overlapping 2x2 local matrix (diag: 1, 1). // Get non assembled local matrix : OK with MatISGetLocalMat. // // How to get assembled local matrix (initial local matrix + neigbhor contributions on the borders) ? // Need to use MatGetLocalSubMatrix ? Not working ?! Expected result is Diag: 2, 1. // // ~> g++ -o matISLocalMat.exe matISLocalMat.cpp -lpetsc -lm; mpirun -n 2 matISLocalMat.exe #include #include "petsc.h" int main(int argc,char **argv) { PetscInitialize(&argc, &argv, NULL, NULL); int size = 0; MPI_Comm_size(MPI_COMM_WORLD, &size); if (size != 2) return 1; int rank = 0; MPI_Comm_rank(MPI_COMM_WORLD, &rank); PetscInt localSize = 2, globalSize = 3; PetscInt localIdx[2] = {0, 0}; if (rank == 0) {localIdx[0] = 0; localIdx[1] = 1;} else {localIdx[0] = 1; localIdx[1] = 2;} ISLocalToGlobalMapping map; ISLocalToGlobalMappingCreate(PETSC_COMM_WORLD, 1, localSize, localIdx, PETSC_COPY_VALUES, &map); Mat A; // MatIS means that the matrix is not assembled. The easiest way to think of this is that processes do not have // to hold full rows. One process can hold part of row i, and another processes can hold another part. However, there are still // the same number of global rows. The local size here is not the size of the local IS block, since that is a property only of MatIS. It is the // size of the local piece of the vector you multiply. This allows PETSc to understand the parallel layout of the Vec, and how it // matched the Mat. // Conclusion: it's crucial to use PETSC_DECIDE in place of localSize in your call to MatCreateIS. MatCreateIS(PETSC_COMM_WORLD, 1, PETSC_DECIDE, PETSC_DECIDE, globalSize, globalSize, map, map, &A); ISLocalToGlobalMappingDestroy(&map); MatISSetPreallocation(A, localSize, NULL, localSize, NULL); PetscScalar localVal[4] = {1., 0., 0., 1.}; MatSetValues(A, 2, localIdx, 2, localIdx, localVal, ADD_VALUES); MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY); MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY); MatView(A, PETSC_VIEWER_STDOUT_WORLD); PetscViewerFlush(PETSC_VIEWER_STDOUT_WORLD); // Diag: 1, 2, 1 if (rank > 0) { // Do not pollute stdout: print only 1 proc std::cout << std::endl << "non assembled local matrix:" << std::endl << std::endl; Mat nonAssembledLocalMat; MatISGetLocalMat(A, &nonAssembledLocalMat); MatView(nonAssembledLocalMat, PETSC_VIEWER_STDOUT_SELF); // Diag: 1, 1 PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF); } MPI_Barrier(MPI_COMM_WORLD); if (rank == 0) std::cout << std::endl << "assembled global matrix:" << std::endl << std::endl; MPI_Barrier(MPI_COMM_WORLD); Mat assembledGlobalMat; MatISGetMPIXAIJ(A, MAT_INITIAL_MATRIX, &assembledGlobalMat); // Collective call. MatView(assembledGlobalMat, PETSC_VIEWER_STDOUT_WORLD); PetscViewerFlush(PETSC_VIEWER_STDOUT_WORLD); if (rank == 0) std::cout << std::endl << "index set to extract assembled local matrix:" << std::endl << std::endl; MPI_Barrier(MPI_COMM_WORLD); IS is; ISCreateGeneral(PETSC_COMM_WORLD, localSize, localIdx, PETSC_COPY_VALUES, &is); ISView(is, PETSC_VIEWER_STDOUT_WORLD); PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF); if (rank == 0) std::cout << std::endl << "assembled local matrix:" << std::endl << std::endl; Mat *assembledLocalMat; MatGetSubMatrices(assembledGlobalMat, 1, &is, &is, MAT_INITIAL_MATRIX, &assembledLocalMat); // Collective call. if (rank == 0) MatView(assembledLocalMat[0], PETSC_VIEWER_STDOUT_SELF); PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF); if (rank == 1) MatView(assembledLocalMat[0], PETSC_VIEWER_STDOUT_SELF); PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF); MatDestroyMatrices(1,&assembledLocalMat); PetscFinalize(); return 0; }