[petsc-users] MatZeroRowsColumns eliminates incorrectly in parallel

Junming Duan junming.duan.math at gmail.com
Fri Aug 9 15:02:06 CDT 2024


Dear all,

I tried to use MatZeroRowsColumns to eliminate Dirichlet boundary nodes. However, it cannot eliminate correctly in parallel.
Please see the attached code which uses DMDA to create the matrix.
When I used one process, it works as expected.
For two processes, the domain is split in the x direction. But the 10th row, 20th column is not eliminated as observed when using one process. The results for two processes are also attached.
I have input the same rows to be eliminated for both processes.
Thank you for any help.

#include <stdio.h>
#include <petscdmda.h>

int main(int argc, char **argv)
{
 PetscInt    M = 5, N = 3, m = PETSC_DECIDE, n = PETSC_DECIDE, ncomp = 2;
 PetscInt i, j;
 DMDALocalInfo daInfo;
 DM          da;
 Mat A;
 Vec x, b;
 MatStencil row, col[5];
 PetscScalar v[5];
 PetscInt  n_dirichlet_rows = 0, dirichlet_rows[2*(M+N)];

 PetscFunctionBeginUser;
 PetscCall(PetscInitialize(&argc, &argv, (char *)0, NULL));
 PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_GHOSTED, DM_BOUNDARY_GHOSTED, DMDA_STENCIL_BOX, M, N, m, n, ncomp, 1, NULL, NULL, &da));
 PetscCall(DMSetFromOptions(da));
 PetscCall(DMSetUp(da));

 PetscCall(DMView(da, PETSC_VIEWER_STDOUT_WORLD));

 PetscCall(DMDAGetLocalInfo(da, &daInfo));
 PetscCall(DMSetMatrixPreallocateOnly(da, PETSC_TRUE));

 PetscCall(DMCreateMatrix(da, &A));
 PetscCall(MatCreateVecs(A, &x, &b));
 PetscCall(MatZeroEntries(A));
 PetscCall(VecZeroEntries(x));
 PetscCall(VecZeroEntries(b));

 for (j = daInfo.ys; j < daInfo.ys + daInfo.ym; ++j) {
   for (i = daInfo.xs; i < daInfo.xs + daInfo.xm; ++i) {
     row.j = j;
     row.i = i;
     row.c = 0;

     col[0].j = j;
     col[0].i = i;
     col[0].c = 0;
     v[0] = row.i + col[0].i + 2;
     col[1].j = j;
     col[1].i = i - 1;
     col[1].c = 0;
     v[1] = row.i + col[1].i + 2;
     col[2].j = j;
     col[2].i = i + 1;
     col[2].c = 0;
     v[2] = row.i + col[2].i + 2;
     col[3].j = j - 1;
     col[3].i = i;
     col[3].c = 0;
     v[3] = row.i + col[2].i + 2;
     col[4].j = j + 1;
     col[4].i = i;
     col[4].c = 0;
     v[4] = row.i + col[2].i + 2;

     PetscCall(MatSetValuesStencil(A, 1, &row, 5, col, v, ADD_VALUES));

     row.j = j;
     row.i = i;
     row.c = 1;
     col[0].j = j;
     col[0].i = i;
     col[0].c = 1;
     v[1] = row.j + col[1].j + 2;
     col[1].j = j - 1;
     col[1].i = i;
     col[1].c = 1;
     v[1] = row.j + col[1].j + 2;
     col[2].j = j + 1;
     col[2].i = i;
     col[2].c = 1;
     v[2] = row.j + col[2].j + 2;
     col[3].j = j;
     col[3].i = i - 1;
     col[3].c = 1;
     v[3] = row.j + col[1].j + 2;
     col[4].j = j;
     col[4].i = i + 1;
     col[4].c = 1;
     v[4] = row.j + col[2].j + 2;

     PetscCall(MatSetValuesStencil(A, 1, &row, 5, col, v, ADD_VALUES));

   }
 }
 PetscCall(MatAssemblyBegin(A, MAT_FINAL_ASSEMBLY));
 PetscCall(MatAssemblyEnd(A, MAT_FINAL_ASSEMBLY));
 MatView(A, 0);

 for (j = 0; j < daInfo.my; ++j) {
   dirichlet_rows[n_dirichlet_rows++] = j * daInfo.mx <https://urldefense.us/v3/__http://dainfo.mx/__;!!G_uCfscf7eWS!faAU5SYXVdbeL4y_c26LP7pbZnC1p3U_6-f4qNMpQMw2dXi4plc1QMmEh9_6S1xCYurJg4RIuXOrnKz85IkV6m3YcLROpRRf$ > * ncomp;
   dirichlet_rows[n_dirichlet_rows++] = (j+1) * daInfo.mx <https://urldefense.us/v3/__http://dainfo.mx/__;!!G_uCfscf7eWS!faAU5SYXVdbeL4y_c26LP7pbZnC1p3U_6-f4qNMpQMw2dXi4plc1QMmEh9_6S1xCYurJg4RIuXOrnKz85IkV6m3YcLROpRRf$ > * ncomp - ncomp;
 }
 PetscCall(PetscPrintf(PETSC_COMM_SELF, "n_dirichlet_rows: %d\n", n_dirichlet_rows));
 for (j = 0; j < n_dirichlet_rows; ++j) {
   PetscCall(PetscPrintf(PETSC_COMM_SELF, "%d, ", dirichlet_rows[j]));
 }
 PetscCall(PetscPrintf(PETSC_COMM_SELF, "\n"));
 PetscCall(MatZeroRowsColumns(A, n_dirichlet_rows, dirichlet_rows, 1, NULL, NULL));
 MatView(A, 0);

 PetscCall(VecDestroy(&x));
 PetscCall(VecDestroy(&b));
 PetscCall(DMDestroy(&da));
 PetscCall(PetscFinalize());
 return 0;
}

———————————————————

n_dirichlet_rows: 6
0, 8, 10, 18, 20, 28,
Mat Object: 2 MPI processes
 type: mpiaij
   row 0:     (0, 1.)      (2, 0.)      (10, 0.)
   row 1:     (1, 2.)      (3, 3.)      (11, 3.)
   row 2:     (0, 0.)      (2, 4.)      (4, 5.)      (12, 0.)
   row 3:     (1, 1.)      (3, 4.)      (5, 3.)      (13, 3.)
   row 4:     (2, 5.)      (4, 6.)      (6, 0.)      (14, 0.)
   row 5:     (3, 1.)      (5, 6.)      (7, 3.)      (15, 3.)
   row 6:     (4, 0.)      (6, 1.)      (8, 0.)      (16, 0.)
   row 7:     (5, 1.)      (7, 8.)      (9, 3.)      (17, 3.)
   row 8:     (6, 0.)      (8, 1.)      (18, 0.)
   row 9:     (7, 1.)      (9, 10.)      (19, 3.)
   row 10:     (0, 0.)      (10, 2.)      (12, 0.)      (20, 3.)
   row 11:     (1, 3.)      (11, 2.)      (13, 5.)      (21, 5.)
   row 12:     (2, 0.)      (10, 0.)      (12, 1.)      (14, 0.)      (22, 0.)
   row 13:     (3, 3.)      (11, 3.)      (13, 4.)      (15, 5.)      (23, 5.)
   row 14:     (4, 0.)      (12, 0.)      (14, 1.)      (16, 0.)      (24, 0.)
   row 15:     (5, 3.)      (13, 3.)      (15, 6.)      (17, 5.)      (25, 5.)
   row 16:     (6, 0.)      (14, 0.)      (16, 8.)      (18, 9.)      (26, 9.)
   row 17:     (7, 3.)      (15, 3.)      (17, 8.)      (19, 5.)      (27, 5.)
   row 18:     (8, 0.)      (16, 9.)      (18, 10.)      (28, 0.)
   row 19:     (9, 3.)      (17, 3.)      (19, 10.)      (29, 5.)
   row 20:     (10, 3.)      (20, 2.)      (22, 3.)
   row 21:     (11, 5.)      (21, 2.)      (23, 7.)
   row 22:     (12, 0.)      (20, 3.)      (22, 4.)      (24, 5.)
   row 23:     (13, 5.)      (21, 5.)      (23, 4.)      (25, 7.)
   row 24:     (14, 0.)      (22, 5.)      (24, 6.)      (26, 7.)
   row 25:     (15, 5.)      (23, 5.)      (25, 6.)      (27, 7.)
   row 26:     (16, 9.)      (24, 7.)      (26, 8.)      (28, 0.)
   row 27:     (17, 5.)      (25, 5.)      (27, 8.)      (29, 7.)
   row 28:     (18, 0.)      (26, 0.)      (28, 1.)
   row 29:     (19, 5.)      (27, 5.)      (29, 10.)

Junming
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.mcs.anl.gov/pipermail/petsc-users/attachments/20240809/d894777c/attachment-0001.html>


More information about the petsc-users mailing list