[petsc-users] 3D Poisson solver failed in KSPSolve when number of cores is larger than one
Thuc Bui
bui at calcreek.com
Mon Jul 24 19:16:09 CDT 2023
Dear PETSc Users/Developers.
I have been successfully using PETsc on Windows without MPI for a while now.
I have now attempted to implement PETSc with MPI on Windows 10. I have built
a release version of PETSc 3.18.6 with MS MPI 10.1.2, Intel MKL 3.279 (2020)
and Visual Studio 2019 as a static library. I am testing a finite difference
3D Poisson solver (cylindrical coordinates) with this MPI PETSc.
On a Windows 10 laptop with 4 cores (8 logical processors), the solver works
fine with one core. However, it gives errors with 2 cores. The errors are
listed below followed by a complete code. The errors are generated in
KSPSolve. I perhaps miss some settings to cause this error "No method
muldiagonalblock for Mat of type seqaij". I don't know why for a 2-core MPI
program that a seqaij matrix is used in KSPSolve.
I would be very grateful if someone can provide me with some direction to
fix these issues.
Many thanks for your help,
Thuc Bui
Senior R&D Engineer
Calabazas Creek Research, Inc
(650) 948-5361
///////////////errors/////////////////////////
rank=0 iStart=0 iEnd=2375
rank=1 iStart=2375 iEnd=4750
[1]PETSC ERROR:
[0]PETSC ERROR:
--------------------- Error Message
--------------------------------------------------------------
--------------------- Error Message
--------------------------------------------------------------
[1]PETSC ERROR:
[0]PETSC ERROR:
No support for this operation for this object type
No support for this operation for this object type
[1]PETSC ERROR:
[0]PETSC ERROR:
No method multdiagonalblock for Mat of type seqaij
No method multdiagonalblock for Mat of type seqaij
[1]PETSC ERROR:
[0]PETSC ERROR:
See https://petsc.org/release/faq/ for trouble shooting.
See https://petsc.org/release/faq/ for trouble shooting.
[1]PETSC ERROR:
[0]PETSC ERROR:
Petsc Release Version 3.18.6, Mar 30, 2023
Petsc Release Version 3.18.6, Mar 30, 2023
[1]PETSC ERROR:
[0]PETSC ERROR:
Poisson3D.exe on a v140-x64SReleaseSP named HERMES by bui Mon Jul 24
15:16:55 2023
[1]PETSC ERROR: Configure options --with-cc="win32fe cl" --with-fc=0
--with-cxx="win32fe cl" --with-debugging=0 --with-openmp
--with-mpi-include=/cygdrive/c/Users/bui/Documents/Petsc/externalpackages/MS
MPI/latest/include:/cygdrive/c/Users/bui/Documents/Petsc/externalpackages/MS
MPI/latest/include/x64
--with-mpi-lib="[/cygdrive/c/Users/bui/Documents/Petsc/externalpackages/MSMP
I/latest/Lib/x64/msmpi.lib,/cygdrive/c/Users/bui/Documents/Petsc/externalpac
kages/MSMPI/latest/Lib/x64/msmpifec.lib]"
--with-blaslapack-lib="[/cygdrive/c/Users/bui/Documents/Petsc/externalpackag
es/IntelMKL/latest/x64/lib/mkl_intel_lp64.lib,/cygdrive/c/Users/bui/Document
s/Petsc/externalpackages/IntelMKL/latest/x64/lib/mkl_core.lib,/cygdrive/c/Us
ers/bui/Documents/Petsc/externalpackages/IntelMKL/latest/x64/lib/mkl_intel_t
hread.lib,/cygdrive/c/Users/bui/Documents/Petsc/externalpackages/IntelMKL/la
test/x64/lib/libiomp5md.lib]"
--with-mpiexec=/cygdrive/c/Users/bui/Documents/Petsc/externalpackages/MSMPI/
latest/bin/x64/mpiexec --with-shared-libraries=0
Poisson3D.exe on a v140-x64SReleaseSP named HERMES by bui Mon Jul 24
15:16:55 2023
[1]PETSC ERROR:
[0]PETSC ERROR:
#1 MatMultDiagonalBlock() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\mat\INTERF~1\matrix.c:2538
Configure options --with-cc="win32fe cl" --with-fc=0 --with-cxx="win32fe cl"
--with-debugging=0 --with-openmp
--with-mpi-include=/cygdrive/c/Users/bui/Documents/Petsc/externalpackages/MS
MPI/latest/include:/cygdrive/c/Users/bui/Documents/Petsc/externalpackages/MS
MPI/latest/include/x64
--with-mpi-lib="[/cygdrive/c/Users/bui/Documents/Petsc/externalpackages/MSMP
I/latest/Lib/x64/msmpi.lib,/cygdrive/c/Users/bui/Documents/Petsc/externalpac
kages/MSMPI/latest/Lib/x64/msmpifec.lib]"
--with-blaslapack-lib="[/cygdrive/c/Users/bui/Documents/Petsc/externalpackag
es/IntelMKL/latest/x64/lib/mkl_intel_lp64.lib,/cygdrive/c/Users/bui/Document
s/Petsc/externalpackages/IntelMKL/latest/x64/lib/mkl_core.lib,/cygdrive/c/Us
ers/bui/Documents/Petsc/externalpackages/IntelMKL/latest/x64/lib/mkl_intel_t
hread.lib,/cygdrive/c/Users/bui/Documents/Petsc/externalpackages/IntelMKL/la
test/x64/lib/libiomp5md.lib]"
--with-mpiexec=/cygdrive/c/Users/bui/Documents/Petsc/externalpackages/MSMPI/
latest/bin/x64/mpiexec --with-shared-libraries=0
[1]PETSC ERROR:
[0]PETSC ERROR:
#2 MatMultDiagonalBlock_MPIAIJ() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\mat\impls\aij\mpi\mpiaij.c:978
#1 MatMultDiagonalBlock() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\mat\INTERF~1\matrix.c:2538
[1]PETSC ERROR:
[0]PETSC ERROR:
#3 MatMultDiagonalBlock() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\mat\INTERF~1\matrix.c:2538
#2 MatMultDiagonalBlock_MPIAIJ() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\mat\impls\aij\mpi\mpiaij.c:978
[1]PETSC ERROR:
[0]PETSC ERROR:
#4 PCApply_Eisenstat() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\ksp\pc\impls\eisens\eisen.c:51
#3 MatMultDiagonalBlock() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\mat\INTERF~1\matrix.c:2538
[1]PETSC ERROR:
[0]PETSC ERROR:
#5 PCApply() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\ksp\pc\INTERF~1\precon.c:441
#4 PCApply_Eisenstat() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\ksp\pc\impls\eisens\eisen.c:51
[1]PETSC ERROR:
[0]PETSC ERROR:
#6 KSP_PCApply() at
C:\Users\bui\DOCUME~1\Petsc\latest\include\petsc/private/kspimpl.h:380
#5 PCApply() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\ksp\pc\INTERF~1\precon.c:441
[1]PETSC ERROR:
[0]PETSC ERROR:
#7 KSPInitialResidual() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\ksp\ksp\INTERF~1\itres.c:64
#6 KSP_PCApply() at
C:\Users\bui\DOCUME~1\Petsc\latest\include\petsc/private/kspimpl.h:380
[1]PETSC ERROR:
[0]PETSC ERROR:
#8 KSPSolve_GMRES() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\ksp\ksp\impls\gmres\gmres.c:227
#7 KSPInitialResidual() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\ksp\ksp\INTERF~1\itres.c:64
[1]PETSC ERROR:
[0]PETSC ERROR:
#9 KSPSolve_Private() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\ksp\ksp\INTERF~1\itfunc.c:899
#8 KSPSolve_GMRES() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\ksp\ksp\impls\gmres\gmres.c:227
[1]PETSC ERROR:
[0]PETSC ERROR:
#10 KSPSolve() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\ksp\ksp\INTERF~1\itfunc.c:1071
#9 KSPSolve_Private() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\ksp\ksp\INTERF~1\itfunc.c:899
[1]PETSC ERROR:
[0]PETSC ERROR:
#11 main() at C:\Users\bui\Documents\Nemesis\Poisson3D\Main.cpp:253
#10 KSPSolve() at
C:\Users\bui\DOCUME~1\Petsc\latest\src\ksp\ksp\INTERF~1\itfunc.c:1071
[1]PETSC ERROR:
[0]PETSC ERROR:
No PETSc Option Table entries
#11 main() at C:\Users\bui\Documents\Nemesis\Poisson3D\Main.cpp:253
[1]PETSC ERROR:
[0]PETSC ERROR:
----------------End of Error Message -------send entire error message to
petsc-maint at mcs.anl.gov----------
No PETSc Option Table entries
[0]PETSC ERROR: ----------------End of Error Message -------send entire
error message to petsc-maint at mcs.anl.gov----------
//complete codes///////////////////////////////////////////////
#include "petscsys.h"
#include "petscksp.h"
#ifndef PETSC_SUCCESS
#define PETSC_SUCCESS 0
#endif
//rectangular matrix mxn
int** allocateIMatrixR(int m, int n)
{
int** v = (int**)malloc(m * sizeof(int*));
for (int i = 0; i < m; ++i) {
v[i] = (int*)malloc(n * sizeof(int));
}
for (int i = 0; i < m; ++i) {
for (int j = 0; j < n; ++n) {
v[i][j] = -1;
}
}
return v;
}
int mapIndex(int k, int i, int j, int Nz, int Nr)
{
//k for z, i for r, j for theta
//base 0
//3D table: theta r z
// 0 0 0
// 0 0 1
// 0 0 2
// ...
// 0 1 0
// 0 1 1
// 0 1 2
// ...
int n = k + i * Nz + j * Nz * Nr;
return n;
}
PetscErrorCode MPIAssembler(Mat* A, Vec* b, int Kz, int Ir, int Jt)
{
PetscFunctionBeginUser;
int M;
PetscCall(VecGetSize(*b, &M));
//contruct an array of Mx3, row index is the equation number, each
row is (i,j,k),
//i is r-index, j theta-index, k z-index
const int dim = 3;
int** Nijk = allocateIMatrixR(M, dim);
for (int k = 0; k < Kz; ++k) {
int k1 = k + 1;
for (int i = 0; i < Ir; ++i) {
for (int j = 0; j < Jt; ++j) {
//for each equation row m
int m = mapIndex(k, i, j, Kz, Ir);
Nijk[m][0] = i;
Nijk[m][1] = j;
Nijk[m][2] = k;
}
}
}
////////////////////////////////////////////////////////////////////////////
/////
//Cylinder dimensions
double R = 11.0; //cm, cylinder radius
double L = 11.0; //cm, cylinder length in z
double Dr = R / (Ir + 1);
double Dz = L / (Kz + 1);
double Dr2 = Dr * Dr;
double Dr_2 = 0.5 * Dr;
double Dr_22 = Dr_2 * Dr_2;
double Dz_2 = 1.0 / (Dz * Dz);
double DT = 2.0 * M_PI / Jt;
double DT2 = DT * DT;
double Vr = 0.0; //Dirichlet on R
double chargeDens = -4.0;
int rank;
PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank));
//get the range of matrix rows owned by this processor
int startingNumber, endingNumber;
PetscCall(MatGetOwnershipRange(*A, &startingNumber,
&endingNumber));
fprintf(stderr, "rank=%i iStart=%i iEnd=%i\n", rank,
startingNumber, endingNumber);
int Ir_1 = Ir - 1;
int Jt_1 = Jt - 1;
for (int m = startingNumber; m < endingNumber; ++m) {
int i = Nijk[m][0];
int j = Nijk[m][1];
int k = Nijk[m][2];
double ri = Dr * (i + 1);
double ri_2 = ri - Dr_2;
double ri2 = ri + Dr_2;
double sqri = ri * ri;
double Ai = (ri_2 / ri) / Dr2;
double Bi = (ri2 / ri) / Dr2;
double Ci = 1.0 / (sqri * DT2);
double Ei = (ri2 + ri_2) / (ri * Dr2) + 2.0 / (sqri * DT2)
+ 2.0 * Dz_2;
//contribution of charge density to RHS
PetscCall(VecSetValue(*b, m, -chargeDens, ADD_VALUES));
//contribution to RHS by Drichlet on R
if (i == Ir_1) {
PetscCall(VecSetValue(*b, i, -Bi * Vr,
ADD_VALUES));
}
if (!i) { //from the axis
//contributing to RHS
PetscCall(VecSetValue(*b, m, -chargeDens * Ai *
Dr_22, ADD_VALUES));
//contributing to LHS
double xTheta = (Ai / Jt);
for (int jt = 0; jt < Jt; ++jt) {
int nj = mapIndex(k, i, jt, Kz, Ir);
PetscCall(MatSetValues(*A, 1, &m, 1, &nj,
&xTheta, ADD_VALUES));
}
}
else {
//0 < i < Ir: contribution of Ai to LHS
int i_jk = mapIndex(k, i - 1, j, Kz, Ir);
PetscCall(MatSetValues(*A, 1, &m, 1, &i_jk, &Ai,
ADD_VALUES));
}
if (i != Ir_1) {
//0 <= i < It-1: contribution of Bi to LHS
int ki1j = mapIndex(k, i + 1, j, Kz, Ir);
PetscCall(MatSetValues(*A, 1, &m, 1, &ki1j, &Bi,
ADD_VALUES));
}
//apply periodic BC in theta////////////////////////////
int kij_;
if (!j) {
kij_ = mapIndex(k, i, Jt_1, Kz, Ir);
}
else {
kij_ = mapIndex(k, i, j - 1, Kz, Ir);
}
//1st contribution of Ci to LHS
PetscCall(MatSetValues(*A, 1, &m, 1, &kij_, &Ci,
ADD_VALUES));
int kij1;
if (j == Jt_1) {
kij1 = mapIndex(k, i, 0, Kz, Ir);
}
else {
kij1 = mapIndex(k, i, j + 1, Kz, Ir);
}
//2nd contribution of Ci to LHS
PetscCall(MatSetValues(*A, 1, &m, 1, &kij1, &Ci,
ADD_VALUES));
////////////////////////////////////////////////
//apply z Neumann BC ///////////////////////////////////
int k_ij;
if (!k) { //z=0
k_ij = mapIndex(k, i, j, Kz, Ir);
}
else { //0 < z < L
k_ij = mapIndex(k - 1, i, j, Kz, Ir);
}
PetscCall(MatSetValues(*A, 1, &m, 1, &k_ij, &Dz_2,
ADD_VALUES));
/////////////////////////////////////////////////////////
//contribution of Ei to LHS
int kij = mapIndex(k, i, j, Kz, Ir);
PetscCall(MatSetValues(*A, 1, &m, 1, &kij, &Ei,
ADD_VALUES));
}
if (Nijk) {
for (int i = 0; i < M; ++i) {
free(Nijk[i]);
}
free(Nijk);
}
PetscCall(VecAssemblyBegin(*b));
PetscCall(VecAssemblyEnd(*b));
PetscCall(MatAssemblyBegin(*A, MAT_FINAL_ASSEMBLY));
PetscCall(MatAssemblyEnd(*A, MAT_FINAL_ASSEMBLY));
PetscFunctionReturn(PETSC_SUCCESS);
}
int main(int argc, char** argv)
{
PetscFunctionBeginUser;
const char help[] = "Poisson3D solver using PETSc.";
PetscCall(PetscInitialize(&argc, &argv, PETSC_NULL, help));
Mat mat_;
Vec B_;
Vec X_;
int Nz = 26; //number of z-segments
int Nr = 20; //number of r-segments
int Nt = 10; //number of theta-segments
PetscCall(PetscOptionsGetInt(NULL, NULL, "-z", &Nz, NULL));
PetscCall(PetscOptionsGetInt(NULL, NULL, "-r", &Nr, NULL));
PetscCall(PetscOptionsGetInt(NULL, NULL, "-t", &Nt, NULL));
//Neumann at z=0 and z=L
//Dirichlet at R, the cylinder radius
//On axis, compute field
//number of unknowns of cylinder interior nodes minus axis
int Kz = Nz - 1;
int Ir = Nr - 1;
int M = Kz * Ir * Nt;
int bw = 15; //for 7-point stencil
PetscCall(MatCreate(PETSC_COMM_WORLD, &mat_));
PetscCall(MatSetSizes(mat_, PETSC_DECIDE, PETSC_DECIDE, M, M));
PetscCall(MatSetFromOptions(mat_));
PetscCall(MatMPIAIJSetPreallocation(mat_, bw, 0, bw, 0));
PetscCall(MatSeqAIJSetPreallocation(mat_, bw, 0));
//source
PetscCall(VecCreate(PETSC_COMM_WORLD, &B_));
PetscCall(VecSetSizes(B_, PETSC_DECIDE, M));
PetscCall(VecSetFromOptions(B_));
//sol
PetscCall(VecDuplicate(B_, &X_));
MPIAssembler(&mat_, &B_, Kz, Ir, Nt);
PCType pcType = PCEISENSTAT;
KSPType solverType = KSPGMRES;
int maxIters = 100;
double relTol = 1.0e-6;
KSP ksp;
PetscCall(KSPCreate(PETSC_COMM_WORLD, &ksp));
PetscCall(KSPSetOperators(ksp, mat_, mat_));
PetscCall(KSPSetType(ksp, solverType));
//preconditioner
PC pc;
PetscCall(KSPGetPC(ksp, &pc));
PetscCall(PCSetType(pc, pcType));
PetscCall(KSPSetTolerances(ksp, relTol, PETSC_DEFAULT,
PETSC_DEFAULT, maxIters));
PetscCall(KSPSetFromOptions(ksp));
//solving
PetscCall(KSPSolve(ksp, B_, X_));
double xMin, xMax;
int mnIndx, mxIndx;
PetscCall(VecMin(X_, &mnIndx, &xMin));
PetscCall(VecMax(X_, &mxIndx, &xMax));
fprintf(stderr, "mnIndx=%i mxIndx=%i xMin=%g xMax=%g\n", mnIndx,
mxIndx, xMin, xMax);
PetscCall(PetscFinalize());
return 0;
}
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.mcs.anl.gov/pipermail/petsc-users/attachments/20230724/4602dfd4/attachment-0001.html>
More information about the petsc-users
mailing list