static char help[] = "Solve a matrix in parallel\n"; /*T Concepts: KSP^basic parallel example; Processors: n T*/ /* Include "petscksp.h" so that we can use KSP solvers. Note that this file automatically includes: petscsys.h - base PETSc routines petscvec.h - vectors petscmat.h - matrices petscis.h - index sets petscksp.h - Krylov subspace methods petscviewer.h - viewers petscpc.h - preconditioners Note: The corresponding uniprocessor example is ex1.c */ #include #undef __FUNCT__ #define __FUNCT__ "main" int main(int argc,char **args) { Vec x, b; /* approx solution, RHS, exact solution */ Mat A, APre; /* linear system matrix */ KSP ksp; /* linear solver context */ PetscErrorCode ierr; PetscViewer fd; /* viewer */ PetscInitialize(&argc,&args,(char*)0,help); /* Load the matrix; then destroy the viewer. */ ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,"test_drdw.bin",FILE_MODE_READ,&fd);CHKERRQ(ierr); ierr = MatCreate(PETSC_COMM_WORLD,&A);CHKERRQ(ierr); ierr = MatSetFromOptions(A);CHKERRQ(ierr); ierr = MatLoad(A,fd);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); /* Load the matrix; then destroy the viewer. */ ierr = PetscViewerBinaryOpen(PETSC_COMM_WORLD,"test_drdwPre.bin",FILE_MODE_READ,&fd);CHKERRQ(ierr); ierr = MatCreate(PETSC_COMM_WORLD,&APre);CHKERRQ(ierr); ierr = MatSetFromOptions(APre);CHKERRQ(ierr); ierr = MatLoad(APre,fd);CHKERRQ(ierr); ierr = PetscViewerDestroy(&fd);CHKERRQ(ierr); ierr = MatCreateVecs(A, &x, &b); /* Set exact solution; then compute right-hand-side vector. */ ierr = VecSet(b,1.0);CHKERRQ(ierr); ierr = KSPCreate(PETSC_COMM_WORLD,&ksp);CHKERRQ(ierr); ierr = KSPSetOperators(ksp,A,A);CHKERRQ(ierr); ierr = KSPSetFromOptions(ksp);CHKERRQ(ierr); printf("Solving...\n"); ierr = KSPSolve(ksp,b,x);CHKERRQ(ierr); // Clean up ierr = VecDestroy(&x);CHKERRQ(ierr); ierr = VecDestroy(&b);CHKERRQ(ierr); ierr = MatDestroy(&A);CHKERRQ(ierr); ierr = MatDestroy(&APre);CHKERRQ(ierr); ierr = KSPDestroy(&ksp);CHKERRQ(ierr); /* Always call PetscFinalize() before exiting a program. This routine - finalizes the PETSc libraries as well as MPI - provides summary and diagnostic information if certain runtime options are chosen (e.g., -log_summary). */ ierr = PetscFinalize(); return 0; }