! ! Description: Solves a linear system in parallel with KSP (Fortran code). ! Also shows how to set a user-defined monitoring routine. ! ! !/*T ! Concepts: KSP^basic parallel example ! Concepts: KSP^setting a user-defined monitoring routine ! Processors: n !T*/ ! ! ----------------------------------------------------------------------- program main implicit none ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! Include files ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! ! This program uses CPP for preprocessing, as indicated by the use of ! PETSc include files in the directory petsc/include/petsc/finclude. This ! convention enables use of the CPP preprocessor, which allows the use ! of the #include statements that define PETSc objects and variables. ! ! Use of the conventional Fortran include statements is also supported ! In this case, the PETsc include files are located in the directory ! petsc/include/foldinclude. ! ! Since one must be very careful to include each file no more than once ! in a Fortran routine, application programmers must exlicitly list ! each file needed for the various PETSc components within their ! program (unlike the C/C++ interface). ! ! See the Fortran section of the PETSc users manual for details. ! ! The following include statements are required for KSP Fortran programs: ! petscsys.h - base PETSc routines ! petscvec.h - vectors ! petscmat.h - matrices ! petscpc.h - preconditioners ! petscksp.h - Krylov subspace methods ! Additional include statements may be needed if using additional ! PETSc routines in a Fortran program, e.g., ! petscviewer.h - viewers ! petscis.h - index sets ! #include #include #include #include #include #include ! ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! Variable declarations ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! ! Variables: ! ksp - linear solver context ! ksp - Krylov subspace method context ! pc - preconditioner context ! x, b, u - approx solution, right-hand-side, exact solution vectors ! A - matrix that defines linear system ! its - iterations for convergence ! norm - norm of error in solution ! rctx - random number generator context ! ! Note that vectors are declared as PETSc "Vec" objects. These vectors ! are mathematical objects that contain more than just an array of ! double precision numbers. I.e., vectors in PETSc are not just ! double precision x(*). ! However, local vector data can be easily accessed via VecGetArray(). ! See the Fortran section of the PETSc users manual for details. ! PetscReal norm PetscInt i,j,II,JJ,m,n,its PetscInt Istart,Iend,ione PetscErrorCode ierr PetscMPIInt rank,size PetscBool flg PetscScalar v,one,neg_one Vec x,b,u Mat A, AtA, AtA2 KSP ksp PetscRandom rctx PetscViewerAndFormat vf; ! These variables are not currently used. ! PC pc ! PCType ptype ! PetscReal tol ! Note: Any user-defined Fortran routines (such as MyKSPMonitor) ! MUST be declared as external. external MyKSPMonitor,MyKSPConverged ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! Beginning of program ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - call PetscInitialize(PETSC_NULL_CHARACTER,ierr) m = 3 n = 3 one = 1.0 neg_one = -1.0 ione = 1 call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER, & & '-m',m,flg,ierr) call PetscOptionsGetInt(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER, & & '-n',n,flg,ierr) call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr) call MPI_Comm_size(PETSC_COMM_WORLD,size,ierr) ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! Compute the matrix and right-hand-side vector that define ! the linear system, Ax = b. ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! Create parallel matrix, specifying only its global dimensions. ! When using MatCreate(), the matrix format can be specified at ! runtime. Also, the parallel partitioning of the matrix is ! determined by PETSc at runtime. call MatCreate(PETSC_COMM_WORLD,A,ierr) call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,m*n,m*n,ierr) call MatSetFromOptions(A,ierr) call MatSetUp(A,ierr) ! Currently, all PETSc parallel matrix formats are partitioned by ! contiguous chunks of rows across the processors. Determine which ! rows of the matrix are locally owned. call MatGetOwnershipRange(A,Istart,Iend,ierr) ! Set matrix elements for the 2-D, five-point stencil in parallel. ! - Each processor needs to insert only elements that it owns ! locally (but any non-local elements will be sent to the ! appropriate processor during matrix assembly). ! - Always specify global row and columns of matrix entries. ! - Note that MatSetValues() uses 0-based row and column numbers ! in Fortran as well as in C. ! Note: this uses the less common natural ordering that orders first ! all the unknowns for x = h then for x = 2h etc; Hence you see JH = II +- n ! instead of JJ = II +- m as you might expect. The more standard ordering ! would first do all variables for y = h, then y = 2h etc. do 10, II=Istart,Iend-1 v = -1.0 i = II/n j = II - i*n if (i.gt.0) then JJ = II - n call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr) endif if (i.lt.m-1) then JJ = II + n call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr) endif if (j.gt.0) then JJ = II - 1 call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr) endif if (j.lt.n-1) then JJ = II + 1 call MatSetValues(A,ione,II,ione,JJ,v,INSERT_VALUES,ierr) endif v = 4.0 call MatSetValues(A,ione,II,ione,II,v,INSERT_VALUES,ierr) 10 continue ! Assemble matrix, using the 2-step process: ! MatAssemblyBegin(), MatAssemblyEnd() ! Computations can be done while messages are in transition, ! by placing code between these two statements. call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr) call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr) ! Create parallel vectors. ! - Here, the parallel partitioning of the vector is determined by ! PETSc at runtime. We could also specify the local dimensions ! if desired -- or use the more general routine VecCreate(). ! - When solving a linear system, the vectors and matrices MUST ! be partitioned accordingly. PETSc automatically generates ! appropriately partitioned matrices and vectors when MatCreate() ! and VecCreate() are used with the same communicator. ! - Note: We form 1 vector from scratch and then duplicate as needed. call MatTransposeMatMult(A, A, MAT_INITIAL_MATRIX, 1.d0, AtA, ierr) call MatDuplicate(AtA,MAT_COPY_VALUES,AtA2,ierr) call VecCreateMPI(PETSC_COMM_WORLD,PETSC_DECIDE,m*n,u,ierr) call VecSetFromOptions(u,ierr) call VecDuplicate(u,b,ierr) call VecDuplicate(b,x,ierr) ! Set exact solution; then compute right-hand-side vector. ! By default we use an exact solution of a vector with all ! elements of 1.0; Alternatively, using the runtime option ! -random_sol forms a solution vector with random components. call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER, & & '-random_exact_sol',flg,ierr) if (flg) then call PetscRandomCreate(PETSC_COMM_WORLD,rctx,ierr) call PetscRandomSetFromOptions(rctx,ierr) call VecSetRandom(u,rctx,ierr) call PetscRandomDestroy(rctx,ierr) else call VecSet(u,one,ierr) endif call MatMult(A,u,b,ierr) ! View the exact solution vector if desired call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER, & & '-view_exact_sol',flg,ierr) if (flg) then call VecView(u,PETSC_VIEWER_STDOUT_WORLD,ierr) endif ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! Create the linear solver and set various options ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! Create linear solver context call KSPCreate(PETSC_COMM_WORLD,ksp,ierr) ! Set operators. Here the matrix that defines the linear system ! also serves as the preconditioning matrix. call KSPSetOperators(ksp,A,A,ierr) ! Set linear solver defaults for this problem (optional). ! - By extracting the KSP and PC contexts from the KSP context, ! we can then directly directly call any KSP and PC routines ! to set various options. ! - The following four statements are optional; all of these ! parameters could alternatively be specified at runtime via ! KSPSetFromOptions(). All of these defaults can be ! overridden at runtime, as indicated below. ! We comment out this section of code since the Jacobi ! preconditioner is not a good general default. ! call KSPGetPC(ksp,pc,ierr) ! ptype = PCJACOBI ! call PCSetType(pc,ptype,ierr) ! tol = 1.e-7 ! call KSPSetTolerances(ksp,tol,PETSC_DEFAULT_REAL, ! & PETSC_DEFAULT_REAL,PETSC_DEFAULT_INTEGER,ierr) ! Set user-defined monitoring routine if desired call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER, & & '-my_ksp_monitor',flg,ierr) if (flg) then call KSPMonitorSet(ksp,MyKSPMonitor,PETSC_NULL_OBJECT, & & PETSC_NULL_FUNCTION,ierr) ! ! Also use the default KSP monitor routine showing how it may be used from Fortran ! call PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD, & & PETSC_VIEWER_DEFAULT,vf,ierr) call KSPMonitorSet(ksp,KSPMonitorDefault,vf, & & PetscViewerAndFormatDestroy,ierr) endif ! Set runtime options, e.g., ! -ksp_type -pc_type -ksp_monitor -ksp_rtol ! These options will override those specified above as long as ! KSPSetFromOptions() is called _after_ any other customization ! routines. call KSPSetFromOptions(ksp,ierr) ! Set convergence test routine if desired call PetscOptionsHasName(PETSC_NULL_OBJECT,PETSC_NULL_CHARACTER, & & '-my_ksp_convergence',flg,ierr) if (flg) then call KSPSetConvergenceTest(ksp,MyKSPConverged, & & PETSC_NULL_OBJECT,PETSC_NULL_FUNCTION,ierr) endif ! ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! Solve the linear system ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - call KSPSolve(ksp,b,x,ierr) ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! Check solution and clean up ! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - ! Check the error call VecAXPY(x,neg_one,u,ierr) call VecNorm(x,NORM_2,norm,ierr) call KSPGetIterationNumber(ksp,its,ierr) if (rank .eq. 0) then if (norm .gt. 1.e-12) then write(6,100) norm,its else write(6,110) its endif endif 100 format('Norm of error ',e11.4,' iterations ',i5) 110 format('Norm of error < 1.e-12 iterations ',i5) ! Free work space. All PETSc objects should be destroyed when they ! are no longer needed. call KSPDestroy(ksp,ierr) call VecDestroy(u,ierr) call VecDestroy(x,ierr) call VecDestroy(b,ierr) call MatDestroy(A,ierr) ! Always call PetscFinalize() before exiting a program. This routine ! - finalizes the PETSc libraries as well as MPI ! - provides summary and diagnostic information if certain runtime ! options are chosen (e.g., -log_summary). See PetscFinalize() ! manpage for more information. call PetscFinalize(ierr) end ! -------------------------------------------------------------- ! ! MyKSPMonitor - This is a user-defined routine for monitoring ! the KSP iterative solvers. ! ! Input Parameters: ! ksp - iterative context ! n - iteration number ! rnorm - 2-norm (preconditioned) residual value (may be estimated) ! dummy - optional user-defined monitor context (unused here) ! subroutine MyKSPMonitor(ksp,n,rnorm,dummy,ierr) implicit none #include #include #include KSP ksp Vec x PetscErrorCode ierr PetscInt n,dummy PetscMPIInt rank PetscReal rnorm ! Build the solution vector call KSPBuildSolution(ksp,PETSC_NULL_OBJECT,x,ierr) ! Write the solution vector and residual norm to stdout ! - Note that the parallel viewer PETSC_VIEWER_STDOUT_WORLD ! handles data from multiple processors so that the ! output is not jumbled. call MPI_Comm_rank(PETSC_COMM_WORLD,rank,ierr) if (rank .eq. 0) write(6,100) n call VecView(x,PETSC_VIEWER_STDOUT_WORLD,ierr) if (rank .eq. 0) write(6,200) n,rnorm 100 format('iteration ',i5,' solution vector:') 200 format('iteration ',i5,' residual norm ',e11.4) ierr = 0 end ! -------------------------------------------------------------- ! ! MyKSPConverged - This is a user-defined routine for testing ! convergence of the KSP iterative solvers. ! ! Input Parameters: ! ksp - iterative context ! n - iteration number ! rnorm - 2-norm (preconditioned) residual value (may be estimated) ! dummy - optional user-defined monitor context (unused here) ! subroutine MyKSPConverged(ksp,n,rnorm,flag,dummy,ierr) implicit none #include #include #include KSP ksp PetscErrorCode ierr PetscInt n,dummy KSPConvergedReason flag PetscReal rnorm if (rnorm .le. .05) then flag = 1 else flag = 0 endif ierr = 0 end