[petsc-users] Problem with GAMG elasticity case:'createdefaultdata' not set(?) need to support NULL data!

Einar Sørheim einar.sorheim at gmail.com
Fri Oct 18 08:30:54 CDT 2013


Uncommenting  KSPSetFromOptions solved my intial problem, however I got a
new problem that occurs after many timesteps, in our code we use petsc gamg
as a solver for several problems(e.g. thermal and mechanical) at each time
step, I have a suspicion it might have something to do with cleanup after
solving, the rror appears after many calls to the petsc gamg solver
interface, the error message is the following:

[0]PETSC ERROR:
------------------------------------------------------------------------
[0]PETSC ERROR: Caught signal number 11 SEGV: Segmentation Violation,
probably memory access out of range
[0]PETSC ERROR: Try option -start_in_debugger or -on_error_attach_debugger
[0]PETSC ERROR: or see
http://www.mcs.anl.gov/petsc/documentation/faq.html#valgrind[0]PETSC ERROR:
or try http://valgrind.org on GNU/linux and Apple Mac OS X to find memory
corruption errors
[0]PETSC ERROR: likely location of problem given in stack below
[0]PETSC ERROR: ---------------------  Stack Frames
------------------------------------
[0]PETSC ERROR: Note: The EXACT line numbers in the stack are not available,
[0]PETSC ERROR:       INSTEAD the line number of the start of the function
[0]PETSC ERROR:       is given.
[0]PETSC ERROR: [0] MatSetNearNullSpace line 7580
src/mat/interface/D:\PROGRA~1\PETSC-~1.3\src\mat\INTERF~1\matrix.c
[0]PETSC ERROR: --------------------- Error Message
------------------------------------
[0]PETSC ERROR: Signal received!
[0]PETSC ERROR:
------------------------------------------------------------------------
[0]PETSC ERROR: Petsc Release Version 3.4.3, Oct, 15, 2013
[0]PETSC ERROR: See docs/changes/index.html for recent updates.
[0]PETSC ERROR: See docs/faq.html for hints about trouble shooting.
[0]PETSC ERROR: See docs/index.html for manual pages.
[0]PETSC ERROR:
------------------------------------------------------------------------
[0]PETSC ERROR: D:\Programming\gits\Alsim\SharedModules -
Copy\Source\Release\Alsim_nypetsc.exe on a arch-mswin-c-debug named CMP3 by
admeinar Fri Oct 18 12:26:04 2013
[0]PETSC ERROR: Libraries linked from
/cygdrive/d/Programming/petsc-3.4.3/arch-mswin-c-debug/lib
[0]PETSC ERROR: Configure run at Thu Oct 17 08:23:18 2013
[0]PETSC ERROR: Configure options --with-debugging=1 --with-openmp=yes
--with-pthread=no --with-cc="win32fe icl" --with-fc="win32fe ifort"
--with-cxx="win32fe icl" --with-blas-lapack-dir="C:/Program Files
(x86)/Intel/Composer XE 2013/mkl/lib/intel64" --download-superlu
--with-sowing=0 --with-c2html=0 --with-mpi-dir="C:/Program Files
(x86)/Intel/MPI/4.1.0.028/em64t" --useThreads=0 --useThreads=0

The source code for the petsc interface:

      subroutine
petscsolver(i_nodes,r_elements_,i_rowstart_,i_columnindex_,r_x_,r_b_,i_Nnz,i_Precond,
i_Solver
     $     ,coords_,i_nodedof)
#include <finclude/petscsys.h>
#include <finclude/petscvec.h>
#include <finclude/petscmat.h>
#include <finclude/petscpc.h>
#include <finclude/petscksp.h>
#include <finclude/petscviewer.h>

      integer i_nodes, i_Nnz, i_Precond, i_solver, i_nodedof
      integer i_rowstart_(*),i_columnindex_(*)
      double precision r_elements_(*),r_x_(*), r_b_(*),coords_(*)
      integer, allocatable  :: i_indices_(:)
!     Variables:
!
!  A       - matrix that defines linear system
!  ksp    - KSP context
!  ksp     - KSP context
!  x, b, u - approx solution, RHS, exact solution vectors
!
!      implicit none
      Vec     x,u,b,vec_coords
      PC      pc
      Mat     A,F,Matnull
      KSP    ksp
      PetscInt i,j,II,JJ,m,n,i1,imaxiter
      PetscInt Istart,Iend
      PetscInt nsteps,one, neg_one
      PetscErrorCode ierr
      PetscBool  flg
      PetscScalar  v,x_array_(1), norm, tol, t0,t1
      PetscOffset i_x
      PetscViewer viewer


      call PetscTime(t0,ierr)
      write(*,*) "Enter petsc"

      n      = 1
      nsteps = 1
      one    = 1
      neg_one = 1
      i1 = 1
      imaxiter = 1200


      call MatCreate(PETSC_COMM_WORLD,A,ierr)
      call MatSetSizes(A,PETSC_DECIDE,PETSC_DECIDE,i_nodes,i_nodes,ierr)
      call MatSetType(A, MATSEQAIJ,ierr)
      call MatSetUp(A,ierr)
      call MatSeqAIJSetPreallocation(A,100,PETSC_NULL_INTEGER,ierr)
!  The matrix is partitioned by contiguous chunks of rows across the
!  processors.  Determine which rows of the matrix are locally owned.

      call MatGetOwnershipRange(A,Istart,Iend,ierr)


      write(*,*) "Start of matrix fill", i_nodes, Istart, Iend
      do i=1,i_nodes
!         write(*,*) "Start of loop",i !!, i_indices_(i+1)
!         i_indices_(i+1) = i
!         write(*,*) "Start of loop2"
         do j=i_rowstart_(i),i_rowstart_(i+1)-1
!            write(*,*) "Start of loop 3"
            v = r_elements_(j)
!            write(*,*) i1,i,i1,j-1,v
            call
MatSetValues(A,i1,i-1,i1,i_columnindex_(j)-1,v,INSERT_VALUES,ierr)
            if (ierr.gt.0) stop
         end do
      end do
!      write(*,*) "End of matrix fill"

!  Assemble matrix, using the 2-step process:
!       MatAssemblyBegin(), MatAssemblyEnd()
!  Computations can be done while messages are in transition
!  by placing code between these two statements.

      call MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY,ierr)
      call MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY,ierr)

!  Create parallel vectors.
!   - When using VecCreate(), the parallel partitioning of the vector
!     is determined by PETSc at runtime.
!   - Note: We form 1 vector from scratch and then duplicate as needed.
!      write(*,*) "Start of vector fill"
      call VecCreate(PETSC_COMM_WORLD,u,ierr)
      call VecSetType(u, VECSEQ,ierr)
      call VecSetSizes(u,PETSC_DECIDE,i_nodes,ierr)
!      call VecSetFromOptions(u,ierr)
      call VecDuplicate(u,b,ierr)
      call VecDuplicate(b,x,ierr)
      do i=1,i_nodes
         call VecSetValues(x,i1,i-1,r_x_(i),INSERT_VALUES,ierr)
         call VecSetValues(b,i1,i-1,r_b_(i),INSERT_VALUES,ierr)
      enddo
!  Assemble vector

       call VecAssemblyBegin(x,ierr)
       call VecAssemblyEnd(x,ierr)
       call VecAssemblyBegin(b,ierr)
       call VecAssemblyEnd(b,ierr)
       call PetscTime(t1,ierr)
!       write(*,*) "End of vec fill time spent :", t1-t0
!  Create linear solver context

      call KSPCreate(PETSC_COMM_WORLD,ksp,ierr)


!  Set operators. Here the matrix that defines the linear system
!  also serves as the preconditioning matrix.

      call KSPSetOperators(ksp,A,A,DIFFERENT_NONZERO_PATTERN,ierr)


      call KSPGetPC(ksp,pc,ierr)
      tol = 1.e-20
      call
KSPSetTolerances(ksp,tol,PETSC_DEFAULT_DOUBLE_PRECISION,PETSC_DEFAULT_DOUBLE_PRECISION,imaxiter,ierr)
      select case (i_Precond)
      case ( 1 )
         call PCSetType(pc,PCJACOBI,ierr)
      case ( 2 )
         call PCSetType(pc,PCILU,ierr)
         call PCFactorSetMatSolverPackage(pc,MATSOLVERSUPERLU,ierr);
         call PCFactorSetUpMatSolverPackage(pc,ierr);
         call PCFactorGetMatrix(pc,F);
         call MatSuperluSetILUDropTol(F,1.e-4);
      case ( 3 )
         call PCSetType(pc,PCGAMG,ierr)
!         call PCGAMGInitializePackage()
!         call PCCreate_GAMG(pc,ierr)
      case DEFAULT
         call PCSetType(pc,PCJACOBI,ierr)
      end select

!      call PCSetType(pc,PCJACOBI,ierr)
      select case (i_solver)
      case ( 0 )
         call KSPSetType(ksp,KSPCG,ierr)
      case DEFAULT
         call KSPSetType(ksp,KSPCG,ierr)
      end select
      call KSPCGSetType(ksp,KSP_CG_SYMMETRIC,ierr)
      if (i_nodedof==3) then
         write(*,*) "Set coord, number of nodes is:", i_nodes/i_nodedof
         call
VecCreateSeqWithArray(MPI_COMM_SELF,3,i_nodes,coords_,vec_coords,ierr)
         call MatNullSpaceCreateRigidBody(vec_coords,Matnull,ierr)
         call MatSetNearNullSpace(A,Matnull,ierr)
         call MatNullSpaceDestroy(Matnull,ierr)

         write(*,*) "Finish setting null space ierr =", ierr
!         call PCSetCoordinates( pc, 3,i_nodes/i_nodedof, coords_, ierr )

      end if
!      call KSPMonitorSet(ksp, KSPMonitorDefault, PETSC_NULL,
PETSC_NULL,ierr)
!  Set runtime options, e.g.,
!      -ksp_type <type> -pc_type <type> -ksp_monitor -ksp_rtol <rtol>
!  These options will override those specified above as long as
!  KSPSetFromOptions() is called _after_ any other customization
!  routines.

      call KSPSetFromOptions(ksp,ierr)
      call KSPSetInitialGuessNonzero(ksp,PETSC_TRUE,ierr)
      call KSPSetUp(ksp,ierr)
      call PetscTime(t0,ierr)
      write(*,*) "Time setup", t0-t1
! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
!                      Solve the linear system
! - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -

      call KSPSolve(ksp,b,x,ierr)
      call PetscTime(t1,ierr)
      write(*,*) "end solve, time used:",t1-t0
      call KSPGetSolution(ksp,x,ierr)
      call VecGetArray(x,x_array_,i_x,ierr)
      do i=1,i_nodes
         r_x_(i) = x_array_(i_x + i)
      enddo
      call VecRestoreArray(x,x_array_,i_x,ierr)


!  Free work space.  All PETSc objects should be destroyed when they
!  are no longer needed.
!      call PCDestroy(pc,ierr)

      call VecDestroy(u,ierr)
      call VecDestroy(x,ierr)
      call VecDestroy(b,ierr)
      call MatDestroy(A,ierr)
      if (i_nodedof==3) then
         call VecDestroy(vec_coords,ierr)
         call MatDestroy(Matnull,ierr)
      endif
      call KSPDestroy(ksp,ierr)
      call PetscTime(t0,ierr)
      write(*,*) "time celan up :", t0-t1

      end

-- 
Einar Sørheim
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.mcs.anl.gov/pipermail/petsc-users/attachments/20131018/e7d1d8b2/attachment-0001.html>
-------------- next part --------------
A non-text attachment was scrubbed...
Name: not available
Type: application/pgp-signature
Size: 851 bytes
Desc: not available
URL: <http://lists.mcs.anl.gov/pipermail/petsc-users/attachments/20131018/e7d1d8b2/attachment-0001.pgp>


More information about the petsc-users mailing list