[petsc-users] Fortran 77 VecGetArray problems
Valerio Grazioso
graziosov at libero.it
Thu Nov 20 21:19:39 CST 2014
Dear Petsc Users,
I’m trying to write a simple parallel linear solver (A*x=q) in fortran 77 with Petsc.
I managed to Setup a MPIAIJ matrix A and a VecMPI qvec
then the KSP and the solution.
Everything seems fine (I’ve checked both A and q with MatView and VecView) but I’m having problems getting the solution in a “normal” local double precision fortran vector.
To be more than sure I’ve tried to repeat the necessary steps in order to achieve this result on qvec :
After creating and calling a VecScatter in order to bring a sequential version on all the processors (as described in
http://www.mcs.anl.gov/petsc/petsc-as/documentation/faq.html#mpi-vec-to-mpi-vec)
I’ve tried to use VecGetValue as described in the User manual in the Fortran users chapter (pag. 147-148)
It seems to fail in both the local xx_v vector (that has wrong values) and in calculating the i_x index offset that has a very large negative value
that, when it is used, generates an obvious “Aut of bound” error.
I’ve tried to use VecGetValue also on qvec directly : same result!
There must be something wrong that I’m doing.
Can anybody point me in the right direction?
Thanks
Regards
Valerio Grazioso
Here it is the relevant code:
#define xx_a(ib) xx_v(i_x + (ib))
…………………..
Vec xSEQ
PetscScalar xx_v(1)
PetscOffset i_x
……………………..
………………..
write(6,'(a,i2,a)'),"Processor ",mpi_rank,": petsc_solve ---> VecCreate"
call VecCreate(comm,qvec,ierr); CHKERRQ(ierr)
write(6,'(a,i2,a)'),"Processor ",mpi_rank,": petsc_solve ---> VecSetSizes"
call VecSetSizes(qvec,PETSC_DECIDE,NCELL,ierr); CHKERRQ(ierr)
write(6,'(a,i2,a)'),"Processor ",mpi_rank,": petsc_solve ---> VecSetFromOptions"
call VecSetFromOptions(qvec,ierr); CHKERRQ(ierr)
write(6,'(a,i2,a)'),"Processor ",mpi_rank,": petsc_solve ---> VecDuplicate"
call VecDuplicate(qvec,xvec,ierr); CHKERRQ(ierr)
write(6,'(a,i2,a)'),"Processor ",mpi_rank,": petsc_solve ---> VecGetOwnershipRange"
call VecGetOwnershipRange(qvec,Istart,Iend,ierr); CHKERRQ(ierr)
write(6,'(a,i2,a,2i8)'),"Processor ",mpi_rank,": petsc_solve ---> Istart,Iend (Vet)=", Istart,Iend
do Iproc = 0,(mpi_size-1)
if (Iproc == mpi_rank) then
do I= (Istart+1), (Iend)
C write(6,'(a,i2,a,2i8)'),"Processor ",mpi_rank,": petsc_solve ---> MatSetValues",I,I
call VecSetValues(qvec,1,I-1,SU(I),INSERT_VALUES,ierr); CHKERRQ(ierr)
call VecSetValues(xvec,1,I-1,0.E0,INSERT_VALUES,ierr); CHKERRQ(ierr)
enddo
endif
enddo
write(6,'(a,i2,a)'),"Processor ",mpi_rank,": petsc_solve ---> VecAssemblyBegin"
call VecAssemblyBegin(qvec,ierr); CHKERRQ(ierr)
call VecAssemblyEnd(qvec,ierr); CHKERRQ(ierr)
call VecAssemblyBegin(xvec,ierr); CHKERRQ(ierr)
call VecAssemblyEnd(xvec,ierr); CHKERRQ(ierr)
write(6,'(a,i2,a)'),"Processor ",mpi_rank,": petsc_solve ---> VecAssemblyEnd"
C --------------------------------- DEBUG -------------------------------------
if (1 == 1) then
call VecScatterCreateToAll(qvec,ctx,xSEQ,ierr); CHKERRQ(ierr)
call VecScatterBegin(ctx,qvec,xSEQ,INSERT_VALUES,SCATTER_FORWARD,ierr)
CHKERRQ(ierr)
call VecScatterEnd(ctx,qvec,xSEQ,INSERT_VALUES,SCATTER_FORWARD,ierr)
CHKERRQ(ierr)
call VecScatterDestroy(ctx,ierr); CHKERRQ(ierr)
C call PetscViewerCreate(comm, view, ierr); CHKERRQ(ierr)
C call PetscViewerSetType(view, PETSCVIEWERASCII, ierr); CHKERRQ(ierr)
C call PetscViewerSetFormat(view, PETSC_VIEWER_ASCII_INDEX, ierr)
C CHKERRQ(ierr)
C call PetscViewerFileSetName(view, 'qvec.m', ierr)
C call VecView(xSEQ, view, ierr); CHKERRQ(ierr)
C call PetscViewerDestroy(view, ierr); CHKERRQ(ierr)
call VecGetArray(xSEQ,xx_v,i_x,ierr); CHKERRQ(ierr)
write(6, *) (xx_a(I), I=1,NCELL)
write(6,'(a,i20)')"i_x = ", i_x
call VecRestoreArray(xSEQ,xx_v,i_x,ierr); CHKERRQ(ierr)
call PetscFinalize(ierr); CHKERRQ(ierr)
STOP
endif
C ---------------------------------- END DEBUG ------------------------------
More information about the petsc-users
mailing list