[petsc-users] on the data size problem
Barry Smith
bsmith at mcs.anl.gov
Tue Aug 25 14:14:56 CDT 2015
Convergence of iterative schemes depends on problem sizes and problem properties. You need to debug your code/algorithm to determine what is going on. Some advice NEVER NEVER NEVER run in parallel until you are getting correct behavior and solutions on one process consistently. Increase the problem size slightly from 5 until you start seeing bad behavior; don't just jump from 5 to 5000.
Barry
> On Aug 19, 2015, at 10:51 AM, Hongliang Lu <honglianglu87 at gmail.com> wrote:
>
> Dear all,
> I am trying to implement a BFS algorithm using Petsc, and I have tested my code on a graph of 5 nodes, but when I tested on a larger graph, which size is 5000 nodes, the program went wrong, and ca not finished, could some on help me out? thank you very much!!!!!
> I tried to run the following code in a cluster with 10 nodes.
>
> int main(int argc,char **args)
> {
> Vec curNodes,tmp;
> Mat oriGraph;
> PetscInt rows, cols;
> PetscScalar one=1;
> PetscScalar nodeVecSum=1;
> char filein[PETSC_MAX_PATH_LEN],fileout[PETSC_MAX_PATH_LEN],buf[PETSC_MAX_PATH_LEN];
> PetscViewer fd;
> PetscInitialize(&argc,&args,(char *)0,help);
>
> PetscOptionsGetString(PETSC_NULL,"-fin",filein,PETSC_MAX_PATH_LEN-1,PETSC_NULL);
> PetscViewerBinaryOpen(PETSC_COMM_WORLD,filein,FILE_MODE_READ,&fd);
> MatCreate(PETSC_COMM_WORLD,&oriGraph);
>
> MatLoad(oriGraph,fd);
> MatGetSize(oriGraph,&rows,&cols);
> MatSetOption(oriGraph,MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE);
> MatSetUp(oriGraph);
> VecCreate(PETSC_COMM_WORLD,&curNodes);
>
> VecSetSizes(curNodes,PETSC_DECIDE,rows);
> VecSetFromOptions(curNodes);
> VecCreate(PETSC_COMM_WORLD,&tmp);
> VecSetSizes(tmp,PETSC_DECIDE,rows);
> VecSetFromOptions(tmp);
> VecZeroEntries(tmp);
> srand(time(0));
> PetscInt node=rand()%rows;
> PetscPrintf(PETSC_COMM_SELF,"The node ID is: %d \n",node);
> VecSetValues(curNodes,1,&node,&one,INSERT_VALUES);
> VecAssemblyBegin(curNodes);
> VecAssemblyEnd(curNodes);
>
> PetscViewerDestroy(&fd);
>
> const PetscInt *colsv;
> const PetscScalar *valsv;
> PetscInt ncols,i,zero=0;
> PetscInt iter=0;
>
> nodeVecSum=1;
> for(;iter<10;iter++)
> {
> VecAssemblyBegin(curNodes);
> VecAssemblyEnd(curNodes);
> MatMult(oriGraph,curNodes,tmp);
> VecAssemblyBegin(tmp);
> VecAssemblyEnd(tmp);
> VecSum(tmp,&nodeVecSum);
> PetscPrintf(PETSC_COMM_SELF,"There are neighbors: %d \n",(int)nodeVecSum);
> VecSum(curNodes,&nodeVecSum);
> if(nodeVecSum<1)
> break;
>
> PetscScalar y;
> PetscInt indices;
> PetscInt n,m,rstart,rend;
> IS isrow;
> Mat curMat;
> MatGetLocalSize(oriGraph,&n,&m);
> MatGetOwnershipRange(oriGraph,&rstart,&rend);
> ISCreateStride(PETSC_COMM_SELF,n,rstart,1,&isrow);
> MatGetSubMatrix(oriGraph,isrow,NULL,MAT_INITIAL_MATRIX,&curMat);
>
> MatGetSize(curMat,&n,&m);
> for(i=rstart;i<rend;i++)
> {
> indices=i;
> VecGetValues(curNodes,1,&indices,&y);
> if(y>0){
> MatGetRow(oriGraph,indices,&ncols,&colsv,&valsv);
> PetscScalar *v,zero=0;
> PetscMalloc1(cols,&v);
> for(int j=0;j<ncols;j++){
> v[j]=zero;
> }
> MatSetValues(oriGraph,1,&indices,ncols,colsv,v,INSERT_VALUES);
> PetscFree(v);
>
> }
>
> }
> MatAssemblyBegin(oriGraph,MAT_FINAL_ASSEMBLY);
> MatAssemblyEnd(oriGraph,MAT_FINAL_ASSEMBLY);
> ISDestroy(&isrow);
>
> MatDestroy(&curMat);
>
> VecCopy(tmp,curNodes);
> VecAssemblyBegin(curNodes);
> VecAssemblyEnd(curNodes);
>
> }
> PetscPrintf(PETSC_COMM_SELF,"Finished in iterations of: %d\n",iter);
> MatDestroy(&oriGraph);
> VecDestroy(&curNodes);
> VecDestroy(&tmp);
> PetscFinalize();
> return 0;
> }
> The Petsc version I have installed is 3.6.1.
>
>
More information about the petsc-users
mailing list