static char help[] = "Solves 2D Laplacian using multigrid.\n\n"; #include #include #include #include #include #include #include "global.h" #include "Global/global_particles.h" typedef struct{ PetscInt xs,ys,zs, xm,ym,zm; } local_corners_struct; typedef struct { DM da; } fields_struct; typedef struct { PetscScalar dimx,dimy,dimz; PetscInt resx,resy,resz; PetscScalar *y_val,*x_val,*z_val, *ri,*rj,*rk; PetscInt x_str,y_str,z_str; } grid_struct; typedef struct { grid_struct grid; Vec phi; Vec rhs; } UserContext; typedef struct { PetscInt i , j, k; } cell; typedef struct { PetscViewer temp; PetscViewer temps; PetscViewer p; PetscViewer u; } field_viewers_struct; typedef struct{ char temp[128]; char temps[128]; char p[128]; char u[128]; }fields_file_path_struct; PetscErrorCode DMLocatePoints_DMDARegular(DM dm,Vec pos,DMPointLocationType ltype, PetscSF cellSF); PetscErrorCode DMGetNeighbors_DMDARegular(DM dm,PetscInt *nneighbors,const PetscMPIInt **neighbors); PetscErrorCode _DMLocatePoints_DM_IS(DM dm,Vec pos,IS *iscell); PetscErrorCode _DMLocatePoints_DM_IS(DM dm,Vec pos,IS *iscell); PetscErrorCode DMLocatePoints_DMDARegular(DM dm,Vec pos,DMPointLocationType ltype, PetscSF cellSF); void HyperbolicStretch_reverse(int *i_final, double point, int n, double h ); static PetscErrorCode initialize_grid(grid_struct* gridp, char loc_grid_gen[]) { PetscViewer viewer; PetscScalar *x_cor, *y_cor, *z_cor; char filename[PETSC_MAX_PATH_LEN]; PetscFunctionBegin; PetscCall(PetscCalloc1(gridp->resx + 1, &x_cor)); PetscCall(PetscCalloc1(gridp->resy + 1, &y_cor)); PetscCall(PetscCalloc1(gridp->resz + 1, &z_cor)); PetscCall(PetscStrcpy(filename, loc_grid_gen)); PetscCall(PetscStrcat(filename, "/Mesh_data/cornx.txt")); PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, filename, FILE_MODE_READ, &viewer)); PetscCall(PetscViewerBinaryRead(viewer, x_cor, gridp->resx + 1, NULL, PETSC_DOUBLE)); PetscCall(PetscViewerDestroy(&viewer)); PetscCall(PetscStrcpy(filename, loc_grid_gen)); PetscCall(PetscStrcat(filename, "/Mesh_data/corny.txt")); PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, filename, FILE_MODE_READ, &viewer)); PetscCall(PetscViewerBinaryRead(viewer, y_cor, gridp->resy + 1, NULL, PETSC_DOUBLE)); PetscCall(PetscViewerDestroy(&viewer)); PetscCall(PetscStrcpy(filename, loc_grid_gen)); PetscCall(PetscStrcat(filename, "/Mesh_data/cornz.txt")); PetscCall(PetscViewerBinaryOpen(PETSC_COMM_SELF, filename, FILE_MODE_READ, &viewer)); PetscCall(PetscViewerBinaryRead(viewer, z_cor, gridp->resz + 1, NULL, PETSC_DOUBLE)); PetscCall(PetscViewerDestroy(&viewer)); PetscCall(PetscCalloc1(gridp->resx+1, &gridp->x_val)); PetscCall(PetscCalloc1(gridp->resy+2, &gridp->y_val)); PetscCall(PetscCalloc1(gridp->resz+2, &gridp->z_val)); gridp->x_val += 1; gridp->y_val += 1; gridp->z_val += 1; PetscCall(PetscCalloc1(gridp->resx+1, &gridp->ri)); PetscCall(PetscCalloc1(gridp->resy+2, &gridp->rj)); PetscCall(PetscCalloc1(gridp->resz+2, &gridp->rk)); gridp->ri += 1; gridp->rj += 1; gridp->rk += 1; int n = gridp->resx; int m = gridp->resy; int l = gridp->resz; // Fill grid struct : for (int i=1 ; i <= n ; i++) { gridp->ri[i-1] = (x_cor[i] - x_cor[i-1]) / 2. ; gridp->x_val[i-1] = (x_cor[i] + x_cor[i-1]) / 2. ; } for (int j=1 ; j <= m ; j++) { gridp->rj[j-1] = (y_cor[j] - y_cor[j-1]) / 2. ; gridp->y_val[j-1] = (y_cor[j] + y_cor[j-1]) / 2. ; } for (int k=1 ; k <= l ; k++) { gridp->rk[k-1] = (z_cor[k] - z_cor[k-1]) / 2. ; gridp->z_val[k-1] = (z_cor[k] + z_cor[k-1]) / 2. ; } //The values of the grid at the boundary of the domain. gridp->ri[-1] = gridp->ri[0]; gridp->ri[n] = gridp->ri[n-1]; gridp->rj[-1] = gridp->rj[0]; gridp->rj[m] = gridp->rj[m-1]; gridp->rk[-1] = gridp->rk[0]; gridp->rk[l] = gridp->rk[l-1]; gridp->x_val[-1] = -gridp->ri[-1]; gridp->x_val[n] = gridp->x_val[n-1] + 2.* gridp->ri[n-1]; gridp->y_val[-1] = -gridp->rj[-1]; gridp->y_val[m] = gridp->y_val[m-1] + 2.* gridp->rj[m-1]; gridp->z_val[-1] = -gridp->rk[-1]; gridp->z_val[l] = gridp->z_val[l-1] + 2.* gridp->rk[l-1]; PetscCall(PetscFree(x_cor)); PetscCall(PetscFree(y_cor)); PetscCall(PetscFree(z_cor)); PetscFunctionReturn(PETSC_SUCCESS); } static PetscErrorCode destroy_grid(grid_struct* gridp) { PetscFunctionBegin; gridp->x_val -= 1; gridp->y_val -= 1; gridp->z_val -= 1; PetscCall(PetscFree(gridp->x_val)); PetscCall(PetscFree(gridp->y_val)); PetscCall(PetscFree(gridp->z_val)); gridp->ri -= 1; gridp->rj -= 1; gridp->rk -= 1; PetscCall(PetscFree(gridp->ri)); PetscCall(PetscFree(gridp->rj)); PetscCall(PetscFree(gridp->rk)); PetscFunctionReturn(PETSC_SUCCESS); } // Based on DMDASetUniformCoordinates // Coordinates of the cell edges (on a staggered DMDA) static PetscErrorCode SetNonUniform3DCoordinates(DM da_swarm, local_corners_struct* cornp, grid_struct* gridp, PetscInt rank) { MPI_Comm comm; DM cda; DMBoundaryType bx,by,bz; Vec xcoor; PetscScalar *coors; PetscInt i,j,k,M,N,P,dim,cnt,m,n,p,dof,s; PetscInt istart,isize,jstart,jsize,kstart,ksize; DMDAStencilType st; PetscFunctionBegin; PetscCall(DMDAGetInfo(da_swarm,&dim,&M,&N,&P,&m,&n,&p,&dof,&s,&bx,&by,&bz,&st)); // Gets info about the distributed array (dimension (dim), nbr of cells (M,N,P), nbr of processors (m,n,p), // degree of freedom per node (dof), stencil width (s), type of boundaries (bx, by, bz) and type of stencil used (st)) PetscCall(PetscObjectGetComm((PetscObject)da_swarm,&comm)); PetscCall(DMDAGetCorners(da_swarm,&istart,&jstart,&kstart,&isize,&jsize,&ksize)); PetscCall(DMGetCoordinateDM(da_swarm, &cda)); PetscCall(DMCreateGlobalVector(cda, &xcoor)); PetscCall(VecGetArray(xcoor,&coors)); cnt = 0; for (k=kstart; k< kstart + ksize; k++) { for (j=jstart; j< jstart+jsize; j++) { for (i=istart; i< istart + isize; i++) { coors[cnt++] = gridp->x_val[i-1] + gridp->ri[i-1]; coors[cnt++] = gridp->y_val[j-1] + gridp->rj[j-1]; coors[cnt++] = gridp->z_val[k-1] + gridp->rk[k-1]; } } } PetscCall(VecRestoreArray(xcoor,&coors)); PetscCall(DMSetCoordinates(da_swarm,xcoor)); PetscCall(VecDestroy(&xcoor)); PetscFunctionReturn(PETSC_SUCCESS); } int main(int argc, char **argv) { fields_struct fields; UserContext user; local_corners_struct corners; PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); user.grid.resx = 50; user.grid.resy = 70; user.grid.resz = 50; user.grid.dimx = 1; user.grid.dimy = 1; user.grid.dimz = 1; // Creation of our collocated mesh PetscCall(DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_GHOSTED,DM_BOUNDARY_GHOSTED,DM_BOUNDARY_GHOSTED,DMDA_STENCIL_BOX,user.grid.resx,user.grid.resy,user.grid.resz,PETSC_DECIDE,1,PETSC_DECIDE,1,1,0,0,0,&fields.da)); PetscCall(DMSetFromOptions(fields.da)); PetscCall(DMSetUp(fields.da)); PetscCall(DMSetApplicationContext(fields.da,&user)); // Initialize the grid char loc_grid_gen[] = "./Grid_generation"; PetscCall(initialize_grid(&user.grid,loc_grid_gen)); // Processor info PetscInt rank,size; PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size)); PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Number of processors = %d, rank = %d\n",size,rank)); PetscCall(DMDAGetCorners(fields.da,&corners.xs,&corners.ys,&corners.zs,&corners.xm,&corners.ym,&corners.zm)); PetscCall(PetscPrintf(PETSC_COMM_SELF,"[Proc %d] cornx = %d, corny = %d, cornz = %d, cornxm = %d, cornym = %d, cornzm = %d\n",rank,corners.xs,corners.ys,corners.zs,corners.xm,corners.ym,corners.zm)); //--------------------- LAGRANGIAN PARTICLE TRACKING ---------------------// DM swarm; DM da_swarm; DM dmcell; PetscInt dim, M,N,P,m,n,p,dof,s; DMBoundaryType bx,by,bz; DMDAStencilType st; const PetscInt *lx_init, *ly_init, *lz_init; // Get info of our collocated grid PetscCall(DMDAGetInfo(fields.da,&dim,&M,&N,&P,&m,&n,&p,&dof,&s,&bx,&by,&bz,&st)); // Add 1 cell in each direction M = M + 1; N = N + 1; P = P + 1; // Get ownership ranges PetscCall(DMDAGetOwnershipRanges(fields.da,&lx_init,&ly_init,&lz_init)); // range of indices owned by each proc // Add 1 cell to the ownership ranges PetscInt *lx, *ly, *lz; PetscCall(PetscMalloc3(n, &lx, m, &ly, p, &lz)); for(int i = 0; i < m; i++) lx[i] = lx_init[i]; for(int i = 0; i < n; i++) ly[i] = ly_init[i]; for(int i = 0; i < p; i++) lz[i] = lz_init[i]; lx[0] = lx_init[0] + 1; // cell is added on 1st proc ly[0] = ly_init[0] + 1; lz[0] = lz_init[0] + 1; // Create a DMDA staggered and without ghost cells (for DMSwarm to work) PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Creation of staggered DMDA\n")); PetscCall(DMDACreate3d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DMDA_STENCIL_BOX,M,N,P,m,n,p,1,1,lx,ly,lz,&da_swarm)); PetscCall(PetscFree3(lx, ly, lz)); PetscCall(DMSetFromOptions(da_swarm)); PetscCall(DMSetUp(da_swarm)); // Set the coordinates (! I had to rewrite this function for my collocated grid) PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Set the non-uniform coordinates\n")); PetscCall(SetNonUniform3DCoordinates(da_swarm, &corners, &user.grid, rank)); // Create a DMShell for point location purposes PetscCall(PetscPrintf(PETSC_COMM_WORLD,"DMShell\n")); PetscCall(DMShellCreate(PETSC_COMM_WORLD,&dmcell)); PetscCall(DMSetApplicationContext(dmcell,da_swarm)); (*dmcell).ops->locatepoints = DMLocatePoints_DMDARegular; (*dmcell).ops->getneighbors = DMGetNeighbors_DMDARegular; // Create the Swarm DMDA PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Creation of SWARM DMDA\n")); PetscCall(DMCreate(PETSC_COMM_WORLD,&swarm)); PetscCall(DMSetType(swarm,DMSWARM)); PetscCall(DMSetDimension(swarm,3)); PetscCall(DMSwarmSetType(swarm,DMSWARM_PIC)); PetscCall(DMSwarmSetCellDM(swarm,dmcell)); // Register fields PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Register fields\n")); PetscCall(DMSwarmRegisterPetscDatatypeField(swarm,"vel",3,PETSC_REAL)); PetscCall(DMSwarmRegisterPetscDatatypeField(swarm,"vel_fluid",3,PETSC_REAL)); PetscCall(DMSwarmRegisterPetscDatatypeField(swarm,"temp",1,PETSC_REAL)); // Initialize the swarm PetscCall(PetscPrintf(PETSC_COMM_WORLD,"Finalize the swarm\n")); PetscCall(DMSwarmFinalizeFieldRegister(swarm)); PetscCall(PetscPrintf(PETSC_COMM_WORLD,"END...\n")); PetscCall(DMDestroy(&swarm)); PetscCall(DMDestroy(&dmcell)); PetscCall(DMDestroy(&da_swarm)); PetscCall(destroy_grid(&user.grid)); PetscCall(DMDestroy(&fields.da)); PetscCall(PetscFinalize()); return 0; } PetscErrorCode _DMLocatePoints_DM_IS(DM dm,Vec pos,IS *iscell) { // ! IF THIS FUNCTION IS CHANGED SO SHOULD particle_getCellID IN particle_addNewAtSouth PetscInt p,n,bs,npoints,mx,my,mz; DM dmregular; PetscInt *cellidx; const PetscScalar *coor; PetscReal dx,dy,dz; PetscMPIInt rank; local_corners_struct corners_stag; PetscInt xstart, ystart, zstart, xend, yend, zend; PetscInt pt_i, pt_j, pt_k; PetscReal h_x, h_y, h_z; PetscFunctionBegin; PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank)); PetscCall(VecGetLocalSize(pos,&n)); // n = Number of elements in the vector (local size) PetscCall(VecGetBlockSize(pos,&bs)); // bs = blocksize of the vector npoints = n/bs; // NUMBER OF PARTICLES PetscCall(PetscMalloc1(npoints,&cellidx)); // cellidx = Memory allocation of size npoints PetscCall(DMGetApplicationContext(dm,&dmregular)); PetscCall(DMDAGetCorners(dmregular,&corners_stag.xs,&corners_stag.ys,&corners_stag.zs,&corners_stag.xm,&corners_stag.ym,&corners_stag.zm)); // 2D! PetscCall(DMDAGetInfo(dmregular,NULL,&mx,&my,&mz,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL,NULL)); // Get the dimensions (nbr of vertices) // Because staggered with 1 more cell on the proc touching the boundary if(corners_stag.xs == 0){ xstart = 0; xend = corners_stag.xm -1; } else{ xstart = corners_stag.xs -1; xend = xstart + corners_stag.xm; } if(corners_stag.ys == 0){ ystart = 0; yend = corners_stag.ym -1; } else{ ystart = corners_stag.ys -1; yend = ystart + corners_stag.ym; } if(corners_stag.zs == 0){ zstart = 0; zend = corners_stag.zm -1; } else{ zstart = corners_stag.zs -1; zend = zstart + corners_stag.zm; } // Dimensions h_x = length[0] / l_ref; h_y = length[1] / l_ref; h_z = length[2] / l_ref; // For uniform case dx = h_x/((PetscReal)(mx-1)); dy = h_y/((PetscReal)(my-1)); dz = h_z/((PetscReal)(mz-1)); PetscCall(VecGetArrayRead(pos,&coor)); for (p=0; p= xend ){ cellidx[p] = DMLOCATEPOINT_POINT_NOT_FOUND; } if(pt_j < ystart || pt_j >= yend ){ cellidx[p] = DMLOCATEPOINT_POINT_NOT_FOUND; } if(pt_k < zstart || pt_k >= zend ){ cellidx[p] = DMLOCATEPOINT_POINT_NOT_FOUND; } } PetscCall(VecRestoreArrayRead(pos,&coor)); PetscCall(ISCreateGeneral(PETSC_COMM_SELF,npoints,cellidx,PETSC_OWN_POINTER,iscell)); PetscFunctionReturn(0); } PetscErrorCode DMLocatePoints_DMDARegular(DM dm,Vec pos,DMPointLocationType ltype, PetscSF cellSF){ IS iscell; PetscSFNode *cells; PetscInt p,bs,npoints,nfound; const PetscInt *boxCells; PetscFunctionBegin; PetscCall(_DMLocatePoints_DM_IS(dm,pos,&iscell)); PetscCall(VecGetLocalSize(pos,&npoints)); PetscCall(VecGetBlockSize(pos,&bs)); npoints = npoints / bs; PetscCall(PetscMalloc1(npoints, &cells)); PetscCall(ISGetIndices(iscell, &boxCells)); for (p=0; p