1 2 static char help[] = "Tests DMCreateDomainDecomposition.\n\n"; 3 4 /* 5 Use the options 6 -da_grid_x <nx> - number of grid points in x direction, if M < 0 7 -da_grid_y <ny> - number of grid points in y direction, if N < 0 8 -da_processors_x <MX> number of processors in x directio 9 -da_processors_y <MY> number of processors in x direction 10 */ 11 12 #include <petscdm.h> 13 #include <petscdmda.h> 14 15 PetscErrorCode FillLocalSubdomain(DM da, Vec gvec) 16 { 17 DMDALocalInfo info; 18 PetscMPIInt rank; 19 PetscInt i,j,k,l; 20 PetscErrorCode ierr; 21 22 PetscFunctionBeginUser; 23 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank)); 24 PetscCall(DMDAGetLocalInfo(da,&info)); 25 26 if (info.dim == 3) { 27 PetscScalar ***g; 28 PetscCall(DMDAVecGetArray(da,gvec,&g)); 29 /* loop over ghosts */ 30 for (k=info.zs; k<info.zs+info.zm; k++) { 31 for (j=info.ys; j<info.ys+info.ym; j++) { 32 for (i=info.xs; i<info.xs+info.xm; i++) { 33 g[k][j][info.dof*i+0] = i; 34 g[k][j][info.dof*i+1] = j; 35 g[k][j][info.dof*i+2] = k; 36 } 37 } 38 } 39 PetscCall(DMDAVecRestoreArray(da,gvec,&g)); 40 } 41 if (info.dim == 2) { 42 PetscScalar **g; 43 PetscCall(DMDAVecGetArray(da,gvec,&g)); 44 /* loop over ghosts */ 45 for (j=info.ys; j<info.ys+info.ym; j++) { 46 for (i=info.xs; i<info.xs+info.xm; i++) { 47 for (l = 0;l<info.dof;l++) { 48 g[j][info.dof*i+0] = i; 49 g[j][info.dof*i+1] = j; 50 g[j][info.dof*i+2] = rank; 51 } 52 } 53 } 54 PetscCall(DMDAVecRestoreArray(da,gvec,&g)); 55 } 56 PetscFunctionReturn(0); 57 } 58 59 int main(int argc,char **argv) 60 { 61 PetscErrorCode ierr; 62 DM da,*subda; 63 PetscInt i,dim = 3; 64 PetscInt M = 25, N = 25, P = 25; 65 PetscMPIInt size,rank; 66 Vec v; 67 Vec slvec,sgvec; 68 IS *ois,*iis; 69 VecScatter oscata; 70 VecScatter *iscat,*oscat,*gscat; 71 DMDALocalInfo info; 72 PetscBool patchis_offproc = PETSC_TRUE; 73 74 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 75 PetscCall(PetscOptionsGetInt(NULL,NULL,"-dim",&dim,NULL)); 76 77 /* Create distributed array and get vectors */ 78 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 79 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank)); 80 if (dim == 2) { 81 PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,M,N,PETSC_DECIDE,PETSC_DECIDE,3,1,NULL,NULL,&da)); 82 } else if (dim == 3) { 83 PetscCall(DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,M,N,P,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,3,1,NULL,NULL,NULL,&da)); 84 } 85 PetscCall(DMSetFromOptions(da)); 86 PetscCall(DMSetUp(da)); 87 PetscCall(DMDAGetLocalInfo(da,&info)); 88 89 PetscCall(DMCreateDomainDecomposition(da,NULL,NULL,&iis,&ois,&subda)); 90 PetscCall(DMCreateDomainDecompositionScatters(da,1,subda,&iscat,&oscat,&gscat)); 91 92 { 93 DMDALocalInfo subinfo; 94 MatStencil lower,upper; 95 IS patchis; 96 Vec smallvec; 97 Vec largevec; 98 VecScatter patchscat; 99 100 PetscCall(DMDAGetLocalInfo(subda[0],&subinfo)); 101 102 lower.i = info.xs; 103 lower.j = info.ys; 104 lower.k = info.zs; 105 upper.i = info.xs+info.xm; 106 upper.j = info.ys+info.ym; 107 upper.k = info.zs+info.zm; 108 109 /* test the patch IS as a thing to scatter to/from */ 110 PetscCall(DMDACreatePatchIS(da,&lower,&upper,&patchis,patchis_offproc)); 111 PetscCall(DMGetGlobalVector(da,&largevec)); 112 113 PetscCall(VecCreate(PETSC_COMM_SELF,&smallvec)); 114 PetscCall(VecSetSizes(smallvec,info.dof*(upper.i - lower.i)*(upper.j - lower.j)*(upper.k - lower.k),PETSC_DECIDE)); 115 PetscCall(VecSetFromOptions(smallvec)); 116 PetscCall(VecScatterCreate(smallvec,NULL,largevec,patchis,&patchscat)); 117 118 PetscCall(FillLocalSubdomain(subda[0],smallvec)); 119 PetscCall(VecSet(largevec,0)); 120 121 PetscCall(VecScatterBegin(patchscat,smallvec,largevec,ADD_VALUES,SCATTER_FORWARD)); 122 PetscCall(VecScatterEnd(patchscat,smallvec,largevec,ADD_VALUES,SCATTER_FORWARD)); 123 PetscCall(ISView(patchis,PETSC_VIEWER_STDOUT_WORLD)); 124 PetscCall(VecScatterView(patchscat,PETSC_VIEWER_STDOUT_WORLD)); 125 126 for (i = 0; i < size; i++) { 127 if (i == rank) { 128 PetscCall(VecView(smallvec,PETSC_VIEWER_STDOUT_SELF)); 129 } 130 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 131 } 132 133 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 134 PetscCall(VecView(largevec,PETSC_VIEWER_STDOUT_WORLD)); 135 136 PetscCall(VecDestroy(&smallvec)); 137 PetscCall(DMRestoreGlobalVector(da,&largevec)); 138 PetscCall(ISDestroy(&patchis)); 139 PetscCall(VecScatterDestroy(&patchscat)); 140 } 141 142 /* view the various parts */ 143 { 144 for (i = 0; i < size; i++) { 145 if (i == rank) { 146 PetscCall(PetscPrintf(PETSC_COMM_SELF,"Processor %d: \n",i)); 147 PetscCall(DMView(subda[0],PETSC_VIEWER_STDOUT_SELF)); 148 } 149 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 150 } 151 152 PetscCall(DMGetLocalVector(subda[0],&slvec)); 153 PetscCall(DMGetGlobalVector(subda[0],&sgvec)); 154 PetscCall(DMGetGlobalVector(da,&v)); 155 156 /* test filling outer between the big DM and the small ones with the IS scatter*/ 157 PetscCall(VecScatterCreate(v,ois[0],sgvec,NULL,&oscata)); 158 159 PetscCall(FillLocalSubdomain(subda[0],sgvec)); 160 161 PetscCall(VecScatterBegin(oscata,sgvec,v,ADD_VALUES,SCATTER_REVERSE)); 162 PetscCall(VecScatterEnd(oscata,sgvec,v,ADD_VALUES,SCATTER_REVERSE)); 163 164 /* test the local-to-local scatter */ 165 166 /* fill up the local subdomain and then add them together */ 167 PetscCall(FillLocalSubdomain(da,v)); 168 169 PetscCall(VecScatterBegin(gscat[0],v,slvec,ADD_VALUES,SCATTER_FORWARD)); 170 PetscCall(VecScatterEnd(gscat[0],v,slvec,ADD_VALUES,SCATTER_FORWARD)); 171 172 PetscCall(VecView(v,PETSC_VIEWER_STDOUT_WORLD)); 173 174 /* test ghost scattering backwards */ 175 176 PetscCall(VecSet(v,0)); 177 178 PetscCall(VecScatterBegin(gscat[0],slvec,v,ADD_VALUES,SCATTER_REVERSE)); 179 PetscCall(VecScatterEnd(gscat[0],slvec,v,ADD_VALUES,SCATTER_REVERSE)); 180 181 PetscCall(VecView(v,PETSC_VIEWER_STDOUT_WORLD)); 182 183 /* test overlap scattering backwards */ 184 185 PetscCall(DMLocalToGlobalBegin(subda[0],slvec,ADD_VALUES,sgvec)); 186 PetscCall(DMLocalToGlobalEnd(subda[0],slvec,ADD_VALUES,sgvec)); 187 188 PetscCall(VecSet(v,0)); 189 190 PetscCall(VecScatterBegin(oscat[0],sgvec,v,ADD_VALUES,SCATTER_REVERSE)); 191 PetscCall(VecScatterEnd(oscat[0],sgvec,v,ADD_VALUES,SCATTER_REVERSE)); 192 193 PetscCall(VecView(v,PETSC_VIEWER_STDOUT_WORLD)); 194 195 /* test interior scattering backwards */ 196 197 PetscCall(VecSet(v,0)); 198 199 PetscCall(VecScatterBegin(iscat[0],sgvec,v,ADD_VALUES,SCATTER_REVERSE)); 200 PetscCall(VecScatterEnd(iscat[0],sgvec,v,ADD_VALUES,SCATTER_REVERSE)); 201 202 PetscCall(VecView(v,PETSC_VIEWER_STDOUT_WORLD)); 203 204 /* test matrix allocation */ 205 for (i = 0; i < size; i++) { 206 if (i == rank) { 207 Mat m; 208 PetscCall(PetscPrintf(PETSC_COMM_SELF,"Processor %d: \n",i)); 209 PetscCall(DMSetMatType(subda[0],MATAIJ)); 210 PetscCall(DMCreateMatrix(subda[0],&m)); 211 PetscCall(MatView(m,PETSC_VIEWER_STDOUT_SELF)); 212 PetscCall(MatDestroy(&m)); 213 } 214 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 215 } 216 PetscCall(DMRestoreLocalVector(subda[0],&slvec)); 217 PetscCall(DMRestoreGlobalVector(subda[0],&sgvec)); 218 PetscCall(DMRestoreGlobalVector(da,&v)); 219 } 220 221 PetscCall(DMDestroy(&subda[0])); 222 PetscCall(ISDestroy(&ois[0])); 223 PetscCall(ISDestroy(&iis[0])); 224 225 PetscCall(VecScatterDestroy(&iscat[0])); 226 PetscCall(VecScatterDestroy(&oscat[0])); 227 PetscCall(VecScatterDestroy(&gscat[0])); 228 PetscCall(VecScatterDestroy(&oscata)); 229 230 PetscCall(PetscFree(iscat)); 231 PetscCall(PetscFree(oscat)); 232 PetscCall(PetscFree(gscat)); 233 PetscCall(PetscFree(oscata)); 234 235 PetscCall(PetscFree(subda)); 236 PetscCall(PetscFree(ois)); 237 PetscCall(PetscFree(iis)); 238 239 PetscCall(DMDestroy(&da)); 240 PetscCall(PetscFinalize()); 241 return 0; 242 } 243