1 2 static char help[] = "Tests DMCreateDomainDecomposition.\n\n"; 3 4 /* 5 Use the options 6 -da_grid_x <nx> - number of grid points in x direction, if M < 0 7 -da_grid_y <ny> - number of grid points in y direction, if N < 0 8 -da_processors_x <MX> number of processors in x directio 9 -da_processors_y <MY> number of processors in x direction 10 */ 11 12 #include <petscdm.h> 13 #include <petscdmda.h> 14 15 PetscErrorCode FillLocalSubdomain(DM da, Vec gvec) 16 { 17 DMDALocalInfo info; 18 PetscMPIInt rank; 19 PetscInt i,j,k,l; 20 21 PetscFunctionBeginUser; 22 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank)); 23 PetscCall(DMDAGetLocalInfo(da,&info)); 24 25 if (info.dim == 3) { 26 PetscScalar ***g; 27 PetscCall(DMDAVecGetArray(da,gvec,&g)); 28 /* loop over ghosts */ 29 for (k=info.zs; k<info.zs+info.zm; k++) { 30 for (j=info.ys; j<info.ys+info.ym; j++) { 31 for (i=info.xs; i<info.xs+info.xm; i++) { 32 g[k][j][info.dof*i+0] = i; 33 g[k][j][info.dof*i+1] = j; 34 g[k][j][info.dof*i+2] = k; 35 } 36 } 37 } 38 PetscCall(DMDAVecRestoreArray(da,gvec,&g)); 39 } 40 if (info.dim == 2) { 41 PetscScalar **g; 42 PetscCall(DMDAVecGetArray(da,gvec,&g)); 43 /* loop over ghosts */ 44 for (j=info.ys; j<info.ys+info.ym; j++) { 45 for (i=info.xs; i<info.xs+info.xm; i++) { 46 for (l = 0;l<info.dof;l++) { 47 g[j][info.dof*i+0] = i; 48 g[j][info.dof*i+1] = j; 49 g[j][info.dof*i+2] = rank; 50 } 51 } 52 } 53 PetscCall(DMDAVecRestoreArray(da,gvec,&g)); 54 } 55 PetscFunctionReturn(0); 56 } 57 58 int main(int argc,char **argv) 59 { 60 DM da,*subda; 61 PetscInt i,dim = 3; 62 PetscInt M = 25, N = 25, P = 25; 63 PetscMPIInt size,rank; 64 Vec v; 65 Vec slvec,sgvec; 66 IS *ois,*iis; 67 VecScatter oscata; 68 VecScatter *iscat,*oscat,*gscat; 69 DMDALocalInfo info; 70 PetscBool patchis_offproc = PETSC_TRUE; 71 72 PetscFunctionBeginUser; 73 PetscCall(PetscInitialize(&argc,&argv,(char*)0,help)); 74 PetscCall(PetscOptionsGetInt(NULL,NULL,"-dim",&dim,NULL)); 75 76 /* Create distributed array and get vectors */ 77 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD,&size)); 78 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD,&rank)); 79 if (dim == 2) { 80 PetscCall(DMDACreate2d(PETSC_COMM_WORLD,DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,M,N,PETSC_DECIDE,PETSC_DECIDE,3,1,NULL,NULL,&da)); 81 } else if (dim == 3) { 82 PetscCall(DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE,DM_BOUNDARY_NONE,DMDA_STENCIL_STAR,M,N,P,PETSC_DECIDE,PETSC_DECIDE,PETSC_DECIDE,3,1,NULL,NULL,NULL,&da)); 83 } 84 PetscCall(DMSetFromOptions(da)); 85 PetscCall(DMSetUp(da)); 86 PetscCall(DMDAGetLocalInfo(da,&info)); 87 88 PetscCall(DMCreateDomainDecomposition(da,NULL,NULL,&iis,&ois,&subda)); 89 PetscCall(DMCreateDomainDecompositionScatters(da,1,subda,&iscat,&oscat,&gscat)); 90 91 { 92 DMDALocalInfo subinfo; 93 MatStencil lower,upper; 94 IS patchis; 95 Vec smallvec; 96 Vec largevec; 97 VecScatter patchscat; 98 99 PetscCall(DMDAGetLocalInfo(subda[0],&subinfo)); 100 101 lower.i = info.xs; 102 lower.j = info.ys; 103 lower.k = info.zs; 104 upper.i = info.xs+info.xm; 105 upper.j = info.ys+info.ym; 106 upper.k = info.zs+info.zm; 107 108 /* test the patch IS as a thing to scatter to/from */ 109 PetscCall(DMDACreatePatchIS(da,&lower,&upper,&patchis,patchis_offproc)); 110 PetscCall(DMGetGlobalVector(da,&largevec)); 111 112 PetscCall(VecCreate(PETSC_COMM_SELF,&smallvec)); 113 PetscCall(VecSetSizes(smallvec,info.dof*(upper.i - lower.i)*(upper.j - lower.j)*(upper.k - lower.k),PETSC_DECIDE)); 114 PetscCall(VecSetFromOptions(smallvec)); 115 PetscCall(VecScatterCreate(smallvec,NULL,largevec,patchis,&patchscat)); 116 117 PetscCall(FillLocalSubdomain(subda[0],smallvec)); 118 PetscCall(VecSet(largevec,0)); 119 120 PetscCall(VecScatterBegin(patchscat,smallvec,largevec,ADD_VALUES,SCATTER_FORWARD)); 121 PetscCall(VecScatterEnd(patchscat,smallvec,largevec,ADD_VALUES,SCATTER_FORWARD)); 122 PetscCall(ISView(patchis,PETSC_VIEWER_STDOUT_WORLD)); 123 PetscCall(VecScatterView(patchscat,PETSC_VIEWER_STDOUT_WORLD)); 124 125 for (i = 0; i < size; i++) { 126 if (i == rank) { 127 PetscCall(VecView(smallvec,PETSC_VIEWER_STDOUT_SELF)); 128 } 129 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 130 } 131 132 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 133 PetscCall(VecView(largevec,PETSC_VIEWER_STDOUT_WORLD)); 134 135 PetscCall(VecDestroy(&smallvec)); 136 PetscCall(DMRestoreGlobalVector(da,&largevec)); 137 PetscCall(ISDestroy(&patchis)); 138 PetscCall(VecScatterDestroy(&patchscat)); 139 } 140 141 /* view the various parts */ 142 { 143 for (i = 0; i < size; i++) { 144 if (i == rank) { 145 PetscCall(PetscPrintf(PETSC_COMM_SELF,"Processor %d: \n",i)); 146 PetscCall(DMView(subda[0],PETSC_VIEWER_STDOUT_SELF)); 147 } 148 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 149 } 150 151 PetscCall(DMGetLocalVector(subda[0],&slvec)); 152 PetscCall(DMGetGlobalVector(subda[0],&sgvec)); 153 PetscCall(DMGetGlobalVector(da,&v)); 154 155 /* test filling outer between the big DM and the small ones with the IS scatter*/ 156 PetscCall(VecScatterCreate(v,ois[0],sgvec,NULL,&oscata)); 157 158 PetscCall(FillLocalSubdomain(subda[0],sgvec)); 159 160 PetscCall(VecScatterBegin(oscata,sgvec,v,ADD_VALUES,SCATTER_REVERSE)); 161 PetscCall(VecScatterEnd(oscata,sgvec,v,ADD_VALUES,SCATTER_REVERSE)); 162 163 /* test the local-to-local scatter */ 164 165 /* fill up the local subdomain and then add them together */ 166 PetscCall(FillLocalSubdomain(da,v)); 167 168 PetscCall(VecScatterBegin(gscat[0],v,slvec,ADD_VALUES,SCATTER_FORWARD)); 169 PetscCall(VecScatterEnd(gscat[0],v,slvec,ADD_VALUES,SCATTER_FORWARD)); 170 171 PetscCall(VecView(v,PETSC_VIEWER_STDOUT_WORLD)); 172 173 /* test ghost scattering backwards */ 174 175 PetscCall(VecSet(v,0)); 176 177 PetscCall(VecScatterBegin(gscat[0],slvec,v,ADD_VALUES,SCATTER_REVERSE)); 178 PetscCall(VecScatterEnd(gscat[0],slvec,v,ADD_VALUES,SCATTER_REVERSE)); 179 180 PetscCall(VecView(v,PETSC_VIEWER_STDOUT_WORLD)); 181 182 /* test overlap scattering backwards */ 183 184 PetscCall(DMLocalToGlobalBegin(subda[0],slvec,ADD_VALUES,sgvec)); 185 PetscCall(DMLocalToGlobalEnd(subda[0],slvec,ADD_VALUES,sgvec)); 186 187 PetscCall(VecSet(v,0)); 188 189 PetscCall(VecScatterBegin(oscat[0],sgvec,v,ADD_VALUES,SCATTER_REVERSE)); 190 PetscCall(VecScatterEnd(oscat[0],sgvec,v,ADD_VALUES,SCATTER_REVERSE)); 191 192 PetscCall(VecView(v,PETSC_VIEWER_STDOUT_WORLD)); 193 194 /* test interior scattering backwards */ 195 196 PetscCall(VecSet(v,0)); 197 198 PetscCall(VecScatterBegin(iscat[0],sgvec,v,ADD_VALUES,SCATTER_REVERSE)); 199 PetscCall(VecScatterEnd(iscat[0],sgvec,v,ADD_VALUES,SCATTER_REVERSE)); 200 201 PetscCall(VecView(v,PETSC_VIEWER_STDOUT_WORLD)); 202 203 /* test matrix allocation */ 204 for (i = 0; i < size; i++) { 205 if (i == rank) { 206 Mat m; 207 PetscCall(PetscPrintf(PETSC_COMM_SELF,"Processor %d: \n",i)); 208 PetscCall(DMSetMatType(subda[0],MATAIJ)); 209 PetscCall(DMCreateMatrix(subda[0],&m)); 210 PetscCall(MatView(m,PETSC_VIEWER_STDOUT_SELF)); 211 PetscCall(MatDestroy(&m)); 212 } 213 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 214 } 215 PetscCall(DMRestoreLocalVector(subda[0],&slvec)); 216 PetscCall(DMRestoreGlobalVector(subda[0],&sgvec)); 217 PetscCall(DMRestoreGlobalVector(da,&v)); 218 } 219 220 PetscCall(DMDestroy(&subda[0])); 221 PetscCall(ISDestroy(&ois[0])); 222 PetscCall(ISDestroy(&iis[0])); 223 224 PetscCall(VecScatterDestroy(&iscat[0])); 225 PetscCall(VecScatterDestroy(&oscat[0])); 226 PetscCall(VecScatterDestroy(&gscat[0])); 227 PetscCall(VecScatterDestroy(&oscata)); 228 229 PetscCall(PetscFree(iscat)); 230 PetscCall(PetscFree(oscat)); 231 PetscCall(PetscFree(gscat)); 232 PetscCall(PetscFree(oscata)); 233 234 PetscCall(PetscFree(subda)); 235 PetscCall(PetscFree(ois)); 236 PetscCall(PetscFree(iis)); 237 238 PetscCall(DMDestroy(&da)); 239 PetscCall(PetscFinalize()); 240 return 0; 241 } 242