1 2 static char help[] = "Tests DMCreateDomainDecomposition.\n\n"; 3 4 /* 5 Use the options 6 -da_grid_x <nx> - number of grid points in x direction, if M < 0 7 -da_grid_y <ny> - number of grid points in y direction, if N < 0 8 -da_processors_x <MX> number of processors in x directio 9 -da_processors_y <MY> number of processors in x direction 10 */ 11 12 #include <petscdm.h> 13 #include <petscdmda.h> 14 15 PetscErrorCode FillLocalSubdomain(DM da, Vec gvec) { 16 DMDALocalInfo info; 17 PetscMPIInt rank; 18 PetscInt i, j, k, l; 19 20 PetscFunctionBeginUser; 21 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); 22 PetscCall(DMDAGetLocalInfo(da, &info)); 23 24 if (info.dim == 3) { 25 PetscScalar ***g; 26 PetscCall(DMDAVecGetArray(da, gvec, &g)); 27 /* loop over ghosts */ 28 for (k = info.zs; k < info.zs + info.zm; k++) { 29 for (j = info.ys; j < info.ys + info.ym; j++) { 30 for (i = info.xs; i < info.xs + info.xm; i++) { 31 g[k][j][info.dof * i + 0] = i; 32 g[k][j][info.dof * i + 1] = j; 33 g[k][j][info.dof * i + 2] = k; 34 } 35 } 36 } 37 PetscCall(DMDAVecRestoreArray(da, gvec, &g)); 38 } 39 if (info.dim == 2) { 40 PetscScalar **g; 41 PetscCall(DMDAVecGetArray(da, gvec, &g)); 42 /* loop over ghosts */ 43 for (j = info.ys; j < info.ys + info.ym; j++) { 44 for (i = info.xs; i < info.xs + info.xm; i++) { 45 for (l = 0; l < info.dof; l++) { 46 g[j][info.dof * i + 0] = i; 47 g[j][info.dof * i + 1] = j; 48 g[j][info.dof * i + 2] = rank; 49 } 50 } 51 } 52 PetscCall(DMDAVecRestoreArray(da, gvec, &g)); 53 } 54 PetscFunctionReturn(0); 55 } 56 57 int main(int argc, char **argv) { 58 DM da, *subda; 59 PetscInt i, dim = 3; 60 PetscInt M = 25, N = 25, P = 25; 61 PetscMPIInt size, rank; 62 Vec v; 63 Vec slvec, sgvec; 64 IS *ois, *iis; 65 VecScatter oscata; 66 VecScatter *iscat, *oscat, *gscat; 67 DMDALocalInfo info; 68 PetscBool patchis_offproc = PETSC_TRUE; 69 70 PetscFunctionBeginUser; 71 PetscCall(PetscInitialize(&argc, &argv, (char *)0, help)); 72 PetscCall(PetscOptionsGetInt(NULL, NULL, "-dim", &dim, NULL)); 73 74 /* Create distributed array and get vectors */ 75 PetscCallMPI(MPI_Comm_size(PETSC_COMM_WORLD, &size)); 76 PetscCallMPI(MPI_Comm_rank(PETSC_COMM_WORLD, &rank)); 77 if (dim == 2) { 78 PetscCall(DMDACreate2d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, M, N, PETSC_DECIDE, PETSC_DECIDE, 3, 1, NULL, NULL, &da)); 79 } else if (dim == 3) { 80 PetscCall(DMDACreate3d(PETSC_COMM_WORLD, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DM_BOUNDARY_NONE, DMDA_STENCIL_STAR, M, N, P, PETSC_DECIDE, PETSC_DECIDE, PETSC_DECIDE, 3, 1, NULL, NULL, NULL, &da)); 81 } 82 PetscCall(DMSetFromOptions(da)); 83 PetscCall(DMSetUp(da)); 84 PetscCall(DMDAGetLocalInfo(da, &info)); 85 86 PetscCall(DMCreateDomainDecomposition(da, NULL, NULL, &iis, &ois, &subda)); 87 PetscCall(DMCreateDomainDecompositionScatters(da, 1, subda, &iscat, &oscat, &gscat)); 88 89 { 90 DMDALocalInfo subinfo; 91 MatStencil lower, upper; 92 IS patchis; 93 Vec smallvec; 94 Vec largevec; 95 VecScatter patchscat; 96 97 PetscCall(DMDAGetLocalInfo(subda[0], &subinfo)); 98 99 lower.i = info.xs; 100 lower.j = info.ys; 101 lower.k = info.zs; 102 upper.i = info.xs + info.xm; 103 upper.j = info.ys + info.ym; 104 upper.k = info.zs + info.zm; 105 106 /* test the patch IS as a thing to scatter to/from */ 107 PetscCall(DMDACreatePatchIS(da, &lower, &upper, &patchis, patchis_offproc)); 108 PetscCall(DMGetGlobalVector(da, &largevec)); 109 110 PetscCall(VecCreate(PETSC_COMM_SELF, &smallvec)); 111 PetscCall(VecSetSizes(smallvec, info.dof * (upper.i - lower.i) * (upper.j - lower.j) * (upper.k - lower.k), PETSC_DECIDE)); 112 PetscCall(VecSetFromOptions(smallvec)); 113 PetscCall(VecScatterCreate(smallvec, NULL, largevec, patchis, &patchscat)); 114 115 PetscCall(FillLocalSubdomain(subda[0], smallvec)); 116 PetscCall(VecSet(largevec, 0)); 117 118 PetscCall(VecScatterBegin(patchscat, smallvec, largevec, ADD_VALUES, SCATTER_FORWARD)); 119 PetscCall(VecScatterEnd(patchscat, smallvec, largevec, ADD_VALUES, SCATTER_FORWARD)); 120 PetscCall(ISView(patchis, PETSC_VIEWER_STDOUT_WORLD)); 121 PetscCall(VecScatterView(patchscat, PETSC_VIEWER_STDOUT_WORLD)); 122 123 for (i = 0; i < size; i++) { 124 if (i == rank) PetscCall(VecView(smallvec, PETSC_VIEWER_STDOUT_SELF)); 125 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 126 } 127 128 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 129 PetscCall(VecView(largevec, PETSC_VIEWER_STDOUT_WORLD)); 130 131 PetscCall(VecDestroy(&smallvec)); 132 PetscCall(DMRestoreGlobalVector(da, &largevec)); 133 PetscCall(ISDestroy(&patchis)); 134 PetscCall(VecScatterDestroy(&patchscat)); 135 } 136 137 /* view the various parts */ 138 { 139 for (i = 0; i < size; i++) { 140 if (i == rank) { 141 PetscCall(PetscPrintf(PETSC_COMM_SELF, "Processor %d: \n", i)); 142 PetscCall(DMView(subda[0], PETSC_VIEWER_STDOUT_SELF)); 143 } 144 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 145 } 146 147 PetscCall(DMGetLocalVector(subda[0], &slvec)); 148 PetscCall(DMGetGlobalVector(subda[0], &sgvec)); 149 PetscCall(DMGetGlobalVector(da, &v)); 150 151 /* test filling outer between the big DM and the small ones with the IS scatter*/ 152 PetscCall(VecScatterCreate(v, ois[0], sgvec, NULL, &oscata)); 153 154 PetscCall(FillLocalSubdomain(subda[0], sgvec)); 155 156 PetscCall(VecScatterBegin(oscata, sgvec, v, ADD_VALUES, SCATTER_REVERSE)); 157 PetscCall(VecScatterEnd(oscata, sgvec, v, ADD_VALUES, SCATTER_REVERSE)); 158 159 /* test the local-to-local scatter */ 160 161 /* fill up the local subdomain and then add them together */ 162 PetscCall(FillLocalSubdomain(da, v)); 163 164 PetscCall(VecScatterBegin(gscat[0], v, slvec, ADD_VALUES, SCATTER_FORWARD)); 165 PetscCall(VecScatterEnd(gscat[0], v, slvec, ADD_VALUES, SCATTER_FORWARD)); 166 167 PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD)); 168 169 /* test ghost scattering backwards */ 170 171 PetscCall(VecSet(v, 0)); 172 173 PetscCall(VecScatterBegin(gscat[0], slvec, v, ADD_VALUES, SCATTER_REVERSE)); 174 PetscCall(VecScatterEnd(gscat[0], slvec, v, ADD_VALUES, SCATTER_REVERSE)); 175 176 PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD)); 177 178 /* test overlap scattering backwards */ 179 180 PetscCall(DMLocalToGlobalBegin(subda[0], slvec, ADD_VALUES, sgvec)); 181 PetscCall(DMLocalToGlobalEnd(subda[0], slvec, ADD_VALUES, sgvec)); 182 183 PetscCall(VecSet(v, 0)); 184 185 PetscCall(VecScatterBegin(oscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE)); 186 PetscCall(VecScatterEnd(oscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE)); 187 188 PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD)); 189 190 /* test interior scattering backwards */ 191 192 PetscCall(VecSet(v, 0)); 193 194 PetscCall(VecScatterBegin(iscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE)); 195 PetscCall(VecScatterEnd(iscat[0], sgvec, v, ADD_VALUES, SCATTER_REVERSE)); 196 197 PetscCall(VecView(v, PETSC_VIEWER_STDOUT_WORLD)); 198 199 /* test matrix allocation */ 200 for (i = 0; i < size; i++) { 201 if (i == rank) { 202 Mat m; 203 PetscCall(PetscPrintf(PETSC_COMM_SELF, "Processor %d: \n", i)); 204 PetscCall(DMSetMatType(subda[0], MATAIJ)); 205 PetscCall(DMCreateMatrix(subda[0], &m)); 206 PetscCall(MatView(m, PETSC_VIEWER_STDOUT_SELF)); 207 PetscCall(MatDestroy(&m)); 208 } 209 PetscCallMPI(MPI_Barrier(PETSC_COMM_WORLD)); 210 } 211 PetscCall(DMRestoreLocalVector(subda[0], &slvec)); 212 PetscCall(DMRestoreGlobalVector(subda[0], &sgvec)); 213 PetscCall(DMRestoreGlobalVector(da, &v)); 214 } 215 216 PetscCall(DMDestroy(&subda[0])); 217 PetscCall(ISDestroy(&ois[0])); 218 PetscCall(ISDestroy(&iis[0])); 219 220 PetscCall(VecScatterDestroy(&iscat[0])); 221 PetscCall(VecScatterDestroy(&oscat[0])); 222 PetscCall(VecScatterDestroy(&gscat[0])); 223 PetscCall(VecScatterDestroy(&oscata)); 224 225 PetscCall(PetscFree(iscat)); 226 PetscCall(PetscFree(oscat)); 227 PetscCall(PetscFree(gscat)); 228 PetscCall(PetscFree(oscata)); 229 230 PetscCall(PetscFree(subda)); 231 PetscCall(PetscFree(ois)); 232 PetscCall(PetscFree(iis)); 233 234 PetscCall(DMDestroy(&da)); 235 PetscCall(PetscFinalize()); 236 return 0; 237 } 238