xref: /petsc/src/dm/partitioner/impls/simple/partsimple.c (revision 98d129c30f3ee9fdddc40fdbc5a989b7be64f888)
1 #include <petscvec.h>
2 #include <petsc/private/partitionerimpl.h> /*I "petscpartitioner.h" I*/
3 
4 typedef struct {
5   PetscBool useGrid;        /* Flag to use a grid layout */
6   PetscInt  gridDim;        /* The grid dimension */
7   PetscInt  nodeGrid[3];    /* Dimension of node grid */
8   PetscInt  processGrid[3]; /* Dimension of local process grid on each node */
9 } PetscPartitioner_Simple;
10 
11 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part)
12 {
13   PetscFunctionBegin;
14   PetscCall(PetscFree(part->data));
15   PetscFunctionReturn(PETSC_SUCCESS);
16 }
17 
18 static PetscErrorCode PetscPartitionerView_Simple_ASCII(PetscPartitioner part, PetscViewer viewer)
19 {
20   PetscFunctionBegin;
21   PetscFunctionReturn(PETSC_SUCCESS);
22 }
23 
24 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer)
25 {
26   PetscBool iascii;
27 
28   PetscFunctionBegin;
29   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
30   PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2);
31   PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii));
32   if (iascii) PetscCall(PetscPartitionerView_Simple_ASCII(part, viewer));
33   PetscFunctionReturn(PETSC_SUCCESS);
34 }
35 
36 static PetscErrorCode PetscPartitionerSetFromOptions_Simple(PetscPartitioner part, PetscOptionItems *PetscOptionsObject)
37 {
38   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *)part->data;
39   PetscInt                 num, i;
40   PetscBool                flg;
41 
42   PetscFunctionBegin;
43   for (i = 0; i < 3; ++i) p->processGrid[i] = p->nodeGrid[i] = 1;
44   PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner Simple Options");
45   num = 3;
46   PetscCall(PetscOptionsIntArray("-petscpartitioner_simple_node_grid", "Number of nodes in each dimension", "", p->nodeGrid, &num, &flg));
47   if (flg) {
48     p->useGrid = PETSC_TRUE;
49     p->gridDim = num;
50   }
51   num = 3;
52   PetscCall(PetscOptionsIntArray("-petscpartitioner_simple_process_grid", "Number of local processes in each dimension for a given node", "", p->processGrid, &num, &flg));
53   if (flg) {
54     p->useGrid = PETSC_TRUE;
55     if (p->gridDim < 0) p->gridDim = num;
56     else PetscCheck(p->gridDim == num, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_INCOMP, "Process grid dimension %" PetscInt_FMT " != %" PetscInt_FMT " node grid dimension", num, p->gridDim);
57   }
58   PetscOptionsHeadEnd();
59   PetscFunctionReturn(PETSC_SUCCESS);
60 }
61 
62 static PetscErrorCode PetscPartitionerPartition_Simple_Grid(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition)
63 {
64   PetscPartitioner_Simple *p     = (PetscPartitioner_Simple *)part->data;
65   const PetscInt          *nodes = p->nodeGrid;
66   const PetscInt          *procs = p->processGrid;
67   PetscInt                *cellproc, *offsets, cells[3] = {1, 1, 1}, pcells[3] = {1, 1, 1};
68   PetscInt                 Np = 1, Nr, np, nk, nj, ni, pk, pj, pi, ck, cj, ci, i;
69   MPI_Comm                 comm;
70   PetscMPIInt              size;
71 
72   PetscFunctionBegin;
73   if (vertSection) PetscCall(PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores vertex weights when using grid partition\n"));
74   if (targetSection) PetscCall(PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores partition weights when using grid partition\n"));
75   PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
76   PetscCallMPI(MPI_Comm_size(comm, &size));
77   /* Check grid */
78   for (i = 0; i < 3; ++i) Np *= nodes[i] * procs[i];
79   PetscCheck(nparts == Np, comm, PETSC_ERR_ARG_INCOMP, "Number of partitions %" PetscInt_FMT " != %" PetscInt_FMT " grid size", nparts, Np);
80   PetscCheck(nparts == size, comm, PETSC_ERR_ARG_INCOMP, "Number of partitions %" PetscInt_FMT " != %d processes", nparts, size);
81   PetscCheck(numVertices % nparts == 0, comm, PETSC_ERR_ARG_INCOMP, "Number of cells %" PetscInt_FMT " is not divisible by number of partitions %" PetscInt_FMT, numVertices, nparts);
82   for (i = 0; i < p->gridDim; ++i) cells[i] = nodes[i] * procs[i];
83   Nr = numVertices / nparts;
84   while (Nr > 1) {
85     for (i = 0; i < p->gridDim; ++i) {
86       cells[i] *= 2;
87       Nr /= 2;
88     }
89   }
90   PetscCheck(!numVertices || Nr == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Odd number of cells %" PetscInt_FMT ". Must be nprocs*2^k", numVertices);
91   for (i = 0; i < p->gridDim; ++i) {
92     PetscCheck(cells[i] % (nodes[i] * procs[i]) == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "dir %" PetscInt_FMT ". Number of cells (%" PetscInt_FMT ") mod number of processors %" PetscInt_FMT, i, cells[i], nodes[i] * procs[i]);
93     pcells[i] = cells[i] / (nodes[i] * procs[i]);
94   }
95   /* Compute sizes */
96   for (np = 0; np < nparts; ++np) PetscCall(PetscSectionSetDof(partSection, np, numVertices / nparts));
97   PetscCall(PetscSectionSetUp(partSection));
98   PetscCall(PetscCalloc1(nparts, &offsets));
99   for (np = 0; np < nparts; ++np) PetscCall(PetscSectionGetOffset(partSection, np, &offsets[np]));
100   if (!numVertices) pcells[0] = pcells[1] = pcells[2] = 0;
101   /* Compute partition */
102   PetscCall(PetscMalloc1(numVertices, &cellproc));
103   for (nk = 0; nk < nodes[2]; ++nk) {
104     for (nj = 0; nj < nodes[1]; ++nj) {
105       for (ni = 0; ni < nodes[0]; ++ni) {
106         const PetscInt nid = (nk * nodes[1] + nj) * nodes[0] + ni;
107 
108         for (pk = 0; pk < procs[2]; ++pk) {
109           for (pj = 0; pj < procs[1]; ++pj) {
110             for (pi = 0; pi < procs[0]; ++pi) {
111               const PetscInt pid = ((nid * procs[2] + pk) * procs[1] + pj) * procs[0] + pi;
112 
113               /* Assume that cells are originally numbered lexicographically */
114               for (ck = 0; ck < pcells[2]; ++ck) {
115                 for (cj = 0; cj < pcells[1]; ++cj) {
116                   for (ci = 0; ci < pcells[0]; ++ci) {
117                     const PetscInt cid = (((nk * procs[2] + pk) * pcells[2] + ck) * cells[1] + ((nj * procs[1] + pj) * pcells[1] + cj)) * cells[0] + (ni * procs[0] + pi) * pcells[0] + ci;
118 
119                     cellproc[offsets[pid]++] = cid;
120                   }
121                 }
122               }
123             }
124           }
125         }
126       }
127     }
128   }
129   for (np = 1; np < nparts; ++np) PetscCheck(offsets[np] - offsets[np - 1] == numVertices / nparts, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Offset %" PetscInt_FMT " != %" PetscInt_FMT " partition size", offsets[np], numVertices / nparts);
130   PetscCall(PetscFree(offsets));
131   PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numVertices, cellproc, PETSC_OWN_POINTER, partition));
132   PetscFunctionReturn(PETSC_SUCCESS);
133 }
134 
135 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection edgeSection, PetscSection targetSection, PetscSection partSection, IS *partition)
136 {
137   PetscPartitioner_Simple *p = (PetscPartitioner_Simple *)part->data;
138   MPI_Comm                 comm;
139   PetscInt                 np, *tpwgts = NULL, sumw = 0, numVerticesGlobal = 0;
140   PetscMPIInt              size;
141 
142   PetscFunctionBegin;
143   if (p->useGrid) {
144     PetscCall(PetscPartitionerPartition_Simple_Grid(part, nparts, numVertices, start, adjacency, vertSection, targetSection, partSection, partition));
145     PetscFunctionReturn(PETSC_SUCCESS);
146   }
147   if (vertSection) PetscCall(PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores vertex weights\n"));
148   PetscCall(PetscObjectGetComm((PetscObject)part, &comm));
149   PetscCallMPI(MPI_Comm_size(comm, &size));
150   if (targetSection) {
151     PetscCall(MPIU_Allreduce(&numVertices, &numVerticesGlobal, 1, MPIU_INT, MPI_SUM, comm));
152     PetscCall(PetscCalloc1(nparts, &tpwgts));
153     for (np = 0; np < nparts; ++np) {
154       PetscCall(PetscSectionGetDof(targetSection, np, &tpwgts[np]));
155       sumw += tpwgts[np];
156     }
157     if (sumw) {
158       PetscInt m, mp;
159       for (np = 0; np < nparts; ++np) tpwgts[np] = (tpwgts[np] * numVerticesGlobal) / sumw;
160       for (np = 0, m = -1, mp = 0, sumw = 0; np < nparts; ++np) {
161         if (m < tpwgts[np]) {
162           m  = tpwgts[np];
163           mp = np;
164         }
165         sumw += tpwgts[np];
166       }
167       if (sumw != numVerticesGlobal) tpwgts[mp] += numVerticesGlobal - sumw;
168     }
169     if (!sumw) PetscCall(PetscFree(tpwgts));
170   }
171 
172   PetscCall(ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition));
173   if (size == 1) {
174     if (tpwgts) {
175       for (np = 0; np < nparts; ++np) PetscCall(PetscSectionSetDof(partSection, np, tpwgts[np]));
176     } else {
177       for (np = 0; np < nparts; ++np) PetscCall(PetscSectionSetDof(partSection, np, numVertices / nparts + ((numVertices % nparts) > np)));
178     }
179   } else {
180     if (tpwgts) {
181       Vec          v;
182       PetscScalar *array;
183       PetscInt     st, j;
184       PetscMPIInt  rank;
185 
186       PetscCall(VecCreate(comm, &v));
187       PetscCall(VecSetSizes(v, numVertices, numVerticesGlobal));
188       PetscCall(VecSetType(v, VECSTANDARD));
189       PetscCallMPI(MPI_Comm_rank(comm, &rank));
190       for (np = 0, st = 0; np < nparts; ++np) {
191         if (rank == np || (rank == size - 1 && size < nparts && np >= size)) {
192           for (j = 0; j < tpwgts[np]; j++) PetscCall(VecSetValue(v, st + j, np, INSERT_VALUES));
193         }
194         st += tpwgts[np];
195       }
196       PetscCall(VecAssemblyBegin(v));
197       PetscCall(VecAssemblyEnd(v));
198       PetscCall(VecGetArray(v, &array));
199       for (j = 0; j < numVertices; ++j) PetscCall(PetscSectionAddDof(partSection, PetscRealPart(array[j]), 1));
200       PetscCall(VecRestoreArray(v, &array));
201       PetscCall(VecDestroy(&v));
202     } else {
203       PetscMPIInt rank;
204       PetscInt    nvGlobal, *offsets, myFirst, myLast;
205 
206       PetscCall(PetscMalloc1(size + 1, &offsets));
207       offsets[0] = 0;
208       PetscCallMPI(MPI_Allgather(&numVertices, 1, MPIU_INT, &offsets[1], 1, MPIU_INT, comm));
209       for (np = 2; np <= size; np++) offsets[np] += offsets[np - 1];
210       nvGlobal = offsets[size];
211       PetscCallMPI(MPI_Comm_rank(comm, &rank));
212       myFirst = offsets[rank];
213       myLast  = offsets[rank + 1] - 1;
214       PetscCall(PetscFree(offsets));
215       if (numVertices) {
216         PetscInt firstPart = 0, firstLargePart = 0;
217         PetscInt lastPart = 0, lastLargePart = 0;
218         PetscInt rem    = nvGlobal % nparts;
219         PetscInt pSmall = nvGlobal / nparts;
220         PetscInt pBig   = nvGlobal / nparts + 1;
221 
222         if (rem) {
223           firstLargePart = myFirst / pBig;
224           lastLargePart  = myLast / pBig;
225 
226           if (firstLargePart < rem) {
227             firstPart = firstLargePart;
228           } else {
229             firstPart = rem + (myFirst - (rem * pBig)) / pSmall;
230           }
231           if (lastLargePart < rem) {
232             lastPart = lastLargePart;
233           } else {
234             lastPart = rem + (myLast - (rem * pBig)) / pSmall;
235           }
236         } else {
237           firstPart = myFirst / (nvGlobal / nparts);
238           lastPart  = myLast / (nvGlobal / nparts);
239         }
240 
241         for (np = firstPart; np <= lastPart; np++) {
242           PetscInt PartStart = np * (nvGlobal / nparts) + PetscMin(nvGlobal % nparts, np);
243           PetscInt PartEnd   = (np + 1) * (nvGlobal / nparts) + PetscMin(nvGlobal % nparts, np + 1);
244 
245           PartStart = PetscMax(PartStart, myFirst);
246           PartEnd   = PetscMin(PartEnd, myLast + 1);
247           PetscCall(PetscSectionSetDof(partSection, np, PartEnd - PartStart));
248         }
249       }
250     }
251   }
252   PetscCall(PetscFree(tpwgts));
253   PetscFunctionReturn(PETSC_SUCCESS);
254 }
255 
256 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part)
257 {
258   PetscFunctionBegin;
259   part->noGraph             = PETSC_TRUE;
260   part->ops->view           = PetscPartitionerView_Simple;
261   part->ops->setfromoptions = PetscPartitionerSetFromOptions_Simple;
262   part->ops->destroy        = PetscPartitionerDestroy_Simple;
263   part->ops->partition      = PetscPartitionerPartition_Simple;
264   PetscFunctionReturn(PETSC_SUCCESS);
265 }
266 
267 /*MC
268   PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object
269 
270   Level: intermediate
271 
272 .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()`
273 M*/
274 
275 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part)
276 {
277   PetscPartitioner_Simple *p;
278 
279   PetscFunctionBegin;
280   PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1);
281   PetscCall(PetscNew(&p));
282   p->gridDim = -1;
283   part->data = p;
284 
285   PetscCall(PetscPartitionerInitialize_Simple(part));
286   PetscFunctionReturn(PETSC_SUCCESS);
287 }
288