1 2 #include <petscvec.h> 3 #include <petsc/private/partitionerimpl.h> /*I "petscpartitioner.h" I*/ 4 5 typedef struct { 6 PetscBool useGrid; /* Flag to use a grid layout */ 7 PetscInt gridDim; /* The grid dimension */ 8 PetscInt nodeGrid[3]; /* Dimension of node grid */ 9 PetscInt processGrid[3]; /* Dimension of local process grid on each node */ 10 } PetscPartitioner_Simple; 11 12 static PetscErrorCode PetscPartitionerDestroy_Simple(PetscPartitioner part) { 13 PetscFunctionBegin; 14 PetscCall(PetscFree(part->data)); 15 PetscFunctionReturn(0); 16 } 17 18 static PetscErrorCode PetscPartitionerView_Simple_ASCII(PetscPartitioner part, PetscViewer viewer) { 19 PetscFunctionBegin; 20 PetscFunctionReturn(0); 21 } 22 23 static PetscErrorCode PetscPartitionerView_Simple(PetscPartitioner part, PetscViewer viewer) { 24 PetscBool iascii; 25 26 PetscFunctionBegin; 27 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 28 PetscValidHeaderSpecific(viewer, PETSC_VIEWER_CLASSID, 2); 29 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &iascii)); 30 if (iascii) PetscCall(PetscPartitionerView_Simple_ASCII(part, viewer)); 31 PetscFunctionReturn(0); 32 } 33 34 static PetscErrorCode PetscPartitionerSetFromOptions_Simple(PetscPartitioner part, PetscOptionItems *PetscOptionsObject) { 35 PetscPartitioner_Simple *p = (PetscPartitioner_Simple *)part->data; 36 PetscInt num, i; 37 PetscBool flg; 38 39 PetscFunctionBegin; 40 for (i = 0; i < 3; ++i) p->processGrid[i] = p->nodeGrid[i] = 1; 41 PetscOptionsHeadBegin(PetscOptionsObject, "PetscPartitioner Simple Options"); 42 num = 3; 43 PetscCall(PetscOptionsIntArray("-petscpartitioner_simple_node_grid", "Number of nodes in each dimension", "", p->nodeGrid, &num, &flg)); 44 if (flg) { 45 p->useGrid = PETSC_TRUE; 46 p->gridDim = num; 47 } 48 num = 3; 49 PetscCall(PetscOptionsIntArray("-petscpartitioner_simple_process_grid", "Number of local processes in each dimension for a given node", "", p->processGrid, &num, &flg)); 50 if (flg) { 51 p->useGrid = PETSC_TRUE; 52 if (p->gridDim < 0) p->gridDim = num; 53 else PetscCheck(p->gridDim == num, PetscObjectComm((PetscObject)part), PETSC_ERR_ARG_INCOMP, "Process grid dimension %" PetscInt_FMT " != %" PetscInt_FMT " node grid dimension", num, p->gridDim); 54 } 55 PetscOptionsHeadEnd(); 56 PetscFunctionReturn(0); 57 } 58 59 static PetscErrorCode PetscPartitionerPartition_Simple_Grid(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition) { 60 PetscPartitioner_Simple *p = (PetscPartitioner_Simple *)part->data; 61 const PetscInt *nodes = p->nodeGrid; 62 const PetscInt *procs = p->processGrid; 63 PetscInt *cellproc, *offsets, cells[3] = {1, 1, 1}, pcells[3] = {1, 1, 1}; 64 PetscInt Np = 1, Nr, np, nk, nj, ni, pk, pj, pi, ck, cj, ci, i; 65 MPI_Comm comm; 66 PetscMPIInt size; 67 68 PetscFunctionBegin; 69 if (vertSection) PetscCall(PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores vertex weights when using grid partition\n")); 70 if (targetSection) PetscCall(PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores partition weights when using grid partition\n")); 71 PetscCall(PetscObjectGetComm((PetscObject)part, &comm)); 72 PetscCallMPI(MPI_Comm_size(comm, &size)); 73 /* Check grid */ 74 for (i = 0; i < 3; ++i) Np *= nodes[i] * procs[i]; 75 PetscCheck(nparts == Np, comm, PETSC_ERR_ARG_INCOMP, "Number of partitions %" PetscInt_FMT " != %" PetscInt_FMT " grid size", nparts, Np); 76 PetscCheck(nparts == size, comm, PETSC_ERR_ARG_INCOMP, "Number of partitions %" PetscInt_FMT " != %d processes", nparts, size); 77 PetscCheck(numVertices % nparts == 0, comm, PETSC_ERR_ARG_INCOMP, "Number of cells %" PetscInt_FMT " is not divisible by number of partitions %" PetscInt_FMT, numVertices, nparts); 78 for (i = 0; i < p->gridDim; ++i) cells[i] = nodes[i] * procs[i]; 79 Nr = numVertices / nparts; 80 while (Nr > 1) { 81 for (i = 0; i < p->gridDim; ++i) { 82 cells[i] *= 2; 83 Nr /= 2; 84 } 85 } 86 PetscCheck(!numVertices || Nr == 1, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Odd number of cells %" PetscInt_FMT ". Must be nprocs*2^k", numVertices); 87 for (i = 0; i < p->gridDim; ++i) { 88 PetscCheck(cells[i] % (nodes[i] * procs[i]) == 0, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "dir %" PetscInt_FMT ". Number of cells (%" PetscInt_FMT ") mod number of processors %" PetscInt_FMT, i, cells[i], nodes[i] * procs[i]); 89 pcells[i] = cells[i] / (nodes[i] * procs[i]); 90 } 91 /* Compute sizes */ 92 for (np = 0; np < nparts; ++np) PetscCall(PetscSectionSetDof(partSection, np, numVertices / nparts)); 93 PetscCall(PetscSectionSetUp(partSection)); 94 PetscCall(PetscCalloc1(nparts, &offsets)); 95 for (np = 0; np < nparts; ++np) PetscCall(PetscSectionGetOffset(partSection, np, &offsets[np])); 96 if (!numVertices) pcells[0] = pcells[1] = pcells[2] = 0; 97 /* Compute partition */ 98 PetscCall(PetscMalloc1(numVertices, &cellproc)); 99 for (nk = 0; nk < nodes[2]; ++nk) { 100 for (nj = 0; nj < nodes[1]; ++nj) { 101 for (ni = 0; ni < nodes[0]; ++ni) { 102 const PetscInt nid = (nk * nodes[1] + nj) * nodes[0] + ni; 103 104 for (pk = 0; pk < procs[2]; ++pk) { 105 for (pj = 0; pj < procs[1]; ++pj) { 106 for (pi = 0; pi < procs[0]; ++pi) { 107 const PetscInt pid = ((nid * procs[2] + pk) * procs[1] + pj) * procs[0] + pi; 108 109 /* Assume that cells are originally numbered lexicographically */ 110 for (ck = 0; ck < pcells[2]; ++ck) { 111 for (cj = 0; cj < pcells[1]; ++cj) { 112 for (ci = 0; ci < pcells[0]; ++ci) { 113 const PetscInt cid = (((nk * procs[2] + pk) * pcells[2] + ck) * cells[1] + ((nj * procs[1] + pj) * pcells[1] + cj)) * cells[0] + (ni * procs[0] + pi) * pcells[0] + ci; 114 115 cellproc[offsets[pid]++] = cid; 116 } 117 } 118 } 119 } 120 } 121 } 122 } 123 } 124 } 125 for (np = 1; np < nparts; ++np) PetscCheck(offsets[np] - offsets[np - 1] == numVertices / nparts, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Offset %" PetscInt_FMT " != %" PetscInt_FMT " partition size", offsets[np], numVertices / nparts); 126 PetscCall(PetscFree(offsets)); 127 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, numVertices, cellproc, PETSC_OWN_POINTER, partition)); 128 PetscFunctionReturn(0); 129 } 130 131 static PetscErrorCode PetscPartitionerPartition_Simple(PetscPartitioner part, PetscInt nparts, PetscInt numVertices, PetscInt start[], PetscInt adjacency[], PetscSection vertSection, PetscSection targetSection, PetscSection partSection, IS *partition) { 132 PetscPartitioner_Simple *p = (PetscPartitioner_Simple *)part->data; 133 MPI_Comm comm; 134 PetscInt np, *tpwgts = NULL, sumw = 0, numVerticesGlobal = 0; 135 PetscMPIInt size; 136 137 PetscFunctionBegin; 138 if (p->useGrid) { 139 PetscCall(PetscPartitionerPartition_Simple_Grid(part, nparts, numVertices, start, adjacency, vertSection, targetSection, partSection, partition)); 140 PetscFunctionReturn(0); 141 } 142 if (vertSection) PetscCall(PetscInfo(part, "PETSCPARTITIONERSIMPLE ignores vertex weights\n")); 143 PetscCall(PetscObjectGetComm((PetscObject)part, &comm)); 144 PetscCallMPI(MPI_Comm_size(comm, &size)); 145 if (targetSection) { 146 PetscCall(MPIU_Allreduce(&numVertices, &numVerticesGlobal, 1, MPIU_INT, MPI_SUM, comm)); 147 PetscCall(PetscCalloc1(nparts, &tpwgts)); 148 for (np = 0; np < nparts; ++np) { 149 PetscCall(PetscSectionGetDof(targetSection, np, &tpwgts[np])); 150 sumw += tpwgts[np]; 151 } 152 if (sumw) { 153 PetscInt m, mp; 154 for (np = 0; np < nparts; ++np) tpwgts[np] = (tpwgts[np] * numVerticesGlobal) / sumw; 155 for (np = 0, m = -1, mp = 0, sumw = 0; np < nparts; ++np) { 156 if (m < tpwgts[np]) { 157 m = tpwgts[np]; 158 mp = np; 159 } 160 sumw += tpwgts[np]; 161 } 162 if (sumw != numVerticesGlobal) tpwgts[mp] += numVerticesGlobal - sumw; 163 } 164 if (!sumw) PetscCall(PetscFree(tpwgts)); 165 } 166 167 PetscCall(ISCreateStride(PETSC_COMM_SELF, numVertices, 0, 1, partition)); 168 if (size == 1) { 169 if (tpwgts) { 170 for (np = 0; np < nparts; ++np) { PetscCall(PetscSectionSetDof(partSection, np, tpwgts[np])); } 171 } else { 172 for (np = 0; np < nparts; ++np) { PetscCall(PetscSectionSetDof(partSection, np, numVertices / nparts + ((numVertices % nparts) > np))); } 173 } 174 } else { 175 if (tpwgts) { 176 Vec v; 177 PetscScalar *array; 178 PetscInt st, j; 179 PetscMPIInt rank; 180 181 PetscCall(VecCreate(comm, &v)); 182 PetscCall(VecSetSizes(v, numVertices, numVerticesGlobal)); 183 PetscCall(VecSetType(v, VECSTANDARD)); 184 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 185 for (np = 0, st = 0; np < nparts; ++np) { 186 if (rank == np || (rank == size - 1 && size < nparts && np >= size)) { 187 for (j = 0; j < tpwgts[np]; j++) { PetscCall(VecSetValue(v, st + j, np, INSERT_VALUES)); } 188 } 189 st += tpwgts[np]; 190 } 191 PetscCall(VecAssemblyBegin(v)); 192 PetscCall(VecAssemblyEnd(v)); 193 PetscCall(VecGetArray(v, &array)); 194 for (j = 0; j < numVertices; ++j) { PetscCall(PetscSectionAddDof(partSection, PetscRealPart(array[j]), 1)); } 195 PetscCall(VecRestoreArray(v, &array)); 196 PetscCall(VecDestroy(&v)); 197 } else { 198 PetscMPIInt rank; 199 PetscInt nvGlobal, *offsets, myFirst, myLast; 200 201 PetscCall(PetscMalloc1(size + 1, &offsets)); 202 offsets[0] = 0; 203 PetscCallMPI(MPI_Allgather(&numVertices, 1, MPIU_INT, &offsets[1], 1, MPIU_INT, comm)); 204 for (np = 2; np <= size; np++) { offsets[np] += offsets[np - 1]; } 205 nvGlobal = offsets[size]; 206 PetscCallMPI(MPI_Comm_rank(comm, &rank)); 207 myFirst = offsets[rank]; 208 myLast = offsets[rank + 1] - 1; 209 PetscCall(PetscFree(offsets)); 210 if (numVertices) { 211 PetscInt firstPart = 0, firstLargePart = 0; 212 PetscInt lastPart = 0, lastLargePart = 0; 213 PetscInt rem = nvGlobal % nparts; 214 PetscInt pSmall = nvGlobal / nparts; 215 PetscInt pBig = nvGlobal / nparts + 1; 216 217 if (rem) { 218 firstLargePart = myFirst / pBig; 219 lastLargePart = myLast / pBig; 220 221 if (firstLargePart < rem) { 222 firstPart = firstLargePart; 223 } else { 224 firstPart = rem + (myFirst - (rem * pBig)) / pSmall; 225 } 226 if (lastLargePart < rem) { 227 lastPart = lastLargePart; 228 } else { 229 lastPart = rem + (myLast - (rem * pBig)) / pSmall; 230 } 231 } else { 232 firstPart = myFirst / (nvGlobal / nparts); 233 lastPart = myLast / (nvGlobal / nparts); 234 } 235 236 for (np = firstPart; np <= lastPart; np++) { 237 PetscInt PartStart = np * (nvGlobal / nparts) + PetscMin(nvGlobal % nparts, np); 238 PetscInt PartEnd = (np + 1) * (nvGlobal / nparts) + PetscMin(nvGlobal % nparts, np + 1); 239 240 PartStart = PetscMax(PartStart, myFirst); 241 PartEnd = PetscMin(PartEnd, myLast + 1); 242 PetscCall(PetscSectionSetDof(partSection, np, PartEnd - PartStart)); 243 } 244 } 245 } 246 } 247 PetscCall(PetscFree(tpwgts)); 248 PetscFunctionReturn(0); 249 } 250 251 static PetscErrorCode PetscPartitionerInitialize_Simple(PetscPartitioner part) { 252 PetscFunctionBegin; 253 part->noGraph = PETSC_TRUE; 254 part->ops->view = PetscPartitionerView_Simple; 255 part->ops->setfromoptions = PetscPartitionerSetFromOptions_Simple; 256 part->ops->destroy = PetscPartitionerDestroy_Simple; 257 part->ops->partition = PetscPartitionerPartition_Simple; 258 PetscFunctionReturn(0); 259 } 260 261 /*MC 262 PETSCPARTITIONERSIMPLE = "simple" - A PetscPartitioner object 263 264 Level: intermediate 265 266 .seealso: `PetscPartitionerType`, `PetscPartitionerCreate()`, `PetscPartitionerSetType()` 267 M*/ 268 269 PETSC_EXTERN PetscErrorCode PetscPartitionerCreate_Simple(PetscPartitioner part) { 270 PetscPartitioner_Simple *p; 271 272 PetscFunctionBegin; 273 PetscValidHeaderSpecific(part, PETSCPARTITIONER_CLASSID, 1); 274 PetscCall(PetscNewLog(part, &p)); 275 p->gridDim = -1; 276 part->data = p; 277 278 PetscCall(PetscPartitionerInitialize_Simple(part)); 279 PetscFunctionReturn(0); 280 } 281