1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <petscblaslapack.h> 7 #include <petscdm.h> 8 #include <petsc/private/kspimpl.h> 9 10 typedef struct { 11 PetscInt nsmooths; 12 PetscInt aggressive_coarsening_levels; // number of aggressive coarsening levels (square or MISk) 13 MatCoarsen crs; 14 } PC_GAMG_AGG; 15 16 /*@ 17 PCGAMGSetNSmooths - Set number of smoothing steps (1 is typical) used for multigrid on all the levels 18 19 Logically Collective 20 21 Input Parameters: 22 . pc - the preconditioner context 23 24 Options Database Key: 25 . -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation 26 27 Level: intermediate 28 29 .seealso: `PCMG`, `PCGAMG` 30 @*/ 31 PetscErrorCode PCGAMGSetNSmooths(PC pc, PetscInt n) 32 { 33 PetscFunctionBegin; 34 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 35 PetscValidLogicalCollectiveInt(pc, n, 2); 36 PetscTryMethod(pc, "PCGAMGSetNSmooths_C", (PC, PetscInt), (pc, n)); 37 PetscFunctionReturn(0); 38 } 39 40 static PetscErrorCode PCGAMGSetNSmooths_AGG(PC pc, PetscInt n) 41 { 42 PC_MG *mg = (PC_MG *)pc->data; 43 PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; 44 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx; 45 46 PetscFunctionBegin; 47 pc_gamg_agg->nsmooths = n; 48 PetscFunctionReturn(0); 49 } 50 51 /*@ 52 PCGAMGSetAggressiveLevels - Use aggressive coarsening on first n levels 53 54 Logically Collective 55 56 Input Parameters: 57 + pc - the preconditioner context 58 - n - 0, 1 or more 59 60 Options Database Key: 61 . -pc_gamg_aggressive_coarsening <n,default = 1> - Number of levels to square the graph on before aggregating it 62 63 Level: intermediate 64 65 .seealso: `PCGAMG`, `PCGAMGSetThreshold()` 66 @*/ 67 PetscErrorCode PCGAMGSetAggressiveLevels(PC pc, PetscInt n) 68 { 69 PetscFunctionBegin; 70 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 71 PetscValidLogicalCollectiveInt(pc, n, 2); 72 PetscTryMethod(pc, "PCGAMGSetAggressiveLevels_C", (PC, PetscInt), (pc, n)); 73 PetscFunctionReturn(0); 74 } 75 76 static PetscErrorCode PCGAMGSetAggressiveLevels_AGG(PC pc, PetscInt n) 77 { 78 PC_MG *mg = (PC_MG *)pc->data; 79 PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; 80 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx; 81 82 PetscFunctionBegin; 83 pc_gamg_agg->aggressive_coarsening_levels = n; 84 PetscFunctionReturn(0); 85 } 86 87 static PetscErrorCode PCSetFromOptions_GAMG_AGG(PC pc, PetscOptionItems *PetscOptionsObject) 88 { 89 PC_MG *mg = (PC_MG *)pc->data; 90 PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; 91 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx; 92 93 PetscFunctionBegin; 94 PetscOptionsHeadBegin(PetscOptionsObject, "GAMG-AGG options"); 95 { 96 PetscBool flg; 97 PetscCall(PetscOptionsInt("-pc_gamg_agg_nsmooths", "smoothing steps for smoothed aggregation, usually 1", "PCGAMGSetNSmooths", pc_gamg_agg->nsmooths, &pc_gamg_agg->nsmooths, NULL)); 98 pc_gamg_agg->aggressive_coarsening_levels = 1; 99 PetscCall( 100 PetscOptionsInt("-pc_gamg_square_graph", "Number of aggressive coarsening (MIS-2) levels from finest (alias for -pc_gamg_aggressive_coarsening, deprecated)", "PCGAMGSetAggressiveLevels", pc_gamg_agg->aggressive_coarsening_levels, &pc_gamg_agg->aggressive_coarsening_levels, &flg)); 101 if (!flg) { 102 PetscCall(PetscOptionsInt("-pc_gamg_aggressive_coarsening", "Number of aggressive coarsening (MIS-2) levels from finest", "PCGAMGSetAggressiveLevels", pc_gamg_agg->aggressive_coarsening_levels, &pc_gamg_agg->aggressive_coarsening_levels, NULL)); 103 } else { 104 PetscCall(PetscOptionsInt("-pc_gamg_aggressive_coarsening", "Number of aggressive coarsening (MIS-2) levels from finest", "PCGAMGSetAggressiveLevels", pc_gamg_agg->aggressive_coarsening_levels, &pc_gamg_agg->aggressive_coarsening_levels, &flg)); 105 if (flg) PetscCall(PetscInfo(pc, "Warning: both -pc_gamg_square_graph and -pc_gamg_aggressive_coarsening are used. -pc_gamg_square_graph is deprecated, Number of aggressive levels is %d\n", (int)pc_gamg_agg->aggressive_coarsening_levels)); 106 } 107 } 108 PetscOptionsHeadEnd(); 109 PetscFunctionReturn(0); 110 } 111 112 static PetscErrorCode PCDestroy_GAMG_AGG(PC pc) 113 { 114 PC_MG *mg = (PC_MG *)pc->data; 115 PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; 116 117 PetscFunctionBegin; 118 PetscCall(PetscFree(pc_gamg->subctx)); 119 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetNSmooths_C", NULL)); 120 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetAggressiveLevels_C", NULL)); 121 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 122 PetscFunctionReturn(0); 123 } 124 125 /* 126 PCSetCoordinates_AGG 127 - collective 128 129 Input Parameter: 130 . pc - the preconditioner context 131 . ndm - dimesion of data (used for dof/vertex for Stokes) 132 . a_nloc - number of vertices local 133 . coords - [a_nloc][ndm] - interleaved coordinate data: {x_0, y_0, z_0, x_1, y_1, ...} 134 */ 135 136 static PetscErrorCode PCSetCoordinates_AGG(PC pc, PetscInt ndm, PetscInt a_nloc, PetscReal *coords) 137 { 138 PC_MG *mg = (PC_MG *)pc->data; 139 PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; 140 PetscInt arrsz, kk, ii, jj, nloc, ndatarows, ndf; 141 Mat mat = pc->pmat; 142 143 PetscFunctionBegin; 144 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 145 PetscValidHeaderSpecific(mat, MAT_CLASSID, 1); 146 nloc = a_nloc; 147 148 /* SA: null space vectors */ 149 PetscCall(MatGetBlockSize(mat, &ndf)); /* this does not work for Stokes */ 150 if (coords && ndf == 1) pc_gamg->data_cell_cols = 1; /* scalar w/ coords and SA (not needed) */ 151 else if (coords) { 152 PetscCheck(ndm <= ndf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "degrees of motion %" PetscInt_FMT " > block size %" PetscInt_FMT, ndm, ndf); 153 pc_gamg->data_cell_cols = (ndm == 2 ? 3 : 6); /* displacement elasticity */ 154 if (ndm != ndf) PetscCheck(pc_gamg->data_cell_cols == ndf, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Don't know how to create null space for ndm=%" PetscInt_FMT ", ndf=%" PetscInt_FMT ". Use MatSetNearNullSpace().", ndm, ndf); 155 } else pc_gamg->data_cell_cols = ndf; /* no data, force SA with constant null space vectors */ 156 pc_gamg->data_cell_rows = ndatarows = ndf; 157 PetscCheck(pc_gamg->data_cell_cols > 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "pc_gamg->data_cell_cols %" PetscInt_FMT " <= 0", pc_gamg->data_cell_cols); 158 arrsz = nloc * pc_gamg->data_cell_rows * pc_gamg->data_cell_cols; 159 160 if (!pc_gamg->data || (pc_gamg->data_sz != arrsz)) { 161 PetscCall(PetscFree(pc_gamg->data)); 162 PetscCall(PetscMalloc1(arrsz + 1, &pc_gamg->data)); 163 } 164 /* copy data in - column oriented */ 165 for (kk = 0; kk < nloc; kk++) { 166 const PetscInt M = nloc * pc_gamg->data_cell_rows; /* stride into data */ 167 PetscReal *data = &pc_gamg->data[kk * ndatarows]; /* start of cell */ 168 if (pc_gamg->data_cell_cols == 1) *data = 1.0; 169 else { 170 /* translational modes */ 171 for (ii = 0; ii < ndatarows; ii++) { 172 for (jj = 0; jj < ndatarows; jj++) { 173 if (ii == jj) data[ii * M + jj] = 1.0; 174 else data[ii * M + jj] = 0.0; 175 } 176 } 177 178 /* rotational modes */ 179 if (coords) { 180 if (ndm == 2) { 181 data += 2 * M; 182 data[0] = -coords[2 * kk + 1]; 183 data[1] = coords[2 * kk]; 184 } else { 185 data += 3 * M; 186 data[0] = 0.0; 187 data[M + 0] = coords[3 * kk + 2]; 188 data[2 * M + 0] = -coords[3 * kk + 1]; 189 data[1] = -coords[3 * kk + 2]; 190 data[M + 1] = 0.0; 191 data[2 * M + 1] = coords[3 * kk]; 192 data[2] = coords[3 * kk + 1]; 193 data[M + 2] = -coords[3 * kk]; 194 data[2 * M + 2] = 0.0; 195 } 196 } 197 } 198 } 199 pc_gamg->data_sz = arrsz; 200 PetscFunctionReturn(0); 201 } 202 203 /* 204 PCSetData_AGG - called if data is not set with PCSetCoordinates. 205 Looks in Mat for near null space. 206 Does not work for Stokes 207 208 Input Parameter: 209 . pc - 210 . a_A - matrix to get (near) null space out of. 211 */ 212 static PetscErrorCode PCSetData_AGG(PC pc, Mat a_A) 213 { 214 PC_MG *mg = (PC_MG *)pc->data; 215 PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; 216 MatNullSpace mnull; 217 218 PetscFunctionBegin; 219 PetscCall(MatGetNearNullSpace(a_A, &mnull)); 220 if (!mnull) { 221 DM dm; 222 PetscCall(PCGetDM(pc, &dm)); 223 if (!dm) PetscCall(MatGetDM(a_A, &dm)); 224 if (dm) { 225 PetscObject deformation; 226 PetscInt Nf; 227 228 PetscCall(DMGetNumFields(dm, &Nf)); 229 if (Nf) { 230 PetscCall(DMGetField(dm, 0, NULL, &deformation)); 231 PetscCall(PetscObjectQuery((PetscObject)deformation, "nearnullspace", (PetscObject *)&mnull)); 232 if (!mnull) PetscCall(PetscObjectQuery((PetscObject)deformation, "nullspace", (PetscObject *)&mnull)); 233 } 234 } 235 } 236 237 if (!mnull) { 238 PetscInt bs, NN, MM; 239 PetscCall(MatGetBlockSize(a_A, &bs)); 240 PetscCall(MatGetLocalSize(a_A, &MM, &NN)); 241 PetscCheck(MM % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "MM %" PetscInt_FMT " must be divisible by bs %" PetscInt_FMT, MM, bs); 242 PetscCall(PCSetCoordinates_AGG(pc, bs, MM / bs, NULL)); 243 } else { 244 PetscReal *nullvec; 245 PetscBool has_const; 246 PetscInt i, j, mlocal, nvec, bs; 247 const Vec *vecs; 248 const PetscScalar *v; 249 250 PetscCall(MatGetLocalSize(a_A, &mlocal, NULL)); 251 PetscCall(MatNullSpaceGetVecs(mnull, &has_const, &nvec, &vecs)); 252 for (i = 0; i < nvec; i++) { 253 PetscCall(VecGetLocalSize(vecs[i], &j)); 254 PetscCheck(j == mlocal, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Attached null space vector size %" PetscInt_FMT " != matrix size %" PetscInt_FMT, j, mlocal); 255 } 256 pc_gamg->data_sz = (nvec + !!has_const) * mlocal; 257 PetscCall(PetscMalloc1((nvec + !!has_const) * mlocal, &nullvec)); 258 if (has_const) 259 for (i = 0; i < mlocal; i++) nullvec[i] = 1.0; 260 for (i = 0; i < nvec; i++) { 261 PetscCall(VecGetArrayRead(vecs[i], &v)); 262 for (j = 0; j < mlocal; j++) nullvec[(i + !!has_const) * mlocal + j] = PetscRealPart(v[j]); 263 PetscCall(VecRestoreArrayRead(vecs[i], &v)); 264 } 265 pc_gamg->data = nullvec; 266 pc_gamg->data_cell_cols = (nvec + !!has_const); 267 PetscCall(MatGetBlockSize(a_A, &bs)); 268 pc_gamg->data_cell_rows = bs; 269 } 270 PetscFunctionReturn(0); 271 } 272 273 /* 274 formProl0 - collect null space data for each aggregate, do QR, put R in coarse grid data and Q in P_0 275 276 Input Parameter: 277 . agg_llists - list of arrays with aggregates -- list from selected vertices of aggregate unselected vertices 278 . bs - row block size 279 . nSAvec - column bs of new P 280 . my0crs - global index of start of locals 281 . data_stride - bs*(nloc nodes + ghost nodes) [data_stride][nSAvec] 282 . data_in[data_stride*nSAvec] - local data on fine grid 283 . flid_fgid[data_stride/bs] - make local to global IDs, includes ghosts in 'locals_llist' 284 285 Output Parameter: 286 . a_data_out - in with fine grid data (w/ghosts), out with coarse grid data 287 . a_Prol - prolongation operator 288 */ 289 static PetscErrorCode formProl0(PetscCoarsenData *agg_llists, PetscInt bs, PetscInt nSAvec, PetscInt my0crs, PetscInt data_stride, PetscReal data_in[], const PetscInt flid_fgid[], PetscReal **a_data_out, Mat a_Prol) 290 { 291 PetscInt Istart, my0, Iend, nloc, clid, flid = 0, aggID, kk, jj, ii, mm, nSelected, minsz, nghosts, out_data_stride; 292 MPI_Comm comm; 293 PetscReal *out_data; 294 PetscCDIntNd *pos; 295 PCGAMGHashTable fgid_flid; 296 297 PetscFunctionBegin; 298 PetscCall(PetscObjectGetComm((PetscObject)a_Prol, &comm)); 299 PetscCall(MatGetOwnershipRange(a_Prol, &Istart, &Iend)); 300 nloc = (Iend - Istart) / bs; 301 my0 = Istart / bs; 302 PetscCheck((Iend - Istart) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Iend %" PetscInt_FMT " - Istart %" PetscInt_FMT " must be divisible by bs %" PetscInt_FMT, Iend, Istart, bs); 303 Iend /= bs; 304 nghosts = data_stride / bs - nloc; 305 306 PetscCall(PCGAMGHashTableCreate(2 * nghosts + 1, &fgid_flid)); 307 for (kk = 0; kk < nghosts; kk++) PetscCall(PCGAMGHashTableAdd(&fgid_flid, flid_fgid[nloc + kk], nloc + kk)); 308 309 /* count selected -- same as number of cols of P */ 310 for (nSelected = mm = 0; mm < nloc; mm++) { 311 PetscBool ise; 312 PetscCall(PetscCDEmptyAt(agg_llists, mm, &ise)); 313 if (!ise) nSelected++; 314 } 315 PetscCall(MatGetOwnershipRangeColumn(a_Prol, &ii, &jj)); 316 PetscCheck((ii / nSAvec) == my0crs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "ii %" PetscInt_FMT " /nSAvec %" PetscInt_FMT " != my0crs %" PetscInt_FMT, ii, nSAvec, my0crs); 317 PetscCheck(nSelected == (jj - ii) / nSAvec, PETSC_COMM_SELF, PETSC_ERR_PLIB, "nSelected %" PetscInt_FMT " != (jj %" PetscInt_FMT " - ii %" PetscInt_FMT ")/nSAvec %" PetscInt_FMT, nSelected, jj, ii, nSAvec); 318 319 /* aloc space for coarse point data (output) */ 320 out_data_stride = nSelected * nSAvec; 321 322 PetscCall(PetscMalloc1(out_data_stride * nSAvec, &out_data)); 323 for (ii = 0; ii < out_data_stride * nSAvec; ii++) out_data[ii] = PETSC_MAX_REAL; 324 *a_data_out = out_data; /* output - stride nSelected*nSAvec */ 325 326 /* find points and set prolongation */ 327 minsz = 100; 328 for (mm = clid = 0; mm < nloc; mm++) { 329 PetscCall(PetscCDSizeAt(agg_llists, mm, &jj)); 330 if (jj > 0) { 331 const PetscInt lid = mm, cgid = my0crs + clid; 332 PetscInt cids[100]; /* max bs */ 333 PetscBLASInt asz = jj, M = asz * bs, N = nSAvec, INFO; 334 PetscBLASInt Mdata = M + ((N - M > 0) ? N - M : 0), LDA = Mdata, LWORK = N * bs; 335 PetscScalar *qqc, *qqr, *TAU, *WORK; 336 PetscInt *fids; 337 PetscReal *data; 338 339 /* count agg */ 340 if (asz < minsz) minsz = asz; 341 342 /* get block */ 343 PetscCall(PetscMalloc5(Mdata * N, &qqc, M * N, &qqr, N, &TAU, LWORK, &WORK, M, &fids)); 344 345 aggID = 0; 346 PetscCall(PetscCDGetHeadPos(agg_llists, lid, &pos)); 347 while (pos) { 348 PetscInt gid1; 349 PetscCall(PetscCDIntNdGetID(pos, &gid1)); 350 PetscCall(PetscCDGetNextPos(agg_llists, lid, &pos)); 351 352 if (gid1 >= my0 && gid1 < Iend) flid = gid1 - my0; 353 else { 354 PetscCall(PCGAMGHashTableFind(&fgid_flid, gid1, &flid)); 355 PetscCheck(flid >= 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "Cannot find gid1 in table"); 356 } 357 /* copy in B_i matrix - column oriented */ 358 data = &data_in[flid * bs]; 359 for (ii = 0; ii < bs; ii++) { 360 for (jj = 0; jj < N; jj++) { 361 PetscReal d = data[jj * data_stride + ii]; 362 qqc[jj * Mdata + aggID * bs + ii] = d; 363 } 364 } 365 /* set fine IDs */ 366 for (kk = 0; kk < bs; kk++) fids[aggID * bs + kk] = flid_fgid[flid] * bs + kk; 367 aggID++; 368 } 369 370 /* pad with zeros */ 371 for (ii = asz * bs; ii < Mdata; ii++) { 372 for (jj = 0; jj < N; jj++, kk++) qqc[jj * Mdata + ii] = .0; 373 } 374 375 /* QR */ 376 PetscCall(PetscFPTrapPush(PETSC_FP_TRAP_OFF)); 377 PetscCallBLAS("LAPACKgeqrf", LAPACKgeqrf_(&Mdata, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO)); 378 PetscCall(PetscFPTrapPop()); 379 PetscCheck(INFO == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "xGEQRF error"); 380 /* get R - column oriented - output B_{i+1} */ 381 { 382 PetscReal *data = &out_data[clid * nSAvec]; 383 for (jj = 0; jj < nSAvec; jj++) { 384 for (ii = 0; ii < nSAvec; ii++) { 385 PetscCheck(data[jj * out_data_stride + ii] == PETSC_MAX_REAL, PETSC_COMM_SELF, PETSC_ERR_PLIB, "data[jj*out_data_stride + ii] != %e", (double)PETSC_MAX_REAL); 386 if (ii <= jj) data[jj * out_data_stride + ii] = PetscRealPart(qqc[jj * Mdata + ii]); 387 else data[jj * out_data_stride + ii] = 0.; 388 } 389 } 390 } 391 392 /* get Q - row oriented */ 393 PetscCallBLAS("LAPACKorgqr", LAPACKorgqr_(&Mdata, &N, &N, qqc, &LDA, TAU, WORK, &LWORK, &INFO)); 394 PetscCheck(INFO == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "xORGQR error arg %" PetscBLASInt_FMT, -INFO); 395 396 for (ii = 0; ii < M; ii++) { 397 for (jj = 0; jj < N; jj++) qqr[N * ii + jj] = qqc[jj * Mdata + ii]; 398 } 399 400 /* add diagonal block of P0 */ 401 for (kk = 0; kk < N; kk++) { cids[kk] = N * cgid + kk; /* global col IDs in P0 */ } 402 PetscCall(MatSetValues(a_Prol, M, fids, N, cids, qqr, INSERT_VALUES)); 403 PetscCall(PetscFree5(qqc, qqr, TAU, WORK, fids)); 404 clid++; 405 } /* coarse agg */ 406 } /* for all fine nodes */ 407 PetscCall(MatAssemblyBegin(a_Prol, MAT_FINAL_ASSEMBLY)); 408 PetscCall(MatAssemblyEnd(a_Prol, MAT_FINAL_ASSEMBLY)); 409 PetscCall(PCGAMGHashTableDestroy(&fgid_flid)); 410 PetscFunctionReturn(0); 411 } 412 413 static PetscErrorCode PCView_GAMG_AGG(PC pc, PetscViewer viewer) 414 { 415 PC_MG *mg = (PC_MG *)pc->data; 416 PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; 417 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx; 418 419 PetscFunctionBegin; 420 PetscCall(PetscViewerASCIIPrintf(viewer, " AGG specific options\n")); 421 PetscCall(PetscViewerASCIIPrintf(viewer, " Number of levels to square graph %d\n", (int)pc_gamg_agg->aggressive_coarsening_levels)); 422 PetscCall(PetscViewerASCIIPrintf(viewer, " Number smoothing steps %d\n", (int)pc_gamg_agg->nsmooths)); 423 PetscFunctionReturn(0); 424 } 425 426 static PetscErrorCode PCGAMGCreateGraph_AGG(PC pc, Mat Amat, Mat *a_Gmat) 427 { 428 PC_MG *mg = (PC_MG *)pc->data; 429 PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; 430 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx; 431 const PetscReal vfilter = pc_gamg->threshold[pc_gamg->current_level]; 432 PetscBool ishem; 433 const char *prefix; 434 435 PetscFunctionBegin; 436 PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_GRAPH], 0, 0, 0, 0)); 437 /* Note: depending on the algorithm that will be used for computing the coarse grid points this should pass PETSC_TRUE or PETSC_FALSE as the first argument */ 438 439 /* MATCOARSENHEM requires numerical weights for edges so ensure they are computed */ 440 PetscCall(MatCoarsenCreate(PetscObjectComm((PetscObject)pc), &pc_gamg_agg->crs)); 441 PetscCall(PetscObjectGetOptionsPrefix((PetscObject)pc, &prefix)); 442 PetscCall(PetscObjectSetOptionsPrefix((PetscObject)pc_gamg_agg->crs, prefix)); 443 PetscCall(MatCoarsenSetFromOptions(pc_gamg_agg->crs)); 444 PetscCall(PetscObjectTypeCompare((PetscObject)pc_gamg_agg->crs, MATCOARSENHEM, &ishem)); 445 446 PetscCall(MatCreateGraph(Amat, PETSC_TRUE, (vfilter >= 0 || ishem) ? PETSC_TRUE : PETSC_FALSE, vfilter, a_Gmat)); 447 PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_GRAPH], 0, 0, 0, 0)); 448 PetscFunctionReturn(0); 449 } 450 451 /* 452 PCGAMGCoarsen_AGG - supports squaring the graph (deprecated) and new graph for 453 communication of QR data used with HEM and MISk coarsening 454 455 Input Parameter: 456 . a_pc - this 457 458 Input/Output Parameter: 459 . a_Gmat1 - graph to coarsen (in), graph off processor edges for QR gather scatter (out) 460 461 Output Parameter: 462 . agg_lists - list of aggregates 463 464 */ 465 static PetscErrorCode PCGAMGCoarsen_AGG(PC a_pc, Mat *a_Gmat1, PetscCoarsenData **agg_lists) 466 { 467 PC_MG *mg = (PC_MG *)a_pc->data; 468 PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; 469 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx; 470 Mat mat, Gmat1 = *a_Gmat1; /* aggressive graph */ 471 IS perm; 472 PetscInt Istart, Iend, Ii, nloc, bs, nn; 473 PetscInt *permute, *degree; 474 PetscBool *bIndexSet; 475 PetscReal hashfact; 476 PetscInt iSwapIndex; 477 PetscRandom random; 478 479 PetscFunctionBegin; 480 PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_COARSEN], 0, 0, 0, 0)); 481 PetscCall(MatGetLocalSize(Gmat1, &nn, NULL)); 482 PetscCall(MatGetBlockSize(Gmat1, &bs)); 483 PetscCheck(bs == 1, PETSC_COMM_SELF, PETSC_ERR_PLIB, "bs %" PetscInt_FMT " must be 1", bs); 484 nloc = nn / bs; 485 486 /* get MIS aggs - randomize */ 487 PetscCall(PetscMalloc2(nloc, &permute, nloc, °ree)); 488 PetscCall(PetscCalloc1(nloc, &bIndexSet)); 489 for (Ii = 0; Ii < nloc; Ii++) permute[Ii] = Ii; 490 PetscCall(PetscRandomCreate(PETSC_COMM_SELF, &random)); 491 PetscCall(MatGetOwnershipRange(Gmat1, &Istart, &Iend)); 492 for (Ii = 0; Ii < nloc; Ii++) { 493 PetscInt nc; 494 PetscCall(MatGetRow(Gmat1, Istart + Ii, &nc, NULL, NULL)); 495 degree[Ii] = nc; 496 PetscCall(MatRestoreRow(Gmat1, Istart + Ii, &nc, NULL, NULL)); 497 } 498 for (Ii = 0; Ii < nloc; Ii++) { 499 PetscCall(PetscRandomGetValueReal(random, &hashfact)); 500 iSwapIndex = (PetscInt)(hashfact * nloc) % nloc; 501 if (!bIndexSet[iSwapIndex] && iSwapIndex != Ii) { 502 PetscInt iTemp = permute[iSwapIndex]; 503 permute[iSwapIndex] = permute[Ii]; 504 permute[Ii] = iTemp; 505 iTemp = degree[iSwapIndex]; 506 degree[iSwapIndex] = degree[Ii]; 507 degree[Ii] = iTemp; 508 bIndexSet[iSwapIndex] = PETSC_TRUE; 509 } 510 } 511 // create minimum degree ordering 512 PetscCall(PetscSortIntWithArray(nloc, degree, permute)); 513 514 PetscCall(PetscFree(bIndexSet)); 515 PetscCall(PetscRandomDestroy(&random)); 516 PetscCall(ISCreateGeneral(PETSC_COMM_SELF, nloc, permute, PETSC_USE_POINTER, &perm)); 517 PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_MIS], 0, 0, 0, 0)); 518 PetscCall(MatCoarsenSetGreedyOrdering(pc_gamg_agg->crs, perm)); 519 PetscCall(MatCoarsenSetAdjacency(pc_gamg_agg->crs, Gmat1)); 520 PetscCall(MatCoarsenSetStrictAggs(pc_gamg_agg->crs, PETSC_TRUE)); 521 if (pc_gamg->current_level < pc_gamg_agg->aggressive_coarsening_levels) PetscCall(MatCoarsenMISKSetDistance(pc_gamg_agg->crs, 2)); // hardwire to MIS-2 522 else PetscCall(MatCoarsenMISKSetDistance(pc_gamg_agg->crs, 1)); // MIS 523 PetscCall(MatCoarsenApply(pc_gamg_agg->crs)); 524 PetscCall(MatCoarsenViewFromOptions(pc_gamg_agg->crs, NULL, "-mat_coarsen_view")); 525 PetscCall(MatCoarsenGetData(pc_gamg_agg->crs, agg_lists)); /* output */ 526 PetscCall(MatCoarsenDestroy(&pc_gamg_agg->crs)); 527 528 PetscCall(ISDestroy(&perm)); 529 PetscCall(PetscFree2(permute, degree)); 530 PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_MIS], 0, 0, 0, 0)); 531 532 { 533 PetscCoarsenData *llist = *agg_lists; 534 /* see if we have a matrix that takes precedence (returned from MatCoarsenApply) */ 535 PetscCall(PetscCDGetMat(llist, &mat)); 536 if (mat) { 537 PetscCall(MatDestroy(&Gmat1)); 538 *a_Gmat1 = mat; /* output */ 539 } 540 } 541 PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_COARSEN], 0, 0, 0, 0)); 542 PetscFunctionReturn(0); 543 } 544 545 /* 546 PCGAMGProlongator_AGG 547 548 Input Parameter: 549 . pc - this 550 . Amat - matrix on this fine level 551 . Graph - used to get ghost data for nodes in 552 . agg_lists - list of aggregates 553 Output Parameter: 554 . a_P_out - prolongation operator to the next level 555 */ 556 static PetscErrorCode PCGAMGProlongator_AGG(PC pc, Mat Amat, Mat Gmat, PetscCoarsenData *agg_lists, Mat *a_P_out) 557 { 558 PC_MG *mg = (PC_MG *)pc->data; 559 PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; 560 const PetscInt col_bs = pc_gamg->data_cell_cols; 561 PetscInt Istart, Iend, nloc, ii, jj, kk, my0, nLocalSelected, bs; 562 Mat Prol; 563 PetscMPIInt size; 564 MPI_Comm comm; 565 PetscReal *data_w_ghost; 566 PetscInt myCrs0, nbnodes = 0, *flid_fgid; 567 MatType mtype; 568 569 PetscFunctionBegin; 570 PetscCall(PetscObjectGetComm((PetscObject)Amat, &comm)); 571 PetscCheck(col_bs >= 1, comm, PETSC_ERR_PLIB, "Column bs cannot be less than 1"); 572 PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROL], 0, 0, 0, 0)); 573 PetscCallMPI(MPI_Comm_size(comm, &size)); 574 PetscCall(MatGetOwnershipRange(Amat, &Istart, &Iend)); 575 PetscCall(MatGetBlockSize(Amat, &bs)); 576 nloc = (Iend - Istart) / bs; 577 my0 = Istart / bs; 578 PetscCheck((Iend - Istart) % bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(Iend %" PetscInt_FMT " - Istart %" PetscInt_FMT ") not divisible by bs %" PetscInt_FMT, Iend, Istart, bs); 579 580 /* get 'nLocalSelected' */ 581 for (ii = 0, nLocalSelected = 0; ii < nloc; ii++) { 582 PetscBool ise; 583 /* filter out singletons 0 or 1? */ 584 PetscCall(PetscCDEmptyAt(agg_lists, ii, &ise)); 585 if (!ise) nLocalSelected++; 586 } 587 588 /* create prolongator, create P matrix */ 589 PetscCall(MatGetType(Amat, &mtype)); 590 PetscCall(MatCreate(comm, &Prol)); 591 PetscCall(MatSetSizes(Prol, nloc * bs, nLocalSelected * col_bs, PETSC_DETERMINE, PETSC_DETERMINE)); 592 PetscCall(MatSetBlockSizes(Prol, bs, col_bs)); 593 PetscCall(MatSetType(Prol, mtype)); 594 PetscCall(MatSeqAIJSetPreallocation(Prol, col_bs, NULL)); 595 PetscCall(MatMPIAIJSetPreallocation(Prol, col_bs, NULL, col_bs, NULL)); 596 597 /* can get all points "removed" */ 598 PetscCall(MatGetSize(Prol, &kk, &ii)); 599 if (!ii) { 600 PetscCall(PetscInfo(pc, "%s: No selected points on coarse grid\n", ((PetscObject)pc)->prefix)); 601 PetscCall(MatDestroy(&Prol)); 602 *a_P_out = NULL; /* out */ 603 PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROL], 0, 0, 0, 0)); 604 PetscFunctionReturn(0); 605 } 606 PetscCall(PetscInfo(pc, "%s: New grid %" PetscInt_FMT " nodes\n", ((PetscObject)pc)->prefix, ii / col_bs)); 607 PetscCall(MatGetOwnershipRangeColumn(Prol, &myCrs0, &kk)); 608 609 PetscCheck((kk - myCrs0) % col_bs == 0, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(kk %" PetscInt_FMT " -myCrs0 %" PetscInt_FMT ") not divisible by col_bs %" PetscInt_FMT, kk, myCrs0, col_bs); 610 myCrs0 = myCrs0 / col_bs; 611 PetscCheck((kk / col_bs - myCrs0) == nLocalSelected, PETSC_COMM_SELF, PETSC_ERR_PLIB, "(kk %" PetscInt_FMT "/col_bs %" PetscInt_FMT " - myCrs0 %" PetscInt_FMT ") != nLocalSelected %" PetscInt_FMT ")", kk, col_bs, myCrs0, nLocalSelected); 612 613 /* create global vector of data in 'data_w_ghost' */ 614 PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROLA], 0, 0, 0, 0)); 615 if (size > 1) { /* get ghost null space data */ 616 PetscReal *tmp_gdata, *tmp_ldata, *tp2; 617 PetscCall(PetscMalloc1(nloc, &tmp_ldata)); 618 for (jj = 0; jj < col_bs; jj++) { 619 for (kk = 0; kk < bs; kk++) { 620 PetscInt ii, stride; 621 const PetscReal *tp = pc_gamg->data + jj * bs * nloc + kk; 622 for (ii = 0; ii < nloc; ii++, tp += bs) tmp_ldata[ii] = *tp; 623 624 PetscCall(PCGAMGGetDataWithGhosts(Gmat, 1, tmp_ldata, &stride, &tmp_gdata)); 625 626 if (!jj && !kk) { /* now I know how many total nodes - allocate TODO: move below and do in one 'col_bs' call */ 627 PetscCall(PetscMalloc1(stride * bs * col_bs, &data_w_ghost)); 628 nbnodes = bs * stride; 629 } 630 tp2 = data_w_ghost + jj * bs * stride + kk; 631 for (ii = 0; ii < stride; ii++, tp2 += bs) *tp2 = tmp_gdata[ii]; 632 PetscCall(PetscFree(tmp_gdata)); 633 } 634 } 635 PetscCall(PetscFree(tmp_ldata)); 636 } else { 637 nbnodes = bs * nloc; 638 data_w_ghost = (PetscReal *)pc_gamg->data; 639 } 640 641 /* get 'flid_fgid' TODO - move up to get 'stride' and do get null space data above in one step (jj loop) */ 642 if (size > 1) { 643 PetscReal *fid_glid_loc, *fiddata; 644 PetscInt stride; 645 646 PetscCall(PetscMalloc1(nloc, &fid_glid_loc)); 647 for (kk = 0; kk < nloc; kk++) fid_glid_loc[kk] = (PetscReal)(my0 + kk); 648 PetscCall(PCGAMGGetDataWithGhosts(Gmat, 1, fid_glid_loc, &stride, &fiddata)); 649 PetscCall(PetscMalloc1(stride, &flid_fgid)); /* copy real data to in */ 650 for (kk = 0; kk < stride; kk++) flid_fgid[kk] = (PetscInt)fiddata[kk]; 651 PetscCall(PetscFree(fiddata)); 652 653 PetscCheck(stride == nbnodes / bs, PETSC_COMM_SELF, PETSC_ERR_PLIB, "stride %" PetscInt_FMT " != nbnodes %" PetscInt_FMT "/bs %" PetscInt_FMT, stride, nbnodes, bs); 654 PetscCall(PetscFree(fid_glid_loc)); 655 } else { 656 PetscCall(PetscMalloc1(nloc, &flid_fgid)); 657 for (kk = 0; kk < nloc; kk++) flid_fgid[kk] = my0 + kk; 658 } 659 PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROLA], 0, 0, 0, 0)); 660 /* get P0 */ 661 PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_PROLB], 0, 0, 0, 0)); 662 { 663 PetscReal *data_out = NULL; 664 PetscCall(formProl0(agg_lists, bs, col_bs, myCrs0, nbnodes, data_w_ghost, flid_fgid, &data_out, Prol)); 665 PetscCall(PetscFree(pc_gamg->data)); 666 667 pc_gamg->data = data_out; 668 pc_gamg->data_cell_rows = col_bs; 669 pc_gamg->data_sz = col_bs * col_bs * nLocalSelected; 670 } 671 PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROLB], 0, 0, 0, 0)); 672 if (size > 1) PetscCall(PetscFree(data_w_ghost)); 673 PetscCall(PetscFree(flid_fgid)); 674 675 *a_P_out = Prol; /* out */ 676 677 PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_PROL], 0, 0, 0, 0)); 678 PetscFunctionReturn(0); 679 } 680 681 /* 682 PCGAMGOptProlongator_AGG 683 684 Input Parameter: 685 . pc - this 686 . Amat - matrix on this fine level 687 In/Output Parameter: 688 . a_P - prolongation operator to the next level 689 */ 690 static PetscErrorCode PCGAMGOptProlongator_AGG(PC pc, Mat Amat, Mat *a_P) 691 { 692 PC_MG *mg = (PC_MG *)pc->data; 693 PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; 694 PC_GAMG_AGG *pc_gamg_agg = (PC_GAMG_AGG *)pc_gamg->subctx; 695 PetscInt jj; 696 Mat Prol = *a_P; 697 MPI_Comm comm; 698 KSP eksp; 699 Vec bb, xx; 700 PC epc; 701 PetscReal alpha, emax, emin; 702 703 PetscFunctionBegin; 704 PetscCall(PetscObjectGetComm((PetscObject)Amat, &comm)); 705 PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_OPT], 0, 0, 0, 0)); 706 707 /* compute maximum singular value of operator to be used in smoother */ 708 if (0 < pc_gamg_agg->nsmooths) { 709 /* get eigen estimates */ 710 if (pc_gamg->emax > 0) { 711 emin = pc_gamg->emin; 712 emax = pc_gamg->emax; 713 } else { 714 const char *prefix; 715 716 PetscCall(MatCreateVecs(Amat, &bb, NULL)); 717 PetscCall(MatCreateVecs(Amat, &xx, NULL)); 718 PetscCall(KSPSetNoisy_Private(bb)); 719 720 PetscCall(KSPCreate(comm, &eksp)); 721 PetscCall(PCGetOptionsPrefix(pc, &prefix)); 722 PetscCall(KSPSetOptionsPrefix(eksp, prefix)); 723 PetscCall(KSPAppendOptionsPrefix(eksp, "pc_gamg_esteig_")); 724 { 725 PetscBool isset, sflg; 726 PetscCall(MatIsSPDKnown(Amat, &isset, &sflg)); 727 if (isset && sflg) PetscCall(KSPSetType(eksp, KSPCG)); 728 } 729 PetscCall(KSPSetErrorIfNotConverged(eksp, pc->erroriffailure)); 730 PetscCall(KSPSetNormType(eksp, KSP_NORM_NONE)); 731 732 PetscCall(KSPSetInitialGuessNonzero(eksp, PETSC_FALSE)); 733 PetscCall(KSPSetOperators(eksp, Amat, Amat)); 734 735 PetscCall(KSPGetPC(eksp, &epc)); 736 PetscCall(PCSetType(epc, PCJACOBI)); /* smoother in smoothed agg. */ 737 738 PetscCall(KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10)); // 10 is safer, but 5 is often fine, can override with -pc_gamg_esteig_ksp_max_it -mg_levels_ksp_chebyshev_esteig 0,0.25,0,1.2 739 740 PetscCall(KSPSetFromOptions(eksp)); 741 PetscCall(KSPSetComputeSingularValues(eksp, PETSC_TRUE)); 742 PetscCall(KSPSolve(eksp, bb, xx)); 743 PetscCall(KSPCheckSolve(eksp, pc, xx)); 744 745 PetscCall(KSPComputeExtremeSingularValues(eksp, &emax, &emin)); 746 PetscCall(PetscInfo(pc, "%s: Smooth P0: max eigen=%e min=%e PC=%s\n", ((PetscObject)pc)->prefix, (double)emax, (double)emin, PCJACOBI)); 747 PetscCall(VecDestroy(&xx)); 748 PetscCall(VecDestroy(&bb)); 749 PetscCall(KSPDestroy(&eksp)); 750 } 751 if (pc_gamg->use_sa_esteig) { 752 mg->min_eigen_DinvA[pc_gamg->current_level] = emin; 753 mg->max_eigen_DinvA[pc_gamg->current_level] = emax; 754 PetscCall(PetscInfo(pc, "%s: Smooth P0: level %" PetscInt_FMT ", cache spectra %g %g\n", ((PetscObject)pc)->prefix, pc_gamg->current_level, (double)emin, (double)emax)); 755 } else { 756 mg->min_eigen_DinvA[pc_gamg->current_level] = 0; 757 mg->max_eigen_DinvA[pc_gamg->current_level] = 0; 758 } 759 } else { 760 mg->min_eigen_DinvA[pc_gamg->current_level] = 0; 761 mg->max_eigen_DinvA[pc_gamg->current_level] = 0; 762 } 763 764 /* smooth P0 */ 765 for (jj = 0; jj < pc_gamg_agg->nsmooths; jj++) { 766 Mat tMat; 767 Vec diag; 768 769 PetscCall(PetscLogEventBegin(petsc_gamg_setup_events[GAMG_OPTSM], 0, 0, 0, 0)); 770 771 /* smooth P1 := (I - omega/lam D^{-1}A)P0 */ 772 PetscCall(PetscLogEventBegin(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2], 0, 0, 0, 0)); 773 PetscCall(MatMatMult(Amat, Prol, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &tMat)); 774 PetscCall(PetscLogEventEnd(petsc_gamg_setup_matmat_events[pc_gamg->current_level][2], 0, 0, 0, 0)); 775 PetscCall(MatProductClear(tMat)); 776 PetscCall(MatCreateVecs(Amat, &diag, NULL)); 777 PetscCall(MatGetDiagonal(Amat, diag)); /* effectively PCJACOBI */ 778 PetscCall(VecReciprocal(diag)); 779 PetscCall(MatDiagonalScale(tMat, diag, NULL)); 780 PetscCall(VecDestroy(&diag)); 781 782 /* TODO: Set a PCFailedReason and exit the building of the AMG preconditioner */ 783 PetscCheck(emax != 0.0, PetscObjectComm((PetscObject)pc), PETSC_ERR_PLIB, "Computed maximum singular value as zero"); 784 /* TODO: Document the 1.4 and don't hardwire it in this routine */ 785 alpha = -1.4 / emax; 786 787 PetscCall(MatAYPX(tMat, alpha, Prol, SUBSET_NONZERO_PATTERN)); 788 PetscCall(MatDestroy(&Prol)); 789 Prol = tMat; 790 PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_OPTSM], 0, 0, 0, 0)); 791 } 792 PetscCall(PetscLogEventEnd(petsc_gamg_setup_events[GAMG_OPT], 0, 0, 0, 0)); 793 *a_P = Prol; 794 PetscFunctionReturn(0); 795 } 796 797 /* 798 PCCreateGAMG_AGG 799 800 Input Parameter: 801 . pc - 802 */ 803 PetscErrorCode PCCreateGAMG_AGG(PC pc) 804 { 805 PC_MG *mg = (PC_MG *)pc->data; 806 PC_GAMG *pc_gamg = (PC_GAMG *)mg->innerctx; 807 PC_GAMG_AGG *pc_gamg_agg; 808 809 PetscFunctionBegin; 810 /* create sub context for SA */ 811 PetscCall(PetscNew(&pc_gamg_agg)); 812 pc_gamg->subctx = pc_gamg_agg; 813 814 pc_gamg->ops->setfromoptions = PCSetFromOptions_GAMG_AGG; 815 pc_gamg->ops->destroy = PCDestroy_GAMG_AGG; 816 /* reset does not do anything; setup not virtual */ 817 818 /* set internal function pointers */ 819 pc_gamg->ops->creategraph = PCGAMGCreateGraph_AGG; 820 pc_gamg->ops->coarsen = PCGAMGCoarsen_AGG; 821 pc_gamg->ops->prolongator = PCGAMGProlongator_AGG; 822 pc_gamg->ops->optprolongator = PCGAMGOptProlongator_AGG; 823 pc_gamg->ops->createdefaultdata = PCSetData_AGG; 824 pc_gamg->ops->view = PCView_GAMG_AGG; 825 826 pc_gamg_agg->aggressive_coarsening_levels = 0; 827 pc_gamg_agg->nsmooths = 1; 828 829 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetNSmooths_C", PCGAMGSetNSmooths_AGG)); 830 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCGAMGSetAggressiveLevels_C", PCGAMGSetAggressiveLevels_AGG)); 831 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_AGG)); 832 PetscFunctionReturn(0); 833 } 834