1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include <petsc/private/matimpl.h> 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */ 7 8 #if defined PETSC_GAMG_USE_LOG 9 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 10 #endif 11 12 #if defined PETSC_USE_LOG 13 PetscLogEvent PC_GAMGGraph_AGG; 14 PetscLogEvent PC_GAMGGraph_GEO; 15 PetscLogEvent PC_GAMGCoarsen_AGG; 16 PetscLogEvent PC_GAMGCoarsen_GEO; 17 PetscLogEvent PC_GAMGProlongator_AGG; 18 PetscLogEvent PC_GAMGProlongator_GEO; 19 PetscLogEvent PC_GAMGOptProlongator_AGG; 20 #endif 21 22 /* #define GAMG_STAGES */ 23 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 24 static PetscLogStage gamg_stages[PETSC_GAMG_MAXLEVELS]; 25 #endif 26 27 static PetscFunctionList GAMGList = 0; 28 static PetscBool PCGAMGPackageInitialized; 29 30 /* ----------------------------------------------------------------------------- */ 31 PetscErrorCode PCReset_GAMG(PC pc) 32 { 33 PetscErrorCode ierr; 34 PC_MG *mg = (PC_MG*)pc->data; 35 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 36 37 PetscFunctionBegin; 38 if (pc_gamg->data) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"This should not happen, cleaned up in SetUp\n"); 39 pc_gamg->data_sz = 0; 40 ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr); 41 PetscFunctionReturn(0); 42 } 43 44 /* -------------------------------------------------------------------------- */ 45 /* 46 PCGAMGCreateLevel_GAMG: create coarse op with RAP. repartition and/or reduce number 47 of active processors. 48 49 Input Parameter: 50 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 51 'pc_gamg->data_sz' are changed via repartitioning/reduction. 52 . Amat_fine - matrix on this fine (k) level 53 . cr_bs - coarse block size 54 In/Output Parameter: 55 . a_P_inout - prolongation operator to the next level (k-->k-1) 56 . a_nactive_proc - number of active procs 57 Output Parameter: 58 . a_Amat_crs - coarse matrix that is created (k-1) 59 */ 60 61 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm, PetscBool is_last) 62 { 63 PetscErrorCode ierr; 64 PC_MG *mg = (PC_MG*)pc->data; 65 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 66 Mat Cmat,Pold=*a_P_inout; 67 MPI_Comm comm; 68 PetscMPIInt rank,size,new_size,nactive=*a_nactive_proc; 69 PetscInt ncrs_eq,ncrs,f_bs; 70 71 PetscFunctionBegin; 72 ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr); 73 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 74 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 75 ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr); 76 ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr); 77 78 /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/ 79 ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr); 80 if (pc_gamg->data_cell_rows>0) { 81 ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 82 } else { 83 PetscInt bs; 84 ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr); 85 ncrs = ncrs_eq/bs; 86 } 87 88 /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */ 89 if (is_last && !pc_gamg->use_parallel_coarse_grid_solver) new_size = 1; 90 else { 91 PetscInt ncrs_eq_glob; 92 ierr = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr); 93 new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 94 if (!new_size) new_size = 1; /* not likely, posible? */ 95 else if (new_size >= nactive) new_size = nactive; /* no change, rare */ 96 } 97 98 if (Pcolumnperm) *Pcolumnperm = NULL; 99 100 if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 101 else { 102 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old; 103 IS is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices; 104 105 nloc_old = ncrs_eq/cr_bs; 106 if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs); 107 #if defined PETSC_GAMG_USE_LOG 108 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 109 #endif 110 /* make 'is_eq_newproc' */ 111 ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr); 112 if (pc_gamg->repart) { 113 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 114 Mat adj; 115 116 ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, %D local equations\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr); 117 118 /* get 'adj' */ 119 if (cr_bs == 1) { 120 ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 121 } else { 122 /* make a scalar matrix to partition (no Stokes here) */ 123 Mat tMat; 124 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 125 const PetscScalar *vals; 126 const PetscInt *idx; 127 PetscInt *d_nnz, *o_nnz, M, N; 128 static PetscInt llev = 0; 129 MatType mtype; 130 131 ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr); 132 ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr); 133 ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr); 134 for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) { 135 ierr = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 136 d_nnz[jj] = ncols/cr_bs; 137 o_nnz[jj] = ncols/cr_bs; 138 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 139 if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs; 140 if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs; 141 } 142 143 ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr); 144 ierr = MatCreate(comm, &tMat);CHKERRQ(ierr); 145 ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr); 146 ierr = MatSetType(tMat,mtype);CHKERRQ(ierr); 147 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 148 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 149 ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 150 151 for (ii = Istart_crs; ii < Iend_crs; ii++) { 152 PetscInt dest_row = ii/cr_bs; 153 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 154 for (jj = 0; jj < ncols; jj++) { 155 PetscInt dest_col = idx[jj]/cr_bs; 156 PetscScalar v = 1.0; 157 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr); 158 } 159 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 160 } 161 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 162 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 163 164 if (llev++ == -1) { 165 PetscViewer viewer; char fname[32]; 166 ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr); 167 PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer); 168 ierr = MatView(tMat, viewer);CHKERRQ(ierr); 169 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 170 } 171 ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 172 ierr = MatDestroy(&tMat);CHKERRQ(ierr); 173 } /* create 'adj' */ 174 175 { /* partition: get newproc_idx */ 176 char prefix[256]; 177 const char *pcpre; 178 const PetscInt *is_idx; 179 MatPartitioning mpart; 180 IS proc_is; 181 PetscInt targetPE; 182 183 ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr); 184 ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr); 185 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 186 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 187 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 188 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 189 ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr); 190 ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr); 191 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 192 193 /* collect IS info */ 194 ierr = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr); 195 ierr = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr); 196 targetPE = 1; /* bring to "front" of machine */ 197 /*targetPE = size/new_size;*/ /* spread partitioning across machine */ 198 for (kk = jj = 0 ; kk < nloc_old ; kk++) { 199 for (ii = 0 ; ii < cr_bs ; ii++, jj++) { 200 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 201 } 202 } 203 ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr); 204 ierr = ISDestroy(&proc_is);CHKERRQ(ierr); 205 } 206 ierr = MatDestroy(&adj);CHKERRQ(ierr); 207 208 ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr); 209 ierr = PetscFree(newproc_idx);CHKERRQ(ierr); 210 } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 211 PetscInt rfactor,targetPE; 212 213 /* find factor */ 214 if (new_size == 1) rfactor = size; /* easy */ 215 else { 216 PetscReal best_fact = 0.; 217 jj = -1; 218 for (kk = 1 ; kk <= size ; kk++) { 219 if (!(size%kk)) { /* a candidate */ 220 PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size; 221 if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 222 if (fact > best_fact) { 223 best_fact = fact; jj = kk; 224 } 225 } 226 } 227 if (jj != -1) rfactor = jj; 228 else rfactor = 1; /* does this happen .. a prime */ 229 } 230 new_size = size/rfactor; 231 232 if (new_size==nactive) { 233 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 234 ierr = PetscFree(counts);CHKERRQ(ierr); 235 ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr); 236 #if defined PETSC_GAMG_USE_LOG 237 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 238 #endif 239 PetscFunctionReturn(0); 240 } 241 242 ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr); 243 targetPE = rank/rfactor; 244 ierr = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr); 245 } /* end simple 'is_eq_newproc' */ 246 247 /* 248 Create an index set from the is_eq_newproc index set to indicate the mapping TO 249 */ 250 ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr); 251 is_eq_num_prim = is_eq_num; 252 /* 253 Determine how many equations/vertices are assigned to each processor 254 */ 255 ierr = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr); 256 ncrs_eq_new = counts[rank]; 257 ierr = ISDestroy(&is_eq_newproc);CHKERRQ(ierr); 258 ncrs_new = ncrs_eq_new/cr_bs; /* eqs */ 259 260 ierr = PetscFree(counts);CHKERRQ(ierr); 261 #if defined PETSC_GAMG_USE_LOG 262 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 263 #endif 264 /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */ 265 { 266 Vec src_crd, dest_crd; 267 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 268 VecScatter vecscat; 269 PetscScalar *array; 270 IS isscat; 271 272 /* move data (for primal equations only) */ 273 /* Create a vector to contain the newly ordered element information */ 274 ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr); 275 ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr); 276 ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */ 277 /* 278 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 279 a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'. 280 */ 281 ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr); 282 ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 283 for (ii=0,jj=0; ii<ncrs; ii++) { 284 PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */ 285 for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 286 } 287 ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 288 ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr); 289 ierr = PetscFree(tidx);CHKERRQ(ierr); 290 /* 291 Create a vector to contain the original vertex information for each element 292 */ 293 ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr); 294 for (jj=0; jj<ndata_cols; jj++) { 295 const PetscInt stride0=ncrs*pc_gamg->data_cell_rows; 296 for (ii=0; ii<ncrs; ii++) { 297 for (kk=0; kk<ndata_rows; kk++) { 298 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 299 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 300 ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr); 301 } 302 } 303 } 304 ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr); 305 ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr); 306 /* 307 Scatter the element vertex information (still in the original vertex ordering) 308 to the correct processor 309 */ 310 ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr); 311 ierr = ISDestroy(&isscat);CHKERRQ(ierr); 312 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 313 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 314 ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr); 315 ierr = VecDestroy(&src_crd);CHKERRQ(ierr); 316 /* 317 Put the element vertex data into a new allocation of the gdata->ele 318 */ 319 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 320 ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr); 321 322 pc_gamg->data_sz = node_data_sz*ncrs_new; 323 strideNew = ncrs_new*ndata_rows; 324 325 ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr); 326 for (jj=0; jj<ndata_cols; jj++) { 327 for (ii=0; ii<ncrs_new; ii++) { 328 for (kk=0; kk<ndata_rows; kk++) { 329 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 330 pc_gamg->data[ix] = PetscRealPart(array[jx]); 331 } 332 } 333 } 334 ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr); 335 ierr = VecDestroy(&dest_crd);CHKERRQ(ierr); 336 } 337 /* move A and P (columns) with new layout */ 338 #if defined PETSC_GAMG_USE_LOG 339 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 340 #endif 341 342 /* 343 Invert for MatCreateSubMatrix 344 */ 345 ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr); 346 ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */ 347 ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr); 348 if (is_eq_num != is_eq_num_prim) { 349 ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 350 } 351 if (Pcolumnperm) { 352 ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr); 353 *Pcolumnperm = new_eq_indices; 354 } 355 ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr); 356 #if defined PETSC_GAMG_USE_LOG 357 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 358 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 359 #endif 360 /* 'a_Amat_crs' output */ 361 { 362 Mat mat; 363 ierr = MatCreateSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr); 364 *a_Amat_crs = mat; 365 } 366 ierr = MatDestroy(&Cmat);CHKERRQ(ierr); 367 368 #if defined PETSC_GAMG_USE_LOG 369 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 370 #endif 371 /* prolongator */ 372 { 373 IS findices; 374 PetscInt Istart,Iend; 375 Mat Pnew; 376 377 ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr); 378 #if defined PETSC_GAMG_USE_LOG 379 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 380 #endif 381 ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr); 382 ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr); 383 ierr = MatCreateSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr); 384 ierr = ISDestroy(&findices);CHKERRQ(ierr); 385 386 #if defined PETSC_GAMG_USE_LOG 387 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 388 #endif 389 ierr = MatDestroy(a_P_inout);CHKERRQ(ierr); 390 391 /* output - repartitioned */ 392 *a_P_inout = Pnew; 393 } 394 ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr); 395 396 *a_nactive_proc = new_size; /* output */ 397 } 398 PetscFunctionReturn(0); 399 } 400 401 /* -------------------------------------------------------------------------- */ 402 /* 403 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 404 by setting data structures and options. 405 406 Input Parameter: 407 . pc - the preconditioner context 408 409 */ 410 PetscErrorCode PCSetUp_GAMG(PC pc) 411 { 412 PetscErrorCode ierr; 413 PC_MG *mg = (PC_MG*)pc->data; 414 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 415 Mat Pmat = pc->pmat; 416 PetscInt fine_level,level,level1,bs,M,N,qq,lidx,nASMBlocksArr[PETSC_GAMG_MAXLEVELS]; 417 MPI_Comm comm; 418 PetscMPIInt rank,size,nactivepe; 419 Mat Aarr[PETSC_GAMG_MAXLEVELS],Parr[PETSC_GAMG_MAXLEVELS]; 420 IS *ASMLocalIDsArr[PETSC_GAMG_MAXLEVELS]; 421 PetscLogDouble nnz0=0.,nnztot=0.; 422 MatInfo info; 423 PetscBool is_last = PETSC_FALSE; 424 425 PetscFunctionBegin; 426 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 427 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 428 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 429 430 if (pc_gamg->setup_count++ > 0) { 431 if ((PetscBool)(!pc_gamg->reuse_prol)) { 432 /* reset everything */ 433 ierr = PCReset_MG(pc);CHKERRQ(ierr); 434 pc->setupcalled = 0; 435 } else { 436 PC_MG_Levels **mglevels = mg->levels; 437 /* just do Galerkin grids */ 438 Mat B,dA,dB; 439 440 if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet"); 441 if (pc_gamg->Nlevels > 1) { 442 /* currently only handle case where mat and pmat are the same on coarser levels */ 443 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr); 444 /* (re)set to get dirty flag */ 445 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr); 446 447 for (level=pc_gamg->Nlevels-2; level>=0; level--) { 448 /* the first time through the matrix structure has changed from repartitioning */ 449 if (pc_gamg->setup_count==2) { 450 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 451 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 452 453 mglevels[level]->A = B; 454 } else { 455 ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr); 456 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 457 } 458 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr); 459 dB = B; 460 } 461 } 462 463 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 464 PetscFunctionReturn(0); 465 } 466 } 467 468 if (!pc_gamg->data) { 469 if (pc_gamg->orig_data) { 470 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 471 ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr); 472 473 pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols; 474 pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows; 475 pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols; 476 477 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr); 478 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq]; 479 } else { 480 if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data"); 481 ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr); 482 } 483 } 484 485 /* cache original data for reuse */ 486 if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) { 487 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr); 488 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq]; 489 pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows; 490 pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols; 491 } 492 493 /* get basic dims */ 494 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 495 ierr = MatGetSize(Pmat, &M, &N);CHKERRQ(ierr); 496 497 ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */ 498 nnz0 = info.nz_used; 499 nnztot = info.nz_used; 500 ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);CHKERRQ(ierr); 501 502 /* Get A_i and R_i */ 503 for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) { 504 pc_gamg->current_level = level; 505 level1 = level + 1; 506 #if defined PETSC_GAMG_USE_LOG 507 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 508 #if (defined GAMG_STAGES) 509 ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr); 510 #endif 511 #endif 512 { /* construct prolongator */ 513 Mat Gmat; 514 PetscCoarsenData *agg_lists; 515 Mat Prol11; 516 517 ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr); 518 ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr); 519 ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr); 520 521 /* could have failed to create new level */ 522 if (Prol11) { 523 /* get new block size of coarse matrices */ 524 ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr); 525 526 if (pc_gamg->ops->optprolongator) { 527 /* smooth */ 528 ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr); 529 } 530 531 Parr[level1] = Prol11; 532 } else Parr[level1] = NULL; /* failed to coarsen */ 533 534 if (pc_gamg->use_aggs_in_asm) { 535 PetscInt bs; 536 ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr); 537 ierr = PetscCDGetASMBlocks(agg_lists, bs, Gmat, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr); 538 } 539 540 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 541 ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr); 542 } /* construct prolongator scope */ 543 #if defined PETSC_GAMG_USE_LOG 544 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 545 #endif 546 if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 547 if (!Parr[level1]) { /* failed to coarsen */ 548 ierr = PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr); 549 #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES 550 ierr = PetscLogStagePop();CHKERRQ(ierr); 551 #endif 552 break; 553 } 554 #if defined PETSC_GAMG_USE_LOG 555 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 556 #endif 557 ierr = MatGetSize(Parr[level1], &M, &N);CHKERRQ(ierr); /* N is next M, a loop test variables */ 558 if (is_last) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Is last ????????"); 559 if (N <= pc_gamg->coarse_eq_limit) is_last = PETSC_TRUE; 560 if (level1 == pc_gamg->Nlevels-1) is_last = PETSC_TRUE; 561 ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs, &Parr[level1], &Aarr[level1], &nactivepe, NULL, is_last);CHKERRQ(ierr); 562 563 #if defined PETSC_GAMG_USE_LOG 564 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 565 #endif 566 ierr = MatGetSize(Aarr[level1], &M, &N);CHKERRQ(ierr); /* M is loop test variables */ 567 ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr); 568 nnztot += info.nz_used; 569 ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr); 570 571 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 572 ierr = PetscLogStagePop();CHKERRQ(ierr); 573 #endif 574 /* stop if one node or one proc -- could pull back for singular problems */ 575 if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) { 576 ierr = PetscInfo2(pc,"HARD stop of coarsening on level %D. Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr); 577 level++; 578 break; 579 } 580 } /* levels */ 581 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 582 583 ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr); 584 pc_gamg->Nlevels = level + 1; 585 fine_level = level; 586 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 587 588 if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */ 589 /* set default smoothers & set operators */ 590 for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) { 591 KSP smoother; 592 PC subpc; 593 594 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 595 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 596 597 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 598 /* set ops */ 599 ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr); 600 ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr); 601 602 /* set defaults */ 603 ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr); 604 605 /* set blocks for ASM smoother that uses the 'aggregates' */ 606 if (pc_gamg->use_aggs_in_asm) { 607 PetscInt sz; 608 IS *iss; 609 610 sz = nASMBlocksArr[level]; 611 iss = ASMLocalIDsArr[level]; 612 ierr = PCSetType(subpc, PCASM);CHKERRQ(ierr); 613 ierr = PCASMSetOverlap(subpc, 0);CHKERRQ(ierr); 614 ierr = PCASMSetType(subpc,PC_ASM_BASIC);CHKERRQ(ierr); 615 if (!sz) { 616 IS is; 617 ierr = ISCreateGeneral(PETSC_COMM_SELF, 0, NULL, PETSC_COPY_VALUES, &is);CHKERRQ(ierr); 618 ierr = PCASMSetLocalSubdomains(subpc, 1, NULL, &is);CHKERRQ(ierr); 619 ierr = ISDestroy(&is);CHKERRQ(ierr); 620 } else { 621 PetscInt kk; 622 ierr = PCASMSetLocalSubdomains(subpc, sz, NULL, iss);CHKERRQ(ierr); 623 for (kk=0; kk<sz; kk++) { 624 ierr = ISDestroy(&iss[kk]);CHKERRQ(ierr); 625 } 626 ierr = PetscFree(iss);CHKERRQ(ierr); 627 } 628 ASMLocalIDsArr[level] = NULL; 629 nASMBlocksArr[level] = 0; 630 } else { 631 ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr); 632 } 633 } 634 { 635 /* coarse grid */ 636 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 637 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 638 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 639 ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr); 640 if (!pc_gamg->use_parallel_coarse_grid_solver) { 641 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 642 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 643 ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr); 644 ierr = PCSetUp(subpc);CHKERRQ(ierr); 645 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); 646 if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii); 647 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 648 ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr); 649 ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr); 650 ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr); 651 ierr = KSPSetType(k2[0], KSPPREONLY);CHKERRQ(ierr); 652 /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in 653 * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to 654 * view every subdomain as though they were different. */ 655 ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE; 656 } 657 } 658 659 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 660 ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr); 661 ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr); 662 ierr = PetscOptionsEnd();CHKERRQ(ierr); 663 ierr = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr); 664 665 /* clean up */ 666 for (level=1; level<pc_gamg->Nlevels; level++) { 667 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 668 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 669 } 670 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 671 } else { 672 KSP smoother; 673 ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr); 674 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 675 ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr); 676 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 677 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 678 } 679 PetscFunctionReturn(0); 680 } 681 682 /* ------------------------------------------------------------------------- */ 683 /* 684 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 685 that was created with PCCreate_GAMG(). 686 687 Input Parameter: 688 . pc - the preconditioner context 689 690 Application Interface Routine: PCDestroy() 691 */ 692 PetscErrorCode PCDestroy_GAMG(PC pc) 693 { 694 PetscErrorCode ierr; 695 PC_MG *mg = (PC_MG*)pc->data; 696 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 697 698 PetscFunctionBegin; 699 ierr = PCReset_GAMG(pc);CHKERRQ(ierr); 700 if (pc_gamg->ops->destroy) { 701 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 702 } 703 ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr); 704 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 705 ierr = PetscFree(pc_gamg);CHKERRQ(ierr); 706 ierr = PCDestroy_MG(pc);CHKERRQ(ierr); 707 PetscFunctionReturn(0); 708 } 709 710 /*@ 711 PCGAMGSetProcEqLim - Set number of equations to aim for per process on the coarse grids via processor reduction. 712 713 Logically Collective on PC 714 715 Input Parameters: 716 + pc - the preconditioner context 717 - n - the number of equations 718 719 720 Options Database Key: 721 . -pc_gamg_process_eq_limit <limit> 722 723 Notes: GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> equations on each process 724 that has degrees of freedom 725 726 Level: intermediate 727 728 Concepts: Unstructured multigrid preconditioner 729 730 .seealso: PCGAMGSetCoarseEqLim() 731 @*/ 732 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 733 { 734 PetscErrorCode ierr; 735 736 PetscFunctionBegin; 737 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 738 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 739 PetscFunctionReturn(0); 740 } 741 742 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 743 { 744 PC_MG *mg = (PC_MG*)pc->data; 745 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 746 747 PetscFunctionBegin; 748 if (n>0) pc_gamg->min_eq_proc = n; 749 PetscFunctionReturn(0); 750 } 751 752 /*@ 753 PCGAMGSetCoarseEqLim - Set maximum number of equations on coarsest grid. 754 755 Collective on PC 756 757 Input Parameters: 758 + pc - the preconditioner context 759 - n - maximum number of equations to aim for 760 761 Options Database Key: 762 . -pc_gamg_coarse_eq_limit <limit> 763 764 Level: intermediate 765 766 Concepts: Unstructured multigrid preconditioner 767 768 .seealso: PCGAMGSetProcEqLim() 769 @*/ 770 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 771 { 772 PetscErrorCode ierr; 773 774 PetscFunctionBegin; 775 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 776 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 777 PetscFunctionReturn(0); 778 } 779 780 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 781 { 782 PC_MG *mg = (PC_MG*)pc->data; 783 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 784 785 PetscFunctionBegin; 786 if (n>0) pc_gamg->coarse_eq_limit = n; 787 PetscFunctionReturn(0); 788 } 789 790 /*@ 791 PCGAMGSetRepartition - Repartition the degrees of freedom across the processors on the coarser grids 792 793 Collective on PC 794 795 Input Parameters: 796 + pc - the preconditioner context 797 - n - PETSC_TRUE or PETSC_FALSE 798 799 Options Database Key: 800 . -pc_gamg_repartition <true,false> 801 802 Notes: this will generally improve the loading balancing of the work on each level 803 804 Level: intermediate 805 806 Concepts: Unstructured multigrid preconditioner 807 808 .seealso: () 809 @*/ 810 PetscErrorCode PCGAMGSetRepartition(PC pc, PetscBool n) 811 { 812 PetscErrorCode ierr; 813 814 PetscFunctionBegin; 815 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 816 ierr = PetscTryMethod(pc,"PCGAMGSetRepartition_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 817 PetscFunctionReturn(0); 818 } 819 820 static PetscErrorCode PCGAMGSetRepartition_GAMG(PC pc, PetscBool n) 821 { 822 PC_MG *mg = (PC_MG*)pc->data; 823 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 824 825 PetscFunctionBegin; 826 pc_gamg->repart = n; 827 PetscFunctionReturn(0); 828 } 829 830 /*@ 831 PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding algebraic multigrid preconditioner 832 833 Collective on PC 834 835 Input Parameters: 836 + pc - the preconditioner context 837 - n - PETSC_TRUE or PETSC_FALSE 838 839 Options Database Key: 840 . -pc_gamg_reuse_interpolation <true,false> 841 842 Level: intermediate 843 844 Notes: this may negatively affect the convergence rate of the method on new matrices if the matrix entries change a great deal, but allows 845 rebuilding the preconditioner quicker. 846 847 Concepts: Unstructured multigrid preconditioner 848 849 .seealso: () 850 @*/ 851 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n) 852 { 853 PetscErrorCode ierr; 854 855 PetscFunctionBegin; 856 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 857 ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 858 PetscFunctionReturn(0); 859 } 860 861 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n) 862 { 863 PC_MG *mg = (PC_MG*)pc->data; 864 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 865 866 PetscFunctionBegin; 867 pc_gamg->reuse_prol = n; 868 PetscFunctionReturn(0); 869 } 870 871 /*@ 872 PCGAMGASMSetUseAggs - Have the PCGAMG smoother on each level use the aggregates defined by the coarsening process as the subdomains for the additive Schwarz preconditioner. 873 874 Collective on PC 875 876 Input Parameters: 877 + pc - the preconditioner context 878 - flg - PETSC_TRUE to use aggregates, PETSC_FALSE to not 879 880 Options Database Key: 881 . -pc_gamg_asm_use_agg 882 883 Level: intermediate 884 885 Concepts: Unstructured multigrid preconditioner 886 887 .seealso: () 888 @*/ 889 PetscErrorCode PCGAMGASMSetUseAggs(PC pc, PetscBool flg) 890 { 891 PetscErrorCode ierr; 892 893 PetscFunctionBegin; 894 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 895 ierr = PetscTryMethod(pc,"PCGAMGASMSetUseAggs_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr); 896 PetscFunctionReturn(0); 897 } 898 899 static PetscErrorCode PCGAMGASMSetUseAggs_GAMG(PC pc, PetscBool flg) 900 { 901 PC_MG *mg = (PC_MG*)pc->data; 902 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 903 904 PetscFunctionBegin; 905 pc_gamg->use_aggs_in_asm = flg; 906 PetscFunctionReturn(0); 907 } 908 909 /*@ 910 PCGAMGSetUseParallelCoarseGridSolve - allow a parallel coarse grid solver 911 912 Collective on PC 913 914 Input Parameters: 915 + pc - the preconditioner context 916 - flg - PETSC_TRUE to not force coarse grid onto one processor 917 918 Options Database Key: 919 . -pc_gamg_use_parallel_coarse_grid_solver 920 921 Level: intermediate 922 923 Concepts: Unstructured multigrid preconditioner 924 925 .seealso: () 926 @*/ 927 PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve(PC pc, PetscBool flg) 928 { 929 PetscErrorCode ierr; 930 931 PetscFunctionBegin; 932 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 933 ierr = PetscTryMethod(pc,"PCGAMGSetUseParallelCoarseGridSolve_C",(PC,PetscBool),(pc,flg));CHKERRQ(ierr); 934 PetscFunctionReturn(0); 935 } 936 937 static PetscErrorCode PCGAMGSetUseParallelCoarseGridSolve_GAMG(PC pc, PetscBool flg) 938 { 939 PC_MG *mg = (PC_MG*)pc->data; 940 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 941 942 PetscFunctionBegin; 943 pc_gamg->use_parallel_coarse_grid_solver = flg; 944 PetscFunctionReturn(0); 945 } 946 947 /*@ 948 PCGAMGSetNlevels - Sets the maximum number of levels PCGAMG will use 949 950 Not collective on PC 951 952 Input Parameters: 953 + pc - the preconditioner 954 - n - the maximum number of levels to use 955 956 Options Database Key: 957 . -pc_mg_levels 958 959 Level: intermediate 960 961 Concepts: Unstructured multigrid preconditioner 962 963 .seealso: () 964 @*/ 965 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 966 { 967 PetscErrorCode ierr; 968 969 PetscFunctionBegin; 970 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 971 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 972 PetscFunctionReturn(0); 973 } 974 975 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 976 { 977 PC_MG *mg = (PC_MG*)pc->data; 978 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 979 980 PetscFunctionBegin; 981 pc_gamg->Nlevels = n; 982 PetscFunctionReturn(0); 983 } 984 985 /*@ 986 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 987 988 Not collective on PC 989 990 Input Parameters: 991 + pc - the preconditioner context 992 - threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph 993 994 Options Database Key: 995 . -pc_gamg_threshold <threshold> 996 997 Notes: Before aggregating the graph GAMG will remove small values from the graph thus reducing the coupling in the graph and a different 998 (perhaps better) coarser set of points. 999 1000 Level: intermediate 1001 1002 Concepts: Unstructured multigrid preconditioner 1003 1004 .seealso: PCGAMGFilterGraph() 1005 @*/ 1006 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal v[], PetscInt n) 1007 { 1008 PetscErrorCode ierr; 1009 1010 PetscFunctionBegin; 1011 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1012 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal[],PetscInt),(pc,v,n));CHKERRQ(ierr); 1013 PetscFunctionReturn(0); 1014 } 1015 1016 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal v[], PetscInt n) 1017 { 1018 PC_MG *mg = (PC_MG*)pc->data; 1019 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1020 PetscInt i; 1021 PetscFunctionBegin; 1022 for (i=0;i<n;i++) pc_gamg->threshold[i] = v[i]; 1023 do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS); 1024 PetscFunctionReturn(0); 1025 } 1026 1027 /*@ 1028 PCGAMGSetThresholdScale - Relative threshold reduction at each level 1029 1030 Not collective on PC 1031 1032 Input Parameters: 1033 + pc - the preconditioner context 1034 - scale - the threshold value reduction, ussually < 1.0 1035 1036 Options Database Key: 1037 . -pc_gamg_threshold_scale <v> 1038 1039 Level: advanced 1040 1041 Concepts: Unstructured multigrid preconditioner 1042 1043 .seealso: () 1044 @*/ 1045 PetscErrorCode PCGAMGSetThresholdScale(PC pc, PetscReal v) 1046 { 1047 PetscErrorCode ierr; 1048 1049 PetscFunctionBegin; 1050 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1051 ierr = PetscTryMethod(pc,"PCGAMGSetThresholdScale_C",(PC,PetscReal),(pc,v));CHKERRQ(ierr); 1052 PetscFunctionReturn(0); 1053 } 1054 1055 static PetscErrorCode PCGAMGSetThresholdScale_GAMG(PC pc, PetscReal v) 1056 { 1057 PC_MG *mg = (PC_MG*)pc->data; 1058 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1059 PetscFunctionBegin; 1060 pc_gamg->threshold_scale = v; 1061 PetscFunctionReturn(0); 1062 } 1063 1064 /*@C 1065 PCGAMGSetType - Set solution method 1066 1067 Collective on PC 1068 1069 Input Parameters: 1070 + pc - the preconditioner context 1071 - type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL 1072 1073 Options Database Key: 1074 . -pc_gamg_type <agg,geo,classical> - type of algebraic multigrid to apply 1075 1076 Level: intermediate 1077 1078 Concepts: Unstructured multigrid preconditioner 1079 1080 .seealso: PCGAMGGetType(), PCGAMG, PCGAMGType 1081 @*/ 1082 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) 1083 { 1084 PetscErrorCode ierr; 1085 1086 PetscFunctionBegin; 1087 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1088 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr); 1089 PetscFunctionReturn(0); 1090 } 1091 1092 /*@C 1093 PCGAMGGetType - Get solution method 1094 1095 Collective on PC 1096 1097 Input Parameter: 1098 . pc - the preconditioner context 1099 1100 Output Parameter: 1101 . type - the type of algorithm used 1102 1103 Level: intermediate 1104 1105 Concepts: Unstructured multigrid preconditioner 1106 1107 .seealso: PCGAMGSetType(), PCGAMGType 1108 @*/ 1109 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type) 1110 { 1111 PetscErrorCode ierr; 1112 1113 PetscFunctionBegin; 1114 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1115 ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr); 1116 PetscFunctionReturn(0); 1117 } 1118 1119 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type) 1120 { 1121 PC_MG *mg = (PC_MG*)pc->data; 1122 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1123 1124 PetscFunctionBegin; 1125 *type = pc_gamg->type; 1126 PetscFunctionReturn(0); 1127 } 1128 1129 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type) 1130 { 1131 PetscErrorCode ierr,(*r)(PC); 1132 PC_MG *mg = (PC_MG*)pc->data; 1133 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1134 1135 PetscFunctionBegin; 1136 pc_gamg->type = type; 1137 ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr); 1138 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1139 if (pc_gamg->ops->destroy) { 1140 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 1141 ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr); 1142 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1143 /* cleaning up common data in pc_gamg - this should disapear someday */ 1144 pc_gamg->data_cell_cols = 0; 1145 pc_gamg->data_cell_rows = 0; 1146 pc_gamg->orig_data_cell_cols = 0; 1147 pc_gamg->orig_data_cell_rows = 0; 1148 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 1149 pc_gamg->data_sz = 0; 1150 } 1151 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 1152 ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr); 1153 ierr = (*r)(pc);CHKERRQ(ierr); 1154 PetscFunctionReturn(0); 1155 } 1156 1157 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer) 1158 { 1159 PetscErrorCode ierr,i; 1160 PC_MG *mg = (PC_MG*)pc->data; 1161 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1162 1163 PetscFunctionBegin; 1164 ierr = PetscViewerASCIIPrintf(viewer," GAMG specific options\n");CHKERRQ(ierr); 1165 ierr = PetscViewerASCIIPrintf(viewer," Threshold for dropping small values in graph on each level =");CHKERRQ(ierr); 1166 for (i=0;i<pc_gamg->current_level;i++) { 1167 ierr = PetscViewerASCIIPrintf(viewer," %g",(double)pc_gamg->threshold[i]);CHKERRQ(ierr); 1168 } 1169 ierr = PetscViewerASCIIPrintf(viewer,"\n");CHKERRQ(ierr); 1170 ierr = PetscViewerASCIIPrintf(viewer," Threshold scaling factor for each level not specified = %g\n",(double)pc_gamg->threshold_scale);CHKERRQ(ierr); 1171 if (pc_gamg->use_aggs_in_asm) { 1172 ierr = PetscViewerASCIIPrintf(viewer," Using aggregates from coarsening process to define subdomains for PCASM\n");CHKERRQ(ierr); 1173 } 1174 if (pc_gamg->use_parallel_coarse_grid_solver) { 1175 ierr = PetscViewerASCIIPrintf(viewer," Using parallel coarse grid solver (all coarse grid equations not put on one process)\n");CHKERRQ(ierr); 1176 } 1177 if (pc_gamg->ops->view) { 1178 ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr); 1179 } 1180 PetscFunctionReturn(0); 1181 } 1182 1183 PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc) 1184 { 1185 PetscErrorCode ierr; 1186 PC_MG *mg = (PC_MG*)pc->data; 1187 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1188 PetscBool flag; 1189 MPI_Comm comm; 1190 char prefix[256]; 1191 PetscInt i,n; 1192 const char *pcpre; 1193 1194 PetscFunctionBegin; 1195 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1196 ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr); 1197 { 1198 char tname[256]; 1199 ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr); 1200 if (flag) { 1201 ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr); 1202 } 1203 ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGSetRepartition",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr); 1204 ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr); 1205 ierr = PetscOptionsBool("-pc_gamg_asm_use_agg","Use aggregation aggregates for ASM smoother","PCGAMGASMSetUseAggs",pc_gamg->use_aggs_in_asm,&pc_gamg->use_aggs_in_asm,NULL);CHKERRQ(ierr); 1206 ierr = PetscOptionsBool("-pc_gamg_use_parallel_coarse_grid_solver","Use parallel coarse grid solver (otherwise put last grid on one process)","PCGAMGSetUseParallelCoarseGridSolve",pc_gamg->use_parallel_coarse_grid_solver,&pc_gamg->use_parallel_coarse_grid_solver,NULL);CHKERRQ(ierr); 1207 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr); 1208 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr); 1209 ierr = PetscOptionsReal("-pc_gamg_threshold_scale","Scaling of threshold for each level not specified","PCGAMGSetThresholdScale",pc_gamg->threshold_scale,&pc_gamg->threshold_scale,NULL);CHKERRQ(ierr); 1210 n = PETSC_GAMG_MAXLEVELS; 1211 ierr = PetscOptionsRealArray("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&n,&flag);CHKERRQ(ierr); 1212 if (!flag || n < PETSC_GAMG_MAXLEVELS) { 1213 if (!flag) n = 1; 1214 i = n; 1215 do {pc_gamg->threshold[i] = pc_gamg->threshold[i-1]*pc_gamg->threshold_scale;} while (++i<PETSC_GAMG_MAXLEVELS); 1216 } 1217 ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 1218 1219 /* set options for subtype */ 1220 if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);} 1221 } 1222 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 1223 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 1224 ierr = PetscOptionsTail();CHKERRQ(ierr); 1225 PetscFunctionReturn(0); 1226 } 1227 1228 /* -------------------------------------------------------------------------- */ 1229 /*MC 1230 PCGAMG - Geometric algebraic multigrid (AMG) preconditioner 1231 1232 Options Database Keys: 1233 + -pc_gamg_type <type> - one of agg, geo, or classical 1234 . -pc_gamg_repartition <true,default=false> - repartition the degrees of freedom accross the coarse grids as they are determined 1235 . -pc_gamg_reuse_interpolation <true,default=false> - when rebuilding the algebraic multigrid preconditioner reuse the previously computed interpolations 1236 . -pc_gamg_asm_use_agg <true,default=false> - use the aggregates from the coasening process to defined the subdomains on each level for the PCASM smoother 1237 . -pc_gamg_process_eq_limit <limit, default=50> - GAMG will reduce the number of MPI processes used directly on the coarse grids so that there are around <limit> 1238 equations on each process that has degrees of freedom 1239 . -pc_gamg_coarse_eq_limit <limit, default=50> - Set maximum number of equations on coarsest grid to aim for. 1240 - -pc_gamg_threshold[] <thresh,default=0> - Before aggregating the graph GAMG will remove small values from the graph on each level 1241 - -pc_gamg_threshold_scale <scale,default=1> - Scaling of threshold on each coarser grid if not specified 1242 1243 Options Database Keys for default Aggregation: 1244 + -pc_gamg_agg_nsmooths <nsmooth, default=1> - number of smoothing steps to use with smooth aggregation 1245 . -pc_gamg_sym_graph <true,default=false> - symmetrize the graph before computing the aggregation 1246 - -pc_gamg_square_graph <n,default=1> - number of levels to square the graph before aggregating it 1247 1248 Multigrid options(inherited): 1249 + -pc_mg_cycles <v>: v or w (PCMGSetCycleType()) 1250 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1251 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1252 . -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade 1253 - -pc_mg_levels <levels> - Number of levels of multigrid to use. 1254 1255 1256 Notes: In order to obtain good performance for PCGAMG for vector valued problems you must 1257 $ Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point 1258 $ Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator 1259 $ See the Users Manual Chapter 4 for more details 1260 1261 Level: intermediate 1262 1263 Concepts: algebraic multigrid 1264 1265 .seealso: PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(), 1266 PCGAMGSetCoarseEqLim(), PCGAMGSetRepartition(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGASMSetUseAggs(), PCGAMGSetUseParallelCoarseGridSolve(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType(), PCGAMGSetReuseInterpolation() 1267 M*/ 1268 1269 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc) 1270 { 1271 PetscErrorCode ierr,i; 1272 PC_GAMG *pc_gamg; 1273 PC_MG *mg; 1274 1275 PetscFunctionBegin; 1276 /* register AMG type */ 1277 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1278 1279 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1280 ierr = PCSetType(pc, PCMG);CHKERRQ(ierr); 1281 ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr); 1282 1283 /* create a supporting struct and attach it to pc */ 1284 ierr = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr); 1285 ierr = PCMGSetGalerkin(pc,PC_MG_GALERKIN_EXTERNAL);CHKERRQ(ierr); 1286 mg = (PC_MG*)pc->data; 1287 mg->innerctx = pc_gamg; 1288 1289 ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr); 1290 1291 pc_gamg->setup_count = 0; 1292 /* these should be in subctx but repartitioning needs simple arrays */ 1293 pc_gamg->data_sz = 0; 1294 pc_gamg->data = 0; 1295 1296 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1297 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1298 pc->ops->setup = PCSetUp_GAMG; 1299 pc->ops->reset = PCReset_GAMG; 1300 pc->ops->destroy = PCDestroy_GAMG; 1301 mg->view = PCView_GAMG; 1302 1303 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr); 1304 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr); 1305 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartition_C",PCGAMGSetRepartition_GAMG);CHKERRQ(ierr); 1306 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr); 1307 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGASMSetUseAggs_C",PCGAMGASMSetUseAggs_GAMG);CHKERRQ(ierr); 1308 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseParallelCoarseGridSolve_C",PCGAMGSetUseParallelCoarseGridSolve_GAMG);CHKERRQ(ierr); 1309 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr); 1310 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThresholdScale_C",PCGAMGSetThresholdScale_GAMG);CHKERRQ(ierr); 1311 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr); 1312 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr); 1313 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr); 1314 pc_gamg->repart = PETSC_FALSE; 1315 pc_gamg->reuse_prol = PETSC_FALSE; 1316 pc_gamg->use_aggs_in_asm = PETSC_FALSE; 1317 pc_gamg->use_parallel_coarse_grid_solver = PETSC_FALSE; 1318 pc_gamg->min_eq_proc = 50; 1319 pc_gamg->coarse_eq_limit = 50; 1320 for (i=0;i<PETSC_GAMG_MAXLEVELS;i++) pc_gamg->threshold[i] = 0.; 1321 pc_gamg->threshold_scale = 1.; 1322 pc_gamg->Nlevels = PETSC_GAMG_MAXLEVELS; 1323 pc_gamg->current_level = 0; /* don't need to init really */ 1324 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1325 1326 /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */ 1327 ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr); 1328 PetscFunctionReturn(0); 1329 } 1330 1331 /*@C 1332 PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called 1333 from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG() 1334 when using static libraries. 1335 1336 Level: developer 1337 1338 .keywords: PC, PCGAMG, initialize, package 1339 .seealso: PetscInitialize() 1340 @*/ 1341 PetscErrorCode PCGAMGInitializePackage(void) 1342 { 1343 PetscErrorCode ierr; 1344 1345 PetscFunctionBegin; 1346 if (PCGAMGPackageInitialized) PetscFunctionReturn(0); 1347 PCGAMGPackageInitialized = PETSC_TRUE; 1348 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr); 1349 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr); 1350 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr); 1351 ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr); 1352 1353 /* general events */ 1354 ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr); 1355 ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr); 1356 ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr); 1357 ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr); 1358 ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr); 1359 ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr); 1360 ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr); 1361 1362 #if defined PETSC_GAMG_USE_LOG 1363 ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr); 1364 ierr = PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr); 1365 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1366 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1367 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1368 ierr = PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr); 1369 ierr = PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr); 1370 ierr = PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr); 1371 ierr = PetscLogEventRegister(" search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr); 1372 ierr = PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr); 1373 ierr = PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr); 1374 ierr = PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr); 1375 ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr); 1376 ierr = PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr); 1377 ierr = PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr); 1378 ierr = PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr); 1379 ierr = PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr); 1380 1381 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1382 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1383 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1384 /* create timer stages */ 1385 #if defined GAMG_STAGES 1386 { 1387 char str[32]; 1388 PetscInt lidx; 1389 sprintf(str,"MG Level %d (finest)",0); 1390 ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr); 1391 for (lidx=1; lidx<9; lidx++) { 1392 sprintf(str,"MG Level %d",lidx); 1393 ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr); 1394 } 1395 } 1396 #endif 1397 #endif 1398 PetscFunctionReturn(0); 1399 } 1400 1401 /*@C 1402 PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is 1403 called from PetscFinalize() automatically. 1404 1405 Level: developer 1406 1407 .keywords: Petsc, destroy, package 1408 .seealso: PetscFinalize() 1409 @*/ 1410 PetscErrorCode PCGAMGFinalizePackage(void) 1411 { 1412 PetscErrorCode ierr; 1413 1414 PetscFunctionBegin; 1415 PCGAMGPackageInitialized = PETSC_FALSE; 1416 ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr); 1417 PetscFunctionReturn(0); 1418 } 1419 1420 /*@C 1421 PCGAMGRegister - Register a PCGAMG implementation. 1422 1423 Input Parameters: 1424 + type - string that will be used as the name of the GAMG type. 1425 - create - function for creating the gamg context. 1426 1427 Level: advanced 1428 1429 .seealso: PCGAMGType, PCGAMG, PCGAMGSetType() 1430 @*/ 1431 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC)) 1432 { 1433 PetscErrorCode ierr; 1434 1435 PetscFunctionBegin; 1436 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1437 ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr); 1438 PetscFunctionReturn(0); 1439 } 1440 1441