1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include <petsc/private/matimpl.h> 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */ 7 8 #if defined PETSC_GAMG_USE_LOG 9 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 10 #endif 11 12 #if defined PETSC_USE_LOG 13 PetscLogEvent PC_GAMGGraph_AGG; 14 PetscLogEvent PC_GAMGGraph_GEO; 15 PetscLogEvent PC_GAMGCoarsen_AGG; 16 PetscLogEvent PC_GAMGCoarsen_GEO; 17 PetscLogEvent PC_GAMGProlongator_AGG; 18 PetscLogEvent PC_GAMGProlongator_GEO; 19 PetscLogEvent PC_GAMGOptProlongator_AGG; 20 #endif 21 22 #define GAMG_MAXLEVELS 30 23 24 /* #define GAMG_STAGES */ 25 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 26 static PetscLogStage gamg_stages[GAMG_MAXLEVELS]; 27 #endif 28 29 static PetscFunctionList GAMGList = 0; 30 static PetscBool PCGAMGPackageInitialized; 31 32 /* ----------------------------------------------------------------------------- */ 33 #undef __FUNCT__ 34 #define __FUNCT__ "PCReset_GAMG" 35 PetscErrorCode PCReset_GAMG(PC pc) 36 { 37 PetscErrorCode ierr; 38 PC_MG *mg = (PC_MG*)pc->data; 39 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 40 41 PetscFunctionBegin; 42 if (pc_gamg->data) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_PLIB,"This should not happen, cleaned up in SetUp\n"); 43 pc_gamg->data_sz = 0; 44 ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr); 45 PetscFunctionReturn(0); 46 } 47 48 /* -------------------------------------------------------------------------- */ 49 /* 50 PCGAMGCreateLevel_GAMG: create coarse op with RAP. repartition and/or reduce number 51 of active processors. 52 53 Input Parameter: 54 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 55 'pc_gamg->data_sz' are changed via repartitioning/reduction. 56 . Amat_fine - matrix on this fine (k) level 57 . cr_bs - coarse block size 58 In/Output Parameter: 59 . a_P_inout - prolongation operator to the next level (k-->k-1) 60 . a_nactive_proc - number of active procs 61 Output Parameter: 62 . a_Amat_crs - coarse matrix that is created (k-1) 63 */ 64 65 #undef __FUNCT__ 66 #define __FUNCT__ "PCGAMGCreateLevel_GAMG" 67 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc,IS * Pcolumnperm) 68 { 69 PetscErrorCode ierr; 70 PC_MG *mg = (PC_MG*)pc->data; 71 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 72 Mat Cmat,Pold=*a_P_inout; 73 MPI_Comm comm; 74 PetscMPIInt rank,size,new_size,nactive=*a_nactive_proc; 75 PetscInt ncrs_eq,ncrs,f_bs; 76 77 PetscFunctionBegin; 78 ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr); 79 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 80 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 81 ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr); 82 ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr); 83 84 /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/ 85 ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr); 86 if (pc_gamg->data_cell_rows>0) { 87 ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 88 } else { 89 PetscInt bs; 90 ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr); 91 ncrs = ncrs_eq/bs; 92 } 93 94 /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */ 95 { 96 PetscInt ncrs_eq_glob; 97 ierr = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr); 98 new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 99 if (!new_size) new_size = 1; /* not likely, posible? */ 100 else if (new_size >= nactive) new_size = nactive; /* no change, rare */ 101 } 102 103 if (Pcolumnperm) *Pcolumnperm = NULL; 104 105 if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 106 else { 107 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old; 108 IS is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices; 109 110 nloc_old = ncrs_eq/cr_bs; 111 if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs); 112 #if defined PETSC_GAMG_USE_LOG 113 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 114 #endif 115 /* make 'is_eq_newproc' */ 116 ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr); 117 if (pc_gamg->repart) { 118 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 119 Mat adj; 120 121 ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, neq = %D\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr); 122 123 /* get 'adj' */ 124 if (cr_bs == 1) { 125 ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 126 } else { 127 /* make a scalar matrix to partition (no Stokes here) */ 128 Mat tMat; 129 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 130 const PetscScalar *vals; 131 const PetscInt *idx; 132 PetscInt *d_nnz, *o_nnz, M, N; 133 static PetscInt llev = 0; 134 MatType mtype; 135 136 ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr); 137 ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr); 138 ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr); 139 for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) { 140 ierr = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 141 d_nnz[jj] = ncols/cr_bs; 142 o_nnz[jj] = ncols/cr_bs; 143 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 144 if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs; 145 if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs; 146 } 147 148 ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr); 149 ierr = MatCreate(comm, &tMat);CHKERRQ(ierr); 150 ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr); 151 ierr = MatSetType(tMat,mtype);CHKERRQ(ierr); 152 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 153 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 154 ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 155 156 for (ii = Istart_crs; ii < Iend_crs; ii++) { 157 PetscInt dest_row = ii/cr_bs; 158 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 159 for (jj = 0; jj < ncols; jj++) { 160 PetscInt dest_col = idx[jj]/cr_bs; 161 PetscScalar v = 1.0; 162 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr); 163 } 164 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 165 } 166 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 167 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 168 169 if (llev++ == -1) { 170 PetscViewer viewer; char fname[32]; 171 ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr); 172 PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer); 173 ierr = MatView(tMat, viewer);CHKERRQ(ierr); 174 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 175 } 176 ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 177 ierr = MatDestroy(&tMat);CHKERRQ(ierr); 178 } /* create 'adj' */ 179 180 { /* partition: get newproc_idx */ 181 char prefix[256]; 182 const char *pcpre; 183 const PetscInt *is_idx; 184 MatPartitioning mpart; 185 IS proc_is; 186 PetscInt targetPE; 187 188 ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr); 189 ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr); 190 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 191 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 192 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 193 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 194 ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr); 195 ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr); 196 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 197 198 /* collect IS info */ 199 ierr = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr); 200 ierr = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr); 201 targetPE = 1; /* bring to "front" of machine */ 202 /*targetPE = size/new_size;*/ /* spread partitioning across machine */ 203 for (kk = jj = 0 ; kk < nloc_old ; kk++) { 204 for (ii = 0 ; ii < cr_bs ; ii++, jj++) { 205 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 206 } 207 } 208 ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr); 209 ierr = ISDestroy(&proc_is);CHKERRQ(ierr); 210 } 211 ierr = MatDestroy(&adj);CHKERRQ(ierr); 212 213 ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr); 214 ierr = PetscFree(newproc_idx);CHKERRQ(ierr); 215 } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 216 PetscInt rfactor,targetPE; 217 218 /* find factor */ 219 if (new_size == 1) rfactor = size; /* easy */ 220 else { 221 PetscReal best_fact = 0.; 222 jj = -1; 223 for (kk = 1 ; kk <= size ; kk++) { 224 if (!(size%kk)) { /* a candidate */ 225 PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size; 226 if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 227 if (fact > best_fact) { 228 best_fact = fact; jj = kk; 229 } 230 } 231 } 232 if (jj != -1) rfactor = jj; 233 else rfactor = 1; /* does this happen .. a prime */ 234 } 235 new_size = size/rfactor; 236 237 if (new_size==nactive) { 238 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 239 ierr = PetscFree(counts);CHKERRQ(ierr); 240 ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr); 241 #if defined PETSC_GAMG_USE_LOG 242 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 243 #endif 244 PetscFunctionReturn(0); 245 } 246 247 ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr); 248 targetPE = rank/rfactor; 249 ierr = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr); 250 } /* end simple 'is_eq_newproc' */ 251 252 /* 253 Create an index set from the is_eq_newproc index set to indicate the mapping TO 254 */ 255 ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr); 256 is_eq_num_prim = is_eq_num; 257 /* 258 Determine how many equations/vertices are assigned to each processor 259 */ 260 ierr = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr); 261 ncrs_eq_new = counts[rank]; 262 ierr = ISDestroy(&is_eq_newproc);CHKERRQ(ierr); 263 ncrs_new = ncrs_eq_new/cr_bs; /* eqs */ 264 265 ierr = PetscFree(counts);CHKERRQ(ierr); 266 #if defined PETSC_GAMG_USE_LOG 267 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 268 #endif 269 /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */ 270 { 271 Vec src_crd, dest_crd; 272 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 273 VecScatter vecscat; 274 PetscScalar *array; 275 IS isscat; 276 277 /* move data (for primal equations only) */ 278 /* Create a vector to contain the newly ordered element information */ 279 ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr); 280 ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr); 281 ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */ 282 /* 283 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 284 a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'. 285 */ 286 ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr); 287 ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 288 for (ii=0,jj=0; ii<ncrs; ii++) { 289 PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */ 290 for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 291 } 292 ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 293 ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr); 294 ierr = PetscFree(tidx);CHKERRQ(ierr); 295 /* 296 Create a vector to contain the original vertex information for each element 297 */ 298 ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr); 299 for (jj=0; jj<ndata_cols; jj++) { 300 const PetscInt stride0=ncrs*pc_gamg->data_cell_rows; 301 for (ii=0; ii<ncrs; ii++) { 302 for (kk=0; kk<ndata_rows; kk++) { 303 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 304 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 305 ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr); 306 } 307 } 308 } 309 ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr); 310 ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr); 311 /* 312 Scatter the element vertex information (still in the original vertex ordering) 313 to the correct processor 314 */ 315 ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr); 316 ierr = ISDestroy(&isscat);CHKERRQ(ierr); 317 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 318 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 319 ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr); 320 ierr = VecDestroy(&src_crd);CHKERRQ(ierr); 321 /* 322 Put the element vertex data into a new allocation of the gdata->ele 323 */ 324 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 325 ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr); 326 327 pc_gamg->data_sz = node_data_sz*ncrs_new; 328 strideNew = ncrs_new*ndata_rows; 329 330 ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr); 331 for (jj=0; jj<ndata_cols; jj++) { 332 for (ii=0; ii<ncrs_new; ii++) { 333 for (kk=0; kk<ndata_rows; kk++) { 334 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 335 pc_gamg->data[ix] = PetscRealPart(array[jx]); 336 } 337 } 338 } 339 ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr); 340 ierr = VecDestroy(&dest_crd);CHKERRQ(ierr); 341 } 342 /* move A and P (columns) with new layout */ 343 #if defined PETSC_GAMG_USE_LOG 344 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 345 #endif 346 347 /* 348 Invert for MatGetSubMatrix 349 */ 350 ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr); 351 ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */ 352 ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr); 353 if (is_eq_num != is_eq_num_prim) { 354 ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 355 } 356 if (Pcolumnperm) { 357 ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr); 358 *Pcolumnperm = new_eq_indices; 359 } 360 ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr); 361 #if defined PETSC_GAMG_USE_LOG 362 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 363 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 364 #endif 365 /* 'a_Amat_crs' output */ 366 { 367 Mat mat; 368 ierr = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr); 369 *a_Amat_crs = mat; 370 } 371 ierr = MatDestroy(&Cmat);CHKERRQ(ierr); 372 373 #if defined PETSC_GAMG_USE_LOG 374 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 375 #endif 376 /* prolongator */ 377 { 378 IS findices; 379 PetscInt Istart,Iend; 380 Mat Pnew; 381 382 ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr); 383 #if defined PETSC_GAMG_USE_LOG 384 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 385 #endif 386 ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr); 387 ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr); 388 ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr); 389 ierr = ISDestroy(&findices);CHKERRQ(ierr); 390 391 #if defined PETSC_GAMG_USE_LOG 392 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 393 #endif 394 ierr = MatDestroy(a_P_inout);CHKERRQ(ierr); 395 396 /* output - repartitioned */ 397 *a_P_inout = Pnew; 398 } 399 ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr); 400 401 *a_nactive_proc = new_size; /* output */ 402 } 403 PetscFunctionReturn(0); 404 } 405 406 /* -------------------------------------------------------------------------- */ 407 /* 408 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 409 by setting data structures and options. 410 411 Input Parameter: 412 . pc - the preconditioner context 413 414 */ 415 #undef __FUNCT__ 416 #define __FUNCT__ "PCSetUp_GAMG" 417 PetscErrorCode PCSetUp_GAMG(PC pc) 418 { 419 PetscErrorCode ierr; 420 PC_MG *mg = (PC_MG*)pc->data; 421 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 422 Mat Pmat = pc->pmat; 423 PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS]; 424 MPI_Comm comm; 425 PetscMPIInt rank,size,nactivepe; 426 Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS]; 427 IS *ASMLocalIDsArr[GAMG_MAXLEVELS]; 428 PetscLogDouble nnz0=0.,nnztot=0.; 429 MatInfo info; 430 431 PetscFunctionBegin; 432 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 433 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 434 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 435 436 if (pc_gamg->setup_count++ > 0) { 437 if ((PetscBool)(!pc_gamg->reuse_prol)) { 438 /* reset everything */ 439 ierr = PCReset_MG(pc);CHKERRQ(ierr); 440 pc->setupcalled = 0; 441 } else { 442 PC_MG_Levels **mglevels = mg->levels; 443 /* just do Galerkin grids */ 444 Mat B,dA,dB; 445 446 if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet"); 447 if (pc_gamg->Nlevels > 1) { 448 /* currently only handle case where mat and pmat are the same on coarser levels */ 449 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr); 450 /* (re)set to get dirty flag */ 451 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr); 452 453 for (level=pc_gamg->Nlevels-2; level>=0; level--) { 454 /* the first time through the matrix structure has changed from repartitioning */ 455 if (pc_gamg->setup_count==2) { 456 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 457 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 458 459 mglevels[level]->A = B; 460 } else { 461 ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr); 462 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 463 } 464 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr); 465 dB = B; 466 } 467 } 468 469 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 470 PetscFunctionReturn(0); 471 } 472 } 473 474 if (!pc_gamg->data) { 475 if (pc_gamg->orig_data) { 476 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 477 ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr); 478 479 pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols; 480 pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows; 481 pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols; 482 483 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr); 484 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq]; 485 } else { 486 if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data"); 487 ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr); 488 } 489 } 490 491 /* cache original data for reuse */ 492 if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) { 493 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr); 494 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq]; 495 pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows; 496 pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols; 497 } 498 499 /* get basic dims */ 500 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 501 ierr = MatGetSize(Pmat, &M, &qq);CHKERRQ(ierr); 502 503 ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */ 504 nnz0 = info.nz_used; 505 nnztot = info.nz_used; 506 ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n",0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols,(int)(nnz0/(PetscReal)M+0.5),size);CHKERRQ(ierr); 507 508 /* Get A_i and R_i */ 509 for (level=0, Aarr[0]=Pmat, nactivepe = size; level < (pc_gamg->Nlevels-1) && (!level || M>pc_gamg->coarse_eq_limit); level++) { 510 pc_gamg->current_level = level; 511 level1 = level + 1; 512 #if defined PETSC_GAMG_USE_LOG 513 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 514 #if (defined GAMG_STAGES) 515 ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr); 516 #endif 517 #endif 518 { /* construct prolongator */ 519 Mat Gmat; 520 PetscCoarsenData *agg_lists; 521 Mat Prol11; 522 523 ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr); 524 ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr); 525 ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr); 526 527 /* could have failed to create new level */ 528 if (Prol11) { 529 /* get new block size of coarse matrices */ 530 ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr); 531 532 if (pc_gamg->ops->optprolongator) { 533 /* smooth */ 534 ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr); 535 } 536 537 Parr[level1] = Prol11; 538 } else Parr[level1] = NULL; 539 540 if (pc_gamg->use_aggs_in_gasm) { 541 PetscInt bs; 542 ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr); 543 ierr = PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr); 544 } 545 546 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 547 ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr); 548 } /* construct prolongator scope */ 549 #if defined PETSC_GAMG_USE_LOG 550 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 551 #endif 552 if (!level) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 553 if (!Parr[level1]) { 554 ierr = PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr); 555 #if defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES 556 ierr = PetscLogStagePop();CHKERRQ(ierr); 557 #endif 558 break; 559 } 560 #if defined PETSC_GAMG_USE_LOG 561 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 562 #endif 563 564 ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs,&Parr[level1], &Aarr[level1], &nactivepe, NULL);CHKERRQ(ierr); 565 566 #if defined PETSC_GAMG_USE_LOG 567 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 568 #endif 569 ierr = MatGetSize(Aarr[level1], &M, &qq);CHKERRQ(ierr); 570 ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr); 571 nnztot += info.nz_used; 572 ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr); 573 574 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 575 ierr = PetscLogStagePop();CHKERRQ(ierr); 576 #endif 577 /* stop if one node or one proc -- could pull back for singular problems */ 578 if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) { 579 ierr = PetscInfo2(pc,"HARD stop of coarsening on level %D. Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr); 580 level++; 581 break; 582 } 583 } /* levels */ 584 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 585 586 ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr); 587 pc_gamg->Nlevels = level + 1; 588 fine_level = level; 589 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 590 591 if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */ 592 /* set default smoothers & set operators */ 593 for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) { 594 KSP smoother; 595 PC subpc; 596 597 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 598 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 599 600 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 601 /* set ops */ 602 ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr); 603 ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr); 604 605 /* set defaults */ 606 ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr); 607 608 /* set blocks for GASM smoother that uses the 'aggregates' */ 609 if (pc_gamg->use_aggs_in_gasm) { 610 PetscInt sz; 611 IS *is; 612 613 sz = nASMBlocksArr[level]; 614 is = ASMLocalIDsArr[level]; 615 ierr = PCSetType(subpc, PCGASM);CHKERRQ(ierr); 616 ierr = PCGASMSetOverlap(subpc, 0);CHKERRQ(ierr); 617 if (!sz) { 618 IS is; 619 PetscInt my0,kk; 620 ierr = MatGetOwnershipRange(Aarr[level], &my0, &kk);CHKERRQ(ierr); 621 ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);CHKERRQ(ierr); 622 ierr = PCGASMSetSubdomains(subpc, 1, &is, NULL);CHKERRQ(ierr); 623 ierr = ISDestroy(&is);CHKERRQ(ierr); 624 } else { 625 PetscInt kk; 626 ierr = PCGASMSetSubdomains(subpc, sz, is, NULL);CHKERRQ(ierr); 627 for (kk=0; kk<sz; kk++) { 628 ierr = ISDestroy(&is[kk]);CHKERRQ(ierr); 629 } 630 ierr = PetscFree(is);CHKERRQ(ierr); 631 } 632 ASMLocalIDsArr[level] = NULL; 633 nASMBlocksArr[level] = 0; 634 ierr = PCGASMSetType(subpc, PC_GASM_BASIC);CHKERRQ(ierr); 635 } else { 636 ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr); 637 } 638 } 639 { 640 /* coarse grid */ 641 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 642 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 643 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 644 ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr); 645 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 646 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 647 ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr); 648 ierr = PCSetUp(subpc);CHKERRQ(ierr); 649 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); 650 if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii); 651 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 652 ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr); 653 ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr); 654 ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr); 655 ierr = KSPSetType(k2[0], KSPPREONLY);CHKERRQ(ierr); 656 /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in 657 * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to 658 * view every subdomain as though they were different. */ 659 ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE; 660 } 661 662 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 663 ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr); 664 ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr); 665 ierr = PetscOptionsEnd();CHKERRQ(ierr); 666 if (!mg->galerkin) SETERRQ(comm,PETSC_ERR_USER,"PCGAMG must use Galerkin for coarse operators."); 667 if (mg->galerkin == 1) mg->galerkin = 2; 668 669 /* clean up */ 670 for (level=1; level<pc_gamg->Nlevels; level++) { 671 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 672 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 673 } 674 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 675 } else { 676 KSP smoother; 677 ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr); 678 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 679 ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr); 680 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 681 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 682 } 683 PetscFunctionReturn(0); 684 } 685 686 /* ------------------------------------------------------------------------- */ 687 /* 688 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 689 that was created with PCCreate_GAMG(). 690 691 Input Parameter: 692 . pc - the preconditioner context 693 694 Application Interface Routine: PCDestroy() 695 */ 696 #undef __FUNCT__ 697 #define __FUNCT__ "PCDestroy_GAMG" 698 PetscErrorCode PCDestroy_GAMG(PC pc) 699 { 700 PetscErrorCode ierr; 701 PC_MG *mg = (PC_MG*)pc->data; 702 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 703 704 PetscFunctionBegin; 705 ierr = PCReset_GAMG(pc);CHKERRQ(ierr); 706 if (pc_gamg->ops->destroy) { 707 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 708 } 709 ierr = PetscRandomDestroy(&pc_gamg->random);CHKERRQ(ierr); 710 ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr); 711 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 712 ierr = PetscFree(pc_gamg);CHKERRQ(ierr); 713 ierr = PCDestroy_MG(pc);CHKERRQ(ierr); 714 PetscFunctionReturn(0); 715 } 716 717 #undef __FUNCT__ 718 #define __FUNCT__ "PCGAMGSetProcEqLim" 719 /*@ 720 PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via processor reduction. 721 722 Logically Collective on PC 723 724 Input Parameters: 725 + pc - the preconditioner context 726 - n - the number of equations 727 728 729 Options Database Key: 730 . -pc_gamg_process_eq_limit <limit> 731 732 Level: intermediate 733 734 Concepts: Unstructured multigrid preconditioner 735 736 .seealso: PCGAMGSetCoarseEqLim() 737 @*/ 738 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 739 { 740 PetscErrorCode ierr; 741 742 PetscFunctionBegin; 743 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 744 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 745 PetscFunctionReturn(0); 746 } 747 748 #undef __FUNCT__ 749 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 750 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 751 { 752 PC_MG *mg = (PC_MG*)pc->data; 753 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 754 755 PetscFunctionBegin; 756 if (n>0) pc_gamg->min_eq_proc = n; 757 PetscFunctionReturn(0); 758 } 759 760 #undef __FUNCT__ 761 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 762 /*@ 763 PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids. 764 765 Collective on PC 766 767 Input Parameters: 768 + pc - the preconditioner context 769 - n - maximum number of equations to aim for 770 771 Options Database Key: 772 . -pc_gamg_coarse_eq_limit <limit> 773 774 Level: intermediate 775 776 Concepts: Unstructured multigrid preconditioner 777 778 .seealso: PCGAMGSetProcEqLim() 779 @*/ 780 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 781 { 782 PetscErrorCode ierr; 783 784 PetscFunctionBegin; 785 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 786 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 787 PetscFunctionReturn(0); 788 } 789 790 #undef __FUNCT__ 791 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 792 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 793 { 794 PC_MG *mg = (PC_MG*)pc->data; 795 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 796 797 PetscFunctionBegin; 798 if (n>0) pc_gamg->coarse_eq_limit = n; 799 PetscFunctionReturn(0); 800 } 801 802 #undef __FUNCT__ 803 #define __FUNCT__ "PCGAMGSetRepartitioning" 804 /*@ 805 PCGAMGSetRepartitioning - Repartition the coarse grids 806 807 Collective on PC 808 809 Input Parameters: 810 + pc - the preconditioner context 811 - n - PETSC_TRUE or PETSC_FALSE 812 813 Options Database Key: 814 . -pc_gamg_repartition <true,false> 815 816 Level: intermediate 817 818 Concepts: Unstructured multigrid preconditioner 819 820 .seealso: () 821 @*/ 822 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n) 823 { 824 PetscErrorCode ierr; 825 826 PetscFunctionBegin; 827 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 828 ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 829 PetscFunctionReturn(0); 830 } 831 832 #undef __FUNCT__ 833 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG" 834 static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n) 835 { 836 PC_MG *mg = (PC_MG*)pc->data; 837 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 838 839 PetscFunctionBegin; 840 pc_gamg->repart = n; 841 PetscFunctionReturn(0); 842 } 843 844 #undef __FUNCT__ 845 #define __FUNCT__ "PCGAMGSetReuseInterpolation" 846 /*@ 847 PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding preconditioner 848 849 Collective on PC 850 851 Input Parameters: 852 + pc - the preconditioner context 853 - n - PETSC_TRUE or PETSC_FALSE 854 855 Options Database Key: 856 . -pc_gamg_reuse_interpolation <true,false> 857 858 Level: intermediate 859 860 Concepts: Unstructured multigrid preconditioner 861 862 .seealso: () 863 @*/ 864 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n) 865 { 866 PetscErrorCode ierr; 867 868 PetscFunctionBegin; 869 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 870 ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 871 PetscFunctionReturn(0); 872 } 873 874 #undef __FUNCT__ 875 #define __FUNCT__ "PCGAMGSetReuseInterpolation_GAMG" 876 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n) 877 { 878 PC_MG *mg = (PC_MG*)pc->data; 879 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 880 881 PetscFunctionBegin; 882 pc_gamg->reuse_prol = n; 883 PetscFunctionReturn(0); 884 } 885 886 #undef __FUNCT__ 887 #define __FUNCT__ "PCGAMGSetUseASMAggs" 888 /*@ 889 PCGAMGSetUseASMAggs - 890 891 Collective on PC 892 893 Input Parameters: 894 . pc - the preconditioner context 895 896 897 Options Database Key: 898 . -pc_gamg_use_agg_gasm 899 900 Level: intermediate 901 902 Concepts: Unstructured multigrid preconditioner 903 904 .seealso: () 905 @*/ 906 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n) 907 { 908 PetscErrorCode ierr; 909 910 PetscFunctionBegin; 911 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 912 ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 913 PetscFunctionReturn(0); 914 } 915 916 #undef __FUNCT__ 917 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG" 918 static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n) 919 { 920 PC_MG *mg = (PC_MG*)pc->data; 921 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 922 923 PetscFunctionBegin; 924 pc_gamg->use_aggs_in_gasm = n; 925 PetscFunctionReturn(0); 926 } 927 928 #undef __FUNCT__ 929 #define __FUNCT__ "PCGAMGSetNlevels" 930 /*@ 931 PCGAMGSetNlevels - Sets the maximum number of levels PCGAMG will use 932 933 Not collective on PC 934 935 Input Parameters: 936 + pc - the preconditioner 937 - n - the maximum number of levels to use 938 939 Options Database Key: 940 . -pc_mg_levels 941 942 Level: intermediate 943 944 Concepts: Unstructured multigrid preconditioner 945 946 .seealso: () 947 @*/ 948 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 949 { 950 PetscErrorCode ierr; 951 952 PetscFunctionBegin; 953 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 954 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 955 PetscFunctionReturn(0); 956 } 957 958 #undef __FUNCT__ 959 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 960 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 961 { 962 PC_MG *mg = (PC_MG*)pc->data; 963 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 964 965 PetscFunctionBegin; 966 pc_gamg->Nlevels = n; 967 PetscFunctionReturn(0); 968 } 969 970 #undef __FUNCT__ 971 #define __FUNCT__ "PCGAMGSetThreshold" 972 /*@ 973 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 974 975 Not collective on PC 976 977 Input Parameters: 978 + pc - the preconditioner context 979 - threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph 980 981 Options Database Key: 982 . -pc_gamg_threshold <threshold> 983 984 Level: intermediate 985 986 Concepts: Unstructured multigrid preconditioner 987 988 .seealso: () 989 @*/ 990 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n) 991 { 992 PetscErrorCode ierr; 993 994 PetscFunctionBegin; 995 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 996 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr); 997 PetscFunctionReturn(0); 998 } 999 1000 #undef __FUNCT__ 1001 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1002 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n) 1003 { 1004 PC_MG *mg = (PC_MG*)pc->data; 1005 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1006 1007 PetscFunctionBegin; 1008 pc_gamg->threshold = n; 1009 PetscFunctionReturn(0); 1010 } 1011 1012 #undef __FUNCT__ 1013 #define __FUNCT__ "PCGAMGSetType" 1014 /*@ 1015 PCGAMGSetType - Set solution method 1016 1017 Collective on PC 1018 1019 Input Parameters: 1020 + pc - the preconditioner context 1021 - type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL 1022 1023 Options Database Key: 1024 . -pc_gamg_type <agg,geo,classical> 1025 1026 Level: intermediate 1027 1028 Concepts: Unstructured multigrid preconditioner 1029 1030 .seealso: PCGAMGGetType(), PCGAMG 1031 @*/ 1032 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) 1033 { 1034 PetscErrorCode ierr; 1035 1036 PetscFunctionBegin; 1037 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1038 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr); 1039 PetscFunctionReturn(0); 1040 } 1041 1042 #undef __FUNCT__ 1043 #define __FUNCT__ "PCGAMGGetType" 1044 /*@ 1045 PCGAMGGetType - Get solution method 1046 1047 Collective on PC 1048 1049 Input Parameter: 1050 . pc - the preconditioner context 1051 1052 Output Parameter: 1053 . type - the type of algorithm used 1054 1055 Level: intermediate 1056 1057 Concepts: Unstructured multigrid preconditioner 1058 1059 .seealso: PCGAMGSetType(), PCGAMGType 1060 @*/ 1061 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type) 1062 { 1063 PetscErrorCode ierr; 1064 1065 PetscFunctionBegin; 1066 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1067 ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr); 1068 PetscFunctionReturn(0); 1069 } 1070 1071 #undef __FUNCT__ 1072 #define __FUNCT__ "PCGAMGGetType_GAMG" 1073 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type) 1074 { 1075 PC_MG *mg = (PC_MG*)pc->data; 1076 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1077 1078 PetscFunctionBegin; 1079 *type = pc_gamg->type; 1080 PetscFunctionReturn(0); 1081 } 1082 1083 #undef __FUNCT__ 1084 #define __FUNCT__ "PCGAMGSetType_GAMG" 1085 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type) 1086 { 1087 PetscErrorCode ierr,(*r)(PC); 1088 PC_MG *mg = (PC_MG*)pc->data; 1089 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1090 1091 PetscFunctionBegin; 1092 pc_gamg->type = type; 1093 ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr); 1094 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1095 if (pc_gamg->ops->destroy) { 1096 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 1097 ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr); 1098 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1099 /* cleaning up common data in pc_gamg - this should disapear someday */ 1100 pc_gamg->data_cell_cols = 0; 1101 pc_gamg->data_cell_rows = 0; 1102 pc_gamg->orig_data_cell_cols = 0; 1103 pc_gamg->orig_data_cell_rows = 0; 1104 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 1105 pc_gamg->data_sz = 0; 1106 } 1107 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 1108 ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr); 1109 ierr = (*r)(pc);CHKERRQ(ierr); 1110 PetscFunctionReturn(0); 1111 } 1112 1113 #undef __FUNCT__ 1114 #define __FUNCT__ "PCView_GAMG" 1115 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer) 1116 { 1117 PetscErrorCode ierr; 1118 PC_MG *mg = (PC_MG*)pc->data; 1119 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1120 1121 PetscFunctionBegin; 1122 ierr = PetscViewerASCIIPrintf(viewer," GAMG specific options\n");CHKERRQ(ierr); 1123 ierr = PetscViewerASCIIPrintf(viewer," Threshold for dropping small values from graph %g\n",(double)pc_gamg->threshold);CHKERRQ(ierr); 1124 if (pc_gamg->ops->view) { 1125 ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr); 1126 } 1127 PetscFunctionReturn(0); 1128 } 1129 1130 #undef __FUNCT__ 1131 #define __FUNCT__ "PCSetFromOptions_GAMG" 1132 PetscErrorCode PCSetFromOptions_GAMG(PetscOptionItems *PetscOptionsObject,PC pc) 1133 { 1134 PetscErrorCode ierr; 1135 PC_MG *mg = (PC_MG*)pc->data; 1136 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1137 PetscBool flag; 1138 MPI_Comm comm; 1139 char prefix[256]; 1140 const char *pcpre; 1141 1142 PetscFunctionBegin; 1143 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1144 ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr); 1145 { 1146 char tname[256]; 1147 ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr); 1148 if (flag) { 1149 ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr); 1150 } 1151 ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGRepartitioning",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr); 1152 ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr); 1153 ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm","Use aggregation agragates for GASM smoother","PCGAMGUseASMAggs",pc_gamg->use_aggs_in_gasm,&pc_gamg->use_aggs_in_gasm,NULL);CHKERRQ(ierr); 1154 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr); 1155 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr); 1156 ierr = PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);CHKERRQ(ierr); 1157 ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 1158 1159 /* set options for subtype */ 1160 if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);} 1161 } 1162 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 1163 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 1164 ierr = PetscObjectSetOptionsPrefix((PetscObject)pc_gamg->random,prefix);CHKERRQ(ierr); 1165 ierr = PetscRandomSetFromOptions(pc_gamg->random);CHKERRQ(ierr); 1166 ierr = PetscOptionsTail();CHKERRQ(ierr); 1167 PetscFunctionReturn(0); 1168 } 1169 1170 /* -------------------------------------------------------------------------- */ 1171 /*MC 1172 PCGAMG - Geometric algebraic multigrid (AMG) preconditioner 1173 1174 Options Database Keys: 1175 Multigrid options(inherited) 1176 + -pc_mg_cycles <v>: v or w (PCMGSetCycleType()) 1177 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1178 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1179 - -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade 1180 1181 1182 Notes: In order to obtain good performance for PCGAMG for vector valued problems you must 1183 $ Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point 1184 $ Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator 1185 $ See the Users Manual Chapter 4 for more details 1186 1187 Level: intermediate 1188 1189 Concepts: algebraic multigrid 1190 1191 .seealso: PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(), 1192 PCGAMGSetCoarseEqLim(), PCGAMGSetRepartitioning(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGSetUseASMAggs(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType() 1193 M*/ 1194 1195 #undef __FUNCT__ 1196 #define __FUNCT__ "PCCreate_GAMG" 1197 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc) 1198 { 1199 PetscErrorCode ierr; 1200 PC_GAMG *pc_gamg; 1201 PC_MG *mg; 1202 1203 PetscFunctionBegin; 1204 /* register AMG type */ 1205 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1206 1207 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1208 ierr = PCSetType(pc, PCMG);CHKERRQ(ierr); 1209 ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr); 1210 1211 /* create a supporting struct and attach it to pc */ 1212 ierr = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr); 1213 mg = (PC_MG*)pc->data; 1214 mg->galerkin = 2; /* Use Galerkin, but it is computed externally from PCMG by GAMG code */ 1215 mg->innerctx = pc_gamg; 1216 1217 ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr); 1218 1219 pc_gamg->setup_count = 0; 1220 /* these should be in subctx but repartitioning needs simple arrays */ 1221 pc_gamg->data_sz = 0; 1222 pc_gamg->data = 0; 1223 1224 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1225 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1226 pc->ops->setup = PCSetUp_GAMG; 1227 pc->ops->reset = PCReset_GAMG; 1228 pc->ops->destroy = PCDestroy_GAMG; 1229 mg->view = PCView_GAMG; 1230 1231 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr); 1232 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr); 1233 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);CHKERRQ(ierr); 1234 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr); 1235 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);CHKERRQ(ierr); 1236 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr); 1237 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr); 1238 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr); 1239 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr); 1240 pc_gamg->repart = PETSC_FALSE; 1241 pc_gamg->reuse_prol = PETSC_FALSE; 1242 pc_gamg->use_aggs_in_gasm = PETSC_FALSE; 1243 pc_gamg->min_eq_proc = 50; 1244 pc_gamg->coarse_eq_limit = 50; 1245 pc_gamg->threshold = 0.; 1246 pc_gamg->Nlevels = GAMG_MAXLEVELS; 1247 pc_gamg->current_level = 0; /* don't need to init really */ 1248 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1249 1250 ierr = PetscRandomCreate(PetscObjectComm((PetscObject)pc),&pc_gamg->random);CHKERRQ(ierr); 1251 1252 /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */ 1253 ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr); 1254 PetscFunctionReturn(0); 1255 } 1256 1257 #undef __FUNCT__ 1258 #define __FUNCT__ "PCGAMGInitializePackage" 1259 /*@C 1260 PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called 1261 from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG() 1262 when using static libraries. 1263 1264 Level: developer 1265 1266 .keywords: PC, PCGAMG, initialize, package 1267 .seealso: PetscInitialize() 1268 @*/ 1269 PetscErrorCode PCGAMGInitializePackage(void) 1270 { 1271 PetscErrorCode ierr; 1272 1273 PetscFunctionBegin; 1274 if (PCGAMGPackageInitialized) PetscFunctionReturn(0); 1275 PCGAMGPackageInitialized = PETSC_TRUE; 1276 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr); 1277 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr); 1278 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr); 1279 ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr); 1280 1281 /* general events */ 1282 ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr); 1283 ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr); 1284 ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr); 1285 ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr); 1286 ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr); 1287 ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr); 1288 ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr); 1289 1290 #if defined PETSC_GAMG_USE_LOG 1291 ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr); 1292 ierr = PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr); 1293 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1294 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1295 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1296 ierr = PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr); 1297 ierr = PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr); 1298 ierr = PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr); 1299 ierr = PetscLogEventRegister(" search-set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr); 1300 ierr = PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr); 1301 ierr = PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr); 1302 ierr = PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr); 1303 ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr); 1304 ierr = PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr); 1305 ierr = PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr); 1306 ierr = PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr); 1307 ierr = PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr); 1308 1309 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1310 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1311 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1312 /* create timer stages */ 1313 #if defined GAMG_STAGES 1314 { 1315 char str[32]; 1316 PetscInt lidx; 1317 sprintf(str,"MG Level %d (finest)",0); 1318 ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr); 1319 for (lidx=1; lidx<9; lidx++) { 1320 sprintf(str,"MG Level %d",lidx); 1321 ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr); 1322 } 1323 } 1324 #endif 1325 #endif 1326 PetscFunctionReturn(0); 1327 } 1328 1329 #undef __FUNCT__ 1330 #define __FUNCT__ "PCGAMGFinalizePackage" 1331 /*@C 1332 PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is 1333 called from PetscFinalize() automatically. 1334 1335 Level: developer 1336 1337 .keywords: Petsc, destroy, package 1338 .seealso: PetscFinalize() 1339 @*/ 1340 PetscErrorCode PCGAMGFinalizePackage(void) 1341 { 1342 PetscErrorCode ierr; 1343 1344 PetscFunctionBegin; 1345 PCGAMGPackageInitialized = PETSC_FALSE; 1346 ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr); 1347 PetscFunctionReturn(0); 1348 } 1349 1350 #undef __FUNCT__ 1351 #define __FUNCT__ "PCGAMGRegister" 1352 /*@C 1353 PCGAMGRegister - Register a PCGAMG implementation. 1354 1355 Input Parameters: 1356 + type - string that will be used as the name of the GAMG type. 1357 - create - function for creating the gamg context. 1358 1359 Level: advanced 1360 1361 .seealso: PCGAMGType, PCGAMG, PCGAMGSetType() 1362 @*/ 1363 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC)) 1364 { 1365 PetscErrorCode ierr; 1366 1367 PetscFunctionBegin; 1368 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1369 ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr); 1370 PetscFunctionReturn(0); 1371 } 1372 1373