1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include <petsc/private/matimpl.h> 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <petsc/private/kspimpl.h> 7 #include <../src/ksp/pc/impls/bjacobi/bjacobi.h> /* Hack to access same_local_solves */ 8 9 #if defined PETSC_GAMG_USE_LOG 10 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 11 #endif 12 13 #if defined PETSC_USE_LOG 14 PetscLogEvent PC_GAMGGraph_AGG; 15 PetscLogEvent PC_GAMGGraph_GEO; 16 PetscLogEvent PC_GAMGCoarsen_AGG; 17 PetscLogEvent PC_GAMGCoarsen_GEO; 18 PetscLogEvent PC_GAMGProlongator_AGG; 19 PetscLogEvent PC_GAMGProlongator_GEO; 20 PetscLogEvent PC_GAMGOptProlongator_AGG; 21 #endif 22 23 #define GAMG_MAXLEVELS 30 24 25 /* #define GAMG_STAGES */ 26 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 27 static PetscLogStage gamg_stages[GAMG_MAXLEVELS]; 28 #endif 29 30 static PetscFunctionList GAMGList = 0; 31 static PetscBool PCGAMGPackageInitialized; 32 33 /* ----------------------------------------------------------------------------- */ 34 #undef __FUNCT__ 35 #define __FUNCT__ "PCReset_GAMG" 36 PetscErrorCode PCReset_GAMG(PC pc) 37 { 38 PetscErrorCode ierr; 39 PC_MG *mg = (PC_MG*)pc->data; 40 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 41 42 PetscFunctionBegin; 43 if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */ 44 PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__); 45 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 46 } 47 pc_gamg->data_sz = 0; 48 ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr); 49 PetscFunctionReturn(0); 50 } 51 52 /* -------------------------------------------------------------------------- */ 53 /* 54 PCGAMGCreateLevel_GAMG: create coarse op with RAP. repartition and/or reduce number 55 of active processors. 56 57 Input Parameter: 58 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 59 'pc_gamg->data_sz' are changed via repartitioning/reduction. 60 . Amat_fine - matrix on this fine (k) level 61 . cr_bs - coarse block size 62 In/Output Parameter: 63 . a_P_inout - prolongation operator to the next level (k-->k-1) 64 . a_nactive_proc - number of active procs 65 Output Parameter: 66 . a_Amat_crs - coarse matrix that is created (k-1) 67 */ 68 69 #undef __FUNCT__ 70 #define __FUNCT__ "PCGAMGCreateLevel_GAMG" 71 static PetscErrorCode PCGAMGCreateLevel_GAMG(PC pc,Mat Amat_fine,PetscInt cr_bs, 72 Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc, 73 IS * Pcolumnperm) 74 { 75 PetscErrorCode ierr; 76 PC_MG *mg = (PC_MG*)pc->data; 77 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 78 Mat Cmat,Pold=*a_P_inout; 79 MPI_Comm comm; 80 PetscMPIInt rank,size,new_size,nactive=*a_nactive_proc; 81 PetscInt ncrs_eq,ncrs,f_bs; 82 83 PetscFunctionBegin; 84 ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr); 85 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 86 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 87 ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr); 88 ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr); 89 90 /* set 'ncrs' (nodes), 'ncrs_eq' (equations)*/ 91 ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr); 92 if (pc_gamg->data_cell_rows>0) { 93 ncrs = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 94 } else { 95 PetscInt bs; 96 ierr = MatGetBlockSize(Cmat, &bs);CHKERRQ(ierr); 97 ncrs = ncrs_eq/bs; 98 } 99 100 /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */ 101 { 102 PetscInt ncrs_eq_glob; 103 ierr = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr); 104 new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)pc_gamg->min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 105 if (new_size == 0) new_size = 1; /* not likely, posible? */ 106 else if (new_size >= nactive) new_size = nactive; /* no change, rare */ 107 } 108 109 if (Pcolumnperm) *Pcolumnperm = NULL; 110 111 if (!pc_gamg->repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 112 else { 113 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_new,ncrs_eq_new,nloc_old; 114 IS is_eq_newproc,is_eq_num,is_eq_num_prim,new_eq_indices; 115 116 nloc_old = ncrs_eq/cr_bs; 117 if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs); 118 #if defined PETSC_GAMG_USE_LOG 119 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 120 #endif 121 /* make 'is_eq_newproc' */ 122 ierr = PetscMalloc1(size, &counts);CHKERRQ(ierr); 123 if (pc_gamg->repart) { 124 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 125 Mat adj; 126 127 ierr = PetscInfo3(pc,"Repartition: size (active): %D --> %D, neq = %D\n",*a_nactive_proc,new_size,ncrs_eq);CHKERRQ(ierr); 128 129 /* get 'adj' */ 130 if (cr_bs == 1) { 131 ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 132 } else { 133 /* make a scalar matrix to partition (no Stokes here) */ 134 Mat tMat; 135 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 136 const PetscScalar *vals; 137 const PetscInt *idx; 138 PetscInt *d_nnz, *o_nnz, M, N; 139 static PetscInt llev = 0; 140 MatType mtype; 141 142 ierr = PetscMalloc2(ncrs, &d_nnz,ncrs, &o_nnz);CHKERRQ(ierr); 143 ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr); 144 ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr); 145 for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) { 146 ierr = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 147 d_nnz[jj] = ncols/cr_bs; 148 o_nnz[jj] = ncols/cr_bs; 149 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 150 if (d_nnz[jj] > ncrs) d_nnz[jj] = ncrs; 151 if (o_nnz[jj] > (M/cr_bs-ncrs)) o_nnz[jj] = M/cr_bs-ncrs; 152 } 153 154 ierr = MatGetType(Amat_fine,&mtype);CHKERRQ(ierr); 155 ierr = MatCreate(comm, &tMat);CHKERRQ(ierr); 156 ierr = MatSetSizes(tMat, ncrs, ncrs,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr); 157 ierr = MatSetType(tMat,mtype);CHKERRQ(ierr); 158 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 159 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 160 ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 161 162 for (ii = Istart_crs; ii < Iend_crs; ii++) { 163 PetscInt dest_row = ii/cr_bs; 164 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 165 for (jj = 0; jj < ncols; jj++) { 166 PetscInt dest_col = idx[jj]/cr_bs; 167 PetscScalar v = 1.0; 168 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr); 169 } 170 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 171 } 172 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 173 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 174 175 if (llev++ == -1) { 176 PetscViewer viewer; char fname[32]; 177 ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr); 178 PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer); 179 ierr = MatView(tMat, viewer);CHKERRQ(ierr); 180 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 181 } 182 183 ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 184 185 ierr = MatDestroy(&tMat);CHKERRQ(ierr); 186 } /* create 'adj' */ 187 188 { /* partition: get newproc_idx */ 189 char prefix[256]; 190 const char *pcpre; 191 const PetscInt *is_idx; 192 MatPartitioning mpart; 193 IS proc_is; 194 PetscInt targetPE; 195 196 ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr); 197 ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr); 198 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 199 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 200 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 201 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 202 ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr); 203 ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr); 204 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 205 206 /* collect IS info */ 207 ierr = PetscMalloc1(ncrs_eq, &newproc_idx);CHKERRQ(ierr); 208 ierr = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr); 209 targetPE = 1; /* bring to "front" of machine */ 210 /*targetPE = size/new_size;*/ /* spread partitioning across machine */ 211 for (kk = jj = 0 ; kk < nloc_old ; kk++) { 212 for (ii = 0 ; ii < cr_bs ; ii++, jj++) { 213 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 214 } 215 } 216 ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr); 217 ierr = ISDestroy(&proc_is);CHKERRQ(ierr); 218 } 219 ierr = MatDestroy(&adj);CHKERRQ(ierr); 220 221 ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr); 222 ierr = PetscFree(newproc_idx);CHKERRQ(ierr); 223 } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 224 225 PetscInt rfactor,targetPE; 226 /* find factor */ 227 if (new_size == 1) rfactor = size; /* easy */ 228 else { 229 PetscReal best_fact = 0.; 230 jj = -1; 231 for (kk = 1 ; kk <= size ; kk++) { 232 if (size%kk==0) { /* a candidate */ 233 PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size; 234 if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 235 if (fact > best_fact) { 236 best_fact = fact; jj = kk; 237 } 238 } 239 } 240 if (jj != -1) rfactor = jj; 241 else rfactor = 1; /* does this happen .. a prime */ 242 } 243 new_size = size/rfactor; 244 245 if (new_size==nactive) { 246 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 247 ierr = PetscFree(counts);CHKERRQ(ierr); 248 ierr = PetscInfo2(pc,"Aggregate processors noop: new_size=%D, neq(loc)=%D\n",new_size,ncrs_eq);CHKERRQ(ierr); 249 PetscFunctionReturn(0); 250 } 251 252 ierr = PetscInfo1(pc,"Number of equations (loc) %D with simple aggregation\n",ncrs_eq);CHKERRQ(ierr); 253 targetPE = rank/rfactor; 254 ierr = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr); 255 } /* end simple 'is_eq_newproc' */ 256 257 /* 258 Create an index set from the is_eq_newproc index set to indicate the mapping TO 259 */ 260 ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr); 261 is_eq_num_prim = is_eq_num; 262 /* 263 Determine how many equations/vertices are assigned to each processor 264 */ 265 ierr = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr); 266 ncrs_eq_new = counts[rank]; 267 ierr = ISDestroy(&is_eq_newproc);CHKERRQ(ierr); 268 ncrs_new = ncrs_eq_new/cr_bs; /* eqs */ 269 270 ierr = PetscFree(counts);CHKERRQ(ierr); 271 #if defined PETSC_GAMG_USE_LOG 272 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 273 #endif 274 /* data movement scope -- this could be moved to subclasses so that we don't try to cram all auxilary data into some complex abstracted thing */ 275 { 276 Vec src_crd, dest_crd; 277 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 278 VecScatter vecscat; 279 PetscScalar *array; 280 IS isscat; 281 282 /* move data (for primal equations only) */ 283 /* Create a vector to contain the newly ordered element information */ 284 ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr); 285 ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_new, PETSC_DECIDE);CHKERRQ(ierr); 286 ierr = VecSetType(dest_crd,VECSTANDARD);CHKERRQ(ierr); /* this is needed! */ 287 /* 288 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 289 a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'. 290 */ 291 ierr = PetscMalloc1(ncrs*node_data_sz, &tidx);CHKERRQ(ierr); 292 ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 293 for (ii=0,jj=0; ii<ncrs; ii++) { 294 PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */ 295 for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 296 } 297 ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 298 ierr = ISCreateGeneral(comm, node_data_sz*ncrs, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr); 299 ierr = PetscFree(tidx);CHKERRQ(ierr); 300 /* 301 Create a vector to contain the original vertex information for each element 302 */ 303 ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs, &src_crd);CHKERRQ(ierr); 304 for (jj=0; jj<ndata_cols; jj++) { 305 const PetscInt stride0=ncrs*pc_gamg->data_cell_rows; 306 for (ii=0; ii<ncrs; ii++) { 307 for (kk=0; kk<ndata_rows; kk++) { 308 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 309 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 310 ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr); 311 } 312 } 313 } 314 ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr); 315 ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr); 316 /* 317 Scatter the element vertex information (still in the original vertex ordering) 318 to the correct processor 319 */ 320 ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr); 321 ierr = ISDestroy(&isscat);CHKERRQ(ierr); 322 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 323 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 324 ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr); 325 ierr = VecDestroy(&src_crd);CHKERRQ(ierr); 326 /* 327 Put the element vertex data into a new allocation of the gdata->ele 328 */ 329 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 330 ierr = PetscMalloc1(node_data_sz*ncrs_new, &pc_gamg->data);CHKERRQ(ierr); 331 332 pc_gamg->data_sz = node_data_sz*ncrs_new; 333 strideNew = ncrs_new*ndata_rows; 334 335 ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr); 336 for (jj=0; jj<ndata_cols; jj++) { 337 for (ii=0; ii<ncrs_new; ii++) { 338 for (kk=0; kk<ndata_rows; kk++) { 339 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 340 pc_gamg->data[ix] = PetscRealPart(array[jx]); 341 } 342 } 343 } 344 ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr); 345 ierr = VecDestroy(&dest_crd);CHKERRQ(ierr); 346 } 347 /* move A and P (columns) with new layout */ 348 #if defined PETSC_GAMG_USE_LOG 349 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 350 #endif 351 352 /* 353 Invert for MatGetSubMatrix 354 */ 355 ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr); 356 ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */ 357 ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr); 358 if (is_eq_num != is_eq_num_prim) { 359 ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 360 } 361 if (Pcolumnperm) { 362 ierr = PetscObjectReference((PetscObject)new_eq_indices);CHKERRQ(ierr); 363 *Pcolumnperm = new_eq_indices; 364 } 365 ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr); 366 #if defined PETSC_GAMG_USE_LOG 367 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 368 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 369 #endif 370 /* 'a_Amat_crs' output */ 371 { 372 Mat mat; 373 ierr = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr); 374 *a_Amat_crs = mat; 375 376 if (!PETSC_TRUE) { 377 PetscInt cbs, rbs; 378 ierr = MatGetBlockSizes(Cmat, &rbs, &cbs);CHKERRQ(ierr); 379 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 380 ierr = MatGetBlockSizes(mat, &rbs, &cbs);CHKERRQ(ierr); 381 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);CHKERRQ(ierr); 382 } 383 } 384 ierr = MatDestroy(&Cmat);CHKERRQ(ierr); 385 386 #if defined PETSC_GAMG_USE_LOG 387 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 388 #endif 389 /* prolongator */ 390 { 391 IS findices; 392 PetscInt Istart,Iend; 393 Mat Pnew; 394 ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr); 395 #if defined PETSC_GAMG_USE_LOG 396 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 397 #endif 398 ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr); 399 ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr); 400 ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr); 401 ierr = ISDestroy(&findices);CHKERRQ(ierr); 402 403 if (!PETSC_TRUE) { 404 PetscInt cbs, rbs; 405 ierr = MatGetBlockSizes(Pold, &rbs, &cbs);CHKERRQ(ierr); 406 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 407 ierr = MatGetBlockSizes(Pnew, &rbs, &cbs);CHKERRQ(ierr); 408 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 409 } 410 #if defined PETSC_GAMG_USE_LOG 411 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 412 #endif 413 ierr = MatDestroy(a_P_inout);CHKERRQ(ierr); 414 415 /* output - repartitioned */ 416 *a_P_inout = Pnew; 417 } 418 ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr); 419 420 *a_nactive_proc = new_size; /* output */ 421 } 422 423 /* outout matrix data */ 424 if (!PETSC_TRUE) { 425 PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs; 426 if (llev==0) { 427 sprintf(fname,"Cmat_%d.m",llev++); 428 PetscViewerASCIIOpen(comm,fname,&viewer); 429 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr); 430 ierr = MatView(Amat_fine, viewer);CHKERRQ(ierr); 431 ierr = PetscViewerDestroy(&viewer); 432 } 433 sprintf(fname,"Cmat_%d.m",llev++); 434 PetscViewerASCIIOpen(comm,fname,&viewer); 435 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr); 436 ierr = MatView(Cmat, viewer);CHKERRQ(ierr); 437 ierr = PetscViewerDestroy(&viewer); 438 } 439 PetscFunctionReturn(0); 440 } 441 442 /* -------------------------------------------------------------------------- */ 443 /* 444 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 445 by setting data structures and options. 446 447 Input Parameter: 448 . pc - the preconditioner context 449 450 */ 451 #undef __FUNCT__ 452 #define __FUNCT__ "PCSetUp_GAMG" 453 PetscErrorCode PCSetUp_GAMG(PC pc) 454 { 455 PetscErrorCode ierr; 456 PC_MG *mg = (PC_MG*)pc->data; 457 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 458 Mat Pmat = pc->pmat; 459 PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS]; 460 MPI_Comm comm; 461 PetscMPIInt rank,size,nactivepe; 462 Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS]; 463 PetscReal emaxs[GAMG_MAXLEVELS]; 464 IS *ASMLocalIDsArr[GAMG_MAXLEVELS]; 465 PetscLogDouble nnz0=0.,nnztot=0.; 466 MatInfo info; 467 468 PetscFunctionBegin; 469 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 470 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 471 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 472 473 if (pc_gamg->setup_count++ > 0) { 474 if ((PetscBool)(!pc_gamg->reuse_prol)) { 475 /* reset everything */ 476 ierr = PCReset_MG(pc);CHKERRQ(ierr); 477 pc->setupcalled = 0; 478 } else { 479 PC_MG_Levels **mglevels = mg->levels; 480 /* just do Galerkin grids */ 481 Mat B,dA,dB; 482 483 if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet"); 484 if (pc_gamg->Nlevels > 1) { 485 /* currently only handle case where mat and pmat are the same on coarser levels */ 486 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB);CHKERRQ(ierr); 487 /* (re)set to get dirty flag */ 488 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB);CHKERRQ(ierr); 489 490 for (level=pc_gamg->Nlevels-2; level>=0; level--) { 491 /* the first time through the matrix structure has changed from repartitioning */ 492 if (pc_gamg->setup_count==2) { 493 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 494 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 495 496 mglevels[level]->A = B; 497 } else { 498 ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B);CHKERRQ(ierr); 499 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 500 } 501 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B);CHKERRQ(ierr); 502 dB = B; 503 } 504 } 505 506 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 507 508 PetscFunctionReturn(0); 509 } 510 } 511 512 if (!pc_gamg->data) { 513 if (pc_gamg->orig_data) { 514 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 515 ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr); 516 517 pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols; 518 pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows; 519 pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols; 520 521 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->data);CHKERRQ(ierr); 522 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq]; 523 } else { 524 if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data"); 525 ierr = pc_gamg->ops->createdefaultdata(pc,Pmat);CHKERRQ(ierr); 526 } 527 } 528 529 /* cache original data for reuse */ 530 if (!pc_gamg->orig_data && (PetscBool)(!pc_gamg->reuse_prol)) { 531 ierr = PetscMalloc1(pc_gamg->data_sz, &pc_gamg->orig_data);CHKERRQ(ierr); 532 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq]; 533 pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows; 534 pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols; 535 } 536 537 /* get basic dims */ 538 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 539 ierr = MatGetSize(Pmat, &M, &qq);CHKERRQ(ierr); 540 541 ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); /* global reduction */ 542 nnz0 = info.nz_used; 543 nnztot = info.nz_used; 544 ierr = PetscInfo6(pc,"level %d) N=%D, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n", 545 0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols, 546 (int)(nnz0/(PetscReal)M+0.5),size); 547 CHKERRQ(ierr); 548 549 /* Get A_i and R_i */ 550 for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */ 551 level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); 552 level++) { 553 pc_gamg->current_level = level; 554 level1 = level + 1; 555 #if defined PETSC_GAMG_USE_LOG 556 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 557 #if (defined GAMG_STAGES) 558 ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr); 559 #endif 560 #endif 561 { /* construct prolongator */ 562 Mat Gmat; 563 PetscCoarsenData *agg_lists; 564 Mat Prol11; 565 566 ierr = pc_gamg->ops->graph(pc,Aarr[level], &Gmat);CHKERRQ(ierr); 567 ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr); 568 ierr = pc_gamg->ops->prolongator(pc,Aarr[level],Gmat,agg_lists,&Prol11);CHKERRQ(ierr); 569 570 /* could have failed to create new level */ 571 if (Prol11) { 572 /* get new block size of coarse matrices */ 573 ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr); 574 575 if (pc_gamg->ops->optprolongator) { 576 /* smooth */ 577 ierr = pc_gamg->ops->optprolongator(pc, Aarr[level], &Prol11);CHKERRQ(ierr); 578 } 579 580 Parr[level1] = Prol11; 581 } else Parr[level1] = NULL; 582 583 if (pc_gamg->use_aggs_in_gasm) { 584 PetscInt bs; 585 ierr = MatGetBlockSizes(Prol11, &bs, NULL);CHKERRQ(ierr); 586 ierr = PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr); 587 } 588 589 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 590 ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr); 591 } /* construct prolongator scope */ 592 #if defined PETSC_GAMG_USE_LOG 593 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 594 #endif 595 /* cache eigen estimate */ 596 if (pc_gamg->emax_id != -1) { 597 PetscBool flag; 598 ierr = PetscObjectComposedDataGetReal((PetscObject)Aarr[level], pc_gamg->emax_id, emaxs[level], flag);CHKERRQ(ierr); 599 if (!flag) emaxs[level] = -1.; 600 } else emaxs[level] = -1.; 601 if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 602 if (!Parr[level1]) { 603 ierr = PetscInfo1(pc,"Stop gridding, level %D\n",level);CHKERRQ(ierr); 604 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 605 ierr = PetscLogStagePop();CHKERRQ(ierr); 606 #endif 607 break; 608 } 609 #if defined PETSC_GAMG_USE_LOG 610 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 611 #endif 612 613 ierr = pc_gamg->ops->createlevel(pc, Aarr[level], bs,&Parr[level1], &Aarr[level1], &nactivepe, NULL);CHKERRQ(ierr); 614 615 #if defined PETSC_GAMG_USE_LOG 616 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 617 #endif 618 ierr = MatGetSize(Aarr[level1], &M, &qq);CHKERRQ(ierr); 619 620 ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr); 621 nnztot += info.nz_used; 622 ierr = PetscInfo5(pc,"%d) N=%D, n data cols=%d, nnz/row (ave)=%d, %d active pes\n",level1,M,pc_gamg->data_cell_cols,(int)(info.nz_used/(PetscReal)M),nactivepe);CHKERRQ(ierr); 623 624 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 625 ierr = PetscLogStagePop();CHKERRQ(ierr); 626 #endif 627 /* stop if one node or one proc -- could pull back for singular problems */ 628 if ( (pc_gamg->data_cell_cols && M/pc_gamg->data_cell_cols < 2) || (!pc_gamg->data_cell_cols && M/bs < 2) ) { 629 ierr = PetscInfo2(pc,"HARD stop of coarsening on level %D. Grid too small: %D block nodes\n",level,M/bs);CHKERRQ(ierr); 630 level++; 631 break; 632 } 633 } /* levels */ 634 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 635 636 ierr = PetscInfo2(pc,"%D levels, grid complexity = %g\n",level+1,nnztot/nnz0);CHKERRQ(ierr); 637 pc_gamg->Nlevels = level + 1; 638 fine_level = level; 639 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 640 641 /* simple setup */ 642 if (!PETSC_TRUE) { 643 PC_MG_Levels **mglevels = mg->levels; 644 for (lidx=0,level=pc_gamg->Nlevels-1; 645 lidx<fine_level; 646 lidx++, level--) { 647 ierr = PCMGSetInterpolation(pc, lidx+1, Parr[level]);CHKERRQ(ierr); 648 ierr = KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level]);CHKERRQ(ierr); 649 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 650 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 651 } 652 ierr = KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0]);CHKERRQ(ierr); 653 654 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 655 } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */ 656 /* set default smoothers & set operators */ 657 for (lidx = 1, level = pc_gamg->Nlevels-2; 658 lidx <= fine_level; 659 lidx++, level--) { 660 KSP smoother; 661 PC subpc; 662 663 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 664 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 665 666 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 667 /* set ops */ 668 ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level]);CHKERRQ(ierr); 669 ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr); 670 671 /* set defaults */ 672 ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr); 673 674 /* set blocks for GASM smoother that uses the 'aggregates' */ 675 if (pc_gamg->use_aggs_in_gasm) { 676 PetscInt sz; 677 IS *is; 678 679 sz = nASMBlocksArr[level]; 680 is = ASMLocalIDsArr[level]; 681 ierr = PCSetType(subpc, PCGASM);CHKERRQ(ierr); 682 ierr = PCGASMSetOverlap(subpc, 0);CHKERRQ(ierr); 683 if (sz==0) { 684 IS is; 685 PetscInt my0,kk; 686 ierr = MatGetOwnershipRange(Aarr[level], &my0, &kk);CHKERRQ(ierr); 687 ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);CHKERRQ(ierr); 688 ierr = PCGASMSetSubdomains(subpc, 1, &is, NULL);CHKERRQ(ierr); 689 ierr = ISDestroy(&is);CHKERRQ(ierr); 690 } else { 691 PetscInt kk; 692 ierr = PCGASMSetSubdomains(subpc, sz, is, NULL);CHKERRQ(ierr); 693 for (kk=0; kk<sz; kk++) { 694 ierr = ISDestroy(&is[kk]);CHKERRQ(ierr); 695 } 696 ierr = PetscFree(is);CHKERRQ(ierr); 697 } 698 ASMLocalIDsArr[level] = NULL; 699 nASMBlocksArr[level] = 0; 700 ierr = PCGASMSetType(subpc, PC_GASM_BASIC);CHKERRQ(ierr); 701 } else { 702 ierr = PCSetType(subpc, PCSOR);CHKERRQ(ierr); 703 } 704 } 705 { 706 /* coarse grid */ 707 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 708 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 709 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 710 ierr = KSPSetOperators(smoother, Lmat, Lmat);CHKERRQ(ierr); 711 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 712 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 713 ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr); 714 ierr = PCSetUp(subpc);CHKERRQ(ierr); 715 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); 716 if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii); 717 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 718 ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr); 719 ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr); 720 ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr); 721 /* This flag gets reset by PCBJacobiGetSubKSP(), but our BJacobi really does the same algorithm everywhere (and in 722 * fact, all but one process will have zero dofs), so we reset the flag to avoid having PCView_BJacobi attempt to 723 * view every subdomain as though they were different. */ 724 ((PC_BJacobi*)subpc->data)->same_local_solves = PETSC_TRUE; 725 } 726 727 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 728 ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr); 729 ierr = PCSetFromOptions_MG(PetscOptionsObject,pc);CHKERRQ(ierr); 730 ierr = PetscOptionsEnd();CHKERRQ(ierr); 731 if (!mg->galerkin) SETERRQ(comm,PETSC_ERR_USER,"PCGAMG must use Galerkin for coarse operators."); 732 if (mg->galerkin == 1) mg->galerkin = 2; 733 734 /* create cheby smoothers */ 735 for (lidx = 1, level = pc_gamg->Nlevels-2; lidx <= fine_level; lidx++, level--) { 736 KSP smoother; 737 PetscBool flag,flag2; 738 PC subpc; 739 740 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 741 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 742 743 /* do my own cheby */ 744 ierr = PetscObjectTypeCompare((PetscObject)smoother, KSPCHEBYSHEV, &flag);CHKERRQ(ierr); 745 if (0 && flag) { 746 PetscReal emax, emin; 747 ierr = PetscObjectTypeCompare((PetscObject)subpc, PCJACOBI, &flag);CHKERRQ(ierr); 748 ierr = PetscObjectTypeCompare((PetscObject)subpc, PCSOR, &flag2);CHKERRQ(ierr); 749 /* eigen estimate only for diagnal PC but lets acccept SOR because it is close and safe (always lower) */ 750 if ((flag||flag2) && (emax=emaxs[level]) > 0.0) { 751 PetscInt N1, N0; 752 emax=emaxs[level]; 753 ierr = MatGetSize(Aarr[level], &N1, NULL);CHKERRQ(ierr); 754 ierr = MatGetSize(Aarr[level+1], &N0, NULL);CHKERRQ(ierr); 755 emin = emax * pc_gamg->eigtarget[0]; 756 emax *= pc_gamg->eigtarget[1]; 757 ierr = KSPChebyshevSetEigenvalues(smoother, emax, emin);CHKERRQ(ierr); 758 } 759 } /* setup checby flag */ 760 } /* non-coarse levels */ 761 762 /* clean up */ 763 for (level=1; level<pc_gamg->Nlevels; level++) { 764 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 765 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 766 } 767 768 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 769 } else { 770 KSP smoother; 771 ierr = PetscInfo(pc,"One level solver used (system is seen as DD). Using default solver.\n");CHKERRQ(ierr); 772 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 773 ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0]);CHKERRQ(ierr); 774 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 775 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 776 } 777 PetscFunctionReturn(0); 778 } 779 780 /* ------------------------------------------------------------------------- */ 781 /* 782 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 783 that was created with PCCreate_GAMG(). 784 785 Input Parameter: 786 . pc - the preconditioner context 787 788 Application Interface Routine: PCDestroy() 789 */ 790 #undef __FUNCT__ 791 #define __FUNCT__ "PCDestroy_GAMG" 792 PetscErrorCode PCDestroy_GAMG(PC pc) 793 { 794 PetscErrorCode ierr; 795 PC_MG *mg = (PC_MG*)pc->data; 796 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 797 798 PetscFunctionBegin; 799 ierr = PCReset_GAMG(pc);CHKERRQ(ierr); 800 if (pc_gamg->ops->destroy) { 801 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 802 } 803 ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr); 804 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 805 ierr = PetscFree(pc_gamg);CHKERRQ(ierr); 806 ierr = PCDestroy_MG(pc);CHKERRQ(ierr); 807 PetscFunctionReturn(0); 808 } 809 810 811 #undef __FUNCT__ 812 #define __FUNCT__ "PCGAMGSetProcEqLim" 813 /*@ 814 PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via processor reduction. 815 816 Logically Collective on PC 817 818 Input Parameters: 819 + pc - the preconditioner context 820 - n - the number of equations 821 822 823 Options Database Key: 824 . -pc_gamg_process_eq_limit <limit> 825 826 Level: intermediate 827 828 Concepts: Unstructured multigrid preconditioner 829 830 .seealso: PCGAMGSetCoarseEqLim() 831 @*/ 832 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 833 { 834 PetscErrorCode ierr; 835 836 PetscFunctionBegin; 837 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 838 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 839 PetscFunctionReturn(0); 840 } 841 842 #undef __FUNCT__ 843 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 844 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 845 { 846 PC_MG *mg = (PC_MG*)pc->data; 847 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 848 849 PetscFunctionBegin; 850 if (n>0) pc_gamg->min_eq_proc = n; 851 PetscFunctionReturn(0); 852 } 853 854 #undef __FUNCT__ 855 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 856 /*@ 857 PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids. 858 859 Collective on PC 860 861 Input Parameters: 862 + pc - the preconditioner context 863 - n - maximum number of equations to aim for 864 865 Options Database Key: 866 . -pc_gamg_coarse_eq_limit <limit> 867 868 Level: intermediate 869 870 Concepts: Unstructured multigrid preconditioner 871 872 .seealso: PCGAMGSetProcEqLim() 873 @*/ 874 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 875 { 876 PetscErrorCode ierr; 877 878 PetscFunctionBegin; 879 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 880 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 881 PetscFunctionReturn(0); 882 } 883 884 #undef __FUNCT__ 885 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 886 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 887 { 888 PC_MG *mg = (PC_MG*)pc->data; 889 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 890 891 PetscFunctionBegin; 892 if (n>0) pc_gamg->coarse_eq_limit = n; 893 PetscFunctionReturn(0); 894 } 895 896 #undef __FUNCT__ 897 #define __FUNCT__ "PCGAMGSetRepartitioning" 898 /*@ 899 PCGAMGSetRepartitioning - Repartition the coarse grids 900 901 Collective on PC 902 903 Input Parameters: 904 + pc - the preconditioner context 905 - n - PETSC_TRUE or PETSC_FALSE 906 907 Options Database Key: 908 . -pc_gamg_repartition <true,false> 909 910 Level: intermediate 911 912 Concepts: Unstructured multigrid preconditioner 913 914 .seealso: () 915 @*/ 916 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n) 917 { 918 PetscErrorCode ierr; 919 920 PetscFunctionBegin; 921 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 922 ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 923 PetscFunctionReturn(0); 924 } 925 926 #undef __FUNCT__ 927 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG" 928 static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n) 929 { 930 PC_MG *mg = (PC_MG*)pc->data; 931 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 932 933 PetscFunctionBegin; 934 pc_gamg->repart = n; 935 PetscFunctionReturn(0); 936 } 937 938 #undef __FUNCT__ 939 #define __FUNCT__ "PCGAMGSetReuseInterpolation" 940 /*@ 941 PCGAMGSetReuseInterpolation - Reuse prolongation when rebuilding preconditioner 942 943 Collective on PC 944 945 Input Parameters: 946 + pc - the preconditioner context 947 - n - PETSC_TRUE or PETSC_FALSE 948 949 Options Database Key: 950 . -pc_gamg_reuse_interpolation <true,false> 951 952 Level: intermediate 953 954 Concepts: Unstructured multigrid preconditioner 955 956 .seealso: () 957 @*/ 958 PetscErrorCode PCGAMGSetReuseInterpolation(PC pc, PetscBool n) 959 { 960 PetscErrorCode ierr; 961 962 PetscFunctionBegin; 963 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 964 ierr = PetscTryMethod(pc,"PCGAMGSetReuseInterpolation_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 965 PetscFunctionReturn(0); 966 } 967 968 #undef __FUNCT__ 969 #define __FUNCT__ "PCGAMGSetReuseInterpolation_GAMG" 970 static PetscErrorCode PCGAMGSetReuseInterpolation_GAMG(PC pc, PetscBool n) 971 { 972 PC_MG *mg = (PC_MG*)pc->data; 973 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 974 975 PetscFunctionBegin; 976 pc_gamg->reuse_prol = n; 977 PetscFunctionReturn(0); 978 } 979 980 #undef __FUNCT__ 981 #define __FUNCT__ "PCGAMGSetUseASMAggs" 982 /*@ 983 PCGAMGSetUseASMAggs - 984 985 Collective on PC 986 987 Input Parameters: 988 . pc - the preconditioner context 989 990 991 Options Database Key: 992 . -pc_gamg_use_agg_gasm 993 994 Level: intermediate 995 996 Concepts: Unstructured multigrid preconditioner 997 998 .seealso: () 999 @*/ 1000 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n) 1001 { 1002 PetscErrorCode ierr; 1003 1004 PetscFunctionBegin; 1005 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1006 ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1007 PetscFunctionReturn(0); 1008 } 1009 1010 #undef __FUNCT__ 1011 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG" 1012 static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n) 1013 { 1014 PC_MG *mg = (PC_MG*)pc->data; 1015 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1016 1017 PetscFunctionBegin; 1018 pc_gamg->use_aggs_in_gasm = n; 1019 PetscFunctionReturn(0); 1020 } 1021 1022 #undef __FUNCT__ 1023 #define __FUNCT__ "PCGAMGSetNlevels" 1024 /*@ 1025 PCGAMGSetNlevels - Sets the maximum number of levels PCGAMG will use 1026 1027 Not collective on PC 1028 1029 Input Parameters: 1030 + pc - the preconditioner 1031 - n - the maximum number of levels to use 1032 1033 Options Database Key: 1034 . -pc_mg_levels 1035 1036 Level: intermediate 1037 1038 Concepts: Unstructured multigrid preconditioner 1039 1040 .seealso: () 1041 @*/ 1042 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 1043 { 1044 PetscErrorCode ierr; 1045 1046 PetscFunctionBegin; 1047 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1048 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1049 PetscFunctionReturn(0); 1050 } 1051 1052 #undef __FUNCT__ 1053 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 1054 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 1055 { 1056 PC_MG *mg = (PC_MG*)pc->data; 1057 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1058 1059 PetscFunctionBegin; 1060 pc_gamg->Nlevels = n; 1061 PetscFunctionReturn(0); 1062 } 1063 1064 #undef __FUNCT__ 1065 #define __FUNCT__ "PCGAMGSetThreshold" 1066 /*@ 1067 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 1068 1069 Not collective on PC 1070 1071 Input Parameters: 1072 + pc - the preconditioner context 1073 - threshold - the threshold value, 0.0 means keep all nonzero entries in the graph; negative means keep even zero entries in the graph 1074 1075 Options Database Key: 1076 . -pc_gamg_threshold <threshold> 1077 1078 Level: intermediate 1079 1080 Concepts: Unstructured multigrid preconditioner 1081 1082 .seealso: () 1083 @*/ 1084 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n) 1085 { 1086 PetscErrorCode ierr; 1087 1088 PetscFunctionBegin; 1089 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1090 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr); 1091 PetscFunctionReturn(0); 1092 } 1093 1094 #undef __FUNCT__ 1095 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1096 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n) 1097 { 1098 PC_MG *mg = (PC_MG*)pc->data; 1099 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1100 1101 PetscFunctionBegin; 1102 pc_gamg->threshold = n; 1103 PetscFunctionReturn(0); 1104 } 1105 1106 #undef __FUNCT__ 1107 #define __FUNCT__ "PCGAMGSetType" 1108 /*@ 1109 PCGAMGSetType - Set solution method 1110 1111 Collective on PC 1112 1113 Input Parameters: 1114 + pc - the preconditioner context 1115 - type - PCGAMGAGG, PCGAMGGEO, or PCGAMGCLASSICAL 1116 1117 Options Database Key: 1118 . -pc_gamg_type <agg,geo,classical> 1119 1120 Level: intermediate 1121 1122 Concepts: Unstructured multigrid preconditioner 1123 1124 .seealso: PCGAMGGetType(), PCGAMG 1125 @*/ 1126 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) 1127 { 1128 PetscErrorCode ierr; 1129 1130 PetscFunctionBegin; 1131 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1132 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr); 1133 PetscFunctionReturn(0); 1134 } 1135 1136 #undef __FUNCT__ 1137 #define __FUNCT__ "PCGAMGGetType" 1138 /*@ 1139 PCGAMGGetType - Get solution method 1140 1141 Collective on PC 1142 1143 Input Parameter: 1144 . pc - the preconditioner context 1145 1146 Output Parameter: 1147 . type - the type of algorithm used 1148 1149 Level: intermediate 1150 1151 Concepts: Unstructured multigrid preconditioner 1152 1153 .seealso: PCGAMGSetType(), PCGAMGType 1154 @*/ 1155 PetscErrorCode PCGAMGGetType(PC pc, PCGAMGType *type) 1156 { 1157 PetscErrorCode ierr; 1158 1159 PetscFunctionBegin; 1160 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1161 ierr = PetscUseMethod(pc,"PCGAMGGetType_C",(PC,PCGAMGType*),(pc,type));CHKERRQ(ierr); 1162 PetscFunctionReturn(0); 1163 } 1164 1165 #undef __FUNCT__ 1166 #define __FUNCT__ "PCGAMGGetType_GAMG" 1167 static PetscErrorCode PCGAMGGetType_GAMG(PC pc, PCGAMGType *type) 1168 { 1169 PC_MG *mg = (PC_MG*)pc->data; 1170 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1171 1172 PetscFunctionBegin; 1173 *type = pc_gamg->type; 1174 PetscFunctionReturn(0); 1175 } 1176 1177 #undef __FUNCT__ 1178 #define __FUNCT__ "PCGAMGSetType_GAMG" 1179 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type) 1180 { 1181 PetscErrorCode ierr,(*r)(PC); 1182 PC_MG *mg = (PC_MG*)pc->data; 1183 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1184 1185 PetscFunctionBegin; 1186 pc_gamg->type = type; 1187 ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr); 1188 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1189 if (pc_gamg->ops->destroy) { 1190 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 1191 ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr); 1192 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1193 /* cleaning up common data in pc_gamg - this should disapear someday */ 1194 pc_gamg->data_cell_cols = 0; 1195 pc_gamg->data_cell_rows = 0; 1196 pc_gamg->orig_data_cell_cols = 0; 1197 pc_gamg->orig_data_cell_rows = 0; 1198 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 1199 pc_gamg->data_sz = 0; 1200 } 1201 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 1202 ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr); 1203 ierr = (*r)(pc);CHKERRQ(ierr); 1204 PetscFunctionReturn(0); 1205 } 1206 1207 #undef __FUNCT__ 1208 #define __FUNCT__ "PCView_GAMG" 1209 static PetscErrorCode PCView_GAMG(PC pc,PetscViewer viewer) 1210 { 1211 PetscErrorCode ierr; 1212 PC_MG *mg = (PC_MG*)pc->data; 1213 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1214 1215 PetscFunctionBegin; 1216 ierr = PetscViewerASCIIPrintf(viewer," GAMG specific options\n");CHKERRQ(ierr); 1217 ierr = PetscViewerASCIIPrintf(viewer," Threshold for dropping small values from graph %g\n",(double)pc_gamg->threshold);CHKERRQ(ierr); 1218 if (pc_gamg->ops->view) { 1219 ierr = (*pc_gamg->ops->view)(pc,viewer);CHKERRQ(ierr); 1220 } 1221 PetscFunctionReturn(0); 1222 } 1223 1224 #undef __FUNCT__ 1225 #define __FUNCT__ "PCSetFromOptions_GAMG" 1226 PetscErrorCode PCSetFromOptions_GAMG(PetscOptions *PetscOptionsObject,PC pc) 1227 { 1228 PetscErrorCode ierr; 1229 PC_MG *mg = (PC_MG*)pc->data; 1230 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1231 PetscBool flag; 1232 PetscInt two = 2; 1233 MPI_Comm comm; 1234 1235 PetscFunctionBegin; 1236 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1237 ierr = PetscOptionsHead(PetscOptionsObject,"GAMG options");CHKERRQ(ierr); 1238 { 1239 char tname[256]; 1240 ierr = PetscOptionsFList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, pc_gamg->gamg_type_name, tname, sizeof(tname), &flag);CHKERRQ(ierr); 1241 if (flag) { 1242 ierr = PCGAMGSetType(pc,tname);CHKERRQ(ierr); 1243 } 1244 ierr = PetscOptionsBool("-pc_gamg_repartition","Repartion coarse grids","PCGAMGRepartitioning",pc_gamg->repart,&pc_gamg->repart,NULL);CHKERRQ(ierr); 1245 ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation","Reuse prolongation operator","PCGAMGReuseInterpolation",pc_gamg->reuse_prol,&pc_gamg->reuse_prol,NULL);CHKERRQ(ierr); 1246 ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm","Use aggregation agragates for GASM smoother","PCGAMGUseASMAggs",pc_gamg->use_aggs_in_gasm,&pc_gamg->use_aggs_in_gasm,NULL);CHKERRQ(ierr); 1247 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit","Limit (goal) on number of equations per process on coarse grids","PCGAMGSetProcEqLim",pc_gamg->min_eq_proc,&pc_gamg->min_eq_proc,NULL);CHKERRQ(ierr); 1248 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit","Limit on number of equations for the coarse grid","PCGAMGSetCoarseEqLim",pc_gamg->coarse_eq_limit,&pc_gamg->coarse_eq_limit,NULL);CHKERRQ(ierr); 1249 ierr = PetscOptionsReal("-pc_gamg_threshold","Relative threshold to use for dropping edges in aggregation graph","PCGAMGSetThreshold",pc_gamg->threshold,&pc_gamg->threshold,&flag);CHKERRQ(ierr); 1250 ierr = PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,NULL);CHKERRQ(ierr); 1251 ierr = PetscOptionsInt("-pc_mg_levels","Set number of MG levels","PCGAMGSetNlevels",pc_gamg->Nlevels,&pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 1252 1253 /* set options for subtype */ 1254 if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(PetscOptionsObject,pc);CHKERRQ(ierr);} 1255 } 1256 ierr = PetscOptionsTail();CHKERRQ(ierr); 1257 PetscFunctionReturn(0); 1258 } 1259 1260 /* -------------------------------------------------------------------------- */ 1261 /*MC 1262 PCGAMG - Geometric algebraic multigrid (AMG) preconditioner 1263 1264 Options Database Keys: 1265 Multigrid options(inherited) 1266 + -pc_mg_cycles <v>: v or w (PCMGSetCycleType()) 1267 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1268 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1269 - -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade 1270 1271 1272 Notes: In order to obtain good performance for PCGAMG for vector valued problems you must 1273 $ Call MatSetBlockSize() to indicate the number of degrees of freedom per grid point 1274 $ Call MatSetNearNullSpace() (or PCSetCoordinates() if solving the equations of elasticity) to indicate the near null space of the operator 1275 $ See the Users Manual Chapter 4 for more details 1276 1277 Level: intermediate 1278 1279 Concepts: algebraic multigrid 1280 1281 .seealso: PCCreate(), PCSetType(), MatSetBlockSize(), PCMGType, PCSetCoordinates(), MatSetNearNullSpace(), PCGAMGSetType(), PCGAMGAGG, PCGAMGGEO, PCGAMGCLASSICAL, PCGAMGSetProcEqLim(), 1282 PCGAMGSetCoarseEqLim(), PCGAMGSetRepartitioning(), PCGAMGRegister(), PCGAMGSetReuseInterpolation(), PCGAMGSetUseASMAggs(), PCGAMGSetNlevels(), PCGAMGSetThreshold(), PCGAMGGetType() 1283 M*/ 1284 1285 #undef __FUNCT__ 1286 #define __FUNCT__ "PCCreate_GAMG" 1287 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc) 1288 { 1289 PetscErrorCode ierr; 1290 PC_GAMG *pc_gamg; 1291 PC_MG *mg; 1292 1293 PetscFunctionBegin; 1294 /* register AMG type */ 1295 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1296 1297 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1298 ierr = PCSetType(pc, PCMG);CHKERRQ(ierr); 1299 ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr); 1300 1301 /* create a supporting struct and attach it to pc */ 1302 ierr = PetscNewLog(pc,&pc_gamg);CHKERRQ(ierr); 1303 mg = (PC_MG*)pc->data; 1304 mg->galerkin = 2; /* Use Galerkin, but it is computed externally from PCMG by GAMG code */ 1305 mg->innerctx = pc_gamg; 1306 1307 ierr = PetscNewLog(pc,&pc_gamg->ops);CHKERRQ(ierr); 1308 1309 pc_gamg->setup_count = 0; 1310 /* these should be in subctx but repartitioning needs simple arrays */ 1311 pc_gamg->data_sz = 0; 1312 pc_gamg->data = 0; 1313 1314 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1315 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1316 pc->ops->setup = PCSetUp_GAMG; 1317 pc->ops->reset = PCReset_GAMG; 1318 pc->ops->destroy = PCDestroy_GAMG; 1319 mg->view = PCView_GAMG; 1320 1321 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr); 1322 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr); 1323 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);CHKERRQ(ierr); 1324 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseInterpolation_C",PCGAMGSetReuseInterpolation_GAMG);CHKERRQ(ierr); 1325 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);CHKERRQ(ierr); 1326 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr); 1327 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr); 1328 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGGetType_C",PCGAMGGetType_GAMG);CHKERRQ(ierr); 1329 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr); 1330 pc_gamg->repart = PETSC_FALSE; 1331 pc_gamg->reuse_prol = PETSC_FALSE; 1332 pc_gamg->use_aggs_in_gasm = PETSC_FALSE; 1333 pc_gamg->min_eq_proc = 50; 1334 pc_gamg->coarse_eq_limit = 50; 1335 pc_gamg->threshold = 0.; 1336 pc_gamg->Nlevels = GAMG_MAXLEVELS; 1337 pc_gamg->emax_id = -1; 1338 pc_gamg->current_level = 0; /* don't need to init really */ 1339 pc_gamg->eigtarget[0] = 0.05; 1340 pc_gamg->eigtarget[1] = 1.05; 1341 pc_gamg->ops->createlevel = PCGAMGCreateLevel_GAMG; 1342 1343 /* PCSetUp_GAMG assumes that the type has been set, so set it to the default now */ 1344 ierr = PCGAMGSetType(pc,PCGAMGAGG);CHKERRQ(ierr); 1345 PetscFunctionReturn(0); 1346 } 1347 1348 #undef __FUNCT__ 1349 #define __FUNCT__ "PCGAMGInitializePackage" 1350 /*@C 1351 PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called 1352 from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG() 1353 when using static libraries. 1354 1355 Level: developer 1356 1357 .keywords: PC, PCGAMG, initialize, package 1358 .seealso: PetscInitialize() 1359 @*/ 1360 PetscErrorCode PCGAMGInitializePackage(void) 1361 { 1362 PetscErrorCode ierr; 1363 1364 PetscFunctionBegin; 1365 if (PCGAMGPackageInitialized) PetscFunctionReturn(0); 1366 PCGAMGPackageInitialized = PETSC_TRUE; 1367 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr); 1368 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr); 1369 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGCLASSICAL,PCCreateGAMG_Classical);CHKERRQ(ierr); 1370 ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr); 1371 1372 /* general events */ 1373 ierr = PetscLogEventRegister("PCGAMGGraph_AGG", 0, &PC_GAMGGraph_AGG);CHKERRQ(ierr); 1374 ierr = PetscLogEventRegister("PCGAMGGraph_GEO", PC_CLASSID, &PC_GAMGGraph_GEO);CHKERRQ(ierr); 1375 ierr = PetscLogEventRegister("PCGAMGCoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr); 1376 ierr = PetscLogEventRegister("PCGAMGCoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr); 1377 ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr); 1378 ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr); 1379 ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptProlongator_AGG);CHKERRQ(ierr); 1380 1381 #if defined PETSC_GAMG_USE_LOG 1382 ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr); 1383 ierr = PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr); 1384 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1385 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1386 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1387 ierr = PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr); 1388 ierr = PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr); 1389 ierr = PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr); 1390 ierr = PetscLogEventRegister(" search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr); 1391 ierr = PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr); 1392 ierr = PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr); 1393 ierr = PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr); 1394 ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr); 1395 ierr = PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr); 1396 ierr = PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr); 1397 ierr = PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr); 1398 ierr = PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr); 1399 1400 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1401 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1402 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1403 /* create timer stages */ 1404 #if defined GAMG_STAGES 1405 { 1406 char str[32]; 1407 PetscInt lidx; 1408 sprintf(str,"MG Level %d (finest)",0); 1409 ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr); 1410 for (lidx=1; lidx<9; lidx++) { 1411 sprintf(str,"MG Level %d",lidx); 1412 ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr); 1413 } 1414 } 1415 #endif 1416 #endif 1417 PetscFunctionReturn(0); 1418 } 1419 1420 #undef __FUNCT__ 1421 #define __FUNCT__ "PCGAMGFinalizePackage" 1422 /*@C 1423 PCGAMGFinalizePackage - This function frees everything from the PCGAMG package. It is 1424 called from PetscFinalize() automatically. 1425 1426 Level: developer 1427 1428 .keywords: Petsc, destroy, package 1429 .seealso: PetscFinalize() 1430 @*/ 1431 PetscErrorCode PCGAMGFinalizePackage(void) 1432 { 1433 PetscErrorCode ierr; 1434 1435 PetscFunctionBegin; 1436 PCGAMGPackageInitialized = PETSC_FALSE; 1437 ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr); 1438 PetscFunctionReturn(0); 1439 } 1440 1441 #undef __FUNCT__ 1442 #define __FUNCT__ "PCGAMGRegister" 1443 /*@C 1444 PCGAMGRegister - Register a PCGAMG implementation. 1445 1446 Input Parameters: 1447 + type - string that will be used as the name of the GAMG type. 1448 - create - function for creating the gamg context. 1449 1450 Level: advanced 1451 1452 .seealso: PCGAMGType, PCGAMG, PCGAMGSetType() 1453 @*/ 1454 PetscErrorCode PCGAMGRegister(PCGAMGType type, PetscErrorCode (*create)(PC)) 1455 { 1456 PetscErrorCode ierr; 1457 1458 PetscFunctionBegin; 1459 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1460 ierr = PetscFunctionListAdd(&GAMGList,type,create);CHKERRQ(ierr); 1461 PetscFunctionReturn(0); 1462 } 1463 1464