1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include "petsc-private/matimpl.h" 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <petsc-private/kspimpl.h> 7 8 #if defined PETSC_GAMG_USE_LOG 9 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 10 #endif 11 12 #if defined PETSC_USE_LOG 13 PetscLogEvent PC_GAMGGgraph_AGG; 14 PetscLogEvent PC_GAMGGgraph_GEO; 15 PetscLogEvent PC_GAMGCoarsen_AGG; 16 PetscLogEvent PC_GAMGCoarsen_GEO; 17 PetscLogEvent PC_GAMGProlongator_AGG; 18 PetscLogEvent PC_GAMGProlongator_GEO; 19 PetscLogEvent PC_GAMGOptprol_AGG; 20 PetscLogEvent PC_GAMGKKTProl_AGG; 21 #endif 22 23 #define GAMG_MAXLEVELS 30 24 25 /* #define GAMG_STAGES */ 26 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 27 static PetscLogStage gamg_stages[GAMG_MAXLEVELS]; 28 #endif 29 30 static PetscFunctionList GAMGList = 0; 31 32 /* ----------------------------------------------------------------------------- */ 33 #undef __FUNCT__ 34 #define __FUNCT__ "PCReset_GAMG" 35 PetscErrorCode PCReset_GAMG(PC pc) 36 { 37 PetscErrorCode ierr; 38 PC_MG *mg = (PC_MG*)pc->data; 39 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 40 41 PetscFunctionBegin; 42 if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */ 43 PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__); 44 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 45 } 46 pc_gamg->data = NULL; pc_gamg->data_sz = 0; 47 48 if (pc_gamg->orig_data) { 49 ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr); 50 } 51 PetscFunctionReturn(0); 52 } 53 54 /* private 2x2 Mat Nest for Stokes */ 55 typedef struct { 56 Mat A11,A21,A12,Amat; 57 IS prim_is,constr_is; 58 } GAMGKKTMat; 59 60 #undef __FUNCT__ 61 #define __FUNCT__ "GAMGKKTMatCreate" 62 static PetscErrorCode GAMGKKTMatCreate(Mat A, PetscBool iskkt, GAMGKKTMat *out) 63 { 64 PetscFunctionBegin; 65 out->Amat = A; 66 if (iskkt) { 67 PetscErrorCode ierr; 68 IS is_constraint, is_prime; 69 PetscInt nmin,nmax; 70 71 ierr = MatGetOwnershipRange(A, &nmin, &nmax);CHKERRQ(ierr); 72 ierr = MatFindZeroDiagonals(A, &is_constraint);CHKERRQ(ierr); 73 ierr = ISComplement(is_constraint, nmin, nmax, &is_prime);CHKERRQ(ierr); 74 75 out->prim_is = is_prime; 76 out->constr_is = is_constraint; 77 78 ierr = MatGetSubMatrix(A, is_prime, is_prime, MAT_INITIAL_MATRIX, &out->A11);CHKERRQ(ierr); 79 ierr = MatGetSubMatrix(A, is_prime, is_constraint, MAT_INITIAL_MATRIX, &out->A12);CHKERRQ(ierr); 80 ierr = MatGetSubMatrix(A, is_constraint, is_prime, MAT_INITIAL_MATRIX, &out->A21);CHKERRQ(ierr); 81 } else { 82 out->A11 = A; 83 out->A21 = NULL; 84 out->A12 = NULL; 85 out->prim_is = NULL; 86 out->constr_is = NULL; 87 } 88 PetscFunctionReturn(0); 89 } 90 91 #undef __FUNCT__ 92 #define __FUNCT__ "GAMGKKTMatDestroy" 93 static PetscErrorCode GAMGKKTMatDestroy(GAMGKKTMat *mat) 94 { 95 PetscErrorCode ierr; 96 97 PetscFunctionBegin; 98 if (mat->A11 && mat->A11 != mat->Amat) { 99 ierr = MatDestroy(&mat->A11);CHKERRQ(ierr); 100 } 101 ierr = MatDestroy(&mat->A21);CHKERRQ(ierr); 102 ierr = MatDestroy(&mat->A12);CHKERRQ(ierr); 103 104 ierr = ISDestroy(&mat->prim_is);CHKERRQ(ierr); 105 ierr = ISDestroy(&mat->constr_is);CHKERRQ(ierr); 106 PetscFunctionReturn(0); 107 } 108 109 /* -------------------------------------------------------------------------- */ 110 /* 111 createLevel: create coarse op with RAP. repartition and/or reduce number 112 of active processors. 113 114 Input Parameter: 115 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 116 'pc_gamg->data_sz' are changed via repartitioning/reduction. 117 . Amat_fine - matrix on this fine (k) level 118 . cr_bs - coarse block size 119 . isLast - 120 . stokes - 121 In/Output Parameter: 122 . a_P_inout - prolongation operator to the next level (k-->k-1) 123 . a_nactive_proc - number of active procs 124 Output Parameter: 125 . a_Amat_crs - coarse matrix that is created (k-1) 126 */ 127 128 #undef __FUNCT__ 129 #define __FUNCT__ "createLevel" 130 static PetscErrorCode createLevel(const PC pc,const Mat Amat_fine,const PetscInt cr_bs,const PetscBool isLast,const PetscBool stokes,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc) 131 { 132 PetscErrorCode ierr; 133 PC_MG *mg = (PC_MG*)pc->data; 134 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 135 const PetscBool repart = pc_gamg->repart; 136 const PetscInt min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit; 137 Mat Cmat,Pold=*a_P_inout; 138 MPI_Comm comm; 139 PetscMPIInt rank,size,new_size,nactive=*a_nactive_proc; 140 PetscInt ncrs_eq,ncrs_prim,f_bs; 141 142 PetscFunctionBegin; 143 ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr); 144 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 145 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 146 ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr); 147 /* RAP */ 148 ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr); 149 150 /* set 'ncrs_prim' (nodes), 'ncrs_eq' (equations)*/ 151 ncrs_prim = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 152 if (pc_gamg->data_sz % (pc_gamg->data_cell_cols*pc_gamg->data_cell_rows)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"pc_gamg->data_sz %D not divisible by (pc_gamg->data_cell_cols %D *pc_gamg->data_cell_rows %D)",pc_gamg->data_sz,pc_gamg->data_cell_cols,pc_gamg->data_cell_rows); 153 ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr); 154 155 /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */ 156 { 157 PetscInt ncrs_eq_glob; 158 ierr = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr); 159 new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 160 if (new_size == 0 || ncrs_eq_glob < coarse_max) new_size = 1; 161 else if (new_size >= nactive) new_size = nactive; /* no change, rare */ 162 if (isLast) new_size = 1; 163 } 164 165 if (!repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 166 else { 167 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 168 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_prim_new,ncrs_eq_new,nloc_old; 169 IS is_eq_newproc,is_eq_newproc_prim,is_eq_num,is_eq_num_prim,isscat,new_eq_indices; 170 VecScatter vecscat; 171 PetscScalar *array; 172 Vec src_crd, dest_crd; 173 174 nloc_old = ncrs_eq/cr_bs; 175 if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs); 176 #if defined PETSC_GAMG_USE_LOG 177 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 178 #endif 179 /* make 'is_eq_newproc' */ 180 ierr = PetscMalloc(size*sizeof(PetscInt), &counts);CHKERRQ(ierr); 181 if (repart && !stokes) { 182 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 183 Mat adj; 184 185 if (pc_gamg->verbose>0) { 186 if (pc_gamg->verbose==1) PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,ncrs_eq); 187 else { 188 PetscInt n; 189 ierr = MPI_Allreduce(&ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr); 190 PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,n); 191 } 192 } 193 194 /* get 'adj' */ 195 if (cr_bs == 1) { 196 ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 197 } else { 198 /* make a scalar matrix to partition (no Stokes here) */ 199 Mat tMat; 200 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 201 const PetscScalar *vals; 202 const PetscInt *idx; 203 PetscInt *d_nnz, *o_nnz, M, N; 204 static PetscInt llev = 0; 205 206 ierr = PetscMalloc(ncrs_prim*sizeof(PetscInt), &d_nnz);CHKERRQ(ierr); 207 ierr = PetscMalloc(ncrs_prim*sizeof(PetscInt), &o_nnz);CHKERRQ(ierr); 208 ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr); 209 ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr); 210 for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) { 211 ierr = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 212 d_nnz[jj] = ncols/cr_bs; 213 o_nnz[jj] = ncols/cr_bs; 214 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 215 if (d_nnz[jj] > ncrs_prim) d_nnz[jj] = ncrs_prim; 216 if (o_nnz[jj] > (M/cr_bs-ncrs_prim)) o_nnz[jj] = M/cr_bs-ncrs_prim; 217 } 218 219 ierr = MatCreate(comm, &tMat);CHKERRQ(ierr); 220 ierr = MatSetSizes(tMat, ncrs_prim, ncrs_prim,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr); 221 ierr = MatSetType(tMat,MATAIJ);CHKERRQ(ierr); 222 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 223 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 224 ierr = PetscFree(d_nnz);CHKERRQ(ierr); 225 ierr = PetscFree(o_nnz);CHKERRQ(ierr); 226 227 for (ii = Istart_crs; ii < Iend_crs; ii++) { 228 PetscInt dest_row = ii/cr_bs; 229 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 230 for (jj = 0; jj < ncols; jj++) { 231 PetscInt dest_col = idx[jj]/cr_bs; 232 PetscScalar v = 1.0; 233 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr); 234 } 235 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 236 } 237 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 238 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 239 240 if (llev++ == -1) { 241 PetscViewer viewer; char fname[32]; 242 ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr); 243 PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer); 244 ierr = MatView(tMat, viewer);CHKERRQ(ierr); 245 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 246 } 247 248 ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 249 250 ierr = MatDestroy(&tMat);CHKERRQ(ierr); 251 } /* create 'adj' */ 252 253 { /* partition: get newproc_idx */ 254 char prefix[256]; 255 const char *pcpre; 256 const PetscInt *is_idx; 257 MatPartitioning mpart; 258 IS proc_is; 259 PetscInt targetPE; 260 261 ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr); 262 ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr); 263 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 264 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 265 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 266 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 267 ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr); 268 ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr); 269 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 270 271 /* collect IS info */ 272 ierr = PetscMalloc(ncrs_eq*sizeof(PetscInt), &newproc_idx);CHKERRQ(ierr); 273 ierr = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr); 274 targetPE = 1; /* bring to "front" of machine */ 275 /*targetPE = size/new_size;*/ /* spread partitioning across machine */ 276 for (kk = jj = 0 ; kk < nloc_old ; kk++) { 277 for (ii = 0 ; ii < cr_bs ; ii++, jj++) { 278 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 279 } 280 } 281 ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr); 282 ierr = ISDestroy(&proc_is);CHKERRQ(ierr); 283 } 284 ierr = MatDestroy(&adj);CHKERRQ(ierr); 285 286 ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr); 287 if (newproc_idx != 0) { 288 ierr = PetscFree(newproc_idx);CHKERRQ(ierr); 289 } 290 } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 291 292 PetscInt rfactor,targetPE; 293 /* find factor */ 294 if (new_size == 1) rfactor = size; /* easy */ 295 else { 296 PetscReal best_fact = 0.; 297 jj = -1; 298 for (kk = 1 ; kk <= size ; kk++) { 299 if (size%kk==0) { /* a candidate */ 300 PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size; 301 if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 302 if (fact > best_fact) { 303 best_fact = fact; jj = kk; 304 } 305 } 306 } 307 if (jj != -1) rfactor = jj; 308 else rfactor = 1; /* does this happen .. a prime */ 309 } 310 new_size = size/rfactor; 311 312 if (new_size==nactive) { 313 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 314 ierr = PetscFree(counts);CHKERRQ(ierr); 315 if (pc_gamg->verbose>0) { 316 PetscPrintf(comm,"\t[%d]%s aggregate processors noop: new_size=%d, neq(loc)=%d\n",rank,__FUNCT__,new_size,ncrs_eq); 317 } 318 PetscFunctionReturn(0); 319 } 320 321 if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",rank,__FUNCT__,ncrs_eq); 322 targetPE = rank/rfactor; 323 ierr = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr); 324 325 if (stokes) { 326 ierr = ISCreateStride(comm, ncrs_prim*cr_bs, targetPE, 0, &is_eq_newproc_prim);CHKERRQ(ierr); 327 } 328 } /* end simple 'is_eq_newproc' */ 329 330 /* 331 Create an index set from the is_eq_newproc index set to indicate the mapping TO 332 */ 333 ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr); 334 if (stokes) { 335 ierr = ISPartitioningToNumbering(is_eq_newproc_prim, &is_eq_num_prim);CHKERRQ(ierr); 336 } else is_eq_num_prim = is_eq_num; 337 /* 338 Determine how many equations/vertices are assigned to each processor 339 */ 340 ierr = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr); 341 ncrs_eq_new = counts[rank]; 342 ierr = ISDestroy(&is_eq_newproc);CHKERRQ(ierr); 343 if (stokes) { 344 ierr = ISPartitioningCount(is_eq_newproc_prim, size, counts);CHKERRQ(ierr); 345 ierr = ISDestroy(&is_eq_newproc_prim);CHKERRQ(ierr); 346 ncrs_prim_new = counts[rank]/cr_bs; /* nodes */ 347 } else ncrs_prim_new = ncrs_eq_new/cr_bs; /* eqs */ 348 349 ierr = PetscFree(counts);CHKERRQ(ierr); 350 #if defined PETSC_GAMG_USE_LOG 351 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 352 #endif 353 354 /* move data (for primal equations only) */ 355 /* Create a vector to contain the newly ordered element information */ 356 ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr); 357 ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_prim_new, PETSC_DECIDE);CHKERRQ(ierr); 358 ierr = VecSetFromOptions(dest_crd);CHKERRQ(ierr); /* this is needed! */ 359 /* 360 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 361 a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'. 362 */ 363 ierr = PetscMalloc((ncrs_prim*node_data_sz)*sizeof(PetscInt), &tidx);CHKERRQ(ierr); 364 ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 365 for (ii=0,jj=0; ii<ncrs_prim; ii++) { 366 PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */ 367 for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 368 } 369 ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 370 ierr = ISCreateGeneral(comm, node_data_sz*ncrs_prim, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr); 371 ierr = PetscFree(tidx);CHKERRQ(ierr); 372 /* 373 Create a vector to contain the original vertex information for each element 374 */ 375 ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs_prim, &src_crd);CHKERRQ(ierr); 376 for (jj=0; jj<ndata_cols; jj++) { 377 const PetscInt stride0=ncrs_prim*pc_gamg->data_cell_rows; 378 for (ii=0; ii<ncrs_prim; ii++) { 379 for (kk=0; kk<ndata_rows; kk++) { 380 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 381 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 382 ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr); 383 } 384 } 385 } 386 ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr); 387 ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr); 388 /* 389 Scatter the element vertex information (still in the original vertex ordering) 390 to the correct processor 391 */ 392 ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr); 393 ierr = ISDestroy(&isscat);CHKERRQ(ierr); 394 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 395 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 396 ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr); 397 ierr = VecDestroy(&src_crd);CHKERRQ(ierr); 398 /* 399 Put the element vertex data into a new allocation of the gdata->ele 400 */ 401 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 402 ierr = PetscMalloc(node_data_sz*ncrs_prim_new*sizeof(PetscReal), &pc_gamg->data);CHKERRQ(ierr); 403 404 pc_gamg->data_sz = node_data_sz*ncrs_prim_new; 405 strideNew = ncrs_prim_new*ndata_rows; 406 407 ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr); 408 for (jj=0; jj<ndata_cols; jj++) { 409 for (ii=0; ii<ncrs_prim_new; ii++) { 410 for (kk=0; kk<ndata_rows; kk++) { 411 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 412 pc_gamg->data[ix] = PetscRealPart(array[jx]); 413 } 414 } 415 } 416 ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr); 417 ierr = VecDestroy(&dest_crd);CHKERRQ(ierr); 418 419 /* move A and P (columns) with new layout */ 420 #if defined PETSC_GAMG_USE_LOG 421 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 422 #endif 423 424 /* 425 Invert for MatGetSubMatrix 426 */ 427 ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr); 428 ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */ 429 ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr); 430 if (is_eq_num != is_eq_num_prim) { 431 ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 432 } 433 ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr); 434 #if defined PETSC_GAMG_USE_LOG 435 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 436 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 437 #endif 438 /* 'a_Amat_crs' output */ 439 { 440 Mat mat; 441 ierr = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr); 442 *a_Amat_crs = mat; 443 444 if (!PETSC_TRUE) { 445 PetscInt cbs, rbs; 446 ierr = MatGetBlockSizes(Cmat, &rbs, &cbs);CHKERRQ(ierr); 447 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 448 ierr = MatGetBlockSizes(mat, &rbs, &cbs);CHKERRQ(ierr); 449 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);CHKERRQ(ierr); 450 } 451 } 452 ierr = MatDestroy(&Cmat);CHKERRQ(ierr); 453 454 #if defined PETSC_GAMG_USE_LOG 455 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 456 #endif 457 /* prolongator */ 458 { 459 IS findices; 460 PetscInt Istart,Iend; 461 Mat Pnew; 462 ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr); 463 #if defined PETSC_GAMG_USE_LOG 464 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 465 #endif 466 ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr); 467 ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr); 468 ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr); 469 ierr = ISDestroy(&findices);CHKERRQ(ierr); 470 471 if (!PETSC_TRUE) { 472 PetscInt cbs, rbs; 473 ierr = MatGetBlockSizes(Pold, &rbs, &cbs);CHKERRQ(ierr); 474 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 475 ierr = MatGetBlockSizes(Pnew, &rbs, &cbs);CHKERRQ(ierr); 476 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 477 } 478 #if defined PETSC_GAMG_USE_LOG 479 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 480 #endif 481 ierr = MatDestroy(a_P_inout);CHKERRQ(ierr); 482 483 /* output - repartitioned */ 484 *a_P_inout = Pnew; 485 } 486 ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr); 487 488 *a_nactive_proc = new_size; /* output */ 489 } 490 491 /* outout matrix data */ 492 if (!PETSC_TRUE) { 493 PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs; 494 if (llev==0) { 495 sprintf(fname,"Cmat_%d.m",llev++); 496 PetscViewerASCIIOpen(comm,fname,&viewer); 497 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr); 498 ierr = MatView(Amat_fine, viewer);CHKERRQ(ierr); 499 ierr = PetscViewerDestroy(&viewer); 500 } 501 sprintf(fname,"Cmat_%d.m",llev++); 502 PetscViewerASCIIOpen(comm,fname,&viewer); 503 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr); 504 ierr = MatView(Cmat, viewer);CHKERRQ(ierr); 505 ierr = PetscViewerDestroy(&viewer); 506 } 507 PetscFunctionReturn(0); 508 } 509 510 /* -------------------------------------------------------------------------- */ 511 /* 512 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 513 by setting data structures and options. 514 515 Input Parameter: 516 . pc - the preconditioner context 517 518 Application Interface Routine: PCSetUp() 519 520 Notes: 521 The interface routine PCSetUp() is not usually called directly by 522 the user, but instead is called by PCApply() if necessary. 523 */ 524 #undef __FUNCT__ 525 #define __FUNCT__ "PCSetUp_GAMG" 526 PetscErrorCode PCSetUp_GAMG(PC pc) 527 { 528 PetscErrorCode ierr; 529 PC_MG *mg = (PC_MG*)pc->data; 530 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 531 Mat Pmat = pc->pmat; 532 PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS]; 533 MPI_Comm comm; 534 PetscMPIInt rank,size,nactivepe; 535 Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS]; 536 PetscReal emaxs[GAMG_MAXLEVELS]; 537 IS *ASMLocalIDsArr[GAMG_MAXLEVELS]; 538 GAMGKKTMat kktMatsArr[GAMG_MAXLEVELS]; 539 PetscLogDouble nnz0=0.,nnztot=0.; 540 MatInfo info; 541 PetscBool stokes = PETSC_FALSE, redo_mesh_setup = (PetscBool)(!pc_gamg->reuse_prol); 542 543 PetscFunctionBegin; 544 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 545 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 546 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 547 548 if (pc_gamg->verbose>2) PetscPrintf(comm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",rank,__FUNCT__,pc_gamg->setup_count,pc->setupcalled); 549 550 if (pc_gamg->setup_count++ > 0) { 551 if (redo_mesh_setup) { 552 /* reset everything */ 553 ierr = PCReset_MG(pc);CHKERRQ(ierr); 554 pc->setupcalled = 0; 555 } else { 556 PC_MG_Levels **mglevels = mg->levels; 557 /* just do Galerkin grids */ 558 Mat B,dA,dB; 559 560 if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet"); 561 if (pc_gamg->Nlevels > 1) { 562 /* currently only handle case where mat and pmat are the same on coarser levels */ 563 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,NULL);CHKERRQ(ierr); 564 /* (re)set to get dirty flag */ 565 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 566 567 for (level=pc_gamg->Nlevels-2; level>-1; level--) { 568 /* the first time through the matrix structure has changed from repartitioning */ 569 if (pc_gamg->setup_count==2 /*&& (pc_gamg->repart || level==0)*/) { 570 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 571 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 572 573 mglevels[level]->A = B; 574 } else { 575 ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B,NULL);CHKERRQ(ierr); 576 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 577 } 578 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 579 dB = B; 580 } 581 } 582 583 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 584 585 /* PCSetUp_MG seems to insists on setting this to GMRES */ 586 ierr = KSPSetType(mglevels[0]->smoothd, KSPPREONLY);CHKERRQ(ierr); 587 PetscFunctionReturn(0); 588 } 589 } 590 591 ierr = PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_fieldsplit_detect_saddle_point",&stokes,NULL);CHKERRQ(ierr); 592 593 ierr = GAMGKKTMatCreate(Pmat, stokes, &kktMatsArr[0]);CHKERRQ(ierr); 594 595 if (!pc_gamg->data) { 596 if (pc_gamg->orig_data) { 597 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 598 ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr); 599 600 pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols; 601 pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows; 602 pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols; 603 604 ierr = PetscMalloc(pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->data);CHKERRQ(ierr); 605 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq]; 606 } else { 607 if (!pc_gamg->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data"); 608 if (stokes) SETERRQ(comm,PETSC_ERR_PLIB,"Need data (eg, PCSetCoordinates) for Stokes problems"); 609 ierr = pc_gamg->createdefaultdata(pc, kktMatsArr[0].A11);CHKERRQ(ierr); 610 } 611 } 612 613 /* cache original data for reuse */ 614 if (!pc_gamg->orig_data && redo_mesh_setup) { 615 ierr = PetscMalloc(pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->orig_data);CHKERRQ(ierr); 616 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq]; 617 pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows; 618 pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols; 619 } 620 621 /* get basic dims */ 622 if (stokes) bs = pc_gamg->data_cell_rows; /* this is agg-mg specific */ 623 else { 624 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 625 } 626 627 ierr = MatGetSize(Pmat, &M, &qq);CHKERRQ(ierr); 628 if (pc_gamg->verbose) { 629 PetscInt NN = M; 630 if (pc_gamg->verbose==1) { 631 ierr = MatGetInfo(Pmat,MAT_LOCAL,&info);CHKERRQ(ierr); 632 ierr = MatGetLocalSize(Pmat, &NN, &qq);CHKERRQ(ierr); 633 } else { 634 ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); 635 } 636 nnz0 = info.nz_used; 637 nnztot = info.nz_used; 638 ierr = PetscPrintf(comm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n", 639 rank,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols, 640 (int)(nnz0/(PetscReal)NN),size);CHKERRQ(ierr); 641 } 642 643 /* Get A_i and R_i */ 644 for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */ 645 level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (size==1 || nactivepe>1); */ 646 level++) { 647 level1 = level + 1; 648 #if defined PETSC_GAMG_USE_LOG 649 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 650 #if (defined GAMG_STAGES) 651 ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr); 652 #endif 653 #endif 654 /* deal with Stokes, get sub matrices */ 655 if (level > 0) { 656 ierr = GAMGKKTMatCreate(Aarr[level], stokes, &kktMatsArr[level]);CHKERRQ(ierr); 657 } 658 { /* construct prolongator */ 659 Mat Gmat; 660 PetscCoarsenData *agg_lists; 661 Mat Prol11,Prol22; 662 663 ierr = pc_gamg->graph(pc,kktMatsArr[level].A11, &Gmat);CHKERRQ(ierr); 664 ierr = pc_gamg->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr); 665 ierr = pc_gamg->prolongator(pc, kktMatsArr[level].A11, Gmat, agg_lists, &Prol11);CHKERRQ(ierr); 666 667 /* could have failed to create new level */ 668 if (Prol11) { 669 /* get new block size of coarse matrices */ 670 ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr); 671 672 if (stokes) { 673 if (!pc_gamg->formkktprol) SETERRQ(comm,PETSC_ERR_USER,"Stokes not supportd by AMG method."); 674 /* R A12 == (T = A21 P)'; G = T' T; coarsen G; form plain agg with G */ 675 ierr = pc_gamg->formkktprol(pc, Prol11, kktMatsArr[level].A21, &Prol22);CHKERRQ(ierr); 676 } 677 678 if (pc_gamg->optprol) { 679 /* smooth */ 680 ierr = pc_gamg->optprol(pc, kktMatsArr[level].A11, &Prol11);CHKERRQ(ierr); 681 } 682 683 if (stokes) { 684 IS is_row[2]; 685 Mat a[4]; 686 687 is_row[0] = kktMatsArr[level].prim_is; is_row[1] = kktMatsArr[level].constr_is; 688 a[0] = Prol11; a[1] = NULL; a[2] = NULL; a[3] = Prol22; 689 ierr = MatCreateNest(comm,2,is_row, 2, is_row, a, &Parr[level1]);CHKERRQ(ierr); 690 } else Parr[level1] = Prol11; 691 } else Parr[level1] = NULL; 692 693 if (pc_gamg->use_aggs_in_gasm) { 694 ierr = PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr); 695 } 696 697 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 698 ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr); 699 } /* construct prolongator scope */ 700 #if defined PETSC_GAMG_USE_LOG 701 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 702 #endif 703 /* cache eigen estimate */ 704 if (pc_gamg->emax_id != -1) { 705 PetscBool flag; 706 ierr = PetscObjectComposedDataGetReal((PetscObject)kktMatsArr[level].A11, pc_gamg->emax_id, emaxs[level], flag);CHKERRQ(ierr); 707 if (!flag) emaxs[level] = -1.; 708 } else emaxs[level] = -1.; 709 if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 710 if (!Parr[level1]) { 711 if (pc_gamg->verbose) { 712 ierr = PetscPrintf(comm,"\t[%d]%s stop gridding, level %d\n",rank,__FUNCT__,level);CHKERRQ(ierr); 713 } 714 break; 715 } 716 #if defined PETSC_GAMG_USE_LOG 717 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 718 #endif 719 720 ierr = createLevel(pc, Aarr[level], bs, (PetscBool)(level==pc_gamg->Nlevels-2), 721 stokes, &Parr[level1], &Aarr[level1], &nactivepe);CHKERRQ(ierr); 722 723 #if defined PETSC_GAMG_USE_LOG 724 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 725 #endif 726 ierr = MatGetSize(Aarr[level1], &M, &qq);CHKERRQ(ierr); 727 728 if (pc_gamg->verbose > 0) { 729 PetscInt NN = M; 730 if (pc_gamg->verbose==1) { 731 ierr = MatGetInfo(Aarr[level1],MAT_LOCAL,&info);CHKERRQ(ierr); 732 ierr = MatGetLocalSize(Aarr[level1], &NN, &qq);CHKERRQ(ierr); 733 } else { 734 ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr); 735 } 736 737 nnztot += info.nz_used; 738 ierr = PetscPrintf(comm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n", 739 rank,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols, 740 (int)(info.nz_used/(PetscReal)NN), nactivepe);CHKERRQ(ierr); 741 } 742 743 /* stop if one node -- could pull back for singular problems */ 744 if (M/pc_gamg->data_cell_cols < 2) { 745 level++; 746 break; 747 } 748 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 749 ierr = PetscLogStagePop();CHKERRQ(ierr); 750 #endif 751 } /* levels */ 752 753 if (pc_gamg->data) { 754 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 755 pc_gamg->data = NULL; 756 } 757 758 if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0); 759 pc_gamg->Nlevels = level + 1; 760 fine_level = level; 761 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 762 763 /* simple setup */ 764 if (!PETSC_TRUE) { 765 PC_MG_Levels **mglevels = mg->levels; 766 for (lidx=0,level=pc_gamg->Nlevels-1; 767 lidx<fine_level; 768 lidx++, level--) { 769 ierr = PCMGSetInterpolation(pc, lidx+1, Parr[level]);CHKERRQ(ierr); 770 ierr = KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN);CHKERRQ(ierr); 771 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 772 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 773 } 774 ierr = KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN);CHKERRQ(ierr); 775 776 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 777 } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */ 778 /* set default smoothers & set operators */ 779 for (lidx = 1, level = pc_gamg->Nlevels-2; 780 lidx <= fine_level; 781 lidx++, level--) { 782 KSP smoother; 783 PC subpc; 784 785 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 786 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 787 788 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 789 /* set ops */ 790 ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN);CHKERRQ(ierr); 791 ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr); 792 793 /* create field split PC, get subsmoother */ 794 if (stokes) { 795 KSP *ksps; 796 PetscInt nn; 797 ierr = PCFieldSplitSetIS(subpc,"0",kktMatsArr[level].prim_is);CHKERRQ(ierr); 798 ierr = PCFieldSplitSetIS(subpc,"1",kktMatsArr[level].constr_is);CHKERRQ(ierr); 799 ierr = PCFieldSplitGetSubKSP(subpc,&nn,&ksps);CHKERRQ(ierr); 800 smoother = ksps[0]; 801 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 802 ierr = PetscFree(ksps);CHKERRQ(ierr); 803 } 804 ierr = GAMGKKTMatDestroy(&kktMatsArr[level]);CHKERRQ(ierr); 805 806 /* set defaults */ 807 ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr); 808 809 /* override defaults and command line args (!) */ 810 if (pc_gamg->use_aggs_in_gasm) { 811 PetscInt sz; 812 IS *is; 813 814 sz = nASMBlocksArr[level]; 815 is = ASMLocalIDsArr[level]; 816 ierr = PCSetType(subpc, PCGASM);CHKERRQ(ierr); 817 if (sz==0) { 818 IS is; 819 PetscInt my0,kk; 820 ierr = MatGetOwnershipRange(Aarr[level], &my0, &kk);CHKERRQ(ierr); 821 ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);CHKERRQ(ierr); 822 ierr = PCGASMSetSubdomains(subpc, 1, &is, NULL);CHKERRQ(ierr); 823 ierr = ISDestroy(&is);CHKERRQ(ierr); 824 } else { 825 PetscInt kk; 826 ierr = PCGASMSetSubdomains(subpc, sz, is, NULL);CHKERRQ(ierr); 827 for (kk=0; kk<sz; kk++) { 828 ierr = ISDestroy(&is[kk]);CHKERRQ(ierr); 829 } 830 ierr = PetscFree(is);CHKERRQ(ierr); 831 } 832 ierr = PCGASMSetOverlap(subpc, 0);CHKERRQ(ierr); 833 834 ASMLocalIDsArr[level] = NULL; 835 nASMBlocksArr[level] = 0; 836 ierr = PCGASMSetType(subpc, PC_GASM_BASIC);CHKERRQ(ierr); 837 } else { 838 ierr = PCSetType(subpc, PCJACOBI);CHKERRQ(ierr); 839 } 840 } 841 { 842 /* coarse grid */ 843 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 844 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 845 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 846 ierr = KSPSetOperators(smoother, Lmat, Lmat, SAME_NONZERO_PATTERN);CHKERRQ(ierr); 847 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 848 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 849 ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr); 850 ierr = PCSetUp(subpc);CHKERRQ(ierr); 851 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); 852 if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii); 853 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 854 ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr); 855 ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr); 856 } 857 858 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 859 ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr); 860 ierr = PCSetFromOptions_MG(pc);CHKERRQ(ierr); 861 ierr = PetscOptionsEnd();CHKERRQ(ierr); 862 if (mg->galerkin != 2) SETERRQ(comm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used."); 863 864 /* create cheby smoothers */ 865 for (lidx = 1, level = pc_gamg->Nlevels-2; 866 lidx <= fine_level; 867 lidx++, level--) { 868 KSP smoother; 869 PetscBool flag; 870 PC subpc; 871 872 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 873 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 874 875 /* create field split PC, get subsmoother */ 876 if (stokes) { 877 KSP *ksps; 878 PetscInt nn; 879 ierr = PCFieldSplitGetSubKSP(subpc,&nn,&ksps);CHKERRQ(ierr); 880 smoother = ksps[0]; 881 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 882 ierr = PetscFree(ksps);CHKERRQ(ierr); 883 } 884 885 /* do my own cheby */ 886 ierr = PetscObjectTypeCompare((PetscObject)smoother, KSPCHEBYSHEV, &flag);CHKERRQ(ierr); 887 if (flag) { 888 PetscReal emax, emin; 889 ierr = PetscObjectTypeCompare((PetscObject)subpc, PCJACOBI, &flag);CHKERRQ(ierr); 890 if (flag && emaxs[level] > 0.0) emax=emaxs[level]; /* eigen estimate only for diagnal PC */ 891 else { /* eigen estimate 'emax' */ 892 KSP eksp; 893 Mat Lmat = Aarr[level]; 894 Vec bb, xx; 895 896 ierr = MatGetVecs(Lmat, &bb, 0);CHKERRQ(ierr); 897 ierr = MatGetVecs(Lmat, &xx, 0);CHKERRQ(ierr); 898 { 899 PetscRandom rctx; 900 ierr = PetscRandomCreate(comm,&rctx);CHKERRQ(ierr); 901 ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr); 902 ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr); 903 ierr = PetscRandomDestroy(&rctx);CHKERRQ(ierr); 904 } 905 906 /* zeroing out BC rows -- needed for crazy matrices */ 907 { 908 PetscInt Istart,Iend,ncols,jj,Ii; 909 PetscScalar zero = 0.0; 910 ierr = MatGetOwnershipRange(Lmat, &Istart, &Iend);CHKERRQ(ierr); 911 for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) { 912 ierr = MatGetRow(Lmat,Ii,&ncols,0,0);CHKERRQ(ierr); 913 if (ncols <= 1) { 914 ierr = VecSetValues(bb, 1, &Ii, &zero, INSERT_VALUES);CHKERRQ(ierr); 915 } 916 ierr = MatRestoreRow(Lmat,Ii,&ncols,0,0);CHKERRQ(ierr); 917 } 918 ierr = VecAssemblyBegin(bb);CHKERRQ(ierr); 919 ierr = VecAssemblyEnd(bb);CHKERRQ(ierr); 920 } 921 922 ierr = KSPCreate(comm, &eksp);CHKERRQ(ierr); 923 ierr = KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10);CHKERRQ(ierr); 924 ierr = KSPSetNormType(eksp, KSP_NORM_NONE);CHKERRQ(ierr); 925 ierr = KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);CHKERRQ(ierr); 926 ierr = KSPAppendOptionsPrefix(eksp, "gamg_est_");CHKERRQ(ierr); 927 ierr = KSPSetFromOptions(eksp);CHKERRQ(ierr); 928 929 ierr = KSPSetInitialGuessNonzero(eksp, PETSC_FALSE);CHKERRQ(ierr); 930 ierr = KSPSetOperators(eksp, Lmat, Lmat, SAME_NONZERO_PATTERN);CHKERRQ(ierr); 931 ierr = KSPSetComputeSingularValues(eksp,PETSC_TRUE);CHKERRQ(ierr); 932 933 /* set PC type to be same as smoother */ 934 ierr = KSPSetPC(eksp, subpc);CHKERRQ(ierr); 935 936 /* solve - keep stuff out of logging */ 937 ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr); 938 ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr); 939 ierr = KSPSolve(eksp, bb, xx);CHKERRQ(ierr); 940 ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr); 941 ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr); 942 943 ierr = KSPComputeExtremeSingularValues(eksp, &emax, &emin);CHKERRQ(ierr); 944 945 ierr = VecDestroy(&xx);CHKERRQ(ierr); 946 ierr = VecDestroy(&bb);CHKERRQ(ierr); 947 ierr = KSPDestroy(&eksp);CHKERRQ(ierr); 948 949 if (pc_gamg->verbose > 0) { 950 PetscInt N1, tt; 951 ierr = MatGetSize(Aarr[level], &N1, &tt);CHKERRQ(ierr); 952 PetscPrintf(comm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1); 953 } 954 } 955 { 956 PetscInt N1, N0; 957 ierr = MatGetSize(Aarr[level], &N1, NULL);CHKERRQ(ierr); 958 ierr = MatGetSize(Aarr[level+1], &N0, NULL);CHKERRQ(ierr); 959 /* heuristic - is this crap? */ 960 /* emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); */ 961 emin = emax * pc_gamg->eigtarget[0]; 962 emax *= pc_gamg->eigtarget[1]; 963 } 964 ierr = KSPChebyshevSetEigenvalues(smoother, emax, emin);CHKERRQ(ierr); 965 } /* setup checby flag */ 966 } /* non-coarse levels */ 967 968 /* clean up */ 969 for (level=1; level<pc_gamg->Nlevels; level++) { 970 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 971 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 972 } 973 974 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 975 976 if (PETSC_TRUE) { 977 KSP smoother; /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */ 978 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 979 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 980 } 981 } else { 982 KSP smoother; 983 if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",rank,__FUNCT__); 984 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 985 ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN);CHKERRQ(ierr); 986 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 987 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 988 } 989 PetscFunctionReturn(0); 990 } 991 992 /* ------------------------------------------------------------------------- */ 993 /* 994 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 995 that was created with PCCreate_GAMG(). 996 997 Input Parameter: 998 . pc - the preconditioner context 999 1000 Application Interface Routine: PCDestroy() 1001 */ 1002 #undef __FUNCT__ 1003 #define __FUNCT__ "PCDestroy_GAMG" 1004 PetscErrorCode PCDestroy_GAMG(PC pc) 1005 { 1006 PetscErrorCode ierr; 1007 PC_MG *mg = (PC_MG*)pc->data; 1008 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 1009 1010 PetscFunctionBegin; 1011 ierr = PCReset_GAMG(pc);CHKERRQ(ierr); 1012 ierr = PetscFree(pc_gamg);CHKERRQ(ierr); 1013 ierr = PCDestroy_MG(pc);CHKERRQ(ierr); 1014 PetscFunctionReturn(0); 1015 } 1016 1017 1018 #undef __FUNCT__ 1019 #define __FUNCT__ "PCGAMGSetProcEqLim" 1020 /*@ 1021 PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via 1022 processor reduction. 1023 1024 Not Collective on PC 1025 1026 Input Parameters: 1027 . pc - the preconditioner context 1028 1029 1030 Options Database Key: 1031 . -pc_gamg_process_eq_limit 1032 1033 Level: intermediate 1034 1035 Concepts: Unstructured multrigrid preconditioner 1036 1037 .seealso: () 1038 @*/ 1039 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 1040 { 1041 PetscErrorCode ierr; 1042 1043 PetscFunctionBegin; 1044 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1045 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1046 PetscFunctionReturn(0); 1047 } 1048 1049 EXTERN_C_BEGIN 1050 #undef __FUNCT__ 1051 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 1052 PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 1053 { 1054 PC_MG *mg = (PC_MG*)pc->data; 1055 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1056 1057 PetscFunctionBegin; 1058 if (n>0) pc_gamg->min_eq_proc = n; 1059 PetscFunctionReturn(0); 1060 } 1061 EXTERN_C_END 1062 1063 #undef __FUNCT__ 1064 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 1065 /*@ 1066 PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids. 1067 1068 Collective on PC 1069 1070 Input Parameters: 1071 . pc - the preconditioner context 1072 1073 1074 Options Database Key: 1075 . -pc_gamg_coarse_eq_limit 1076 1077 Level: intermediate 1078 1079 Concepts: Unstructured multrigrid preconditioner 1080 1081 .seealso: () 1082 @*/ 1083 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 1084 { 1085 PetscErrorCode ierr; 1086 1087 PetscFunctionBegin; 1088 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1089 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1090 PetscFunctionReturn(0); 1091 } 1092 1093 EXTERN_C_BEGIN 1094 #undef __FUNCT__ 1095 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 1096 PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 1097 { 1098 PC_MG *mg = (PC_MG*)pc->data; 1099 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1100 1101 PetscFunctionBegin; 1102 if (n>0) pc_gamg->coarse_eq_limit = n; 1103 PetscFunctionReturn(0); 1104 } 1105 EXTERN_C_END 1106 1107 #undef __FUNCT__ 1108 #define __FUNCT__ "PCGAMGSetRepartitioning" 1109 /*@ 1110 PCGAMGSetRepartitioning - Repartition the coarse grids 1111 1112 Collective on PC 1113 1114 Input Parameters: 1115 . pc - the preconditioner context 1116 1117 1118 Options Database Key: 1119 . -pc_gamg_repartition 1120 1121 Level: intermediate 1122 1123 Concepts: Unstructured multrigrid preconditioner 1124 1125 .seealso: () 1126 @*/ 1127 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n) 1128 { 1129 PetscErrorCode ierr; 1130 1131 PetscFunctionBegin; 1132 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1133 ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1134 PetscFunctionReturn(0); 1135 } 1136 1137 EXTERN_C_BEGIN 1138 #undef __FUNCT__ 1139 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG" 1140 PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n) 1141 { 1142 PC_MG *mg = (PC_MG*)pc->data; 1143 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1144 1145 PetscFunctionBegin; 1146 pc_gamg->repart = n; 1147 PetscFunctionReturn(0); 1148 } 1149 EXTERN_C_END 1150 1151 #undef __FUNCT__ 1152 #define __FUNCT__ "PCGAMGSetReuseProl" 1153 /*@ 1154 PCGAMGSetReuseProl - Reuse prlongation 1155 1156 Collective on PC 1157 1158 Input Parameters: 1159 . pc - the preconditioner context 1160 1161 1162 Options Database Key: 1163 . -pc_gamg_reuse_interpolation 1164 1165 Level: intermediate 1166 1167 Concepts: Unstructured multrigrid preconditioner 1168 1169 .seealso: () 1170 @*/ 1171 PetscErrorCode PCGAMGSetReuseProl(PC pc, PetscBool n) 1172 { 1173 PetscErrorCode ierr; 1174 1175 PetscFunctionBegin; 1176 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1177 ierr = PetscTryMethod(pc,"PCGAMGSetReuseProl_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1178 PetscFunctionReturn(0); 1179 } 1180 1181 EXTERN_C_BEGIN 1182 #undef __FUNCT__ 1183 #define __FUNCT__ "PCGAMGSetReuseProl_GAMG" 1184 PetscErrorCode PCGAMGSetReuseProl_GAMG(PC pc, PetscBool n) 1185 { 1186 PC_MG *mg = (PC_MG*)pc->data; 1187 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1188 1189 PetscFunctionBegin; 1190 pc_gamg->reuse_prol = n; 1191 PetscFunctionReturn(0); 1192 } 1193 EXTERN_C_END 1194 1195 #undef __FUNCT__ 1196 #define __FUNCT__ "PCGAMGSetUseASMAggs" 1197 /*@ 1198 PCGAMGSetUseASMAggs - 1199 1200 Collective on PC 1201 1202 Input Parameters: 1203 . pc - the preconditioner context 1204 1205 1206 Options Database Key: 1207 . -pc_gamg_use_agg_gasm 1208 1209 Level: intermediate 1210 1211 Concepts: Unstructured multrigrid preconditioner 1212 1213 .seealso: () 1214 @*/ 1215 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n) 1216 { 1217 PetscErrorCode ierr; 1218 1219 PetscFunctionBegin; 1220 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1221 ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1222 PetscFunctionReturn(0); 1223 } 1224 1225 EXTERN_C_BEGIN 1226 #undef __FUNCT__ 1227 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG" 1228 PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n) 1229 { 1230 PC_MG *mg = (PC_MG*)pc->data; 1231 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1232 1233 PetscFunctionBegin; 1234 pc_gamg->use_aggs_in_gasm = n; 1235 PetscFunctionReturn(0); 1236 } 1237 EXTERN_C_END 1238 1239 #undef __FUNCT__ 1240 #define __FUNCT__ "PCGAMGSetNlevels" 1241 /*@ 1242 PCGAMGSetNlevels - 1243 1244 Not collective on PC 1245 1246 Input Parameters: 1247 . pc - the preconditioner context 1248 1249 1250 Options Database Key: 1251 . -pc_mg_levels 1252 1253 Level: intermediate 1254 1255 Concepts: Unstructured multrigrid preconditioner 1256 1257 .seealso: () 1258 @*/ 1259 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 1260 { 1261 PetscErrorCode ierr; 1262 1263 PetscFunctionBegin; 1264 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1265 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1266 PetscFunctionReturn(0); 1267 } 1268 1269 EXTERN_C_BEGIN 1270 #undef __FUNCT__ 1271 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 1272 PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 1273 { 1274 PC_MG *mg = (PC_MG*)pc->data; 1275 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1276 1277 PetscFunctionBegin; 1278 pc_gamg->Nlevels = n; 1279 PetscFunctionReturn(0); 1280 } 1281 EXTERN_C_END 1282 1283 #undef __FUNCT__ 1284 #define __FUNCT__ "PCGAMGSetThreshold" 1285 /*@ 1286 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 1287 1288 Not collective on PC 1289 1290 Input Parameters: 1291 . pc - the preconditioner context 1292 1293 1294 Options Database Key: 1295 . -pc_gamg_threshold 1296 1297 Level: intermediate 1298 1299 Concepts: Unstructured multrigrid preconditioner 1300 1301 .seealso: () 1302 @*/ 1303 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n) 1304 { 1305 PetscErrorCode ierr; 1306 1307 PetscFunctionBegin; 1308 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1309 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr); 1310 PetscFunctionReturn(0); 1311 } 1312 1313 EXTERN_C_BEGIN 1314 #undef __FUNCT__ 1315 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1316 PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n) 1317 { 1318 PC_MG *mg = (PC_MG*)pc->data; 1319 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1320 1321 PetscFunctionBegin; 1322 pc_gamg->threshold = n; 1323 PetscFunctionReturn(0); 1324 } 1325 EXTERN_C_END 1326 1327 #undef __FUNCT__ 1328 #define __FUNCT__ "PCGAMGSetType" 1329 /*@ 1330 PCGAMGSetType - Set solution method - calls sub create method 1331 1332 Collective on PC 1333 1334 Input Parameters: 1335 . pc - the preconditioner context 1336 1337 1338 Options Database Key: 1339 . -pc_gamg_type 1340 1341 Level: intermediate 1342 1343 Concepts: Unstructured multrigrid preconditioner 1344 1345 .seealso: () 1346 @*/ 1347 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) 1348 { 1349 PetscErrorCode ierr; 1350 1351 PetscFunctionBegin; 1352 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1353 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr); 1354 PetscFunctionReturn(0); 1355 } 1356 1357 EXTERN_C_BEGIN 1358 #undef __FUNCT__ 1359 #define __FUNCT__ "PCGAMGSetType_GAMG" 1360 PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type) 1361 { 1362 PetscErrorCode ierr,(*r)(PC); 1363 1364 PetscFunctionBegin; 1365 ierr = PetscFunctionListFind(PetscObjectComm((PetscObject)pc),GAMGList,type,PETSC_FALSE,(PetscVoidStarFunction)&r);CHKERRQ(ierr); 1366 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1367 ierr = (*r)(pc);CHKERRQ(ierr); 1368 PetscFunctionReturn(0); 1369 } 1370 EXTERN_C_END 1371 1372 #undef __FUNCT__ 1373 #define __FUNCT__ "PCSetFromOptions_GAMG" 1374 PetscErrorCode PCSetFromOptions_GAMG(PC pc) 1375 { 1376 PetscErrorCode ierr; 1377 PC_MG *mg = (PC_MG*)pc->data; 1378 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1379 PetscBool flag; 1380 PetscInt two = 2; 1381 MPI_Comm comm; 1382 1383 PetscFunctionBegin; 1384 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1385 ierr = PetscOptionsHead("GAMG options");CHKERRQ(ierr); 1386 { 1387 /* -pc_gamg_verbose */ 1388 ierr = PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG", 1389 "none", pc_gamg->verbose, 1390 &pc_gamg->verbose, NULL);CHKERRQ(ierr); 1391 /* -pc_gamg_repartition */ 1392 ierr = PetscOptionsBool("-pc_gamg_repartition", 1393 "Repartion coarse grids (false)", 1394 "PCGAMGRepartitioning", 1395 pc_gamg->repart, 1396 &pc_gamg->repart, 1397 &flag);CHKERRQ(ierr); 1398 /* -pc_gamg_reuse_interpolation */ 1399 ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation", 1400 "Reuse prolongation operator (true)", 1401 "PCGAMGReuseProl", 1402 pc_gamg->reuse_prol, 1403 &pc_gamg->reuse_prol, 1404 &flag);CHKERRQ(ierr); 1405 /* -pc_gamg_use_agg_gasm */ 1406 ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm", 1407 "Use aggregation agragates for GASM smoother (false)", 1408 "PCGAMGUseASMAggs", 1409 pc_gamg->use_aggs_in_gasm, 1410 &pc_gamg->use_aggs_in_gasm, 1411 &flag);CHKERRQ(ierr); 1412 /* -pc_gamg_process_eq_limit */ 1413 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit", 1414 "Limit (goal) on number of equations per process on coarse grids", 1415 "PCGAMGSetProcEqLim", 1416 pc_gamg->min_eq_proc, 1417 &pc_gamg->min_eq_proc, 1418 &flag);CHKERRQ(ierr); 1419 /* -pc_gamg_coarse_eq_limit */ 1420 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit", 1421 "Limit on number of equations for the coarse grid", 1422 "PCGAMGSetCoarseEqLim", 1423 pc_gamg->coarse_eq_limit, 1424 &pc_gamg->coarse_eq_limit, 1425 &flag);CHKERRQ(ierr); 1426 /* -pc_gamg_threshold */ 1427 ierr = PetscOptionsReal("-pc_gamg_threshold", 1428 "Relative threshold to use for dropping edges in aggregation graph", 1429 "PCGAMGSetThreshold", 1430 pc_gamg->threshold, 1431 &pc_gamg->threshold, 1432 &flag);CHKERRQ(ierr); 1433 if (flag && pc_gamg->verbose) { 1434 ierr = PetscPrintf(comm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);CHKERRQ(ierr); 1435 } 1436 1437 ierr = PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,NULL);CHKERRQ(ierr); 1438 ierr = PetscOptionsInt("-pc_mg_levels", 1439 "Set number of MG levels", 1440 "PCGAMGSetNlevels", 1441 pc_gamg->Nlevels, 1442 &pc_gamg->Nlevels, 1443 &flag);CHKERRQ(ierr); 1444 } 1445 ierr = PetscOptionsTail();CHKERRQ(ierr); 1446 PetscFunctionReturn(0); 1447 } 1448 1449 /* -------------------------------------------------------------------------- */ 1450 /*MC 1451 PCGAMG - Geometric algebraic multigrid (AMG) preconditioning framework. 1452 - This is the entry point to GAMG, registered in pcregis.c 1453 1454 Options Database Keys: 1455 Multigrid options(inherited) 1456 + -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType) 1457 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1458 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1459 - -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade 1460 1461 Level: intermediate 1462 1463 Concepts: multigrid 1464 1465 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType, 1466 PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(), 1467 PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(), 1468 PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(), 1469 PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR() 1470 M*/ 1471 EXTERN_C_BEGIN 1472 #undef __FUNCT__ 1473 #define __FUNCT__ "PCCreate_GAMG" 1474 PetscErrorCode PCCreate_GAMG(PC pc) 1475 { 1476 PetscErrorCode ierr; 1477 PC_GAMG *pc_gamg; 1478 PC_MG *mg; 1479 #if defined PETSC_GAMG_USE_LOG 1480 static long count = 0; 1481 #endif 1482 1483 PetscFunctionBegin; 1484 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1485 ierr = PCSetType(pc, PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */ 1486 ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr); 1487 1488 /* create a supporting struct and attach it to pc */ 1489 ierr = PetscNewLog(pc, PC_GAMG, &pc_gamg);CHKERRQ(ierr); 1490 mg = (PC_MG*)pc->data; 1491 mg->galerkin = 2; /* Use Galerkin, but it is computed externally */ 1492 mg->innerctx = pc_gamg; 1493 1494 pc_gamg->setup_count = 0; 1495 /* these should be in subctx but repartitioning needs simple arrays */ 1496 pc_gamg->data_sz = 0; 1497 pc_gamg->data = 0; 1498 1499 /* register AMG type */ 1500 if (!GAMGList) { 1501 ierr = PetscFunctionListAdd(PETSC_COMM_WORLD,&GAMGList,GAMGGEO,"PCCreateGAMG_GEO",(void (*)(void))PCCreateGAMG_GEO);CHKERRQ(ierr); 1502 ierr = PetscFunctionListAdd(PETSC_COMM_WORLD,&GAMGList,GAMGAGG,"PCCreateGAMG_AGG",(void (*)(void))PCCreateGAMG_AGG);CHKERRQ(ierr); 1503 } 1504 1505 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1506 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1507 pc->ops->setup = PCSetUp_GAMG; 1508 pc->ops->reset = PCReset_GAMG; 1509 pc->ops->destroy = PCDestroy_GAMG; 1510 1511 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1512 "PCGAMGSetProcEqLim_C", 1513 "PCGAMGSetProcEqLim_GAMG", 1514 PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr); 1515 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1516 "PCGAMGSetCoarseEqLim_C", 1517 "PCGAMGSetCoarseEqLim_GAMG", 1518 PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr); 1519 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1520 "PCGAMGSetRepartitioning_C", 1521 "PCGAMGSetRepartitioning_GAMG", 1522 PCGAMGSetRepartitioning_GAMG);CHKERRQ(ierr); 1523 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1524 "PCGAMGSetReuseProl_C", 1525 "PCGAMGSetReuseProl_GAMG", 1526 PCGAMGSetReuseProl_GAMG);CHKERRQ(ierr); 1527 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1528 "PCGAMGSetUseASMAggs_C", 1529 "PCGAMGSetUseASMAggs_GAMG", 1530 PCGAMGSetUseASMAggs_GAMG);CHKERRQ(ierr); 1531 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1532 "PCGAMGSetThreshold_C", 1533 "PCGAMGSetThreshold_GAMG", 1534 PCGAMGSetThreshold_GAMG);CHKERRQ(ierr); 1535 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc, 1536 "PCGAMGSetType_C", 1537 "PCGAMGSetType_GAMG", 1538 PCGAMGSetType_GAMG);CHKERRQ(ierr); 1539 pc_gamg->repart = PETSC_FALSE; 1540 pc_gamg->reuse_prol = PETSC_TRUE; 1541 pc_gamg->use_aggs_in_gasm = PETSC_FALSE; 1542 pc_gamg->min_eq_proc = 50; 1543 pc_gamg->coarse_eq_limit = 800; 1544 pc_gamg->threshold = 0.001; 1545 pc_gamg->Nlevels = GAMG_MAXLEVELS; 1546 pc_gamg->verbose = 0; 1547 pc_gamg->emax_id = -1; 1548 pc_gamg->eigtarget[0] = 0.05; 1549 pc_gamg->eigtarget[1] = 1.05; 1550 1551 /* private events */ 1552 #if defined PETSC_GAMG_USE_LOG 1553 if (count++ == 0) { 1554 ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr); 1555 ierr = PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr); 1556 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1557 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1558 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1559 ierr = PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr); 1560 ierr = PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr); 1561 ierr = PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr); 1562 ierr = PetscLogEventRegister(" search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr); 1563 ierr = PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr); 1564 ierr = PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr); 1565 ierr = PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr); 1566 ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr); 1567 ierr = PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr); 1568 ierr = PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr); 1569 ierr = PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr); 1570 ierr = PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr); 1571 1572 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1573 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1574 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1575 /* create timer stages */ 1576 #if defined GAMG_STAGES 1577 { 1578 char str[32]; 1579 PetscInt lidx; 1580 sprintf(str,"MG Level %d (finest)",0); 1581 ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr); 1582 for (lidx=1; lidx<9; lidx++) { 1583 sprintf(str,"MG Level %d",lidx); 1584 ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr); 1585 } 1586 } 1587 #endif 1588 } 1589 #endif 1590 /* general events */ 1591 #if defined PETSC_USE_LOG 1592 ierr = PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG);CHKERRQ(ierr); 1593 ierr = PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);CHKERRQ(ierr); 1594 ierr = PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr); 1595 ierr = PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr); 1596 ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr); 1597 ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr); 1598 ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);CHKERRQ(ierr); 1599 ierr = PetscLogEventRegister("GAMGKKTProl_AGG", PC_CLASSID, &PC_GAMGKKTProl_AGG);CHKERRQ(ierr); 1600 #endif 1601 1602 /* instantiate derived type */ 1603 ierr = PetscOptionsHead("GAMG options");CHKERRQ(ierr); 1604 { 1605 char tname[256] = GAMGAGG; 1606 ierr = PetscOptionsList("-pc_gamg_type","Type of GAMG method","PCGAMGSetType",GAMGList, tname, tname, sizeof(tname), NULL);CHKERRQ(ierr); 1607 ierr = PCGAMGSetType(pc, tname);CHKERRQ(ierr); 1608 } 1609 ierr = PetscOptionsTail();CHKERRQ(ierr); 1610 PetscFunctionReturn(0); 1611 } 1612 EXTERN_C_END 1613