1 /* 2 GAMG geometric-algebric multigrid PC - Mark Adams 2011 3 */ 4 #include "petsc-private/matimpl.h" 5 #include <../src/ksp/pc/impls/gamg/gamg.h> /*I "petscpc.h" I*/ 6 #include <petsc-private/kspimpl.h> 7 8 #if defined PETSC_GAMG_USE_LOG 9 PetscLogEvent petsc_gamg_setup_events[NUM_SET]; 10 #endif 11 12 #if defined PETSC_USE_LOG 13 PetscLogEvent PC_GAMGGgraph_AGG; 14 PetscLogEvent PC_GAMGGgraph_GEO; 15 PetscLogEvent PC_GAMGCoarsen_AGG; 16 PetscLogEvent PC_GAMGCoarsen_GEO; 17 PetscLogEvent PC_GAMGProlongator_AGG; 18 PetscLogEvent PC_GAMGProlongator_GEO; 19 PetscLogEvent PC_GAMGOptprol_AGG; 20 PetscLogEvent PC_GAMGKKTProl_AGG; 21 #endif 22 23 #define GAMG_MAXLEVELS 30 24 25 /* #define GAMG_STAGES */ 26 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 27 static PetscLogStage gamg_stages[GAMG_MAXLEVELS]; 28 #endif 29 30 static PetscFunctionList GAMGList = 0; 31 static PetscBool PCGAMGPackageInitialized; 32 33 /* ----------------------------------------------------------------------------- */ 34 #undef __FUNCT__ 35 #define __FUNCT__ "PCReset_GAMG" 36 PetscErrorCode PCReset_GAMG(PC pc) 37 { 38 PetscErrorCode ierr; 39 PC_MG *mg = (PC_MG*)pc->data; 40 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 41 42 PetscFunctionBegin; 43 if (pc_gamg->data) { /* this should not happen, cleaned up in SetUp */ 44 PetscPrintf(PetscObjectComm((PetscObject)pc),"***[%d]%s this should not happen, cleaned up in SetUp\n",0,__FUNCT__); 45 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 46 } 47 pc_gamg->data = NULL; pc_gamg->data_sz = 0; 48 49 if (pc_gamg->orig_data) { 50 ierr = PetscFree(pc_gamg->orig_data);CHKERRQ(ierr); 51 } 52 PetscFunctionReturn(0); 53 } 54 55 /* private 2x2 Mat Nest for Stokes */ 56 typedef struct { 57 Mat A11,A21,A12,Amat; 58 IS prim_is,constr_is; 59 } GAMGKKTMat; 60 61 #undef __FUNCT__ 62 #define __FUNCT__ "GAMGKKTMatCreate" 63 static PetscErrorCode GAMGKKTMatCreate(Mat A, PetscBool iskkt, GAMGKKTMat *out) 64 { 65 PetscFunctionBegin; 66 out->Amat = A; 67 if (iskkt) { 68 PetscErrorCode ierr; 69 IS is_constraint, is_prime; 70 PetscInt nmin,nmax; 71 72 ierr = MatGetOwnershipRange(A, &nmin, &nmax);CHKERRQ(ierr); 73 ierr = MatFindZeroDiagonals(A, &is_constraint);CHKERRQ(ierr); 74 ierr = ISComplement(is_constraint, nmin, nmax, &is_prime);CHKERRQ(ierr); 75 76 out->prim_is = is_prime; 77 out->constr_is = is_constraint; 78 79 ierr = MatGetSubMatrix(A, is_prime, is_prime, MAT_INITIAL_MATRIX, &out->A11);CHKERRQ(ierr); 80 ierr = MatGetSubMatrix(A, is_prime, is_constraint, MAT_INITIAL_MATRIX, &out->A12);CHKERRQ(ierr); 81 ierr = MatGetSubMatrix(A, is_constraint, is_prime, MAT_INITIAL_MATRIX, &out->A21);CHKERRQ(ierr); 82 } else { 83 out->A11 = A; 84 out->A21 = NULL; 85 out->A12 = NULL; 86 out->prim_is = NULL; 87 out->constr_is = NULL; 88 } 89 PetscFunctionReturn(0); 90 } 91 92 #undef __FUNCT__ 93 #define __FUNCT__ "GAMGKKTMatDestroy" 94 static PetscErrorCode GAMGKKTMatDestroy(GAMGKKTMat *mat) 95 { 96 PetscErrorCode ierr; 97 98 PetscFunctionBegin; 99 if (mat->A11 && mat->A11 != mat->Amat) { 100 ierr = MatDestroy(&mat->A11);CHKERRQ(ierr); 101 } 102 ierr = MatDestroy(&mat->A21);CHKERRQ(ierr); 103 ierr = MatDestroy(&mat->A12);CHKERRQ(ierr); 104 105 ierr = ISDestroy(&mat->prim_is);CHKERRQ(ierr); 106 ierr = ISDestroy(&mat->constr_is);CHKERRQ(ierr); 107 PetscFunctionReturn(0); 108 } 109 110 /* -------------------------------------------------------------------------- */ 111 /* 112 createLevel: create coarse op with RAP. repartition and/or reduce number 113 of active processors. 114 115 Input Parameter: 116 . pc - parameters + side effect: coarse data in 'pc_gamg->data' and 117 'pc_gamg->data_sz' are changed via repartitioning/reduction. 118 . Amat_fine - matrix on this fine (k) level 119 . cr_bs - coarse block size 120 . isLast - 121 . stokes - 122 In/Output Parameter: 123 . a_P_inout - prolongation operator to the next level (k-->k-1) 124 . a_nactive_proc - number of active procs 125 Output Parameter: 126 . a_Amat_crs - coarse matrix that is created (k-1) 127 */ 128 129 #undef __FUNCT__ 130 #define __FUNCT__ "createLevel" 131 static PetscErrorCode createLevel(const PC pc,const Mat Amat_fine,const PetscInt cr_bs,const PetscBool isLast,const PetscBool stokes,Mat *a_P_inout,Mat *a_Amat_crs,PetscMPIInt *a_nactive_proc) 132 { 133 PetscErrorCode ierr; 134 PC_MG *mg = (PC_MG*)pc->data; 135 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 136 const PetscBool repart = pc_gamg->repart; 137 const PetscInt min_eq_proc = pc_gamg->min_eq_proc, coarse_max = pc_gamg->coarse_eq_limit; 138 Mat Cmat,Pold=*a_P_inout; 139 MPI_Comm comm; 140 PetscMPIInt rank,size,new_size,nactive=*a_nactive_proc; 141 PetscInt ncrs_eq,ncrs_prim,f_bs; 142 143 PetscFunctionBegin; 144 ierr = PetscObjectGetComm((PetscObject)Amat_fine,&comm);CHKERRQ(ierr); 145 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 146 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 147 ierr = MatGetBlockSize(Amat_fine, &f_bs);CHKERRQ(ierr); 148 /* RAP */ 149 ierr = MatPtAP(Amat_fine, Pold, MAT_INITIAL_MATRIX, 2.0, &Cmat);CHKERRQ(ierr); 150 151 /* set 'ncrs_prim' (nodes), 'ncrs_eq' (equations)*/ 152 ncrs_prim = pc_gamg->data_sz/pc_gamg->data_cell_cols/pc_gamg->data_cell_rows; 153 if (pc_gamg->data_sz % (pc_gamg->data_cell_cols*pc_gamg->data_cell_rows)) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_PLIB,"pc_gamg->data_sz %D not divisible by (pc_gamg->data_cell_cols %D *pc_gamg->data_cell_rows %D)",pc_gamg->data_sz,pc_gamg->data_cell_cols,pc_gamg->data_cell_rows); 154 ierr = MatGetLocalSize(Cmat, &ncrs_eq, NULL);CHKERRQ(ierr); 155 156 /* get number of PEs to make active 'new_size', reduce, can be any integer 1-P */ 157 { 158 PetscInt ncrs_eq_glob; 159 ierr = MatGetSize(Cmat, &ncrs_eq_glob, NULL);CHKERRQ(ierr); 160 new_size = (PetscMPIInt)((float)ncrs_eq_glob/(float)min_eq_proc + 0.5); /* hardwire min. number of eq/proc */ 161 if (new_size == 0 || ncrs_eq_glob < coarse_max) new_size = 1; 162 else if (new_size >= nactive) new_size = nactive; /* no change, rare */ 163 if (isLast) new_size = 1; 164 } 165 166 if (!repart && new_size==nactive) *a_Amat_crs = Cmat; /* output - no repartitioning or reduction - could bail here */ 167 else { 168 const PetscInt *idx,ndata_rows=pc_gamg->data_cell_rows,ndata_cols=pc_gamg->data_cell_cols,node_data_sz=ndata_rows*ndata_cols; 169 PetscInt *counts,*newproc_idx,ii,jj,kk,strideNew,*tidx,ncrs_prim_new,ncrs_eq_new,nloc_old; 170 IS is_eq_newproc,is_eq_newproc_prim,is_eq_num,is_eq_num_prim,isscat,new_eq_indices; 171 VecScatter vecscat; 172 PetscScalar *array; 173 Vec src_crd, dest_crd; 174 175 nloc_old = ncrs_eq/cr_bs; 176 if (ncrs_eq % cr_bs) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ncrs_eq %D not divisible by cr_bs %D",ncrs_eq,cr_bs); 177 #if defined PETSC_GAMG_USE_LOG 178 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 179 #endif 180 /* make 'is_eq_newproc' */ 181 ierr = PetscMalloc(size*sizeof(PetscInt), &counts);CHKERRQ(ierr); 182 if (repart && !stokes) { 183 /* Repartition Cmat_{k} and move colums of P^{k}_{k-1} and coordinates of primal part accordingly */ 184 Mat adj; 185 186 if (pc_gamg->verbose>0) { 187 if (pc_gamg->verbose==1) PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,ncrs_eq); 188 else { 189 PetscInt n; 190 ierr = MPI_Allreduce(&ncrs_eq, &n, 1, MPIU_INT, MPI_SUM, comm);CHKERRQ(ierr); 191 PetscPrintf(comm,"\t[%d]%s repartition: size (active): %d --> %d, neq = %d\n",rank,__FUNCT__,*a_nactive_proc,new_size,n); 192 } 193 } 194 195 /* get 'adj' */ 196 if (cr_bs == 1) { 197 ierr = MatConvert(Cmat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 198 } else { 199 /* make a scalar matrix to partition (no Stokes here) */ 200 Mat tMat; 201 PetscInt Istart_crs,Iend_crs,ncols,jj,Ii; 202 const PetscScalar *vals; 203 const PetscInt *idx; 204 PetscInt *d_nnz, *o_nnz, M, N; 205 static PetscInt llev = 0; 206 207 ierr = PetscMalloc(ncrs_prim*sizeof(PetscInt), &d_nnz);CHKERRQ(ierr); 208 ierr = PetscMalloc(ncrs_prim*sizeof(PetscInt), &o_nnz);CHKERRQ(ierr); 209 ierr = MatGetOwnershipRange(Cmat, &Istart_crs, &Iend_crs);CHKERRQ(ierr); 210 ierr = MatGetSize(Cmat, &M, &N);CHKERRQ(ierr); 211 for (Ii = Istart_crs, jj = 0; Ii < Iend_crs; Ii += cr_bs, jj++) { 212 ierr = MatGetRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 213 d_nnz[jj] = ncols/cr_bs; 214 o_nnz[jj] = ncols/cr_bs; 215 ierr = MatRestoreRow(Cmat,Ii,&ncols,0,0);CHKERRQ(ierr); 216 if (d_nnz[jj] > ncrs_prim) d_nnz[jj] = ncrs_prim; 217 if (o_nnz[jj] > (M/cr_bs-ncrs_prim)) o_nnz[jj] = M/cr_bs-ncrs_prim; 218 } 219 220 ierr = MatCreate(comm, &tMat);CHKERRQ(ierr); 221 ierr = MatSetSizes(tMat, ncrs_prim, ncrs_prim,PETSC_DETERMINE, PETSC_DETERMINE);CHKERRQ(ierr); 222 ierr = MatSetType(tMat,MATAIJ);CHKERRQ(ierr); 223 ierr = MatSeqAIJSetPreallocation(tMat,0,d_nnz);CHKERRQ(ierr); 224 ierr = MatMPIAIJSetPreallocation(tMat,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 225 ierr = PetscFree(d_nnz);CHKERRQ(ierr); 226 ierr = PetscFree(o_nnz);CHKERRQ(ierr); 227 228 for (ii = Istart_crs; ii < Iend_crs; ii++) { 229 PetscInt dest_row = ii/cr_bs; 230 ierr = MatGetRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 231 for (jj = 0; jj < ncols; jj++) { 232 PetscInt dest_col = idx[jj]/cr_bs; 233 PetscScalar v = 1.0; 234 ierr = MatSetValues(tMat,1,&dest_row,1,&dest_col,&v,ADD_VALUES);CHKERRQ(ierr); 235 } 236 ierr = MatRestoreRow(Cmat,ii,&ncols,&idx,&vals);CHKERRQ(ierr); 237 } 238 ierr = MatAssemblyBegin(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 239 ierr = MatAssemblyEnd(tMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 240 241 if (llev++ == -1) { 242 PetscViewer viewer; char fname[32]; 243 ierr = PetscSNPrintf(fname,sizeof(fname),"part_mat_%D.mat",llev);CHKERRQ(ierr); 244 PetscViewerBinaryOpen(comm,fname,FILE_MODE_WRITE,&viewer); 245 ierr = MatView(tMat, viewer);CHKERRQ(ierr); 246 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 247 } 248 249 ierr = MatConvert(tMat, MATMPIADJ, MAT_INITIAL_MATRIX, &adj);CHKERRQ(ierr); 250 251 ierr = MatDestroy(&tMat);CHKERRQ(ierr); 252 } /* create 'adj' */ 253 254 { /* partition: get newproc_idx */ 255 char prefix[256]; 256 const char *pcpre; 257 const PetscInt *is_idx; 258 MatPartitioning mpart; 259 IS proc_is; 260 PetscInt targetPE; 261 262 ierr = MatPartitioningCreate(comm, &mpart);CHKERRQ(ierr); 263 ierr = MatPartitioningSetAdjacency(mpart, adj);CHKERRQ(ierr); 264 ierr = PCGetOptionsPrefix(pc, &pcpre);CHKERRQ(ierr); 265 ierr = PetscSNPrintf(prefix,sizeof(prefix),"%spc_gamg_",pcpre ? pcpre : "");CHKERRQ(ierr); 266 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 267 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 268 ierr = MatPartitioningSetNParts(mpart, new_size);CHKERRQ(ierr); 269 ierr = MatPartitioningApply(mpart, &proc_is);CHKERRQ(ierr); 270 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 271 272 /* collect IS info */ 273 ierr = PetscMalloc(ncrs_eq*sizeof(PetscInt), &newproc_idx);CHKERRQ(ierr); 274 ierr = ISGetIndices(proc_is, &is_idx);CHKERRQ(ierr); 275 targetPE = 1; /* bring to "front" of machine */ 276 /*targetPE = size/new_size;*/ /* spread partitioning across machine */ 277 for (kk = jj = 0 ; kk < nloc_old ; kk++) { 278 for (ii = 0 ; ii < cr_bs ; ii++, jj++) { 279 newproc_idx[jj] = is_idx[kk] * targetPE; /* distribution */ 280 } 281 } 282 ierr = ISRestoreIndices(proc_is, &is_idx);CHKERRQ(ierr); 283 ierr = ISDestroy(&proc_is);CHKERRQ(ierr); 284 } 285 ierr = MatDestroy(&adj);CHKERRQ(ierr); 286 287 ierr = ISCreateGeneral(comm, ncrs_eq, newproc_idx, PETSC_COPY_VALUES, &is_eq_newproc);CHKERRQ(ierr); 288 if (newproc_idx != 0) { 289 ierr = PetscFree(newproc_idx);CHKERRQ(ierr); 290 } 291 } else { /* simple aggreagtion of parts -- 'is_eq_newproc' */ 292 293 PetscInt rfactor,targetPE; 294 /* find factor */ 295 if (new_size == 1) rfactor = size; /* easy */ 296 else { 297 PetscReal best_fact = 0.; 298 jj = -1; 299 for (kk = 1 ; kk <= size ; kk++) { 300 if (size%kk==0) { /* a candidate */ 301 PetscReal nactpe = (PetscReal)size/(PetscReal)kk, fact = nactpe/(PetscReal)new_size; 302 if (fact > 1.0) fact = 1./fact; /* keep fact < 1 */ 303 if (fact > best_fact) { 304 best_fact = fact; jj = kk; 305 } 306 } 307 } 308 if (jj != -1) rfactor = jj; 309 else rfactor = 1; /* does this happen .. a prime */ 310 } 311 new_size = size/rfactor; 312 313 if (new_size==nactive) { 314 *a_Amat_crs = Cmat; /* output - no repartitioning or reduction, bail out because nested here */ 315 ierr = PetscFree(counts);CHKERRQ(ierr); 316 if (pc_gamg->verbose>0) { 317 PetscPrintf(comm,"\t[%d]%s aggregate processors noop: new_size=%d, neq(loc)=%d\n",rank,__FUNCT__,new_size,ncrs_eq); 318 } 319 PetscFunctionReturn(0); 320 } 321 322 if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s number of equations (loc) %d with simple aggregation\n",rank,__FUNCT__,ncrs_eq); 323 targetPE = rank/rfactor; 324 ierr = ISCreateStride(comm, ncrs_eq, targetPE, 0, &is_eq_newproc);CHKERRQ(ierr); 325 326 if (stokes) { 327 ierr = ISCreateStride(comm, ncrs_prim*cr_bs, targetPE, 0, &is_eq_newproc_prim);CHKERRQ(ierr); 328 } 329 } /* end simple 'is_eq_newproc' */ 330 331 /* 332 Create an index set from the is_eq_newproc index set to indicate the mapping TO 333 */ 334 ierr = ISPartitioningToNumbering(is_eq_newproc, &is_eq_num);CHKERRQ(ierr); 335 if (stokes) { 336 ierr = ISPartitioningToNumbering(is_eq_newproc_prim, &is_eq_num_prim);CHKERRQ(ierr); 337 } else is_eq_num_prim = is_eq_num; 338 /* 339 Determine how many equations/vertices are assigned to each processor 340 */ 341 ierr = ISPartitioningCount(is_eq_newproc, size, counts);CHKERRQ(ierr); 342 ncrs_eq_new = counts[rank]; 343 ierr = ISDestroy(&is_eq_newproc);CHKERRQ(ierr); 344 if (stokes) { 345 ierr = ISPartitioningCount(is_eq_newproc_prim, size, counts);CHKERRQ(ierr); 346 ierr = ISDestroy(&is_eq_newproc_prim);CHKERRQ(ierr); 347 ncrs_prim_new = counts[rank]/cr_bs; /* nodes */ 348 } else ncrs_prim_new = ncrs_eq_new/cr_bs; /* eqs */ 349 350 ierr = PetscFree(counts);CHKERRQ(ierr); 351 #if defined PETSC_GAMG_USE_LOG 352 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET12],0,0,0,0);CHKERRQ(ierr); 353 #endif 354 355 /* move data (for primal equations only) */ 356 /* Create a vector to contain the newly ordered element information */ 357 ierr = VecCreate(comm, &dest_crd);CHKERRQ(ierr); 358 ierr = VecSetSizes(dest_crd, node_data_sz*ncrs_prim_new, PETSC_DECIDE);CHKERRQ(ierr); 359 ierr = VecSetFromOptions(dest_crd);CHKERRQ(ierr); /* this is needed! */ 360 /* 361 There are 'ndata_rows*ndata_cols' data items per node, (one can think of the vectors of having 362 a block size of ...). Note, ISs are expanded into equation space by 'cr_bs'. 363 */ 364 ierr = PetscMalloc((ncrs_prim*node_data_sz)*sizeof(PetscInt), &tidx);CHKERRQ(ierr); 365 ierr = ISGetIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 366 for (ii=0,jj=0; ii<ncrs_prim; ii++) { 367 PetscInt id = idx[ii*cr_bs]/cr_bs; /* get node back */ 368 for (kk=0; kk<node_data_sz; kk++, jj++) tidx[jj] = id*node_data_sz + kk; 369 } 370 ierr = ISRestoreIndices(is_eq_num_prim, &idx);CHKERRQ(ierr); 371 ierr = ISCreateGeneral(comm, node_data_sz*ncrs_prim, tidx, PETSC_COPY_VALUES, &isscat);CHKERRQ(ierr); 372 ierr = PetscFree(tidx);CHKERRQ(ierr); 373 /* 374 Create a vector to contain the original vertex information for each element 375 */ 376 ierr = VecCreateSeq(PETSC_COMM_SELF, node_data_sz*ncrs_prim, &src_crd);CHKERRQ(ierr); 377 for (jj=0; jj<ndata_cols; jj++) { 378 const PetscInt stride0=ncrs_prim*pc_gamg->data_cell_rows; 379 for (ii=0; ii<ncrs_prim; ii++) { 380 for (kk=0; kk<ndata_rows; kk++) { 381 PetscInt ix = ii*ndata_rows + kk + jj*stride0, jx = ii*node_data_sz + kk*ndata_cols + jj; 382 PetscScalar tt = (PetscScalar)pc_gamg->data[ix]; 383 ierr = VecSetValues(src_crd, 1, &jx, &tt, INSERT_VALUES);CHKERRQ(ierr); 384 } 385 } 386 } 387 ierr = VecAssemblyBegin(src_crd);CHKERRQ(ierr); 388 ierr = VecAssemblyEnd(src_crd);CHKERRQ(ierr); 389 /* 390 Scatter the element vertex information (still in the original vertex ordering) 391 to the correct processor 392 */ 393 ierr = VecScatterCreate(src_crd, NULL, dest_crd, isscat, &vecscat);CHKERRQ(ierr); 394 ierr = ISDestroy(&isscat);CHKERRQ(ierr); 395 ierr = VecScatterBegin(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 396 ierr = VecScatterEnd(vecscat,src_crd,dest_crd,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 397 ierr = VecScatterDestroy(&vecscat);CHKERRQ(ierr); 398 ierr = VecDestroy(&src_crd);CHKERRQ(ierr); 399 /* 400 Put the element vertex data into a new allocation of the gdata->ele 401 */ 402 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 403 ierr = PetscMalloc(node_data_sz*ncrs_prim_new*sizeof(PetscReal), &pc_gamg->data);CHKERRQ(ierr); 404 405 pc_gamg->data_sz = node_data_sz*ncrs_prim_new; 406 strideNew = ncrs_prim_new*ndata_rows; 407 408 ierr = VecGetArray(dest_crd, &array);CHKERRQ(ierr); 409 for (jj=0; jj<ndata_cols; jj++) { 410 for (ii=0; ii<ncrs_prim_new; ii++) { 411 for (kk=0; kk<ndata_rows; kk++) { 412 PetscInt ix = ii*ndata_rows + kk + jj*strideNew, jx = ii*node_data_sz + kk*ndata_cols + jj; 413 pc_gamg->data[ix] = PetscRealPart(array[jx]); 414 } 415 } 416 } 417 ierr = VecRestoreArray(dest_crd, &array);CHKERRQ(ierr); 418 ierr = VecDestroy(&dest_crd);CHKERRQ(ierr); 419 420 /* move A and P (columns) with new layout */ 421 #if defined PETSC_GAMG_USE_LOG 422 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 423 #endif 424 425 /* 426 Invert for MatGetSubMatrix 427 */ 428 ierr = ISInvertPermutation(is_eq_num, ncrs_eq_new, &new_eq_indices);CHKERRQ(ierr); 429 ierr = ISSort(new_eq_indices);CHKERRQ(ierr); /* is this needed? */ 430 ierr = ISSetBlockSize(new_eq_indices, cr_bs);CHKERRQ(ierr); 431 if (is_eq_num != is_eq_num_prim) { 432 ierr = ISDestroy(&is_eq_num_prim);CHKERRQ(ierr); /* could be same as 'is_eq_num' */ 433 } 434 ierr = ISDestroy(&is_eq_num);CHKERRQ(ierr); 435 #if defined PETSC_GAMG_USE_LOG 436 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET13],0,0,0,0);CHKERRQ(ierr); 437 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 438 #endif 439 /* 'a_Amat_crs' output */ 440 { 441 Mat mat; 442 ierr = MatGetSubMatrix(Cmat, new_eq_indices, new_eq_indices, MAT_INITIAL_MATRIX, &mat);CHKERRQ(ierr); 443 *a_Amat_crs = mat; 444 445 if (!PETSC_TRUE) { 446 PetscInt cbs, rbs; 447 ierr = MatGetBlockSizes(Cmat, &rbs, &cbs);CHKERRQ(ierr); 448 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Old Mat rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 449 ierr = MatGetBlockSizes(mat, &rbs, &cbs);CHKERRQ(ierr); 450 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s New Mat rbs=%d cbs=%d cr_bs=%d\n",rank,__FUNCT__,rbs,cbs,cr_bs);CHKERRQ(ierr); 451 } 452 } 453 ierr = MatDestroy(&Cmat);CHKERRQ(ierr); 454 455 #if defined PETSC_GAMG_USE_LOG 456 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET14],0,0,0,0);CHKERRQ(ierr); 457 #endif 458 /* prolongator */ 459 { 460 IS findices; 461 PetscInt Istart,Iend; 462 Mat Pnew; 463 ierr = MatGetOwnershipRange(Pold, &Istart, &Iend);CHKERRQ(ierr); 464 #if defined PETSC_GAMG_USE_LOG 465 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 466 #endif 467 ierr = ISCreateStride(comm,Iend-Istart,Istart,1,&findices);CHKERRQ(ierr); 468 ierr = ISSetBlockSize(findices,f_bs);CHKERRQ(ierr); 469 ierr = MatGetSubMatrix(Pold, findices, new_eq_indices, MAT_INITIAL_MATRIX, &Pnew);CHKERRQ(ierr); 470 ierr = ISDestroy(&findices);CHKERRQ(ierr); 471 472 if (!PETSC_TRUE) { 473 PetscInt cbs, rbs; 474 ierr = MatGetBlockSizes(Pold, &rbs, &cbs);CHKERRQ(ierr); 475 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pold rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 476 ierr = MatGetBlockSizes(Pnew, &rbs, &cbs);CHKERRQ(ierr); 477 ierr = PetscPrintf(MPI_COMM_SELF,"[%d]%s Pnew rbs=%d cbs=%d\n",rank,__FUNCT__,rbs,cbs);CHKERRQ(ierr); 478 } 479 #if defined PETSC_GAMG_USE_LOG 480 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET15],0,0,0,0);CHKERRQ(ierr); 481 #endif 482 ierr = MatDestroy(a_P_inout);CHKERRQ(ierr); 483 484 /* output - repartitioned */ 485 *a_P_inout = Pnew; 486 } 487 ierr = ISDestroy(&new_eq_indices);CHKERRQ(ierr); 488 489 *a_nactive_proc = new_size; /* output */ 490 } 491 492 /* outout matrix data */ 493 if (!PETSC_TRUE) { 494 PetscViewer viewer; char fname[32]; static int llev=0; Cmat = *a_Amat_crs; 495 if (llev==0) { 496 sprintf(fname,"Cmat_%d.m",llev++); 497 PetscViewerASCIIOpen(comm,fname,&viewer); 498 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr); 499 ierr = MatView(Amat_fine, viewer);CHKERRQ(ierr); 500 ierr = PetscViewerDestroy(&viewer); 501 } 502 sprintf(fname,"Cmat_%d.m",llev++); 503 PetscViewerASCIIOpen(comm,fname,&viewer); 504 ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_MATLAB);CHKERRQ(ierr); 505 ierr = MatView(Cmat, viewer);CHKERRQ(ierr); 506 ierr = PetscViewerDestroy(&viewer); 507 } 508 PetscFunctionReturn(0); 509 } 510 511 /* -------------------------------------------------------------------------- */ 512 /* 513 PCSetUp_GAMG - Prepares for the use of the GAMG preconditioner 514 by setting data structures and options. 515 516 Input Parameter: 517 . pc - the preconditioner context 518 519 Application Interface Routine: PCSetUp() 520 521 Notes: 522 The interface routine PCSetUp() is not usually called directly by 523 the user, but instead is called by PCApply() if necessary. 524 */ 525 #undef __FUNCT__ 526 #define __FUNCT__ "PCSetUp_GAMG" 527 PetscErrorCode PCSetUp_GAMG(PC pc) 528 { 529 PetscErrorCode ierr; 530 PC_MG *mg = (PC_MG*)pc->data; 531 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 532 Mat Pmat = pc->pmat; 533 PetscInt fine_level,level,level1,bs,M,qq,lidx,nASMBlocksArr[GAMG_MAXLEVELS]; 534 MPI_Comm comm; 535 PetscMPIInt rank,size,nactivepe; 536 Mat Aarr[GAMG_MAXLEVELS],Parr[GAMG_MAXLEVELS]; 537 PetscReal emaxs[GAMG_MAXLEVELS]; 538 IS *ASMLocalIDsArr[GAMG_MAXLEVELS]; 539 GAMGKKTMat kktMatsArr[GAMG_MAXLEVELS]; 540 PetscLogDouble nnz0=0.,nnztot=0.; 541 MatInfo info; 542 PetscBool stokes = PETSC_FALSE, redo_mesh_setup = (PetscBool)(!pc_gamg->reuse_prol); 543 544 PetscFunctionBegin; 545 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 546 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 547 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 548 549 if (pc_gamg->verbose>2) PetscPrintf(comm,"[%d]%s pc_gamg->setup_count=%d pc->setupcalled=%d\n",rank,__FUNCT__,pc_gamg->setup_count,pc->setupcalled); 550 551 if (pc_gamg->setup_count++ > 0) { 552 if (redo_mesh_setup) { 553 /* reset everything */ 554 ierr = PCReset_MG(pc);CHKERRQ(ierr); 555 pc->setupcalled = 0; 556 } else { 557 PC_MG_Levels **mglevels = mg->levels; 558 /* just do Galerkin grids */ 559 Mat B,dA,dB; 560 561 if (!pc->setupcalled) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"PCSetUp() has not been called yet"); 562 if (pc_gamg->Nlevels > 1) { 563 /* currently only handle case where mat and pmat are the same on coarser levels */ 564 ierr = KSPGetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,&dA,&dB,NULL);CHKERRQ(ierr); 565 /* (re)set to get dirty flag */ 566 ierr = KSPSetOperators(mglevels[pc_gamg->Nlevels-1]->smoothd,dA,dB,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 567 568 for (level=pc_gamg->Nlevels-2; level>=0; level--) { 569 /* the first time through the matrix structure has changed from repartitioning */ 570 if (pc_gamg->setup_count==2 && (pc_gamg->repart || level==0)) { 571 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_INITIAL_MATRIX,1.0,&B);CHKERRQ(ierr); 572 ierr = MatDestroy(&mglevels[level]->A);CHKERRQ(ierr); 573 574 mglevels[level]->A = B; 575 } else { 576 ierr = KSPGetOperators(mglevels[level]->smoothd,NULL,&B,NULL);CHKERRQ(ierr); 577 ierr = MatPtAP(dB,mglevels[level+1]->interpolate,MAT_REUSE_MATRIX,1.0,&B);CHKERRQ(ierr); 578 } 579 ierr = KSPSetOperators(mglevels[level]->smoothd,B,B,SAME_NONZERO_PATTERN);CHKERRQ(ierr); 580 dB = B; 581 } 582 } 583 584 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 585 586 /* PCSetUp_MG seems to insists on setting this to GMRES */ 587 ierr = KSPSetType(mglevels[0]->smoothd, KSPPREONLY);CHKERRQ(ierr); 588 PetscFunctionReturn(0); 589 } 590 } 591 592 ierr = PetscOptionsGetBool(((PetscObject)pc)->prefix,"-pc_fieldsplit_detect_saddle_point",&stokes,NULL);CHKERRQ(ierr); 593 594 ierr = GAMGKKTMatCreate(Pmat, stokes, &kktMatsArr[0]);CHKERRQ(ierr); 595 596 if (!pc_gamg->data) { 597 if (pc_gamg->orig_data) { 598 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 599 ierr = MatGetLocalSize(Pmat, &qq, NULL);CHKERRQ(ierr); 600 601 pc_gamg->data_sz = (qq/bs)*pc_gamg->orig_data_cell_rows*pc_gamg->orig_data_cell_cols; 602 pc_gamg->data_cell_rows = pc_gamg->orig_data_cell_rows; 603 pc_gamg->data_cell_cols = pc_gamg->orig_data_cell_cols; 604 605 ierr = PetscMalloc(pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->data);CHKERRQ(ierr); 606 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->data[qq] = pc_gamg->orig_data[qq]; 607 } else { 608 if (!pc_gamg->ops->createdefaultdata) SETERRQ(comm,PETSC_ERR_PLIB,"'createdefaultdata' not set(?) need to support NULL data"); 609 if (stokes) SETERRQ(comm,PETSC_ERR_PLIB,"Need data (eg, PCSetCoordinates) for Stokes problems"); 610 ierr = pc_gamg->ops->createdefaultdata(pc, kktMatsArr[0].A11);CHKERRQ(ierr); 611 } 612 } 613 614 /* cache original data for reuse */ 615 if (!pc_gamg->orig_data && redo_mesh_setup) { 616 ierr = PetscMalloc(pc_gamg->data_sz*sizeof(PetscReal), &pc_gamg->orig_data);CHKERRQ(ierr); 617 for (qq=0; qq<pc_gamg->data_sz; qq++) pc_gamg->orig_data[qq] = pc_gamg->data[qq]; 618 pc_gamg->orig_data_cell_rows = pc_gamg->data_cell_rows; 619 pc_gamg->orig_data_cell_cols = pc_gamg->data_cell_cols; 620 } 621 622 /* get basic dims */ 623 if (stokes) bs = pc_gamg->data_cell_rows; /* this is agg-mg specific */ 624 else { 625 ierr = MatGetBlockSize(Pmat, &bs);CHKERRQ(ierr); 626 } 627 628 ierr = MatGetSize(Pmat, &M, &qq);CHKERRQ(ierr); 629 if (pc_gamg->verbose) { 630 PetscInt NN = M; 631 if (pc_gamg->verbose==1) { 632 ierr = MatGetInfo(Pmat,MAT_LOCAL,&info);CHKERRQ(ierr); 633 ierr = MatGetLocalSize(Pmat, &NN, &qq);CHKERRQ(ierr); 634 } else { 635 ierr = MatGetInfo(Pmat,MAT_GLOBAL_SUM,&info);CHKERRQ(ierr); 636 } 637 nnz0 = info.nz_used; 638 nnztot = info.nz_used; 639 ierr = PetscPrintf(comm,"\t[%d]%s level %d N=%d, n data rows=%d, n data cols=%d, nnz/row (ave)=%d, np=%d\n", 640 rank,__FUNCT__,0,M,pc_gamg->data_cell_rows,pc_gamg->data_cell_cols, 641 (int)(nnz0/(PetscReal)NN),size);CHKERRQ(ierr); 642 } 643 644 /* Get A_i and R_i */ 645 for (level=0, Aarr[0]=Pmat, nactivepe = size; /* hard wired stopping logic */ 646 level < (pc_gamg->Nlevels-1) && (level==0 || M>pc_gamg->coarse_eq_limit); /* && (size==1 || nactivepe>1); */ 647 level++) { 648 level1 = level + 1; 649 #if defined PETSC_GAMG_USE_LOG 650 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 651 #if (defined GAMG_STAGES) 652 ierr = PetscLogStagePush(gamg_stages[level]);CHKERRQ(ierr); 653 #endif 654 #endif 655 /* deal with Stokes, get sub matrices */ 656 if (level > 0) { 657 ierr = GAMGKKTMatCreate(Aarr[level], stokes, &kktMatsArr[level]);CHKERRQ(ierr); 658 } 659 { /* construct prolongator */ 660 Mat Gmat; 661 PetscCoarsenData *agg_lists; 662 Mat Prol11,Prol22; 663 664 ierr = pc_gamg->ops->graph(pc,kktMatsArr[level].A11, &Gmat);CHKERRQ(ierr); 665 ierr = pc_gamg->ops->coarsen(pc, &Gmat, &agg_lists);CHKERRQ(ierr); 666 ierr = pc_gamg->ops->prolongator(pc, kktMatsArr[level].A11, Gmat, agg_lists, &Prol11);CHKERRQ(ierr); 667 668 /* could have failed to create new level */ 669 if (Prol11) { 670 /* get new block size of coarse matrices */ 671 ierr = MatGetBlockSizes(Prol11, NULL, &bs);CHKERRQ(ierr); 672 673 if (stokes) { 674 if (!pc_gamg->ops->formkktprol) SETERRQ(comm,PETSC_ERR_USER,"Stokes not supportd by AMG method."); 675 /* R A12 == (T = A21 P)'; G = T' T; coarsen G; form plain agg with G */ 676 ierr = pc_gamg->ops->formkktprol(pc, Prol11, kktMatsArr[level].A21, &Prol22);CHKERRQ(ierr); 677 } 678 679 if (pc_gamg->ops->optprol) { 680 /* smooth */ 681 ierr = pc_gamg->ops->optprol(pc, kktMatsArr[level].A11, &Prol11);CHKERRQ(ierr); 682 } 683 684 if (stokes) { 685 IS is_row[2]; 686 Mat a[4]; 687 688 is_row[0] = kktMatsArr[level].prim_is; is_row[1] = kktMatsArr[level].constr_is; 689 a[0] = Prol11; a[1] = NULL; a[2] = NULL; a[3] = Prol22; 690 ierr = MatCreateNest(comm,2,is_row, 2, is_row, a, &Parr[level1]);CHKERRQ(ierr); 691 } else Parr[level1] = Prol11; 692 } else Parr[level1] = NULL; 693 694 if (pc_gamg->use_aggs_in_gasm) { 695 ierr = PetscCDGetASMBlocks(agg_lists, bs, &nASMBlocksArr[level], &ASMLocalIDsArr[level]);CHKERRQ(ierr); 696 } 697 698 ierr = MatDestroy(&Gmat);CHKERRQ(ierr); 699 ierr = PetscCDDestroy(agg_lists);CHKERRQ(ierr); 700 } /* construct prolongator scope */ 701 #if defined PETSC_GAMG_USE_LOG 702 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET1],0,0,0,0);CHKERRQ(ierr); 703 #endif 704 /* cache eigen estimate */ 705 if (pc_gamg->emax_id != -1) { 706 PetscBool flag; 707 ierr = PetscObjectComposedDataGetReal((PetscObject)kktMatsArr[level].A11, pc_gamg->emax_id, emaxs[level], flag);CHKERRQ(ierr); 708 if (!flag) emaxs[level] = -1.; 709 } else emaxs[level] = -1.; 710 if (level==0) Aarr[0] = Pmat; /* use Pmat for finest level setup */ 711 if (!Parr[level1]) { 712 if (pc_gamg->verbose) { 713 ierr = PetscPrintf(comm,"\t[%d]%s stop gridding, level %d\n",rank,__FUNCT__,level);CHKERRQ(ierr); 714 } 715 break; 716 } 717 #if defined PETSC_GAMG_USE_LOG 718 ierr = PetscLogEventBegin(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 719 #endif 720 721 ierr = createLevel(pc, Aarr[level], bs, (PetscBool)(level==pc_gamg->Nlevels-2), 722 stokes, &Parr[level1], &Aarr[level1], &nactivepe);CHKERRQ(ierr); 723 724 #if defined PETSC_GAMG_USE_LOG 725 ierr = PetscLogEventEnd(petsc_gamg_setup_events[SET2],0,0,0,0);CHKERRQ(ierr); 726 #endif 727 ierr = MatGetSize(Aarr[level1], &M, &qq);CHKERRQ(ierr); 728 729 if (pc_gamg->verbose > 0) { 730 PetscInt NN = M; 731 if (pc_gamg->verbose==1) { 732 ierr = MatGetInfo(Aarr[level1],MAT_LOCAL,&info);CHKERRQ(ierr); 733 ierr = MatGetLocalSize(Aarr[level1], &NN, &qq);CHKERRQ(ierr); 734 } else { 735 ierr = MatGetInfo(Aarr[level1], MAT_GLOBAL_SUM, &info);CHKERRQ(ierr); 736 } 737 738 nnztot += info.nz_used; 739 ierr = PetscPrintf(comm,"\t\t[%d]%s %d) N=%d, n data cols=%d, nnz/row (ave)=%d, %d active pes\n", 740 rank,__FUNCT__,(int)level1,M,pc_gamg->data_cell_cols, 741 (int)(info.nz_used/(PetscReal)NN), nactivepe);CHKERRQ(ierr); 742 } 743 744 /* stop if one node -- could pull back for singular problems */ 745 if (M/pc_gamg->data_cell_cols < 2) { 746 level++; 747 break; 748 } 749 #if (defined PETSC_GAMG_USE_LOG && defined GAMG_STAGES) 750 ierr = PetscLogStagePop();CHKERRQ(ierr); 751 #endif 752 } /* levels */ 753 754 if (pc_gamg->data) { 755 ierr = PetscFree(pc_gamg->data);CHKERRQ(ierr); 756 pc_gamg->data = NULL; 757 } 758 759 if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s %d levels, grid complexity = %g\n",0,__FUNCT__,level+1,nnztot/nnz0); 760 pc_gamg->Nlevels = level + 1; 761 fine_level = level; 762 ierr = PCMGSetLevels(pc,pc_gamg->Nlevels,NULL);CHKERRQ(ierr); 763 764 /* simple setup */ 765 if (!PETSC_TRUE) { 766 PC_MG_Levels **mglevels = mg->levels; 767 for (lidx=0,level=pc_gamg->Nlevels-1; 768 lidx<fine_level; 769 lidx++, level--) { 770 ierr = PCMGSetInterpolation(pc, lidx+1, Parr[level]);CHKERRQ(ierr); 771 ierr = KSPSetOperators(mglevels[lidx]->smoothd, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN);CHKERRQ(ierr); 772 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 773 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 774 } 775 ierr = KSPSetOperators(mglevels[fine_level]->smoothd, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN);CHKERRQ(ierr); 776 777 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 778 } else if (pc_gamg->Nlevels > 1) { /* don't setup MG if one level */ 779 /* set default smoothers & set operators */ 780 for (lidx = 1, level = pc_gamg->Nlevels-2; 781 lidx <= fine_level; 782 lidx++, level--) { 783 KSP smoother; 784 PC subpc; 785 786 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 787 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 788 789 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 790 /* set ops */ 791 ierr = KSPSetOperators(smoother, Aarr[level], Aarr[level], SAME_NONZERO_PATTERN);CHKERRQ(ierr); 792 ierr = PCMGSetInterpolation(pc, lidx, Parr[level+1]);CHKERRQ(ierr); 793 794 /* create field split PC, get subsmoother */ 795 if (stokes) { 796 KSP *ksps; 797 PetscInt nn; 798 ierr = PCFieldSplitSetIS(subpc,"0",kktMatsArr[level].prim_is);CHKERRQ(ierr); 799 ierr = PCFieldSplitSetIS(subpc,"1",kktMatsArr[level].constr_is);CHKERRQ(ierr); 800 ierr = PCFieldSplitGetSubKSP(subpc,&nn,&ksps);CHKERRQ(ierr); 801 smoother = ksps[0]; 802 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 803 ierr = PetscFree(ksps);CHKERRQ(ierr); 804 } 805 ierr = GAMGKKTMatDestroy(&kktMatsArr[level]);CHKERRQ(ierr); 806 807 /* set defaults */ 808 ierr = KSPSetType(smoother, KSPCHEBYSHEV);CHKERRQ(ierr); 809 810 /* override defaults and command line args (!) */ 811 if (pc_gamg->use_aggs_in_gasm) { 812 PetscInt sz; 813 IS *is; 814 815 sz = nASMBlocksArr[level]; 816 is = ASMLocalIDsArr[level]; 817 ierr = PCSetType(subpc, PCGASM);CHKERRQ(ierr); 818 if (sz==0) { 819 IS is; 820 PetscInt my0,kk; 821 ierr = MatGetOwnershipRange(Aarr[level], &my0, &kk);CHKERRQ(ierr); 822 ierr = ISCreateGeneral(PETSC_COMM_SELF, 1, &my0, PETSC_COPY_VALUES, &is);CHKERRQ(ierr); 823 ierr = PCGASMSetSubdomains(subpc, 1, &is, NULL);CHKERRQ(ierr); 824 ierr = ISDestroy(&is);CHKERRQ(ierr); 825 } else { 826 PetscInt kk; 827 ierr = PCGASMSetSubdomains(subpc, sz, is, NULL);CHKERRQ(ierr); 828 for (kk=0; kk<sz; kk++) { 829 ierr = ISDestroy(&is[kk]);CHKERRQ(ierr); 830 } 831 ierr = PetscFree(is);CHKERRQ(ierr); 832 } 833 ierr = PCGASMSetOverlap(subpc, 0);CHKERRQ(ierr); 834 835 ASMLocalIDsArr[level] = NULL; 836 nASMBlocksArr[level] = 0; 837 ierr = PCGASMSetType(subpc, PC_GASM_BASIC);CHKERRQ(ierr); 838 } else { 839 ierr = PCSetType(subpc, PCJACOBI);CHKERRQ(ierr); 840 } 841 } 842 { 843 /* coarse grid */ 844 KSP smoother,*k2; PC subpc,pc2; PetscInt ii,first; 845 Mat Lmat = Aarr[(level=pc_gamg->Nlevels-1)]; lidx = 0; 846 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 847 ierr = KSPSetOperators(smoother, Lmat, Lmat, SAME_NONZERO_PATTERN);CHKERRQ(ierr); 848 ierr = KSPSetNormType(smoother, KSP_NORM_NONE);CHKERRQ(ierr); 849 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 850 ierr = PCSetType(subpc, PCBJACOBI);CHKERRQ(ierr); 851 ierr = PCSetUp(subpc);CHKERRQ(ierr); 852 ierr = PCBJacobiGetSubKSP(subpc,&ii,&first,&k2);CHKERRQ(ierr); 853 if (ii != 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_PLIB,"ii %D is not one",ii); 854 ierr = KSPGetPC(k2[0],&pc2);CHKERRQ(ierr); 855 ierr = PCSetType(pc2, PCLU);CHKERRQ(ierr); 856 ierr = PCFactorSetShiftType(pc2,MAT_SHIFT_INBLOCKS);CHKERRQ(ierr); 857 ierr = KSPSetTolerances(k2[0],PETSC_DEFAULT,PETSC_DEFAULT,PETSC_DEFAULT,1);CHKERRQ(ierr); 858 } 859 860 /* should be called in PCSetFromOptions_GAMG(), but cannot be called prior to PCMGSetLevels() */ 861 ierr = PetscObjectOptionsBegin((PetscObject)pc);CHKERRQ(ierr); 862 ierr = PCSetFromOptions_MG(pc);CHKERRQ(ierr); 863 ierr = PetscOptionsEnd();CHKERRQ(ierr); 864 if (mg->galerkin != 2) SETERRQ(comm,PETSC_ERR_USER,"GAMG does Galerkin manually so the -pc_mg_galerkin option must not be used."); 865 866 /* create cheby smoothers */ 867 for (lidx = 1, level = pc_gamg->Nlevels-2; 868 lidx <= fine_level; 869 lidx++, level--) { 870 KSP smoother; 871 PetscBool flag; 872 PC subpc; 873 874 ierr = PCMGGetSmoother(pc, lidx, &smoother);CHKERRQ(ierr); 875 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 876 877 /* create field split PC, get subsmoother */ 878 if (stokes) { 879 KSP *ksps; 880 PetscInt nn; 881 ierr = PCFieldSplitGetSubKSP(subpc,&nn,&ksps);CHKERRQ(ierr); 882 smoother = ksps[0]; 883 ierr = KSPGetPC(smoother, &subpc);CHKERRQ(ierr); 884 ierr = PetscFree(ksps);CHKERRQ(ierr); 885 } 886 887 /* do my own cheby */ 888 ierr = PetscObjectTypeCompare((PetscObject)smoother, KSPCHEBYSHEV, &flag);CHKERRQ(ierr); 889 if (flag) { 890 PetscReal emax, emin; 891 ierr = PetscObjectTypeCompare((PetscObject)subpc, PCJACOBI, &flag);CHKERRQ(ierr); 892 if (flag && emaxs[level] > 0.0) emax=emaxs[level]; /* eigen estimate only for diagnal PC */ 893 else { /* eigen estimate 'emax' */ 894 KSP eksp; 895 Mat Lmat = Aarr[level]; 896 Vec bb, xx; 897 898 ierr = MatGetVecs(Lmat, &bb, 0);CHKERRQ(ierr); 899 ierr = MatGetVecs(Lmat, &xx, 0);CHKERRQ(ierr); 900 { 901 PetscRandom rctx; 902 ierr = PetscRandomCreate(comm,&rctx);CHKERRQ(ierr); 903 ierr = PetscRandomSetFromOptions(rctx);CHKERRQ(ierr); 904 ierr = VecSetRandom(bb,rctx);CHKERRQ(ierr); 905 ierr = PetscRandomDestroy(&rctx);CHKERRQ(ierr); 906 } 907 908 /* zeroing out BC rows -- needed for crazy matrices */ 909 { 910 PetscInt Istart,Iend,ncols,jj,Ii; 911 PetscScalar zero = 0.0; 912 ierr = MatGetOwnershipRange(Lmat, &Istart, &Iend);CHKERRQ(ierr); 913 for (Ii = Istart, jj = 0; Ii < Iend; Ii++, jj++) { 914 ierr = MatGetRow(Lmat,Ii,&ncols,0,0);CHKERRQ(ierr); 915 if (ncols <= 1) { 916 ierr = VecSetValues(bb, 1, &Ii, &zero, INSERT_VALUES);CHKERRQ(ierr); 917 } 918 ierr = MatRestoreRow(Lmat,Ii,&ncols,0,0);CHKERRQ(ierr); 919 } 920 ierr = VecAssemblyBegin(bb);CHKERRQ(ierr); 921 ierr = VecAssemblyEnd(bb);CHKERRQ(ierr); 922 } 923 924 ierr = KSPCreate(comm, &eksp);CHKERRQ(ierr); 925 ierr = KSPSetTolerances(eksp, PETSC_DEFAULT, PETSC_DEFAULT, PETSC_DEFAULT, 10);CHKERRQ(ierr); 926 ierr = KSPSetNormType(eksp, KSP_NORM_NONE);CHKERRQ(ierr); 927 ierr = KSPSetOptionsPrefix(eksp,((PetscObject)pc)->prefix);CHKERRQ(ierr); 928 ierr = KSPAppendOptionsPrefix(eksp, "gamg_est_");CHKERRQ(ierr); 929 ierr = KSPSetFromOptions(eksp);CHKERRQ(ierr); 930 931 ierr = KSPSetInitialGuessNonzero(eksp, PETSC_FALSE);CHKERRQ(ierr); 932 ierr = KSPSetOperators(eksp, Lmat, Lmat, SAME_NONZERO_PATTERN);CHKERRQ(ierr); 933 ierr = KSPSetComputeSingularValues(eksp,PETSC_TRUE);CHKERRQ(ierr); 934 935 /* set PC type to be same as smoother */ 936 ierr = KSPSetPC(eksp, subpc);CHKERRQ(ierr); 937 938 /* solve - keep stuff out of logging */ 939 ierr = PetscLogEventDeactivate(KSP_Solve);CHKERRQ(ierr); 940 ierr = PetscLogEventDeactivate(PC_Apply);CHKERRQ(ierr); 941 ierr = KSPSolve(eksp, bb, xx);CHKERRQ(ierr); 942 ierr = PetscLogEventActivate(KSP_Solve);CHKERRQ(ierr); 943 ierr = PetscLogEventActivate(PC_Apply);CHKERRQ(ierr); 944 945 ierr = KSPComputeExtremeSingularValues(eksp, &emax, &emin);CHKERRQ(ierr); 946 947 ierr = VecDestroy(&xx);CHKERRQ(ierr); 948 ierr = VecDestroy(&bb);CHKERRQ(ierr); 949 ierr = KSPDestroy(&eksp);CHKERRQ(ierr); 950 951 if (pc_gamg->verbose > 0) { 952 PetscInt N1, tt; 953 ierr = MatGetSize(Aarr[level], &N1, &tt);CHKERRQ(ierr); 954 PetscPrintf(comm,"\t\t\t%s PC setup max eigen=%e min=%e on level %d (N=%d)\n",__FUNCT__,emax,emin,lidx,N1); 955 } 956 } 957 { 958 PetscInt N1, N0; 959 ierr = MatGetSize(Aarr[level], &N1, NULL);CHKERRQ(ierr); 960 ierr = MatGetSize(Aarr[level+1], &N0, NULL);CHKERRQ(ierr); 961 /* heuristic - is this crap? */ 962 /* emin = 1.*emax/((PetscReal)N1/(PetscReal)N0); */ 963 emin = emax * pc_gamg->eigtarget[0]; 964 emax *= pc_gamg->eigtarget[1]; 965 } 966 ierr = KSPChebyshevSetEigenvalues(smoother, emax, emin);CHKERRQ(ierr); 967 } /* setup checby flag */ 968 } /* non-coarse levels */ 969 970 /* clean up */ 971 for (level=1; level<pc_gamg->Nlevels; level++) { 972 ierr = MatDestroy(&Parr[level]);CHKERRQ(ierr); 973 ierr = MatDestroy(&Aarr[level]);CHKERRQ(ierr); 974 } 975 976 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 977 978 if (PETSC_TRUE) { 979 KSP smoother; /* PCSetUp_MG seems to insists on setting this to GMRES on coarse grid */ 980 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 981 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 982 } 983 } else { 984 KSP smoother; 985 if (pc_gamg->verbose) PetscPrintf(comm,"\t[%d]%s one level solver used (system is seen as DD). Using default solver.\n",rank,__FUNCT__); 986 ierr = PCMGGetSmoother(pc, 0, &smoother);CHKERRQ(ierr); 987 ierr = KSPSetOperators(smoother, Aarr[0], Aarr[0], SAME_NONZERO_PATTERN);CHKERRQ(ierr); 988 ierr = KSPSetType(smoother, KSPPREONLY);CHKERRQ(ierr); 989 ierr = PCSetUp_MG(pc);CHKERRQ(ierr); 990 } 991 PetscFunctionReturn(0); 992 } 993 994 /* ------------------------------------------------------------------------- */ 995 /* 996 PCDestroy_GAMG - Destroys the private context for the GAMG preconditioner 997 that was created with PCCreate_GAMG(). 998 999 Input Parameter: 1000 . pc - the preconditioner context 1001 1002 Application Interface Routine: PCDestroy() 1003 */ 1004 #undef __FUNCT__ 1005 #define __FUNCT__ "PCDestroy_GAMG" 1006 PetscErrorCode PCDestroy_GAMG(PC pc) 1007 { 1008 PetscErrorCode ierr; 1009 PC_MG *mg = (PC_MG*)pc->data; 1010 PC_GAMG *pc_gamg= (PC_GAMG*)mg->innerctx; 1011 1012 PetscFunctionBegin; 1013 ierr = PCReset_GAMG(pc);CHKERRQ(ierr); 1014 if (pc_gamg->ops->destroy) { 1015 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 1016 } 1017 ierr = PetscFree(pc_gamg->ops);CHKERRQ(ierr); 1018 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 1019 ierr = PetscFree(pc_gamg);CHKERRQ(ierr); 1020 ierr = PCDestroy_MG(pc);CHKERRQ(ierr); 1021 PetscFunctionReturn(0); 1022 } 1023 1024 1025 #undef __FUNCT__ 1026 #define __FUNCT__ "PCGAMGSetProcEqLim" 1027 /*@ 1028 PCGAMGSetProcEqLim - Set number of equations to aim for on coarse grids via 1029 processor reduction. 1030 1031 Not Collective on PC 1032 1033 Input Parameters: 1034 . pc - the preconditioner context 1035 1036 1037 Options Database Key: 1038 . -pc_gamg_process_eq_limit 1039 1040 Level: intermediate 1041 1042 Concepts: Unstructured multrigrid preconditioner 1043 1044 .seealso: () 1045 @*/ 1046 PetscErrorCode PCGAMGSetProcEqLim(PC pc, PetscInt n) 1047 { 1048 PetscErrorCode ierr; 1049 1050 PetscFunctionBegin; 1051 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1052 ierr = PetscTryMethod(pc,"PCGAMGSetProcEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1053 PetscFunctionReturn(0); 1054 } 1055 1056 #undef __FUNCT__ 1057 #define __FUNCT__ "PCGAMGSetProcEqLim_GAMG" 1058 static PetscErrorCode PCGAMGSetProcEqLim_GAMG(PC pc, PetscInt n) 1059 { 1060 PC_MG *mg = (PC_MG*)pc->data; 1061 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1062 1063 PetscFunctionBegin; 1064 if (n>0) pc_gamg->min_eq_proc = n; 1065 PetscFunctionReturn(0); 1066 } 1067 1068 #undef __FUNCT__ 1069 #define __FUNCT__ "PCGAMGSetCoarseEqLim" 1070 /*@ 1071 PCGAMGSetCoarseEqLim - Set max number of equations on coarse grids. 1072 1073 Collective on PC 1074 1075 Input Parameters: 1076 . pc - the preconditioner context 1077 1078 1079 Options Database Key: 1080 . -pc_gamg_coarse_eq_limit 1081 1082 Level: intermediate 1083 1084 Concepts: Unstructured multrigrid preconditioner 1085 1086 .seealso: () 1087 @*/ 1088 PetscErrorCode PCGAMGSetCoarseEqLim(PC pc, PetscInt n) 1089 { 1090 PetscErrorCode ierr; 1091 1092 PetscFunctionBegin; 1093 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1094 ierr = PetscTryMethod(pc,"PCGAMGSetCoarseEqLim_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1095 PetscFunctionReturn(0); 1096 } 1097 1098 #undef __FUNCT__ 1099 #define __FUNCT__ "PCGAMGSetCoarseEqLim_GAMG" 1100 static PetscErrorCode PCGAMGSetCoarseEqLim_GAMG(PC pc, PetscInt n) 1101 { 1102 PC_MG *mg = (PC_MG*)pc->data; 1103 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1104 1105 PetscFunctionBegin; 1106 if (n>0) pc_gamg->coarse_eq_limit = n; 1107 PetscFunctionReturn(0); 1108 } 1109 1110 #undef __FUNCT__ 1111 #define __FUNCT__ "PCGAMGSetRepartitioning" 1112 /*@ 1113 PCGAMGSetRepartitioning - Repartition the coarse grids 1114 1115 Collective on PC 1116 1117 Input Parameters: 1118 . pc - the preconditioner context 1119 1120 1121 Options Database Key: 1122 . -pc_gamg_repartition 1123 1124 Level: intermediate 1125 1126 Concepts: Unstructured multrigrid preconditioner 1127 1128 .seealso: () 1129 @*/ 1130 PetscErrorCode PCGAMGSetRepartitioning(PC pc, PetscBool n) 1131 { 1132 PetscErrorCode ierr; 1133 1134 PetscFunctionBegin; 1135 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1136 ierr = PetscTryMethod(pc,"PCGAMGSetRepartitioning_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1137 PetscFunctionReturn(0); 1138 } 1139 1140 #undef __FUNCT__ 1141 #define __FUNCT__ "PCGAMGSetRepartitioning_GAMG" 1142 static PetscErrorCode PCGAMGSetRepartitioning_GAMG(PC pc, PetscBool n) 1143 { 1144 PC_MG *mg = (PC_MG*)pc->data; 1145 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1146 1147 PetscFunctionBegin; 1148 pc_gamg->repart = n; 1149 PetscFunctionReturn(0); 1150 } 1151 1152 #undef __FUNCT__ 1153 #define __FUNCT__ "PCGAMGSetReuseProl" 1154 /*@ 1155 PCGAMGSetReuseProl - Reuse prlongation 1156 1157 Collective on PC 1158 1159 Input Parameters: 1160 . pc - the preconditioner context 1161 1162 1163 Options Database Key: 1164 . -pc_gamg_reuse_interpolation 1165 1166 Level: intermediate 1167 1168 Concepts: Unstructured multrigrid preconditioner 1169 1170 .seealso: () 1171 @*/ 1172 PetscErrorCode PCGAMGSetReuseProl(PC pc, PetscBool n) 1173 { 1174 PetscErrorCode ierr; 1175 1176 PetscFunctionBegin; 1177 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1178 ierr = PetscTryMethod(pc,"PCGAMGSetReuseProl_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1179 PetscFunctionReturn(0); 1180 } 1181 1182 #undef __FUNCT__ 1183 #define __FUNCT__ "PCGAMGSetReuseProl_GAMG" 1184 static PetscErrorCode PCGAMGSetReuseProl_GAMG(PC pc, PetscBool n) 1185 { 1186 PC_MG *mg = (PC_MG*)pc->data; 1187 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1188 1189 PetscFunctionBegin; 1190 pc_gamg->reuse_prol = n; 1191 PetscFunctionReturn(0); 1192 } 1193 1194 #undef __FUNCT__ 1195 #define __FUNCT__ "PCGAMGSetUseASMAggs" 1196 /*@ 1197 PCGAMGSetUseASMAggs - 1198 1199 Collective on PC 1200 1201 Input Parameters: 1202 . pc - the preconditioner context 1203 1204 1205 Options Database Key: 1206 . -pc_gamg_use_agg_gasm 1207 1208 Level: intermediate 1209 1210 Concepts: Unstructured multrigrid preconditioner 1211 1212 .seealso: () 1213 @*/ 1214 PetscErrorCode PCGAMGSetUseASMAggs(PC pc, PetscBool n) 1215 { 1216 PetscErrorCode ierr; 1217 1218 PetscFunctionBegin; 1219 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1220 ierr = PetscTryMethod(pc,"PCGAMGSetUseASMAggs_C",(PC,PetscBool),(pc,n));CHKERRQ(ierr); 1221 PetscFunctionReturn(0); 1222 } 1223 1224 #undef __FUNCT__ 1225 #define __FUNCT__ "PCGAMGSetUseASMAggs_GAMG" 1226 static PetscErrorCode PCGAMGSetUseASMAggs_GAMG(PC pc, PetscBool n) 1227 { 1228 PC_MG *mg = (PC_MG*)pc->data; 1229 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1230 1231 PetscFunctionBegin; 1232 pc_gamg->use_aggs_in_gasm = n; 1233 PetscFunctionReturn(0); 1234 } 1235 1236 #undef __FUNCT__ 1237 #define __FUNCT__ "PCGAMGSetNlevels" 1238 /*@ 1239 PCGAMGSetNlevels - 1240 1241 Not collective on PC 1242 1243 Input Parameters: 1244 . pc - the preconditioner context 1245 1246 1247 Options Database Key: 1248 . -pc_mg_levels 1249 1250 Level: intermediate 1251 1252 Concepts: Unstructured multrigrid preconditioner 1253 1254 .seealso: () 1255 @*/ 1256 PetscErrorCode PCGAMGSetNlevels(PC pc, PetscInt n) 1257 { 1258 PetscErrorCode ierr; 1259 1260 PetscFunctionBegin; 1261 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1262 ierr = PetscTryMethod(pc,"PCGAMGSetNlevels_C",(PC,PetscInt),(pc,n));CHKERRQ(ierr); 1263 PetscFunctionReturn(0); 1264 } 1265 1266 #undef __FUNCT__ 1267 #define __FUNCT__ "PCGAMGSetNlevels_GAMG" 1268 static PetscErrorCode PCGAMGSetNlevels_GAMG(PC pc, PetscInt n) 1269 { 1270 PC_MG *mg = (PC_MG*)pc->data; 1271 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1272 1273 PetscFunctionBegin; 1274 pc_gamg->Nlevels = n; 1275 PetscFunctionReturn(0); 1276 } 1277 1278 #undef __FUNCT__ 1279 #define __FUNCT__ "PCGAMGSetThreshold" 1280 /*@ 1281 PCGAMGSetThreshold - Relative threshold to use for dropping edges in aggregation graph 1282 1283 Not collective on PC 1284 1285 Input Parameters: 1286 . pc - the preconditioner context 1287 1288 1289 Options Database Key: 1290 . -pc_gamg_threshold 1291 1292 Level: intermediate 1293 1294 Concepts: Unstructured multrigrid preconditioner 1295 1296 .seealso: () 1297 @*/ 1298 PetscErrorCode PCGAMGSetThreshold(PC pc, PetscReal n) 1299 { 1300 PetscErrorCode ierr; 1301 1302 PetscFunctionBegin; 1303 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1304 ierr = PetscTryMethod(pc,"PCGAMGSetThreshold_C",(PC,PetscReal),(pc,n));CHKERRQ(ierr); 1305 PetscFunctionReturn(0); 1306 } 1307 1308 #undef __FUNCT__ 1309 #define __FUNCT__ "PCGAMGSetThreshold_GAMG" 1310 static PetscErrorCode PCGAMGSetThreshold_GAMG(PC pc, PetscReal n) 1311 { 1312 PC_MG *mg = (PC_MG*)pc->data; 1313 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1314 1315 PetscFunctionBegin; 1316 pc_gamg->threshold = n; 1317 PetscFunctionReturn(0); 1318 } 1319 1320 #undef __FUNCT__ 1321 #define __FUNCT__ "PCGAMGSetType" 1322 /*@ 1323 PCGAMGSetType - Set solution method - calls sub create method 1324 1325 Collective on PC 1326 1327 Input Parameters: 1328 . pc - the preconditioner context 1329 1330 1331 Options Database Key: 1332 . -pc_gamg_type 1333 1334 Level: intermediate 1335 1336 Concepts: Unstructured multrigrid preconditioner 1337 1338 .seealso: () 1339 @*/ 1340 PetscErrorCode PCGAMGSetType(PC pc, PCGAMGType type) 1341 { 1342 PetscErrorCode ierr; 1343 1344 PetscFunctionBegin; 1345 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1346 ierr = PetscTryMethod(pc,"PCGAMGSetType_C",(PC,PCGAMGType),(pc,type));CHKERRQ(ierr); 1347 PetscFunctionReturn(0); 1348 } 1349 1350 #undef __FUNCT__ 1351 #define __FUNCT__ "PCGAMGSetType_GAMG" 1352 static PetscErrorCode PCGAMGSetType_GAMG(PC pc, PCGAMGType type) 1353 { 1354 PetscErrorCode ierr,(*r)(PC); 1355 PC_MG *mg = (PC_MG*)pc->data; 1356 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1357 1358 PetscFunctionBegin; 1359 ierr = PetscFunctionListFind(GAMGList,type,&r);CHKERRQ(ierr); 1360 if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE,"Unknown GAMG type %s given",type); 1361 if (pc_gamg->ops->destroy) { 1362 ierr = (*pc_gamg->ops->destroy)(pc);CHKERRQ(ierr); 1363 ierr = PetscMemzero(pc_gamg->ops,sizeof(struct _PCGAMGOps));CHKERRQ(ierr); 1364 } 1365 ierr = PetscFree(pc_gamg->gamg_type_name);CHKERRQ(ierr); 1366 ierr = PetscStrallocpy(type,&pc_gamg->gamg_type_name);CHKERRQ(ierr); 1367 ierr = (*r)(pc);CHKERRQ(ierr); 1368 PetscFunctionReturn(0); 1369 } 1370 1371 #undef __FUNCT__ 1372 #define __FUNCT__ "PCSetFromOptions_GAMG" 1373 PetscErrorCode PCSetFromOptions_GAMG(PC pc) 1374 { 1375 PetscErrorCode ierr; 1376 PC_MG *mg = (PC_MG*)pc->data; 1377 PC_GAMG *pc_gamg = (PC_GAMG*)mg->innerctx; 1378 PetscBool flag; 1379 PetscInt two = 2; 1380 MPI_Comm comm; 1381 1382 PetscFunctionBegin; 1383 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 1384 ierr = PetscOptionsHead("GAMG options");CHKERRQ(ierr); 1385 { 1386 /* -pc_gamg_type */ 1387 { 1388 char tname[256] = PCGAMGAGG; 1389 const char *deftype = pc_gamg->gamg_type_name ? pc_gamg->gamg_type_name : tname; 1390 ierr = PetscOptionsList("-pc_gamg_type","Type of AMG method","PCGAMGSetType",GAMGList, tname, tname, sizeof(tname), &flag);CHKERRQ(ierr); 1391 /* call PCCreateGAMG_XYZ */ 1392 if (flag || !pc_gamg->gamg_type_name) { 1393 ierr = PCGAMGSetType(pc, flag ? tname : deftype);CHKERRQ(ierr); 1394 } 1395 } 1396 /* -pc_gamg_verbose */ 1397 ierr = PetscOptionsInt("-pc_gamg_verbose","Verbose (debugging) output for PCGAMG", 1398 "none", pc_gamg->verbose, 1399 &pc_gamg->verbose, NULL);CHKERRQ(ierr); 1400 /* -pc_gamg_repartition */ 1401 ierr = PetscOptionsBool("-pc_gamg_repartition", 1402 "Repartion coarse grids (false)", 1403 "PCGAMGRepartitioning", 1404 pc_gamg->repart, 1405 &pc_gamg->repart, 1406 &flag);CHKERRQ(ierr); 1407 /* -pc_gamg_reuse_interpolation */ 1408 ierr = PetscOptionsBool("-pc_gamg_reuse_interpolation", 1409 "Reuse prolongation operator (true)", 1410 "PCGAMGReuseProl", 1411 pc_gamg->reuse_prol, 1412 &pc_gamg->reuse_prol, 1413 &flag);CHKERRQ(ierr); 1414 /* -pc_gamg_use_agg_gasm */ 1415 ierr = PetscOptionsBool("-pc_gamg_use_agg_gasm", 1416 "Use aggregation agragates for GASM smoother (false)", 1417 "PCGAMGUseASMAggs", 1418 pc_gamg->use_aggs_in_gasm, 1419 &pc_gamg->use_aggs_in_gasm, 1420 &flag);CHKERRQ(ierr); 1421 /* -pc_gamg_process_eq_limit */ 1422 ierr = PetscOptionsInt("-pc_gamg_process_eq_limit", 1423 "Limit (goal) on number of equations per process on coarse grids", 1424 "PCGAMGSetProcEqLim", 1425 pc_gamg->min_eq_proc, 1426 &pc_gamg->min_eq_proc, 1427 &flag);CHKERRQ(ierr); 1428 /* -pc_gamg_coarse_eq_limit */ 1429 ierr = PetscOptionsInt("-pc_gamg_coarse_eq_limit", 1430 "Limit on number of equations for the coarse grid", 1431 "PCGAMGSetCoarseEqLim", 1432 pc_gamg->coarse_eq_limit, 1433 &pc_gamg->coarse_eq_limit, 1434 &flag);CHKERRQ(ierr); 1435 /* -pc_gamg_threshold */ 1436 ierr = PetscOptionsReal("-pc_gamg_threshold", 1437 "Relative threshold to use for dropping edges in aggregation graph", 1438 "PCGAMGSetThreshold", 1439 pc_gamg->threshold, 1440 &pc_gamg->threshold, 1441 &flag);CHKERRQ(ierr); 1442 if (flag && pc_gamg->verbose) { 1443 ierr = PetscPrintf(comm,"\t[%d]%s threshold set %e\n",0,__FUNCT__,pc_gamg->threshold);CHKERRQ(ierr); 1444 } 1445 /* -pc_gamg_eigtarget */ 1446 ierr = PetscOptionsRealArray("-pc_gamg_eigtarget","Target eigenvalue range as fraction of estimated maximum eigenvalue","PCGAMGSetEigTarget",pc_gamg->eigtarget,&two,NULL);CHKERRQ(ierr); 1447 ierr = PetscOptionsInt("-pc_mg_levels", 1448 "Set number of MG levels", 1449 "PCGAMGSetNlevels", 1450 pc_gamg->Nlevels, 1451 &pc_gamg->Nlevels, 1452 &flag);CHKERRQ(ierr); 1453 1454 /* set options for subtype */ 1455 if (pc_gamg->ops->setfromoptions) {ierr = (*pc_gamg->ops->setfromoptions)(pc);CHKERRQ(ierr);} 1456 } 1457 ierr = PetscOptionsTail();CHKERRQ(ierr); 1458 PetscFunctionReturn(0); 1459 } 1460 1461 /* -------------------------------------------------------------------------- */ 1462 /*MC 1463 PCGAMG - Geometric algebraic multigrid (AMG) preconditioning framework. 1464 - This is the entry point to GAMG, registered in pcregis.c 1465 1466 Options Database Keys: 1467 Multigrid options(inherited) 1468 + -pc_mg_cycles <1>: 1 for V cycle, 2 for W-cycle (PCMGSetCycleType) 1469 . -pc_mg_smoothup <1>: Number of post-smoothing steps (PCMGSetNumberSmoothUp) 1470 . -pc_mg_smoothdown <1>: Number of pre-smoothing steps (PCMGSetNumberSmoothDown) 1471 - -pc_mg_type <multiplicative>: (one of) additive multiplicative full kascade 1472 1473 Level: intermediate 1474 1475 Concepts: multigrid 1476 1477 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, PCMGType, 1478 PCMGSetLevels(), PCMGGetLevels(), PCMGSetType(), PCMGSetCycleType(), PCMGSetNumberSmoothDown(), 1479 PCMGSetNumberSmoothUp(), PCMGGetCoarseSolve(), PCMGSetResidual(), PCMGSetInterpolation(), 1480 PCMGSetRestriction(), PCMGGetSmoother(), PCMGGetSmootherUp(), PCMGGetSmootherDown(), 1481 PCMGSetCyclesOnLevel(), PCMGSetRhs(), PCMGSetX(), PCMGSetR() 1482 M*/ 1483 1484 #undef __FUNCT__ 1485 #define __FUNCT__ "PCCreate_GAMG" 1486 PETSC_EXTERN PetscErrorCode PCCreate_GAMG(PC pc) 1487 { 1488 PetscErrorCode ierr; 1489 PC_GAMG *pc_gamg; 1490 PC_MG *mg; 1491 #if defined PETSC_GAMG_USE_LOG 1492 static long count = 0; 1493 #endif 1494 1495 PetscFunctionBegin; 1496 /* PCGAMG is an inherited class of PCMG. Initialize pc as PCMG */ 1497 ierr = PCSetType(pc, PCMG);CHKERRQ(ierr); /* calls PCCreate_MG() and MGCreate_Private() */ 1498 ierr = PetscObjectChangeTypeName((PetscObject)pc, PCGAMG);CHKERRQ(ierr); 1499 1500 /* create a supporting struct and attach it to pc */ 1501 ierr = PetscNewLog(pc, PC_GAMG, &pc_gamg);CHKERRQ(ierr); 1502 mg = (PC_MG*)pc->data; 1503 mg->galerkin = 2; /* Use Galerkin, but it is computed externally */ 1504 mg->innerctx = pc_gamg; 1505 1506 ierr = PetscNewLog(pc,struct _PCGAMGOps,&pc_gamg->ops);CHKERRQ(ierr); 1507 1508 pc_gamg->setup_count = 0; 1509 /* these should be in subctx but repartitioning needs simple arrays */ 1510 pc_gamg->data_sz = 0; 1511 pc_gamg->data = 0; 1512 1513 /* register AMG type */ 1514 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES) 1515 ierr = PCGAMGInitializePackage();CHKERRQ(ierr); 1516 #endif 1517 1518 /* overwrite the pointers of PCMG by the functions of base class PCGAMG */ 1519 pc->ops->setfromoptions = PCSetFromOptions_GAMG; 1520 pc->ops->setup = PCSetUp_GAMG; 1521 pc->ops->reset = PCReset_GAMG; 1522 pc->ops->destroy = PCDestroy_GAMG; 1523 1524 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetProcEqLim_C",PCGAMGSetProcEqLim_GAMG);CHKERRQ(ierr); 1525 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetCoarseEqLim_C",PCGAMGSetCoarseEqLim_GAMG);CHKERRQ(ierr); 1526 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetRepartitioning_C",PCGAMGSetRepartitioning_GAMG);CHKERRQ(ierr); 1527 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetReuseProl_C",PCGAMGSetReuseProl_GAMG);CHKERRQ(ierr); 1528 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetUseASMAggs_C",PCGAMGSetUseASMAggs_GAMG);CHKERRQ(ierr); 1529 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetThreshold_C",PCGAMGSetThreshold_GAMG);CHKERRQ(ierr); 1530 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetType_C",PCGAMGSetType_GAMG);CHKERRQ(ierr); 1531 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGAMGSetNlevels_C",PCGAMGSetNlevels_GAMG);CHKERRQ(ierr); 1532 pc_gamg->repart = PETSC_FALSE; 1533 pc_gamg->reuse_prol = PETSC_FALSE; 1534 pc_gamg->use_aggs_in_gasm = PETSC_FALSE; 1535 pc_gamg->min_eq_proc = 50; 1536 pc_gamg->coarse_eq_limit = 800; 1537 pc_gamg->threshold = 0.; 1538 pc_gamg->Nlevels = GAMG_MAXLEVELS; 1539 pc_gamg->verbose = 0; 1540 pc_gamg->emax_id = -1; 1541 pc_gamg->eigtarget[0] = 0.05; 1542 pc_gamg->eigtarget[1] = 1.05; 1543 1544 /* private events */ 1545 #if defined PETSC_GAMG_USE_LOG 1546 if (count++ == 0) { 1547 ierr = PetscLogEventRegister("GAMG: createProl", PC_CLASSID, &petsc_gamg_setup_events[SET1]);CHKERRQ(ierr); 1548 ierr = PetscLogEventRegister(" Graph", PC_CLASSID, &petsc_gamg_setup_events[GRAPH]);CHKERRQ(ierr); 1549 /* PetscLogEventRegister(" G.Mat", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_MAT]); */ 1550 /* PetscLogEventRegister(" G.Filter", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_FILTER]); */ 1551 /* PetscLogEventRegister(" G.Square", PC_CLASSID, &petsc_gamg_setup_events[GRAPH_SQR]); */ 1552 ierr = PetscLogEventRegister(" MIS/Agg", PC_CLASSID, &petsc_gamg_setup_events[SET4]);CHKERRQ(ierr); 1553 ierr = PetscLogEventRegister(" geo: growSupp", PC_CLASSID, &petsc_gamg_setup_events[SET5]);CHKERRQ(ierr); 1554 ierr = PetscLogEventRegister(" geo: triangle", PC_CLASSID, &petsc_gamg_setup_events[SET6]);CHKERRQ(ierr); 1555 ierr = PetscLogEventRegister(" search&set", PC_CLASSID, &petsc_gamg_setup_events[FIND_V]);CHKERRQ(ierr); 1556 ierr = PetscLogEventRegister(" SA: col data", PC_CLASSID, &petsc_gamg_setup_events[SET7]);CHKERRQ(ierr); 1557 ierr = PetscLogEventRegister(" SA: frmProl0", PC_CLASSID, &petsc_gamg_setup_events[SET8]);CHKERRQ(ierr); 1558 ierr = PetscLogEventRegister(" SA: smooth", PC_CLASSID, &petsc_gamg_setup_events[SET9]);CHKERRQ(ierr); 1559 ierr = PetscLogEventRegister("GAMG: partLevel", PC_CLASSID, &petsc_gamg_setup_events[SET2]);CHKERRQ(ierr); 1560 ierr = PetscLogEventRegister(" repartition", PC_CLASSID, &petsc_gamg_setup_events[SET12]);CHKERRQ(ierr); 1561 ierr = PetscLogEventRegister(" Invert-Sort", PC_CLASSID, &petsc_gamg_setup_events[SET13]);CHKERRQ(ierr); 1562 ierr = PetscLogEventRegister(" Move A", PC_CLASSID, &petsc_gamg_setup_events[SET14]);CHKERRQ(ierr); 1563 ierr = PetscLogEventRegister(" Move P", PC_CLASSID, &petsc_gamg_setup_events[SET15]);CHKERRQ(ierr); 1564 1565 /* PetscLogEventRegister(" PL move data", PC_CLASSID, &petsc_gamg_setup_events[SET13]); */ 1566 /* PetscLogEventRegister("GAMG: fix", PC_CLASSID, &petsc_gamg_setup_events[SET10]); */ 1567 /* PetscLogEventRegister("GAMG: set levels", PC_CLASSID, &petsc_gamg_setup_events[SET11]); */ 1568 /* create timer stages */ 1569 #if defined GAMG_STAGES 1570 { 1571 char str[32]; 1572 PetscInt lidx; 1573 sprintf(str,"MG Level %d (finest)",0); 1574 ierr = PetscLogStageRegister(str, &gamg_stages[0]);CHKERRQ(ierr); 1575 for (lidx=1; lidx<9; lidx++) { 1576 sprintf(str,"MG Level %d",lidx); 1577 ierr = PetscLogStageRegister(str, &gamg_stages[lidx]);CHKERRQ(ierr); 1578 } 1579 } 1580 #endif 1581 } 1582 #endif 1583 /* general events */ 1584 #if defined PETSC_USE_LOG 1585 ierr = PetscLogEventRegister("PCGAMGgraph_AGG", 0, &PC_GAMGGgraph_AGG);CHKERRQ(ierr); 1586 ierr = PetscLogEventRegister("PCGAMGgraph_GEO", PC_CLASSID, &PC_GAMGGgraph_GEO);CHKERRQ(ierr); 1587 ierr = PetscLogEventRegister("PCGAMGcoarse_AGG", PC_CLASSID, &PC_GAMGCoarsen_AGG);CHKERRQ(ierr); 1588 ierr = PetscLogEventRegister("PCGAMGcoarse_GEO", PC_CLASSID, &PC_GAMGCoarsen_GEO);CHKERRQ(ierr); 1589 ierr = PetscLogEventRegister("PCGAMGProl_AGG", PC_CLASSID, &PC_GAMGProlongator_AGG);CHKERRQ(ierr); 1590 ierr = PetscLogEventRegister("PCGAMGProl_GEO", PC_CLASSID, &PC_GAMGProlongator_GEO);CHKERRQ(ierr); 1591 ierr = PetscLogEventRegister("PCGAMGPOpt_AGG", PC_CLASSID, &PC_GAMGOptprol_AGG);CHKERRQ(ierr); 1592 ierr = PetscLogEventRegister("GAMGKKTProl_AGG", PC_CLASSID, &PC_GAMGKKTProl_AGG);CHKERRQ(ierr); 1593 #endif 1594 1595 PetscFunctionReturn(0); 1596 } 1597 1598 #undef __FUNCT__ 1599 #define __FUNCT__ "PCGAMGInitializePackage" 1600 /*@C 1601 PCGAMGInitializePackage - This function initializes everything in the PCGAMG package. It is called 1602 from PetscDLLibraryRegister() when using dynamic libraries, and on the first call to PCCreate_GAMG() 1603 when using static libraries. 1604 1605 Level: developer 1606 1607 .keywords: PC, PCGAMG, initialize, package 1608 .seealso: PetscInitialize() 1609 @*/ 1610 PetscErrorCode PCGAMGInitializePackage(void) 1611 { 1612 PetscErrorCode ierr; 1613 1614 PetscFunctionBegin; 1615 if (PCGAMGPackageInitialized) PetscFunctionReturn(0); 1616 PCGAMGPackageInitialized = PETSC_TRUE; 1617 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGGEO,PCCreateGAMG_GEO);CHKERRQ(ierr); 1618 ierr = PetscFunctionListAdd(&GAMGList,PCGAMGAGG,PCCreateGAMG_AGG);CHKERRQ(ierr); 1619 ierr = PetscRegisterFinalize(PCGAMGFinalizePackage);CHKERRQ(ierr); 1620 PetscFunctionReturn(0); 1621 } 1622 1623 #undef __FUNCT__ 1624 #define __FUNCT__ "PCGAMGFinalizePackage" 1625 /*@C 1626 PCGAMGFinalizePackage - This function destroys everything in the PCGAMG package. It is 1627 called from PetscFinalize(). 1628 1629 Level: developer 1630 1631 .keywords: Petsc, destroy, package 1632 .seealso: PetscFinalize() 1633 @*/ 1634 PetscErrorCode PCGAMGFinalizePackage(void) 1635 { 1636 PetscErrorCode ierr; 1637 1638 PetscFunctionBegin; 1639 PCGAMGPackageInitialized = PETSC_FALSE; 1640 ierr = PetscFunctionListDestroy(&GAMGList);CHKERRQ(ierr); 1641 PetscFunctionReturn(0); 1642 } 1643