1 2 /* 3 This file defines an additive Schwarz preconditioner for any Mat implementation. 4 5 Note that each processor may have any number of subdomains. But in order to 6 deal easily with the VecScatter(), we treat each processor as if it has the 7 same number of subdomains. 8 9 n - total number of true subdomains on all processors 10 n_local_true - actual number of subdomains on this processor 11 n_local = maximum over all processors of n_local_true 12 */ 13 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 14 #include <petscdm.h> 15 16 typedef struct { 17 PetscInt n, n_local, n_local_true; 18 PetscInt overlap; /* overlap requested by user */ 19 KSP *ksp; /* linear solvers for each block */ 20 VecScatter restriction; /* mapping from global to overlapping (process) subdomain*/ 21 VecScatter *lrestriction; /* mapping from subregion to overlapping (process) subdomain */ 22 VecScatter *lprolongation; /* mapping from non-overlapping subregion to overlapping (process) subdomain; used for restrict additive version of algorithms */ 23 Vec lx, ly; /* work vectors */ 24 Vec *x,*y; /* work vectors */ 25 IS lis; /* index set that defines each overlapping multiplicative (process) subdomain */ 26 IS *is; /* index set that defines each overlapping subdomain */ 27 IS *is_local; /* index set that defines each non-overlapping subdomain, may be NULL */ 28 Mat *mat,*pmat; /* mat is not currently used */ 29 PCASMType type; /* use reduced interpolation, restriction or both */ 30 PetscBool type_set; /* if user set this value (so won't change it for symmetric problems) */ 31 PetscBool same_local_solves; /* flag indicating whether all local solvers are same */ 32 PetscBool sort_indices; /* flag to sort subdomain indices */ 33 PetscBool dm_subdomains; /* whether DM is allowed to define subdomains */ 34 PCCompositeType loctype; /* the type of composition for local solves */ 35 MatType sub_mat_type; /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */ 36 /* For multiplicative solve */ 37 Mat *lmats; /* submatrices for overlapping multiplicative (process) subdomain */ 38 } PC_ASM; 39 40 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer) 41 { 42 PC_ASM *osm = (PC_ASM*)pc->data; 43 PetscErrorCode ierr; 44 PetscMPIInt rank; 45 PetscInt i,bsz; 46 PetscBool iascii,isstring; 47 PetscViewer sviewer; 48 49 PetscFunctionBegin; 50 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 51 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 52 if (iascii) { 53 char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set"; 54 if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);} 55 if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);} 56 ierr = PetscViewerASCIIPrintf(viewer," %s, %s\n",blocks,overlaps);CHKERRQ(ierr); 57 ierr = PetscViewerASCIIPrintf(viewer," restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr); 58 if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer," Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);} 59 if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer," Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);} 60 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 61 if (osm->same_local_solves) { 62 if (osm->ksp) { 63 ierr = PetscViewerASCIIPrintf(viewer," Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr); 64 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 65 if (!rank) { 66 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 67 ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr); 68 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 69 } 70 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 71 } 72 } else { 73 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 74 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr); 75 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 76 ierr = PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr); 77 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 78 ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 79 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 80 for (i=0; i<osm->n_local_true; i++) { 81 ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr); 82 ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr); 83 ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr); 84 ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 85 } 86 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 87 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 88 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 89 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 90 } 91 } else if (isstring) { 92 ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr); 93 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 94 if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);} 95 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 96 } 97 PetscFunctionReturn(0); 98 } 99 100 static PetscErrorCode PCASMPrintSubdomains(PC pc) 101 { 102 PC_ASM *osm = (PC_ASM*)pc->data; 103 const char *prefix; 104 char fname[PETSC_MAX_PATH_LEN+1]; 105 PetscViewer viewer, sviewer; 106 char *s; 107 PetscInt i,j,nidx; 108 const PetscInt *idx; 109 PetscMPIInt rank, size; 110 PetscErrorCode ierr; 111 112 PetscFunctionBegin; 113 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr); 114 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr); 115 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 116 ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr); 117 if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); }; 118 ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr); 119 for (i=0; i<osm->n_local; i++) { 120 if (i < osm->n_local_true) { 121 ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr); 122 ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr); 123 /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/ 124 ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr); 125 ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr); 126 ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr); 127 for (j=0; j<nidx; j++) { 128 ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr); 129 } 130 ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr); 131 ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr); 132 ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr); 133 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 134 ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr); 135 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 136 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 137 ierr = PetscFree(s);CHKERRQ(ierr); 138 if (osm->is_local) { 139 /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/ 140 ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr); 141 ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr); 142 ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr); 143 ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr); 144 ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr); 145 for (j=0; j<nidx; j++) { 146 ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr); 147 } 148 ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr); 149 ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr); 150 ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr); 151 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 152 ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr); 153 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 154 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 155 ierr = PetscFree(s);CHKERRQ(ierr); 156 } 157 } else { 158 /* Participate in collective viewer calls. */ 159 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 160 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 161 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 162 /* Assume either all ranks have is_local or none do. */ 163 if (osm->is_local) { 164 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 165 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 166 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 167 } 168 } 169 } 170 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 171 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 172 PetscFunctionReturn(0); 173 } 174 175 static PetscErrorCode PCSetUp_ASM(PC pc) 176 { 177 PC_ASM *osm = (PC_ASM*)pc->data; 178 PetscErrorCode ierr; 179 PetscBool symset,flg; 180 PetscInt i,m,m_local; 181 MatReuse scall = MAT_REUSE_MATRIX; 182 IS isl; 183 KSP ksp; 184 PC subpc; 185 const char *prefix,*pprefix; 186 Vec vec; 187 DM *domain_dm = NULL; 188 189 PetscFunctionBegin; 190 if (!pc->setupcalled) { 191 PetscInt m; 192 193 if (!osm->type_set) { 194 ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr); 195 if (symset && flg) osm->type = PC_ASM_BASIC; 196 } 197 198 /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */ 199 if (osm->n_local_true == PETSC_DECIDE) { 200 /* no subdomains given */ 201 /* try pc->dm first, if allowed */ 202 if (osm->dm_subdomains && pc->dm) { 203 PetscInt num_domains, d; 204 char **domain_names; 205 IS *inner_domain_is, *outer_domain_is; 206 ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr); 207 osm->overlap = -1; /* We do not want to increase the overlap of the IS. 208 A future improvement of this code might allow one to use 209 DM-defined subdomains and also increase the overlap, 210 but that is not currently supported */ 211 if (num_domains) { 212 ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr); 213 } 214 for (d = 0; d < num_domains; ++d) { 215 if (domain_names) {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);} 216 if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);} 217 if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);} 218 } 219 ierr = PetscFree(domain_names);CHKERRQ(ierr); 220 ierr = PetscFree(inner_domain_is);CHKERRQ(ierr); 221 ierr = PetscFree(outer_domain_is);CHKERRQ(ierr); 222 } 223 if (osm->n_local_true == PETSC_DECIDE) { 224 /* still no subdomains; use one subdomain per processor */ 225 osm->n_local_true = 1; 226 } 227 } 228 { /* determine the global and max number of subdomains */ 229 struct {PetscInt max,sum;} inwork,outwork; 230 PetscMPIInt size; 231 232 inwork.max = osm->n_local_true; 233 inwork.sum = osm->n_local_true; 234 ierr = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 235 osm->n_local = outwork.max; 236 osm->n = outwork.sum; 237 238 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 239 if (outwork.max == 1 && outwork.sum == size) { 240 /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */ 241 ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr); 242 } 243 } 244 if (!osm->is) { /* create the index sets */ 245 ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr); 246 } 247 if (osm->n_local_true > 1 && !osm->is_local) { 248 ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr); 249 for (i=0; i<osm->n_local_true; i++) { 250 if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */ 251 ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr); 252 ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr); 253 } else { 254 ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr); 255 osm->is_local[i] = osm->is[i]; 256 } 257 } 258 } 259 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 260 flg = PETSC_FALSE; 261 ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr); 262 if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); } 263 264 if (osm->overlap > 0) { 265 /* Extend the "overlapping" regions by a number of steps */ 266 ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr); 267 } 268 if (osm->sort_indices) { 269 for (i=0; i<osm->n_local_true; i++) { 270 ierr = ISSort(osm->is[i]);CHKERRQ(ierr); 271 if (osm->is_local) { 272 ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr); 273 } 274 } 275 } 276 277 if (!osm->ksp) { 278 /* Create the local solvers */ 279 ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr); 280 if (domain_dm) { 281 ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr); 282 } 283 for (i=0; i<osm->n_local_true; i++) { 284 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 285 ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr); 286 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr); 287 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 288 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 289 ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr); 290 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 291 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 292 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 293 if (domain_dm) { 294 ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr); 295 ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr); 296 ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr); 297 } 298 osm->ksp[i] = ksp; 299 } 300 if (domain_dm) { 301 ierr = PetscFree(domain_dm);CHKERRQ(ierr); 302 } 303 } 304 305 ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr); 306 ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr); 307 ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr); 308 ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr); 309 ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr); 310 311 scall = MAT_INITIAL_MATRIX; 312 } else { 313 /* 314 Destroy the blocks from the previous iteration 315 */ 316 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 317 ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr); 318 scall = MAT_INITIAL_MATRIX; 319 } 320 } 321 322 /* 323 Extract out the submatrices 324 */ 325 ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr); 326 if (scall == MAT_INITIAL_MATRIX) { 327 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr); 328 for (i=0; i<osm->n_local_true; i++) { 329 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr); 330 ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr); 331 } 332 } 333 334 /* Convert the types of the submatrices (if needbe) */ 335 if (osm->sub_mat_type) { 336 for (i=0; i<osm->n_local_true; i++) { 337 ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr); 338 } 339 } 340 341 if(!pc->setupcalled){ 342 /* Create the local work vectors (from the local matrices) and scatter contexts */ 343 ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr); 344 345 if (osm->is_local && (osm->type == PC_ASM_INTERPOLATE || osm->type == PC_ASM_NONE )) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot use interpolate or none PCASMType if is_local was provided to PCASMSetLocalSubdomains()"); 346 if (osm->is_local && osm->type == PC_ASM_RESTRICT && osm->loctype == PC_COMPOSITE_ADDITIVE) { 347 ierr = PetscMalloc1(osm->n_local_true,&osm->lprolongation);CHKERRQ(ierr); 348 } 349 ierr = PetscMalloc1(osm->n_local_true,&osm->lrestriction);CHKERRQ(ierr); 350 ierr = PetscMalloc1(osm->n_local_true,&osm->x);CHKERRQ(ierr); 351 ierr = PetscMalloc1(osm->n_local_true,&osm->y);CHKERRQ(ierr); 352 353 ierr = ISGetLocalSize(osm->lis,&m);CHKERRQ(ierr); 354 ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr); 355 ierr = VecScatterCreate(vec,osm->lis,osm->lx,isl,&osm->restriction);CHKERRQ(ierr); 356 ierr = ISDestroy(&isl);CHKERRQ(ierr); 357 358 359 for (i=0; i<osm->n_local_true; ++i) { 360 ISLocalToGlobalMapping ltog; 361 IS isll; 362 const PetscInt *idx_is; 363 PetscInt *idx_lis,nout; 364 365 ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr); 366 ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr); 367 ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr); 368 369 /* generate a scatter from ly to y[i] picking all the overlapping is[i] entries */ 370 ierr = ISLocalToGlobalMappingCreateIS(osm->lis,<og);CHKERRQ(ierr); 371 ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr); 372 ierr = ISGetIndices(osm->is[i], &idx_is);CHKERRQ(ierr); 373 ierr = PetscMalloc1(m,&idx_lis);CHKERRQ(ierr); 374 ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m,idx_is,&nout,idx_lis);CHKERRQ(ierr); 375 if (nout != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is not a subset of lis"); 376 ierr = ISRestoreIndices(osm->is[i], &idx_is);CHKERRQ(ierr); 377 ierr = ISCreateGeneral(PETSC_COMM_SELF,m,idx_lis,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr); 378 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 379 ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr); 380 ierr = VecScatterCreate(osm->ly,isll,osm->y[i],isl,&osm->lrestriction[i]);CHKERRQ(ierr); 381 ierr = ISDestroy(&isll);CHKERRQ(ierr); 382 ierr = ISDestroy(&isl);CHKERRQ(ierr); 383 if (osm->lprolongation) { /* generate a scatter from y[i] to ly picking only the the non-overalapping is_local[i] entries */ 384 ISLocalToGlobalMapping ltog; 385 IS isll,isll_local; 386 const PetscInt *idx_local; 387 PetscInt *idx1, *idx2, nout; 388 389 ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr); 390 ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr); 391 392 ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],<og);CHKERRQ(ierr); 393 ierr = PetscMalloc1(m_local,&idx1);CHKERRQ(ierr); 394 ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx1);CHKERRQ(ierr); 395 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 396 if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is"); 397 ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx1,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr); 398 399 ierr = ISLocalToGlobalMappingCreateIS(osm->lis,<og);CHKERRQ(ierr); 400 ierr = PetscMalloc1(m_local,&idx2);CHKERRQ(ierr); 401 ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx2);CHKERRQ(ierr); 402 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 403 if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of lis"); 404 ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx2,PETSC_OWN_POINTER,&isll_local);CHKERRQ(ierr); 405 406 ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr); 407 ierr = VecScatterCreate(osm->y[i],isll,osm->ly,isll_local,&osm->lprolongation[i]);CHKERRQ(ierr); 408 409 ierr = ISDestroy(&isll);CHKERRQ(ierr); 410 ierr = ISDestroy(&isll_local);CHKERRQ(ierr); 411 } 412 } 413 ierr = VecDestroy(&vec);CHKERRQ(ierr); 414 } 415 416 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) { 417 IS *cis; 418 PetscInt c; 419 420 ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr); 421 for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis; 422 ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr); 423 ierr = PetscFree(cis);CHKERRQ(ierr); 424 } 425 426 /* Return control to the user so that the submatrices can be modified (e.g., to apply 427 different boundary conditions for the submatrices than for the global problem) */ 428 ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr); 429 430 /* 431 Loop over subdomains putting them into local ksp 432 */ 433 for (i=0; i<osm->n_local_true; i++) { 434 ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr); 435 if (!pc->setupcalled) { 436 ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr); 437 } 438 } 439 PetscFunctionReturn(0); 440 } 441 442 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc) 443 { 444 PC_ASM *osm = (PC_ASM*)pc->data; 445 PetscErrorCode ierr; 446 PetscInt i; 447 KSPConvergedReason reason; 448 449 PetscFunctionBegin; 450 for (i=0; i<osm->n_local_true; i++) { 451 ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr); 452 ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr); 453 if (reason == KSP_DIVERGED_PCSETUP_FAILED) { 454 pc->failedreason = PC_SUBPC_ERROR; 455 } 456 } 457 PetscFunctionReturn(0); 458 } 459 460 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y) 461 { 462 PC_ASM *osm = (PC_ASM*)pc->data; 463 PetscErrorCode ierr; 464 PetscInt i,n_local_true = osm->n_local_true; 465 ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE; 466 467 PetscFunctionBegin; 468 /* 469 Support for limiting the restriction or interpolation to only local 470 subdomain values (leaving the other values 0). 471 */ 472 if (!(osm->type & PC_ASM_RESTRICT)) { 473 forward = SCATTER_FORWARD_LOCAL; 474 /* have to zero the work RHS since scatter may leave some slots empty */ 475 ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr); 476 } 477 if (!(osm->type & PC_ASM_INTERPOLATE)) { 478 reverse = SCATTER_REVERSE_LOCAL; 479 } 480 481 if(osm->loctype == PC_COMPOSITE_MULTIPLICATIVE || osm->loctype == PC_COMPOSITE_ADDITIVE){ 482 /* zero the global and the local solutions */ 483 ierr = VecZeroEntries(y);CHKERRQ(ierr); 484 ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr); 485 486 /* Copy the global RHS to local RHS including the ghost nodes */ 487 ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr); 488 ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr); 489 490 /* Restrict local RHS to the overlapping 0-block RHS */ 491 ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr); 492 ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr); 493 494 /* do the local solves */ 495 for (i = 0; i < n_local_true; ++i) { 496 497 /* solve the overlapping i-block */ 498 ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr); 499 500 if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution (only for restrictive additive) */ 501 ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr); 502 ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr); 503 } 504 else{ /* interpolate the overalapping i-block solution to the local solution */ 505 ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr); 506 ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr); 507 } 508 509 if (i < n_local_true-1) { 510 /* Restrict local RHS to the overlapping (i+1)-block RHS */ 511 ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr); 512 ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr); 513 514 if ( osm->loctype == PC_COMPOSITE_MULTIPLICATIVE){ 515 /* udpdate the overlapping (i+1)-block RHS using the current local solution */ 516 ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr); 517 ierr = VecAXPBY(osm->x[i+1],-1.,1., osm->y[i+1]); CHKERRQ(ierr); 518 } 519 } 520 } 521 /* Add the local solution to the global solution including the ghost nodes */ 522 ierr = VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr); 523 ierr = VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr); 524 }else{ 525 SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]); 526 } 527 PetscFunctionReturn(0); 528 } 529 530 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y) 531 { 532 PC_ASM *osm = (PC_ASM*)pc->data; 533 PetscErrorCode ierr; 534 PetscInt i,n_local_true = osm->n_local_true; 535 ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE; 536 537 PetscFunctionBegin; 538 /* 539 Support for limiting the restriction or interpolation to only local 540 subdomain values (leaving the other values 0). 541 542 Note: these are reversed from the PCApply_ASM() because we are applying the 543 transpose of the three terms 544 */ 545 546 if (!(osm->type & PC_ASM_INTERPOLATE)) { 547 forward = SCATTER_FORWARD_LOCAL; 548 /* have to zero the work RHS since scatter may leave some slots empty */ 549 ierr = VecSet(osm->lx, 0.0);CHKERRQ(ierr); 550 } 551 if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL; 552 553 /* zero the global and the local solutions */ 554 ierr = VecZeroEntries(y);CHKERRQ(ierr); 555 ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr); 556 557 /* Copy the global RHS to local RHS including the ghost nodes */ 558 ierr = VecScatterBegin(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr); 559 ierr = VecScatterEnd(osm->restriction, x, osm->lx, INSERT_VALUES, forward);CHKERRQ(ierr); 560 561 /* Restrict local RHS to the overlapping 0-block RHS */ 562 ierr = VecScatterBegin(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr); 563 ierr = VecScatterEnd(osm->lrestriction[0], osm->lx, osm->x[0], INSERT_VALUES, forward);CHKERRQ(ierr); 564 565 /* do the local solves */ 566 for (i = 0; i < n_local_true; ++i) { 567 568 /* solve the overlapping i-block */ 569 ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr); 570 571 if (osm->lprolongation) { /* interpolate the non-overalapping i-block solution to the local solution */ 572 ierr = VecScatterBegin(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr); 573 ierr = VecScatterEnd(osm->lprolongation[i], osm->y[i], osm->ly, ADD_VALUES, forward);CHKERRQ(ierr); 574 } 575 else{ /* interpolate the overalapping i-block solution to the local solution */ 576 ierr = VecScatterBegin(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr); 577 ierr = VecScatterEnd(osm->lrestriction[i], osm->y[i], osm->ly, ADD_VALUES, reverse);CHKERRQ(ierr); 578 } 579 580 if (i < n_local_true-1) { 581 /* Restrict local RHS to the overlapping (i+1)-block RHS */ 582 ierr = VecScatterBegin(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr); 583 ierr = VecScatterEnd(osm->lrestriction[i+1], osm->lx, osm->x[i+1], INSERT_VALUES, forward);CHKERRQ(ierr); 584 } 585 } 586 /* Add the local solution to the global solution including the ghost nodes */ 587 ierr = VecScatterBegin(osm->restriction, osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr); 588 ierr = VecScatterEnd(osm->restriction, osm->ly, y, ADD_VALUES, reverse);CHKERRQ(ierr); 589 590 PetscFunctionReturn(0); 591 592 } 593 594 static PetscErrorCode PCReset_ASM(PC pc) 595 { 596 PC_ASM *osm = (PC_ASM*)pc->data; 597 PetscErrorCode ierr; 598 PetscInt i; 599 600 PetscFunctionBegin; 601 if (osm->ksp) { 602 for (i=0; i<osm->n_local_true; i++) { 603 ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr); 604 } 605 } 606 if (osm->pmat) { 607 if (osm->n_local_true > 0) { 608 ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr); 609 } 610 } 611 if (osm->lrestriction) { 612 ierr = VecScatterDestroy(&osm->restriction);CHKERRQ(ierr); 613 for (i=0; i<osm->n_local_true; i++) { 614 ierr = VecScatterDestroy(&osm->lrestriction[i]);CHKERRQ(ierr); 615 if (osm->lprolongation) {ierr = VecScatterDestroy(&osm->lprolongation[i]);CHKERRQ(ierr);} 616 ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr); 617 ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr); 618 } 619 ierr = PetscFree(osm->lrestriction);CHKERRQ(ierr); 620 if (osm->lprolongation) {ierr = PetscFree(osm->lprolongation);CHKERRQ(ierr);} 621 ierr = PetscFree(osm->x);CHKERRQ(ierr); 622 ierr = PetscFree(osm->y);CHKERRQ(ierr); 623 624 } 625 ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr); 626 ierr = ISDestroy(&osm->lis);CHKERRQ(ierr); 627 ierr = VecDestroy(&osm->lx);CHKERRQ(ierr); 628 ierr = VecDestroy(&osm->ly);CHKERRQ(ierr); 629 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) { 630 ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr); 631 } 632 633 ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr); 634 635 osm->is = 0; 636 osm->is_local = 0; 637 PetscFunctionReturn(0); 638 } 639 640 static PetscErrorCode PCDestroy_ASM(PC pc) 641 { 642 PC_ASM *osm = (PC_ASM*)pc->data; 643 PetscErrorCode ierr; 644 PetscInt i; 645 646 PetscFunctionBegin; 647 ierr = PCReset_ASM(pc);CHKERRQ(ierr); 648 if (osm->ksp) { 649 for (i=0; i<osm->n_local_true; i++) { 650 ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr); 651 } 652 ierr = PetscFree(osm->ksp);CHKERRQ(ierr); 653 } 654 ierr = PetscFree(pc->data);CHKERRQ(ierr); 655 PetscFunctionReturn(0); 656 } 657 658 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc) 659 { 660 PC_ASM *osm = (PC_ASM*)pc->data; 661 PetscErrorCode ierr; 662 PetscInt blocks,ovl; 663 PetscBool symset,flg; 664 PCASMType asmtype; 665 PCCompositeType loctype; 666 char sub_mat_type[256]; 667 668 PetscFunctionBegin; 669 /* set the type to symmetric if matrix is symmetric */ 670 if (!osm->type_set && pc->pmat) { 671 ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr); 672 if (symset && flg) osm->type = PC_ASM_BASIC; 673 } 674 ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr); 675 ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr); 676 ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr); 677 if (flg) { 678 ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr); 679 osm->dm_subdomains = PETSC_FALSE; 680 } 681 ierr = PetscOptionsInt("-pc_asm_local_blocks","Number of local subdomains","PCASMSetLocalSubdomains",osm->n_local_true,&blocks,&flg);CHKERRQ(ierr); 682 if (flg) { 683 ierr = PCASMSetLocalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr); 684 osm->dm_subdomains = PETSC_FALSE; 685 } 686 ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr); 687 if (flg) { 688 ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); 689 osm->dm_subdomains = PETSC_FALSE; 690 } 691 flg = PETSC_FALSE; 692 ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr); 693 if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); } 694 flg = PETSC_FALSE; 695 ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr); 696 if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); } 697 ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr); 698 if(flg){ 699 ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr); 700 } 701 ierr = PetscOptionsTail();CHKERRQ(ierr); 702 PetscFunctionReturn(0); 703 } 704 705 /*------------------------------------------------------------------------------------*/ 706 707 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[]) 708 { 709 PC_ASM *osm = (PC_ASM*)pc->data; 710 PetscErrorCode ierr; 711 PetscInt i; 712 713 PetscFunctionBegin; 714 if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n); 715 if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp()."); 716 717 if (!pc->setupcalled) { 718 if (is) { 719 for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);} 720 } 721 if (is_local) { 722 for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);} 723 } 724 ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr); 725 726 osm->n_local_true = n; 727 osm->is = 0; 728 osm->is_local = 0; 729 if (is) { 730 ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr); 731 for (i=0; i<n; i++) osm->is[i] = is[i]; 732 /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */ 733 osm->overlap = -1; 734 } 735 if (is_local) { 736 ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr); 737 for (i=0; i<n; i++) osm->is_local[i] = is_local[i]; 738 if (!is) { 739 ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr); 740 for (i=0; i<osm->n_local_true; i++) { 741 if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */ 742 ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr); 743 ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr); 744 } else { 745 ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr); 746 osm->is[i] = osm->is_local[i]; 747 } 748 } 749 } 750 } 751 } 752 PetscFunctionReturn(0); 753 } 754 755 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local) 756 { 757 PC_ASM *osm = (PC_ASM*)pc->data; 758 PetscErrorCode ierr; 759 PetscMPIInt rank,size; 760 PetscInt n; 761 762 PetscFunctionBegin; 763 if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N); 764 if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet."); 765 766 /* 767 Split the subdomains equally among all processors 768 */ 769 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 770 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 771 n = N/size + ((N % size) > rank); 772 if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N); 773 if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp()."); 774 if (!pc->setupcalled) { 775 ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr); 776 777 osm->n_local_true = n; 778 osm->is = 0; 779 osm->is_local = 0; 780 } 781 PetscFunctionReturn(0); 782 } 783 784 static PetscErrorCode PCASMSetOverlap_ASM(PC pc,PetscInt ovl) 785 { 786 PC_ASM *osm = (PC_ASM*)pc->data; 787 788 PetscFunctionBegin; 789 if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested"); 790 if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp()."); 791 if (!pc->setupcalled) osm->overlap = ovl; 792 PetscFunctionReturn(0); 793 } 794 795 static PetscErrorCode PCASMSetType_ASM(PC pc,PCASMType type) 796 { 797 PC_ASM *osm = (PC_ASM*)pc->data; 798 799 PetscFunctionBegin; 800 osm->type = type; 801 osm->type_set = PETSC_TRUE; 802 PetscFunctionReturn(0); 803 } 804 805 static PetscErrorCode PCASMGetType_ASM(PC pc,PCASMType *type) 806 { 807 PC_ASM *osm = (PC_ASM*)pc->data; 808 809 PetscFunctionBegin; 810 *type = osm->type; 811 PetscFunctionReturn(0); 812 } 813 814 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type) 815 { 816 PC_ASM *osm = (PC_ASM *) pc->data; 817 818 PetscFunctionBegin; 819 if (type != PC_COMPOSITE_ADDITIVE && type != PC_COMPOSITE_MULTIPLICATIVE) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Only supports additive or multiplicative as the local type"); 820 osm->loctype = type; 821 PetscFunctionReturn(0); 822 } 823 824 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type) 825 { 826 PC_ASM *osm = (PC_ASM *) pc->data; 827 828 PetscFunctionBegin; 829 *type = osm->loctype; 830 PetscFunctionReturn(0); 831 } 832 833 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc,PetscBool doSort) 834 { 835 PC_ASM *osm = (PC_ASM*)pc->data; 836 837 PetscFunctionBegin; 838 osm->sort_indices = doSort; 839 PetscFunctionReturn(0); 840 } 841 842 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp) 843 { 844 PC_ASM *osm = (PC_ASM*)pc->data; 845 PetscErrorCode ierr; 846 847 PetscFunctionBegin; 848 if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here"); 849 850 if (n_local) *n_local = osm->n_local_true; 851 if (first_local) { 852 ierr = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 853 *first_local -= osm->n_local_true; 854 } 855 if (ksp) { 856 /* Assume that local solves are now different; not necessarily 857 true though! This flag is used only for PCView_ASM() */ 858 *ksp = osm->ksp; 859 osm->same_local_solves = PETSC_FALSE; 860 } 861 PetscFunctionReturn(0); 862 } 863 864 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type) 865 { 866 PC_ASM *osm = (PC_ASM*)pc->data; 867 868 PetscFunctionBegin; 869 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 870 PetscValidPointer(sub_mat_type,2); 871 *sub_mat_type = osm->sub_mat_type; 872 PetscFunctionReturn(0); 873 } 874 875 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type) 876 { 877 PetscErrorCode ierr; 878 PC_ASM *osm = (PC_ASM*)pc->data; 879 880 PetscFunctionBegin; 881 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 882 ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr); 883 ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr); 884 PetscFunctionReturn(0); 885 } 886 887 /*@C 888 PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner. 889 890 Collective on PC 891 892 Input Parameters: 893 + pc - the preconditioner context 894 . n - the number of subdomains for this processor (default value = 1) 895 . is - the index set that defines the subdomains for this processor 896 (or NULL for PETSc to determine subdomains) 897 - is_local - the index sets that define the local part of the subdomains for this processor, not used unless PCASMType is PC_ASM_RESTRICT 898 (or NULL to not provide these) 899 900 Options Database Key: 901 To set the total number of subdomain blocks rather than specify the 902 index sets, use the option 903 . -pc_asm_local_blocks <blks> - Sets local blocks 904 905 Notes: 906 The IS numbering is in the parallel, global numbering of the vector for both is and is_local 907 908 By default the ASM preconditioner uses 1 block per processor. 909 910 Use PCASMSetTotalSubdomains() to set the subdomains for all processors. 911 912 If is_local is provided and PCASMType is PC_ASM_RESTRICT then the solution only over the is_local region is interpolated 913 back to form the global solution (this is the standard restricted additive Schwarz method) 914 915 If the is_local is provided and PCASMType is PC_ASM_INTERPOLATE or PC_ASM_NONE then an error is generated since there is 916 no code to handle that case. 917 918 Level: advanced 919 920 .keywords: PC, ASM, set, local, subdomains, additive Schwarz 921 922 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(), 923 PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), PCASMType, PCASMSetType() 924 @*/ 925 PetscErrorCode PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[]) 926 { 927 PetscErrorCode ierr; 928 929 PetscFunctionBegin; 930 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 931 ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr); 932 PetscFunctionReturn(0); 933 } 934 935 /*@C 936 PCASMSetTotalSubdomains - Sets the subdomains for all processors for the 937 additive Schwarz preconditioner. Either all or no processors in the 938 PC communicator must call this routine, with the same index sets. 939 940 Collective on PC 941 942 Input Parameters: 943 + pc - the preconditioner context 944 . N - the number of subdomains for all processors 945 . is - the index sets that define the subdomains for all processors 946 (or NULL to ask PETSc to determine the subdomains) 947 - is_local - the index sets that define the local part of the subdomains for this processor 948 (or NULL to not provide this information) 949 950 Options Database Key: 951 To set the total number of subdomain blocks rather than specify the 952 index sets, use the option 953 . -pc_asm_blocks <blks> - Sets total blocks 954 955 Notes: 956 Currently you cannot use this to set the actual subdomains with the argument is or is_local. 957 958 By default the ASM preconditioner uses 1 block per processor. 959 960 These index sets cannot be destroyed until after completion of the 961 linear solves for which the ASM preconditioner is being used. 962 963 Use PCASMSetLocalSubdomains() to set local subdomains. 964 965 The IS numbering is in the parallel, global numbering of the vector for both is and is_local 966 967 Level: advanced 968 969 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz 970 971 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(), 972 PCASMCreateSubdomains2D() 973 @*/ 974 PetscErrorCode PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[]) 975 { 976 PetscErrorCode ierr; 977 978 PetscFunctionBegin; 979 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 980 ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr); 981 PetscFunctionReturn(0); 982 } 983 984 /*@ 985 PCASMSetOverlap - Sets the overlap between a pair of subdomains for the 986 additive Schwarz preconditioner. Either all or no processors in the 987 PC communicator must call this routine. 988 989 Logically Collective on PC 990 991 Input Parameters: 992 + pc - the preconditioner context 993 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1) 994 995 Options Database Key: 996 . -pc_asm_overlap <ovl> - Sets overlap 997 998 Notes: 999 By default the ASM preconditioner uses 1 block per processor. To use 1000 multiple blocks per perocessor, see PCASMSetTotalSubdomains() and 1001 PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>). 1002 1003 The overlap defaults to 1, so if one desires that no additional 1004 overlap be computed beyond what may have been set with a call to 1005 PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl 1006 must be set to be 0. In particular, if one does not explicitly set 1007 the subdomains an application code, then all overlap would be computed 1008 internally by PETSc, and using an overlap of 0 would result in an ASM 1009 variant that is equivalent to the block Jacobi preconditioner. 1010 1011 The default algorithm used by PETSc to increase overlap is fast, but not scalable, 1012 use the option -mat_increase_overlap_scalable when the problem and number of processes is large. 1013 1014 Note that one can define initial index sets with any overlap via 1015 PCASMSetLocalSubdomains(); the routine 1016 PCASMSetOverlap() merely allows PETSc to extend that overlap further 1017 if desired. 1018 1019 Level: intermediate 1020 1021 .keywords: PC, ASM, set, overlap 1022 1023 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(), 1024 PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains(), MatIncreaseOverlap() 1025 @*/ 1026 PetscErrorCode PCASMSetOverlap(PC pc,PetscInt ovl) 1027 { 1028 PetscErrorCode ierr; 1029 1030 PetscFunctionBegin; 1031 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1032 PetscValidLogicalCollectiveInt(pc,ovl,2); 1033 ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr); 1034 PetscFunctionReturn(0); 1035 } 1036 1037 /*@ 1038 PCASMSetType - Sets the type of restriction and interpolation used 1039 for local problems in the additive Schwarz method. 1040 1041 Logically Collective on PC 1042 1043 Input Parameters: 1044 + pc - the preconditioner context 1045 - type - variant of ASM, one of 1046 .vb 1047 PC_ASM_BASIC - full interpolation and restriction 1048 PC_ASM_RESTRICT - full restriction, local processor interpolation 1049 PC_ASM_INTERPOLATE - full interpolation, local processor restriction 1050 PC_ASM_NONE - local processor restriction and interpolation 1051 .ve 1052 1053 Options Database Key: 1054 . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type 1055 1056 Notes: if the is_local arguments are passed to PCASMSetLocalSubdomains() then they are used when PC_ASM_RESTRICT has been selected 1057 to limit the local processor interpolation 1058 1059 Level: intermediate 1060 1061 .keywords: PC, ASM, set, type 1062 1063 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(), 1064 PCASMCreateSubdomains2D(), PCASMType, PCASMSetLocalType(), PCASMGetLocalType() 1065 @*/ 1066 PetscErrorCode PCASMSetType(PC pc,PCASMType type) 1067 { 1068 PetscErrorCode ierr; 1069 1070 PetscFunctionBegin; 1071 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1072 PetscValidLogicalCollectiveEnum(pc,type,2); 1073 ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr); 1074 PetscFunctionReturn(0); 1075 } 1076 1077 /*@ 1078 PCASMGetType - Gets the type of restriction and interpolation used 1079 for local problems in the additive Schwarz method. 1080 1081 Logically Collective on PC 1082 1083 Input Parameter: 1084 . pc - the preconditioner context 1085 1086 Output Parameter: 1087 . type - variant of ASM, one of 1088 1089 .vb 1090 PC_ASM_BASIC - full interpolation and restriction 1091 PC_ASM_RESTRICT - full restriction, local processor interpolation 1092 PC_ASM_INTERPOLATE - full interpolation, local processor restriction 1093 PC_ASM_NONE - local processor restriction and interpolation 1094 .ve 1095 1096 Options Database Key: 1097 . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type 1098 1099 Level: intermediate 1100 1101 .keywords: PC, ASM, set, type 1102 1103 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(), 1104 PCASMCreateSubdomains2D(), PCASMType, PCASMSetType(), PCASMSetLocalType(), PCASMGetLocalType() 1105 @*/ 1106 PetscErrorCode PCASMGetType(PC pc,PCASMType *type) 1107 { 1108 PetscErrorCode ierr; 1109 1110 PetscFunctionBegin; 1111 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1112 ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr); 1113 PetscFunctionReturn(0); 1114 } 1115 1116 /*@ 1117 PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method. 1118 1119 Logically Collective on PC 1120 1121 Input Parameters: 1122 + pc - the preconditioner context 1123 - type - type of composition, one of 1124 .vb 1125 PC_COMPOSITE_ADDITIVE - local additive combination 1126 PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination 1127 .ve 1128 1129 Options Database Key: 1130 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type 1131 1132 Level: intermediate 1133 1134 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASM, PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType 1135 @*/ 1136 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type) 1137 { 1138 PetscErrorCode ierr; 1139 1140 PetscFunctionBegin; 1141 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1142 PetscValidLogicalCollectiveEnum(pc, type, 2); 1143 ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr); 1144 PetscFunctionReturn(0); 1145 } 1146 1147 /*@ 1148 PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method. 1149 1150 Logically Collective on PC 1151 1152 Input Parameter: 1153 . pc - the preconditioner context 1154 1155 Output Parameter: 1156 . type - type of composition, one of 1157 .vb 1158 PC_COMPOSITE_ADDITIVE - local additive combination 1159 PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination 1160 .ve 1161 1162 Options Database Key: 1163 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type 1164 1165 Level: intermediate 1166 1167 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate(), PCASMType, PCASMSetType(), PCASMGetType(), PCCompositeType 1168 @*/ 1169 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type) 1170 { 1171 PetscErrorCode ierr; 1172 1173 PetscFunctionBegin; 1174 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1175 PetscValidPointer(type, 2); 1176 ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr); 1177 PetscFunctionReturn(0); 1178 } 1179 1180 /*@ 1181 PCASMSetSortIndices - Determines whether subdomain indices are sorted. 1182 1183 Logically Collective on PC 1184 1185 Input Parameters: 1186 + pc - the preconditioner context 1187 - doSort - sort the subdomain indices 1188 1189 Level: intermediate 1190 1191 .keywords: PC, ASM, set, type 1192 1193 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(), 1194 PCASMCreateSubdomains2D() 1195 @*/ 1196 PetscErrorCode PCASMSetSortIndices(PC pc,PetscBool doSort) 1197 { 1198 PetscErrorCode ierr; 1199 1200 PetscFunctionBegin; 1201 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1202 PetscValidLogicalCollectiveBool(pc,doSort,2); 1203 ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr); 1204 PetscFunctionReturn(0); 1205 } 1206 1207 /*@C 1208 PCASMGetSubKSP - Gets the local KSP contexts for all blocks on 1209 this processor. 1210 1211 Collective on PC iff first_local is requested 1212 1213 Input Parameter: 1214 . pc - the preconditioner context 1215 1216 Output Parameters: 1217 + n_local - the number of blocks on this processor or NULL 1218 . first_local - the global number of the first block on this processor or NULL, 1219 all processors must request or all must pass NULL 1220 - ksp - the array of KSP contexts 1221 1222 Note: 1223 After PCASMGetSubKSP() the array of KSPes is not to be freed. 1224 1225 You must call KSPSetUp() before calling PCASMGetSubKSP(). 1226 1227 Fortran note: 1228 The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length. 1229 1230 Level: advanced 1231 1232 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context 1233 1234 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(), 1235 PCASMCreateSubdomains2D(), 1236 @*/ 1237 PetscErrorCode PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[]) 1238 { 1239 PetscErrorCode ierr; 1240 1241 PetscFunctionBegin; 1242 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1243 ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr); 1244 PetscFunctionReturn(0); 1245 } 1246 1247 /* -------------------------------------------------------------------------------------*/ 1248 /*MC 1249 PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with 1250 its own KSP object. 1251 1252 Options Database Keys: 1253 + -pc_asm_blocks <blks> - Sets total blocks 1254 . -pc_asm_overlap <ovl> - Sets overlap 1255 . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type, default is restrict 1256 - -pc_asm_local_type [additive, multiplicative] - Sets ASM type, default is additive 1257 1258 IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you 1259 will get a different convergence rate due to the default option of -pc_asm_type restrict. Use 1260 -pc_asm_type basic to use the standard ASM. 1261 1262 Notes: Each processor can have one or more blocks, but a block cannot be shared by more 1263 than one processor. Use PCGASM for subdomains shared by multiple processes. Defaults to one block per processor. 1264 1265 To set options on the solvers for each block append -sub_ to all the KSP, and PC 1266 options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly 1267 1268 To set the options on the solvers separate for each block call PCASMGetSubKSP() 1269 and set the options directly on the resulting KSP object (you can access its PC 1270 with KSPGetPC()) 1271 1272 Level: beginner 1273 1274 Concepts: additive Schwarz method 1275 1276 References: 1277 + 1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions 1278 Courant Institute, New York University Technical report 1279 - 1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations, 1280 Cambridge University Press. 1281 1282 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, 1283 PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), PCASMType, PCASMGetType(), PCASMSetLocalType(), PCASMGetLocalType() 1284 PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType(), PCCompositeType 1285 1286 M*/ 1287 1288 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc) 1289 { 1290 PetscErrorCode ierr; 1291 PC_ASM *osm; 1292 1293 PetscFunctionBegin; 1294 ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr); 1295 1296 osm->n = PETSC_DECIDE; 1297 osm->n_local = 0; 1298 osm->n_local_true = PETSC_DECIDE; 1299 osm->overlap = 1; 1300 osm->ksp = 0; 1301 osm->restriction = 0; 1302 osm->lprolongation = 0; 1303 osm->lrestriction = 0; 1304 osm->x = 0; 1305 osm->y = 0; 1306 osm->is = 0; 1307 osm->is_local = 0; 1308 osm->mat = 0; 1309 osm->pmat = 0; 1310 osm->type = PC_ASM_RESTRICT; 1311 osm->loctype = PC_COMPOSITE_ADDITIVE; 1312 osm->same_local_solves = PETSC_TRUE; 1313 osm->sort_indices = PETSC_TRUE; 1314 osm->dm_subdomains = PETSC_FALSE; 1315 osm->sub_mat_type = NULL; 1316 1317 pc->data = (void*)osm; 1318 pc->ops->apply = PCApply_ASM; 1319 pc->ops->applytranspose = PCApplyTranspose_ASM; 1320 pc->ops->setup = PCSetUp_ASM; 1321 pc->ops->reset = PCReset_ASM; 1322 pc->ops->destroy = PCDestroy_ASM; 1323 pc->ops->setfromoptions = PCSetFromOptions_ASM; 1324 pc->ops->setuponblocks = PCSetUpOnBlocks_ASM; 1325 pc->ops->view = PCView_ASM; 1326 pc->ops->applyrichardson = 0; 1327 1328 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr); 1329 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr); 1330 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr); 1331 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr); 1332 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr); 1333 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr); 1334 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr); 1335 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr); 1336 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr); 1337 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr); 1338 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr); 1339 PetscFunctionReturn(0); 1340 } 1341 1342 /*@C 1343 PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz 1344 preconditioner for a any problem on a general grid. 1345 1346 Collective 1347 1348 Input Parameters: 1349 + A - The global matrix operator 1350 - n - the number of local blocks 1351 1352 Output Parameters: 1353 . outis - the array of index sets defining the subdomains 1354 1355 Level: advanced 1356 1357 Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap 1358 from these if you use PCASMSetLocalSubdomains() 1359 1360 In the Fortran version you must provide the array outis[] already allocated of length n. 1361 1362 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid 1363 1364 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains() 1365 @*/ 1366 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[]) 1367 { 1368 MatPartitioning mpart; 1369 const char *prefix; 1370 PetscInt i,j,rstart,rend,bs; 1371 PetscBool hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE; 1372 Mat Ad = NULL, adj; 1373 IS ispart,isnumb,*is; 1374 PetscErrorCode ierr; 1375 1376 PetscFunctionBegin; 1377 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1378 PetscValidPointer(outis,3); 1379 if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n); 1380 1381 /* Get prefix, row distribution, and block size */ 1382 ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr); 1383 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 1384 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1385 if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs); 1386 1387 /* Get diagonal block from matrix if possible */ 1388 ierr = MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 1389 if (hasop) { 1390 ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr); 1391 } 1392 if (Ad) { 1393 ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr); 1394 if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);} 1395 } 1396 if (Ad && n > 1) { 1397 PetscBool match,done; 1398 /* Try to setup a good matrix partitioning if available */ 1399 ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr); 1400 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 1401 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 1402 ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr); 1403 if (!match) { 1404 ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr); 1405 } 1406 if (!match) { /* assume a "good" partitioner is available */ 1407 PetscInt na; 1408 const PetscInt *ia,*ja; 1409 ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr); 1410 if (done) { 1411 /* Build adjacency matrix by hand. Unfortunately a call to 1412 MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will 1413 remove the block-aij structure and we cannot expect 1414 MatPartitioning to split vertices as we need */ 1415 PetscInt i,j,len,nnz,cnt,*iia=0,*jja=0; 1416 const PetscInt *row; 1417 nnz = 0; 1418 for (i=0; i<na; i++) { /* count number of nonzeros */ 1419 len = ia[i+1] - ia[i]; 1420 row = ja + ia[i]; 1421 for (j=0; j<len; j++) { 1422 if (row[j] == i) { /* don't count diagonal */ 1423 len--; break; 1424 } 1425 } 1426 nnz += len; 1427 } 1428 ierr = PetscMalloc1(na+1,&iia);CHKERRQ(ierr); 1429 ierr = PetscMalloc1(nnz,&jja);CHKERRQ(ierr); 1430 nnz = 0; 1431 iia[0] = 0; 1432 for (i=0; i<na; i++) { /* fill adjacency */ 1433 cnt = 0; 1434 len = ia[i+1] - ia[i]; 1435 row = ja + ia[i]; 1436 for (j=0; j<len; j++) { 1437 if (row[j] != i) { /* if not diagonal */ 1438 jja[nnz+cnt++] = row[j]; 1439 } 1440 } 1441 nnz += cnt; 1442 iia[i+1] = nnz; 1443 } 1444 /* Partitioning of the adjacency matrix */ 1445 ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr); 1446 ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr); 1447 ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr); 1448 ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr); 1449 ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr); 1450 ierr = MatDestroy(&adj);CHKERRQ(ierr); 1451 foundpart = PETSC_TRUE; 1452 } 1453 ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr); 1454 } 1455 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 1456 } 1457 1458 ierr = PetscMalloc1(n,&is);CHKERRQ(ierr); 1459 *outis = is; 1460 1461 if (!foundpart) { 1462 1463 /* Partitioning by contiguous chunks of rows */ 1464 1465 PetscInt mbs = (rend-rstart)/bs; 1466 PetscInt start = rstart; 1467 for (i=0; i<n; i++) { 1468 PetscInt count = (mbs/n + ((mbs % n) > i)) * bs; 1469 ierr = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr); 1470 start += count; 1471 } 1472 1473 } else { 1474 1475 /* Partitioning by adjacency of diagonal block */ 1476 1477 const PetscInt *numbering; 1478 PetscInt *count,nidx,*indices,*newidx,start=0; 1479 /* Get node count in each partition */ 1480 ierr = PetscMalloc1(n,&count);CHKERRQ(ierr); 1481 ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr); 1482 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1483 for (i=0; i<n; i++) count[i] *= bs; 1484 } 1485 /* Build indices from node numbering */ 1486 ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr); 1487 ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr); 1488 for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */ 1489 ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr); 1490 ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr); 1491 ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr); 1492 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1493 ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr); 1494 for (i=0; i<nidx; i++) { 1495 for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j; 1496 } 1497 ierr = PetscFree(indices);CHKERRQ(ierr); 1498 nidx *= bs; 1499 indices = newidx; 1500 } 1501 /* Shift to get global indices */ 1502 for (i=0; i<nidx; i++) indices[i] += rstart; 1503 1504 /* Build the index sets for each block */ 1505 for (i=0; i<n; i++) { 1506 ierr = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr); 1507 ierr = ISSort(is[i]);CHKERRQ(ierr); 1508 start += count[i]; 1509 } 1510 1511 ierr = PetscFree(count);CHKERRQ(ierr); 1512 ierr = PetscFree(indices);CHKERRQ(ierr); 1513 ierr = ISDestroy(&isnumb);CHKERRQ(ierr); 1514 ierr = ISDestroy(&ispart);CHKERRQ(ierr); 1515 1516 } 1517 PetscFunctionReturn(0); 1518 } 1519 1520 /*@C 1521 PCASMDestroySubdomains - Destroys the index sets created with 1522 PCASMCreateSubdomains(). Should be called after setting subdomains 1523 with PCASMSetLocalSubdomains(). 1524 1525 Collective 1526 1527 Input Parameters: 1528 + n - the number of index sets 1529 . is - the array of index sets 1530 - is_local - the array of local index sets, can be NULL 1531 1532 Level: advanced 1533 1534 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid 1535 1536 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains() 1537 @*/ 1538 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[]) 1539 { 1540 PetscInt i; 1541 PetscErrorCode ierr; 1542 1543 PetscFunctionBegin; 1544 if (n <= 0) PetscFunctionReturn(0); 1545 if (is) { 1546 PetscValidPointer(is,2); 1547 for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); } 1548 ierr = PetscFree(is);CHKERRQ(ierr); 1549 } 1550 if (is_local) { 1551 PetscValidPointer(is_local,3); 1552 for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); } 1553 ierr = PetscFree(is_local);CHKERRQ(ierr); 1554 } 1555 PetscFunctionReturn(0); 1556 } 1557 1558 /*@ 1559 PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz 1560 preconditioner for a two-dimensional problem on a regular grid. 1561 1562 Not Collective 1563 1564 Input Parameters: 1565 + m, n - the number of mesh points in the x and y directions 1566 . M, N - the number of subdomains in the x and y directions 1567 . dof - degrees of freedom per node 1568 - overlap - overlap in mesh lines 1569 1570 Output Parameters: 1571 + Nsub - the number of subdomains created 1572 . is - array of index sets defining overlapping (if overlap > 0) subdomains 1573 - is_local - array of index sets defining non-overlapping subdomains 1574 1575 Note: 1576 Presently PCAMSCreateSubdomains2d() is valid only for sequential 1577 preconditioners. More general related routines are 1578 PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains(). 1579 1580 Level: advanced 1581 1582 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid 1583 1584 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(), 1585 PCASMSetOverlap() 1586 @*/ 1587 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local) 1588 { 1589 PetscInt i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer; 1590 PetscErrorCode ierr; 1591 PetscInt nidx,*idx,loc,ii,jj,count; 1592 1593 PetscFunctionBegin; 1594 if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," "); 1595 1596 *Nsub = N*M; 1597 ierr = PetscMalloc1(*Nsub,is);CHKERRQ(ierr); 1598 ierr = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr); 1599 ystart = 0; 1600 loc_outer = 0; 1601 for (i=0; i<N; i++) { 1602 height = n/N + ((n % N) > i); /* height of subdomain */ 1603 if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n"); 1604 yleft = ystart - overlap; if (yleft < 0) yleft = 0; 1605 yright = ystart + height + overlap; if (yright > n) yright = n; 1606 xstart = 0; 1607 for (j=0; j<M; j++) { 1608 width = m/M + ((m % M) > j); /* width of subdomain */ 1609 if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m"); 1610 xleft = xstart - overlap; if (xleft < 0) xleft = 0; 1611 xright = xstart + width + overlap; if (xright > m) xright = m; 1612 nidx = (xright - xleft)*(yright - yleft); 1613 ierr = PetscMalloc1(nidx,&idx);CHKERRQ(ierr); 1614 loc = 0; 1615 for (ii=yleft; ii<yright; ii++) { 1616 count = m*ii + xleft; 1617 for (jj=xleft; jj<xright; jj++) idx[loc++] = count++; 1618 } 1619 ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr); 1620 if (overlap == 0) { 1621 ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr); 1622 1623 (*is_local)[loc_outer] = (*is)[loc_outer]; 1624 } else { 1625 for (loc=0,ii=ystart; ii<ystart+height; ii++) { 1626 for (jj=xstart; jj<xstart+width; jj++) { 1627 idx[loc++] = m*ii + jj; 1628 } 1629 } 1630 ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr); 1631 } 1632 ierr = PetscFree(idx);CHKERRQ(ierr); 1633 xstart += width; 1634 loc_outer++; 1635 } 1636 ystart += height; 1637 } 1638 for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); } 1639 PetscFunctionReturn(0); 1640 } 1641 1642 /*@C 1643 PCASMGetLocalSubdomains - Gets the local subdomains (for this processor 1644 only) for the additive Schwarz preconditioner. 1645 1646 Not Collective 1647 1648 Input Parameter: 1649 . pc - the preconditioner context 1650 1651 Output Parameters: 1652 + n - the number of subdomains for this processor (default value = 1) 1653 . is - the index sets that define the subdomains for this processor 1654 - is_local - the index sets that define the local part of the subdomains for this processor (can be NULL) 1655 1656 1657 Notes: 1658 The IS numbering is in the parallel, global numbering of the vector. 1659 1660 Level: advanced 1661 1662 .keywords: PC, ASM, set, local, subdomains, additive Schwarz 1663 1664 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(), 1665 PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices() 1666 @*/ 1667 PetscErrorCode PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[]) 1668 { 1669 PC_ASM *osm = (PC_ASM*)pc->data; 1670 PetscErrorCode ierr; 1671 PetscBool match; 1672 1673 PetscFunctionBegin; 1674 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1675 PetscValidIntPointer(n,2); 1676 if (is) PetscValidPointer(is,3); 1677 ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr); 1678 if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM"); 1679 if (n) *n = osm->n_local_true; 1680 if (is) *is = osm->is; 1681 if (is_local) *is_local = osm->is_local; 1682 PetscFunctionReturn(0); 1683 } 1684 1685 /*@C 1686 PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor 1687 only) for the additive Schwarz preconditioner. 1688 1689 Not Collective 1690 1691 Input Parameter: 1692 . pc - the preconditioner context 1693 1694 Output Parameters: 1695 + n - the number of matrices for this processor (default value = 1) 1696 - mat - the matrices 1697 1698 Level: advanced 1699 1700 Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks()) 1701 1702 Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner. 1703 1704 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi 1705 1706 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(), 1707 PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices() 1708 @*/ 1709 PetscErrorCode PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[]) 1710 { 1711 PC_ASM *osm; 1712 PetscErrorCode ierr; 1713 PetscBool match; 1714 1715 PetscFunctionBegin; 1716 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1717 PetscValidIntPointer(n,2); 1718 if (mat) PetscValidPointer(mat,3); 1719 if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp()."); 1720 ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr); 1721 if (!match) { 1722 if (n) *n = 0; 1723 if (mat) *mat = NULL; 1724 } else { 1725 osm = (PC_ASM*)pc->data; 1726 if (n) *n = osm->n_local_true; 1727 if (mat) *mat = osm->pmat; 1728 } 1729 PetscFunctionReturn(0); 1730 } 1731 1732 /*@ 1733 PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible. 1734 1735 Logically Collective 1736 1737 Input Parameter: 1738 + pc - the preconditioner 1739 - flg - boolean indicating whether to use subdomains defined by the DM 1740 1741 Options Database Key: 1742 . -pc_asm_dm_subdomains 1743 1744 Level: intermediate 1745 1746 Notes: 1747 PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(), 1748 so setting either of the first two effectively turns the latter off. 1749 1750 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz 1751 1752 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap() 1753 PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains() 1754 @*/ 1755 PetscErrorCode PCASMSetDMSubdomains(PC pc,PetscBool flg) 1756 { 1757 PC_ASM *osm = (PC_ASM*)pc->data; 1758 PetscErrorCode ierr; 1759 PetscBool match; 1760 1761 PetscFunctionBegin; 1762 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1763 PetscValidLogicalCollectiveBool(pc,flg,2); 1764 if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC."); 1765 ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr); 1766 if (match) { 1767 osm->dm_subdomains = flg; 1768 } 1769 PetscFunctionReturn(0); 1770 } 1771 1772 /*@ 1773 PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible. 1774 Not Collective 1775 1776 Input Parameter: 1777 . pc - the preconditioner 1778 1779 Output Parameter: 1780 . flg - boolean indicating whether to use subdomains defined by the DM 1781 1782 Level: intermediate 1783 1784 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz 1785 1786 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap() 1787 PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains() 1788 @*/ 1789 PetscErrorCode PCASMGetDMSubdomains(PC pc,PetscBool* flg) 1790 { 1791 PC_ASM *osm = (PC_ASM*)pc->data; 1792 PetscErrorCode ierr; 1793 PetscBool match; 1794 1795 PetscFunctionBegin; 1796 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1797 PetscValidPointer(flg,2); 1798 ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr); 1799 if (match) { 1800 if (flg) *flg = osm->dm_subdomains; 1801 } 1802 PetscFunctionReturn(0); 1803 } 1804 1805 /*@ 1806 PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string. 1807 1808 Not Collective 1809 1810 Input Parameter: 1811 . pc - the PC 1812 1813 Output Parameter: 1814 . -pc_asm_sub_mat_type - name of matrix type 1815 1816 Level: advanced 1817 1818 .keywords: PC, PCASM, MatType, set 1819 1820 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat 1821 @*/ 1822 PetscErrorCode PCASMGetSubMatType(PC pc,MatType *sub_mat_type){ 1823 PetscErrorCode ierr; 1824 1825 ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr); 1826 PetscFunctionReturn(0); 1827 } 1828 1829 /*@ 1830 PCASMSetSubMatType - Set the type of matrix used for ASM subsolves 1831 1832 Collective on Mat 1833 1834 Input Parameters: 1835 + pc - the PC object 1836 - sub_mat_type - matrix type 1837 1838 Options Database Key: 1839 . -pc_asm_sub_mat_type <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed. 1840 1841 Notes: 1842 See "${PETSC_DIR}/include/petscmat.h" for available types 1843 1844 Level: advanced 1845 1846 .keywords: PC, PCASM, MatType, set 1847 1848 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat 1849 @*/ 1850 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type) 1851 { 1852 PetscErrorCode ierr; 1853 1854 ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr); 1855 PetscFunctionReturn(0); 1856 } 1857