1 2 /* 3 This file defines an additive Schwarz preconditioner for any Mat implementation. 4 5 Note that each processor may have any number of subdomains. But in order to 6 deal easily with the VecScatter(), we treat each processor as if it has the 7 same number of subdomains. 8 9 n - total number of true subdomains on all processors 10 n_local_true - actual number of subdomains on this processor 11 n_local = maximum over all processors of n_local_true 12 */ 13 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 14 #include <petscdm.h> 15 16 typedef struct { 17 PetscInt n, n_local, n_local_true; 18 PetscInt overlap; /* overlap requested by user */ 19 KSP *ksp; /* linear solvers for each block */ 20 VecScatter *restriction; /* mapping from global to subregion */ 21 VecScatter *localization; /* mapping from overlapping to non-overlapping subregion */ 22 VecScatter *prolongation; /* mapping from subregion to global */ 23 Vec *x,*y,*y_local; /* work vectors */ 24 IS *is; /* index set that defines each overlapping subdomain */ 25 IS *is_local; /* index set that defines each non-overlapping subdomain, may be NULL */ 26 Mat *mat,*pmat; /* mat is not currently used */ 27 PCASMType type; /* use reduced interpolation, restriction or both */ 28 PetscBool type_set; /* if user set this value (so won't change it for symmetric problems) */ 29 PetscBool same_local_solves; /* flag indicating whether all local solvers are same */ 30 PetscBool sort_indices; /* flag to sort subdomain indices */ 31 PetscBool dm_subdomains; /* whether DM is allowed to define subdomains */ 32 PCCompositeType loctype; /* the type of composition for local solves */ 33 MatType sub_mat_type; /* the type of Mat used for subdomain solves (can be MATSAME or NULL) */ 34 /* For multiplicative solve */ 35 Mat *lmats; /* submatrices for overlapping multiplicative (process) subdomain */ 36 Vec lx, ly; /* work vectors */ 37 IS lis; /* index set that defines each overlapping multiplicative (process) subdomain */ 38 } PC_ASM; 39 40 static PetscErrorCode PCView_ASM(PC pc,PetscViewer viewer) 41 { 42 PC_ASM *osm = (PC_ASM*)pc->data; 43 PetscErrorCode ierr; 44 PetscMPIInt rank; 45 PetscInt i,bsz; 46 PetscBool iascii,isstring; 47 PetscViewer sviewer; 48 49 PetscFunctionBegin; 50 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 51 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr); 52 if (iascii) { 53 char overlaps[256] = "user-defined overlap",blocks[256] = "total subdomain blocks not yet set"; 54 if (osm->overlap >= 0) {ierr = PetscSNPrintf(overlaps,sizeof(overlaps),"amount of overlap = %D",osm->overlap);CHKERRQ(ierr);} 55 if (osm->n > 0) {ierr = PetscSNPrintf(blocks,sizeof(blocks),"total subdomain blocks = %D",osm->n);CHKERRQ(ierr);} 56 ierr = PetscViewerASCIIPrintf(viewer," Additive Schwarz: %s, %s\n",blocks,overlaps);CHKERRQ(ierr); 57 ierr = PetscViewerASCIIPrintf(viewer," Additive Schwarz: restriction/interpolation type - %s\n",PCASMTypes[osm->type]);CHKERRQ(ierr); 58 if (osm->dm_subdomains) {ierr = PetscViewerASCIIPrintf(viewer," Additive Schwarz: using DM to define subdomains\n");CHKERRQ(ierr);} 59 if (osm->loctype != PC_COMPOSITE_ADDITIVE) {ierr = PetscViewerASCIIPrintf(viewer," Additive Schwarz: local solve composition type - %s\n",PCCompositeTypes[osm->loctype]);CHKERRQ(ierr);} 60 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 61 if (osm->same_local_solves) { 62 if (osm->ksp) { 63 ierr = PetscViewerASCIIPrintf(viewer," Local solve is same for all blocks, in the following KSP and PC objects:\n");CHKERRQ(ierr); 64 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 65 if (!rank) { 66 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 67 ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr); 68 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 69 } 70 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 71 } 72 } else { 73 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 74 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] number of local blocks = %D\n",(int)rank,osm->n_local_true);CHKERRQ(ierr); 75 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 76 ierr = PetscViewerASCIIPrintf(viewer," Local solve info for each block is in the following KSP and PC objects:\n");CHKERRQ(ierr); 77 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 78 ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 79 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 80 for (i=0; i<osm->n_local_true; i++) { 81 ierr = ISGetLocalSize(osm->is[i],&bsz);CHKERRQ(ierr); 82 ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%d] local block number %D, size = %D\n",(int)rank,i,bsz);CHKERRQ(ierr); 83 ierr = KSPView(osm->ksp[i],sviewer);CHKERRQ(ierr); 84 ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 85 } 86 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 87 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 88 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 89 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 90 } 91 } else if (isstring) { 92 ierr = PetscViewerStringSPrintf(viewer," blocks=%D, overlap=%D, type=%s",osm->n,osm->overlap,PCASMTypes[osm->type]);CHKERRQ(ierr); 93 ierr = PetscViewerGetSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 94 if (osm->ksp) {ierr = KSPView(osm->ksp[0],sviewer);CHKERRQ(ierr);} 95 ierr = PetscViewerRestoreSubViewer(viewer,PETSC_COMM_SELF,&sviewer);CHKERRQ(ierr); 96 } 97 PetscFunctionReturn(0); 98 } 99 100 static PetscErrorCode PCASMPrintSubdomains(PC pc) 101 { 102 PC_ASM *osm = (PC_ASM*)pc->data; 103 const char *prefix; 104 char fname[PETSC_MAX_PATH_LEN+1]; 105 PetscViewer viewer, sviewer; 106 char *s; 107 PetscInt i,j,nidx; 108 const PetscInt *idx; 109 PetscMPIInt rank, size; 110 PetscErrorCode ierr; 111 112 PetscFunctionBegin; 113 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr); 114 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr); 115 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 116 ierr = PetscOptionsGetString(NULL,prefix,"-pc_asm_print_subdomains",fname,PETSC_MAX_PATH_LEN,NULL);CHKERRQ(ierr); 117 if (fname[0] == 0) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); }; 118 ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr); 119 for (i=0; i<osm->n_local; i++) { 120 if (i < osm->n_local_true) { 121 ierr = ISGetLocalSize(osm->is[i],&nidx);CHKERRQ(ierr); 122 ierr = ISGetIndices(osm->is[i],&idx);CHKERRQ(ierr); 123 /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/ 124 ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr); 125 ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr); 126 ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D with overlap:\n", rank, size, i);CHKERRQ(ierr); 127 for (j=0; j<nidx; j++) { 128 ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr); 129 } 130 ierr = ISRestoreIndices(osm->is[i],&idx);CHKERRQ(ierr); 131 ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr); 132 ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr); 133 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 134 ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr); 135 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 136 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 137 ierr = PetscFree(s);CHKERRQ(ierr); 138 if (osm->is_local) { 139 /* Print to a string viewer; no more than 15 characters per index plus 512 char for the header.*/ 140 ierr = PetscMalloc1(16*(nidx+1)+512, &s);CHKERRQ(ierr); 141 ierr = PetscViewerStringOpen(PETSC_COMM_SELF, s, 16*(nidx+1)+512, &sviewer);CHKERRQ(ierr); 142 ierr = PetscViewerStringSPrintf(sviewer, "[%D:%D] Subdomain %D without overlap:\n", rank, size, i);CHKERRQ(ierr); 143 ierr = ISGetLocalSize(osm->is_local[i],&nidx);CHKERRQ(ierr); 144 ierr = ISGetIndices(osm->is_local[i],&idx);CHKERRQ(ierr); 145 for (j=0; j<nidx; j++) { 146 ierr = PetscViewerStringSPrintf(sviewer,"%D ",idx[j]);CHKERRQ(ierr); 147 } 148 ierr = ISRestoreIndices(osm->is_local[i],&idx);CHKERRQ(ierr); 149 ierr = PetscViewerStringSPrintf(sviewer,"\n");CHKERRQ(ierr); 150 ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr); 151 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 152 ierr = PetscViewerASCIISynchronizedPrintf(viewer, s);CHKERRQ(ierr); 153 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 154 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 155 ierr = PetscFree(s);CHKERRQ(ierr); 156 } 157 } else { 158 /* Participate in collective viewer calls. */ 159 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 160 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 161 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 162 /* Assume either all ranks have is_local or none do. */ 163 if (osm->is_local) { 164 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 165 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 166 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 167 } 168 } 169 } 170 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 171 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 172 PetscFunctionReturn(0); 173 } 174 175 static PetscErrorCode PCSetUp_ASM(PC pc) 176 { 177 PC_ASM *osm = (PC_ASM*)pc->data; 178 PetscErrorCode ierr; 179 PetscBool symset,flg; 180 PetscInt i,m,m_local; 181 MatReuse scall = MAT_REUSE_MATRIX; 182 IS isl; 183 KSP ksp; 184 PC subpc; 185 const char *prefix,*pprefix; 186 Vec vec; 187 DM *domain_dm = NULL; 188 189 PetscFunctionBegin; 190 if (!pc->setupcalled) { 191 192 if (!osm->type_set) { 193 ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr); 194 if (symset && flg) osm->type = PC_ASM_BASIC; 195 } 196 197 /* Note: if subdomains have been set either via PCASMSetTotalSubdomains() or via PCASMSetLocalSubdomains(), osm->n_local_true will not be PETSC_DECIDE */ 198 if (osm->n_local_true == PETSC_DECIDE) { 199 /* no subdomains given */ 200 /* try pc->dm first, if allowed */ 201 if (osm->dm_subdomains && pc->dm) { 202 PetscInt num_domains, d; 203 char **domain_names; 204 IS *inner_domain_is, *outer_domain_is; 205 ierr = DMCreateDomainDecomposition(pc->dm, &num_domains, &domain_names, &inner_domain_is, &outer_domain_is, &domain_dm);CHKERRQ(ierr); 206 osm->overlap = -1; /* We do not want to increase the overlap of the IS. 207 A future improvement of this code might allow one to use 208 DM-defined subdomains and also increase the overlap, 209 but that is not currently supported */ 210 if (num_domains) { 211 ierr = PCASMSetLocalSubdomains(pc, num_domains, outer_domain_is, inner_domain_is);CHKERRQ(ierr); 212 } 213 for (d = 0; d < num_domains; ++d) { 214 if (domain_names) {ierr = PetscFree(domain_names[d]);CHKERRQ(ierr);} 215 if (inner_domain_is) {ierr = ISDestroy(&inner_domain_is[d]);CHKERRQ(ierr);} 216 if (outer_domain_is) {ierr = ISDestroy(&outer_domain_is[d]);CHKERRQ(ierr);} 217 } 218 ierr = PetscFree(domain_names);CHKERRQ(ierr); 219 ierr = PetscFree(inner_domain_is);CHKERRQ(ierr); 220 ierr = PetscFree(outer_domain_is);CHKERRQ(ierr); 221 } 222 if (osm->n_local_true == PETSC_DECIDE) { 223 /* still no subdomains; use one subdomain per processor */ 224 osm->n_local_true = 1; 225 } 226 } 227 { /* determine the global and max number of subdomains */ 228 struct {PetscInt max,sum;} inwork,outwork; 229 PetscMPIInt size; 230 231 inwork.max = osm->n_local_true; 232 inwork.sum = osm->n_local_true; 233 ierr = MPIU_Allreduce(&inwork,&outwork,1,MPIU_2INT,MPIU_MAXSUM_OP,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 234 osm->n_local = outwork.max; 235 osm->n = outwork.sum; 236 237 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 238 if (outwork.max == 1 && outwork.sum == size) { 239 /* osm->n_local_true = 1 on all processes, set this option may enable use of optimized MatCreateSubMatrices() implementation */ 240 ierr = MatSetOption(pc->pmat,MAT_SUBMAT_SINGLEIS,PETSC_TRUE);CHKERRQ(ierr); 241 } 242 } 243 if (!osm->is) { /* create the index sets */ 244 ierr = PCASMCreateSubdomains(pc->pmat,osm->n_local_true,&osm->is);CHKERRQ(ierr); 245 } 246 if (osm->n_local_true > 1 && !osm->is_local) { 247 ierr = PetscMalloc1(osm->n_local_true,&osm->is_local);CHKERRQ(ierr); 248 for (i=0; i<osm->n_local_true; i++) { 249 if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */ 250 ierr = ISDuplicate(osm->is[i],&osm->is_local[i]);CHKERRQ(ierr); 251 ierr = ISCopy(osm->is[i],osm->is_local[i]);CHKERRQ(ierr); 252 } else { 253 ierr = PetscObjectReference((PetscObject)osm->is[i]);CHKERRQ(ierr); 254 osm->is_local[i] = osm->is[i]; 255 } 256 } 257 } 258 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 259 flg = PETSC_FALSE; 260 ierr = PetscOptionsGetBool(NULL,prefix,"-pc_asm_print_subdomains",&flg,NULL);CHKERRQ(ierr); 261 if (flg) { ierr = PCASMPrintSubdomains(pc);CHKERRQ(ierr); } 262 263 if (osm->overlap > 0) { 264 /* Extend the "overlapping" regions by a number of steps */ 265 ierr = MatIncreaseOverlap(pc->pmat,osm->n_local_true,osm->is,osm->overlap);CHKERRQ(ierr); 266 } 267 if (osm->sort_indices) { 268 for (i=0; i<osm->n_local_true; i++) { 269 ierr = ISSort(osm->is[i]);CHKERRQ(ierr); 270 if (osm->is_local) { 271 ierr = ISSort(osm->is_local[i]);CHKERRQ(ierr); 272 } 273 } 274 } 275 276 if (!osm->ksp) { 277 /* Create the local solvers */ 278 ierr = PetscMalloc1(osm->n_local_true,&osm->ksp);CHKERRQ(ierr); 279 if (domain_dm) { 280 ierr = PetscInfo(pc,"Setting up ASM subproblems using the embedded DM\n");CHKERRQ(ierr); 281 } 282 for (i=0; i<osm->n_local_true; i++) { 283 ierr = KSPCreate(PETSC_COMM_SELF,&ksp);CHKERRQ(ierr); 284 ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr); 285 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr); 286 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 287 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 288 ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr); 289 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 290 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 291 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 292 if (domain_dm) { 293 ierr = KSPSetDM(ksp, domain_dm[i]);CHKERRQ(ierr); 294 ierr = KSPSetDMActive(ksp, PETSC_FALSE);CHKERRQ(ierr); 295 ierr = DMDestroy(&domain_dm[i]);CHKERRQ(ierr); 296 } 297 osm->ksp[i] = ksp; 298 } 299 if (domain_dm) { 300 ierr = PetscFree(domain_dm);CHKERRQ(ierr); 301 } 302 } 303 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) { 304 PetscInt m; 305 306 ierr = ISConcatenate(PETSC_COMM_SELF, osm->n_local_true, osm->is, &osm->lis);CHKERRQ(ierr); 307 ierr = ISSortRemoveDups(osm->lis);CHKERRQ(ierr); 308 ierr = ISGetLocalSize(osm->lis, &m);CHKERRQ(ierr); 309 ierr = VecCreateSeq(PETSC_COMM_SELF, m, &osm->lx);CHKERRQ(ierr); 310 ierr = VecDuplicate(osm->lx, &osm->ly);CHKERRQ(ierr); 311 } 312 scall = MAT_INITIAL_MATRIX; 313 } else { 314 /* 315 Destroy the blocks from the previous iteration 316 */ 317 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 318 ierr = MatDestroyMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr); 319 scall = MAT_INITIAL_MATRIX; 320 } 321 } 322 323 /* 324 Extract out the submatrices 325 */ 326 ierr = MatCreateSubMatrices(pc->pmat,osm->n_local_true,osm->is,osm->is,scall,&osm->pmat);CHKERRQ(ierr); 327 if (scall == MAT_INITIAL_MATRIX) { 328 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr); 329 for (i=0; i<osm->n_local_true; i++) { 330 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr); 331 ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr); 332 } 333 } 334 335 /* Convert the types of the submatrices (if needbe) */ 336 if (osm->sub_mat_type) { 337 for (i=0; i<osm->n_local_true; i++) { 338 ierr = MatConvert(osm->pmat[i],osm->sub_mat_type,MAT_INPLACE_MATRIX,&(osm->pmat[i]));CHKERRQ(ierr); 339 } 340 } 341 342 if(!pc->setupcalled){ 343 /* Create the local work vectors (from the local matrices) and scatter contexts */ 344 ierr = MatCreateVecs(pc->pmat,&vec,0);CHKERRQ(ierr); 345 ierr = PetscMalloc1(osm->n_local,&osm->restriction);CHKERRQ(ierr); 346 if (osm->is_local) {ierr = PetscMalloc1(osm->n_local,&osm->localization);CHKERRQ(ierr);} 347 ierr = PetscMalloc1(osm->n_local,&osm->prolongation);CHKERRQ(ierr); 348 ierr = PetscMalloc1(osm->n_local,&osm->x);CHKERRQ(ierr); 349 ierr = PetscMalloc1(osm->n_local,&osm->y);CHKERRQ(ierr); 350 ierr = PetscMalloc1(osm->n_local,&osm->y_local);CHKERRQ(ierr); 351 for (i=0; i<osm->n_local_true; ++i) { 352 ierr = ISGetLocalSize(osm->is[i],&m);CHKERRQ(ierr); 353 ierr = MatCreateVecs(osm->pmat[i],&osm->x[i],NULL);CHKERRQ(ierr); 354 ierr = ISCreateStride(PETSC_COMM_SELF,m,0,1,&isl);CHKERRQ(ierr); 355 ierr = VecScatterCreate(vec,osm->is[i],osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr); 356 ierr = ISDestroy(&isl);CHKERRQ(ierr); 357 ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr); 358 if (osm->is_local) { 359 ISLocalToGlobalMapping ltog; 360 IS isll; 361 const PetscInt *idx_local; 362 PetscInt *idx,nout; 363 364 ierr = ISLocalToGlobalMappingCreateIS(osm->is[i],<og);CHKERRQ(ierr); 365 ierr = ISGetLocalSize(osm->is_local[i],&m_local);CHKERRQ(ierr); 366 ierr = ISGetIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr); 367 ierr = PetscMalloc1(m_local,&idx);CHKERRQ(ierr); 368 ierr = ISGlobalToLocalMappingApply(ltog,IS_GTOLM_DROP,m_local,idx_local,&nout,idx);CHKERRQ(ierr); 369 ierr = ISLocalToGlobalMappingDestroy(<og);CHKERRQ(ierr); 370 if (nout != m_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"is_local not a subset of is"); 371 ierr = ISRestoreIndices(osm->is_local[i], &idx_local);CHKERRQ(ierr); 372 ierr = ISCreateGeneral(PETSC_COMM_SELF,m_local,idx,PETSC_OWN_POINTER,&isll);CHKERRQ(ierr); 373 ierr = ISCreateStride(PETSC_COMM_SELF,m_local,0,1,&isl);CHKERRQ(ierr); 374 ierr = VecCreateSeq(PETSC_COMM_SELF,m_local,&osm->y_local[i]);CHKERRQ(ierr); 375 ierr = VecScatterCreate(osm->y[i],isll,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr); 376 ierr = ISDestroy(&isll);CHKERRQ(ierr); 377 378 ierr = VecScatterCreate(vec,osm->is_local[i],osm->y_local[i],isl,&osm->prolongation[i]);CHKERRQ(ierr); 379 ierr = ISDestroy(&isl);CHKERRQ(ierr); 380 } else { 381 ierr = VecGetLocalSize(vec,&m_local);CHKERRQ(ierr); 382 383 osm->y_local[i] = osm->y[i]; 384 385 ierr = PetscObjectReference((PetscObject) osm->y[i]);CHKERRQ(ierr); 386 387 osm->prolongation[i] = osm->restriction[i]; 388 389 ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr); 390 } 391 } 392 for (i=osm->n_local_true; i<osm->n_local; i++) { 393 ierr = VecCreateSeq(PETSC_COMM_SELF,0,&osm->x[i]);CHKERRQ(ierr); 394 ierr = VecDuplicate(osm->x[i],&osm->y[i]);CHKERRQ(ierr); 395 ierr = VecDuplicate(osm->x[i],&osm->y_local[i]);CHKERRQ(ierr); 396 ierr = ISCreateStride(PETSC_COMM_SELF,0,0,1,&isl);CHKERRQ(ierr); 397 ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->restriction[i]);CHKERRQ(ierr); 398 if (osm->is_local) { 399 ierr = VecScatterCreate(osm->y[i],isl,osm->y_local[i],isl,&osm->localization[i]);CHKERRQ(ierr); 400 ierr = VecScatterCreate(vec,isl,osm->x[i],isl,&osm->prolongation[i]);CHKERRQ(ierr); 401 } else { 402 osm->prolongation[i] = osm->restriction[i]; 403 ierr = PetscObjectReference((PetscObject) osm->restriction[i]);CHKERRQ(ierr); 404 } 405 ierr = ISDestroy(&isl);CHKERRQ(ierr); 406 } 407 ierr = VecDestroy(&vec);CHKERRQ(ierr); 408 } 409 410 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) { 411 IS *cis; 412 PetscInt c; 413 414 ierr = PetscMalloc1(osm->n_local_true, &cis);CHKERRQ(ierr); 415 for (c = 0; c < osm->n_local_true; ++c) cis[c] = osm->lis; 416 ierr = MatCreateSubMatrices(pc->pmat, osm->n_local_true, osm->is, cis, scall, &osm->lmats);CHKERRQ(ierr); 417 ierr = PetscFree(cis);CHKERRQ(ierr); 418 } 419 420 /* Return control to the user so that the submatrices can be modified (e.g., to apply 421 different boundary conditions for the submatrices than for the global problem) */ 422 ierr = PCModifySubMatrices(pc,osm->n_local_true,osm->is,osm->is,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr); 423 424 /* 425 Loop over subdomains putting them into local ksp 426 */ 427 for (i=0; i<osm->n_local_true; i++) { 428 ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr); 429 if (!pc->setupcalled) { 430 ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr); 431 } 432 } 433 PetscFunctionReturn(0); 434 } 435 436 static PetscErrorCode PCSetUpOnBlocks_ASM(PC pc) 437 { 438 PC_ASM *osm = (PC_ASM*)pc->data; 439 PetscErrorCode ierr; 440 PetscInt i; 441 KSPConvergedReason reason; 442 443 PetscFunctionBegin; 444 for (i=0; i<osm->n_local_true; i++) { 445 ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr); 446 ierr = KSPGetConvergedReason(osm->ksp[i],&reason);CHKERRQ(ierr); 447 if (reason == KSP_DIVERGED_PCSETUP_FAILED) { 448 pc->failedreason = PC_SUBPC_ERROR; 449 } 450 } 451 PetscFunctionReturn(0); 452 } 453 454 static PetscErrorCode PCApply_ASM(PC pc,Vec x,Vec y) 455 { 456 PC_ASM *osm = (PC_ASM*)pc->data; 457 PetscErrorCode ierr; 458 PetscInt i,n_local = osm->n_local,n_local_true = osm->n_local_true; 459 ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE; 460 461 PetscFunctionBegin; 462 /* 463 Support for limiting the restriction or interpolation to only local 464 subdomain values (leaving the other values 0). 465 */ 466 if (!(osm->type & PC_ASM_RESTRICT)) { 467 forward = SCATTER_FORWARD_LOCAL; 468 /* have to zero the work RHS since scatter may leave some slots empty */ 469 for (i=0; i<n_local_true; i++) { 470 ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr); 471 } 472 } 473 if (!(osm->type & PC_ASM_INTERPOLATE)) reverse = SCATTER_REVERSE_LOCAL; 474 475 switch (osm->loctype) 476 { 477 case PC_COMPOSITE_ADDITIVE: 478 for (i=0; i<n_local; i++) { 479 ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr); 480 } 481 ierr = VecZeroEntries(y);CHKERRQ(ierr); 482 /* do the local solves */ 483 for (i=0; i<n_local_true; i++) { 484 ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr); 485 ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr); 486 if (osm->localization) { 487 ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr); 488 ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr); 489 } 490 ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr); 491 } 492 /* handle the rest of the scatters that do not have local solves */ 493 for (i=n_local_true; i<n_local; i++) { 494 ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr); 495 ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr); 496 } 497 for (i=0; i<n_local; i++) { 498 ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr); 499 } 500 break; 501 case PC_COMPOSITE_MULTIPLICATIVE: 502 ierr = VecZeroEntries(y);CHKERRQ(ierr); 503 /* do the local solves */ 504 for (i = 0; i < n_local_true; ++i) { 505 if (i > 0) { 506 /* Update rhs */ 507 ierr = VecScatterBegin(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr); 508 ierr = VecScatterEnd(osm->restriction[i], osm->lx, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr); 509 } else { 510 ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr); 511 } 512 ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr); 513 ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], ADD_VALUES, forward);CHKERRQ(ierr); 514 ierr = KSPSolve(osm->ksp[i], osm->x[i], osm->y[i]);CHKERRQ(ierr); 515 if (osm->localization) { 516 ierr = VecScatterBegin(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr); 517 ierr = VecScatterEnd(osm->localization[i], osm->y[i], osm->y_local[i], INSERT_VALUES, forward);CHKERRQ(ierr); 518 } 519 ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr); 520 ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr); 521 if (i < n_local_true-1) { 522 ierr = VecSet(osm->ly, 0.0);CHKERRQ(ierr); 523 ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr); 524 ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], osm->ly, INSERT_VALUES, reverse);CHKERRQ(ierr); 525 ierr = VecScale(osm->ly, -1.0);CHKERRQ(ierr); 526 ierr = MatMult(osm->lmats[i+1], osm->ly, osm->y[i+1]);CHKERRQ(ierr); 527 ierr = VecScatterBegin(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr); 528 ierr = VecScatterEnd(osm->restriction[i+1], osm->y[i+1], osm->lx, INSERT_VALUES, reverse);CHKERRQ(ierr); 529 } 530 } 531 /* handle the rest of the scatters that do not have local solves */ 532 for (i = n_local_true; i < n_local; ++i) { 533 ierr = VecScatterBegin(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr); 534 ierr = VecScatterEnd(osm->restriction[i], x, osm->x[i], INSERT_VALUES, forward);CHKERRQ(ierr); 535 ierr = VecScatterBegin(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr); 536 ierr = VecScatterEnd(osm->prolongation[i], osm->y_local[i], y, ADD_VALUES, reverse);CHKERRQ(ierr); 537 } 538 break; 539 default: SETERRQ1(PetscObjectComm((PetscObject) pc), PETSC_ERR_ARG_WRONG, "Invalid local composition type: %s", PCCompositeTypes[osm->loctype]); 540 } 541 PetscFunctionReturn(0); 542 } 543 544 static PetscErrorCode PCApplyTranspose_ASM(PC pc,Vec x,Vec y) 545 { 546 PC_ASM *osm = (PC_ASM*)pc->data; 547 PetscErrorCode ierr; 548 PetscInt i,n_local = osm->n_local,n_local_true = osm->n_local_true; 549 ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE; 550 551 PetscFunctionBegin; 552 /* 553 Support for limiting the restriction or interpolation to only local 554 subdomain values (leaving the other values 0). 555 556 Note: these are reversed from the PCApply_ASM() because we are applying the 557 transpose of the three terms 558 */ 559 if (!(osm->type & PC_ASM_INTERPOLATE)) { 560 forward = SCATTER_FORWARD_LOCAL; 561 /* have to zero the work RHS since scatter may leave some slots empty */ 562 for (i=0; i<n_local_true; i++) { 563 ierr = VecZeroEntries(osm->x[i]);CHKERRQ(ierr); 564 } 565 } 566 if (!(osm->type & PC_ASM_RESTRICT)) reverse = SCATTER_REVERSE_LOCAL; 567 568 for (i=0; i<n_local; i++) { 569 ierr = VecScatterBegin(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr); 570 } 571 ierr = VecZeroEntries(y);CHKERRQ(ierr); 572 /* do the local solves */ 573 for (i=0; i<n_local_true; i++) { 574 ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr); 575 ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr); 576 if (osm->localization) { 577 ierr = VecScatterBegin(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr); 578 ierr = VecScatterEnd(osm->localization[i],osm->y[i],osm->y_local[i],INSERT_VALUES,forward);CHKERRQ(ierr); 579 } 580 ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr); 581 } 582 /* handle the rest of the scatters that do not have local solves */ 583 for (i=n_local_true; i<n_local; i++) { 584 ierr = VecScatterEnd(osm->restriction[i],x,osm->x[i],INSERT_VALUES,forward);CHKERRQ(ierr); 585 ierr = VecScatterBegin(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr); 586 } 587 for (i=0; i<n_local; i++) { 588 ierr = VecScatterEnd(osm->prolongation[i],osm->y_local[i],y,ADD_VALUES,reverse);CHKERRQ(ierr); 589 } 590 PetscFunctionReturn(0); 591 } 592 593 static PetscErrorCode PCReset_ASM(PC pc) 594 { 595 PC_ASM *osm = (PC_ASM*)pc->data; 596 PetscErrorCode ierr; 597 PetscInt i; 598 599 PetscFunctionBegin; 600 if (osm->ksp) { 601 for (i=0; i<osm->n_local_true; i++) { 602 ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr); 603 } 604 } 605 if (osm->pmat) { 606 if (osm->n_local_true > 0) { 607 ierr = MatDestroySubMatrices(osm->n_local_true,&osm->pmat);CHKERRQ(ierr); 608 } 609 } 610 if (osm->restriction) { 611 for (i=0; i<osm->n_local; i++) { 612 ierr = VecScatterDestroy(&osm->restriction[i]);CHKERRQ(ierr); 613 if (osm->localization) {ierr = VecScatterDestroy(&osm->localization[i]);CHKERRQ(ierr);} 614 ierr = VecScatterDestroy(&osm->prolongation[i]);CHKERRQ(ierr); 615 ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr); 616 ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr); 617 ierr = VecDestroy(&osm->y_local[i]);CHKERRQ(ierr); 618 } 619 ierr = PetscFree(osm->restriction);CHKERRQ(ierr); 620 if (osm->localization) {ierr = PetscFree(osm->localization);CHKERRQ(ierr);} 621 ierr = PetscFree(osm->prolongation);CHKERRQ(ierr); 622 ierr = PetscFree(osm->x);CHKERRQ(ierr); 623 ierr = PetscFree(osm->y);CHKERRQ(ierr); 624 ierr = PetscFree(osm->y_local);CHKERRQ(ierr); 625 } 626 ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr); 627 if (osm->loctype == PC_COMPOSITE_MULTIPLICATIVE) { 628 ierr = ISDestroy(&osm->lis);CHKERRQ(ierr); 629 ierr = MatDestroyMatrices(osm->n_local_true, &osm->lmats);CHKERRQ(ierr); 630 ierr = VecDestroy(&osm->lx);CHKERRQ(ierr); 631 ierr = VecDestroy(&osm->ly);CHKERRQ(ierr); 632 } 633 634 ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr); 635 636 osm->is = 0; 637 osm->is_local = 0; 638 PetscFunctionReturn(0); 639 } 640 641 static PetscErrorCode PCDestroy_ASM(PC pc) 642 { 643 PC_ASM *osm = (PC_ASM*)pc->data; 644 PetscErrorCode ierr; 645 PetscInt i; 646 647 PetscFunctionBegin; 648 ierr = PCReset_ASM(pc);CHKERRQ(ierr); 649 if (osm->ksp) { 650 for (i=0; i<osm->n_local_true; i++) { 651 ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr); 652 } 653 ierr = PetscFree(osm->ksp);CHKERRQ(ierr); 654 } 655 ierr = PetscFree(pc->data);CHKERRQ(ierr); 656 PetscFunctionReturn(0); 657 } 658 659 static PetscErrorCode PCSetFromOptions_ASM(PetscOptionItems *PetscOptionsObject,PC pc) 660 { 661 PC_ASM *osm = (PC_ASM*)pc->data; 662 PetscErrorCode ierr; 663 PetscInt blocks,ovl; 664 PetscBool symset,flg; 665 PCASMType asmtype; 666 PCCompositeType loctype; 667 char sub_mat_type[256]; 668 669 PetscFunctionBegin; 670 /* set the type to symmetric if matrix is symmetric */ 671 if (!osm->type_set && pc->pmat) { 672 ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr); 673 if (symset && flg) osm->type = PC_ASM_BASIC; 674 } 675 ierr = PetscOptionsHead(PetscOptionsObject,"Additive Schwarz options");CHKERRQ(ierr); 676 ierr = PetscOptionsBool("-pc_asm_dm_subdomains","Use DMCreateDomainDecomposition() to define subdomains","PCASMSetDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr); 677 ierr = PetscOptionsInt("-pc_asm_blocks","Number of subdomains","PCASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr); 678 if (flg) { 679 ierr = PCASMSetTotalSubdomains(pc,blocks,NULL,NULL);CHKERRQ(ierr); 680 osm->dm_subdomains = PETSC_FALSE; 681 } 682 ierr = PetscOptionsInt("-pc_asm_overlap","Number of grid points overlap","PCASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr); 683 if (flg) { 684 ierr = PCASMSetOverlap(pc,ovl);CHKERRQ(ierr); 685 osm->dm_subdomains = PETSC_FALSE; 686 } 687 flg = PETSC_FALSE; 688 ierr = PetscOptionsEnum("-pc_asm_type","Type of restriction/extension","PCASMSetType",PCASMTypes,(PetscEnum)osm->type,(PetscEnum*)&asmtype,&flg);CHKERRQ(ierr); 689 if (flg) {ierr = PCASMSetType(pc,asmtype);CHKERRQ(ierr); } 690 flg = PETSC_FALSE; 691 ierr = PetscOptionsEnum("-pc_asm_local_type","Type of local solver composition","PCASMSetLocalType",PCCompositeTypes,(PetscEnum)osm->loctype,(PetscEnum*)&loctype,&flg);CHKERRQ(ierr); 692 if (flg) {ierr = PCASMSetLocalType(pc,loctype);CHKERRQ(ierr); } 693 ierr = PetscOptionsFList("-pc_asm_sub_mat_type","Subsolve Matrix Type","PCASMSetSubMatType",MatList,NULL,sub_mat_type,256,&flg);CHKERRQ(ierr); 694 if(flg){ 695 ierr = PCASMSetSubMatType(pc,sub_mat_type);CHKERRQ(ierr); 696 } 697 ierr = PetscOptionsTail();CHKERRQ(ierr); 698 PetscFunctionReturn(0); 699 } 700 701 /*------------------------------------------------------------------------------------*/ 702 703 static PetscErrorCode PCASMSetLocalSubdomains_ASM(PC pc,PetscInt n,IS is[],IS is_local[]) 704 { 705 PC_ASM *osm = (PC_ASM*)pc->data; 706 PetscErrorCode ierr; 707 PetscInt i; 708 709 PetscFunctionBegin; 710 if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more blocks, n = %D",n); 711 if (pc->setupcalled && (n != osm->n_local_true || is)) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetLocalSubdomains() should be called before calling PCSetUp()."); 712 713 if (!pc->setupcalled) { 714 if (is) { 715 for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is[i]);CHKERRQ(ierr);} 716 } 717 if (is_local) { 718 for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)is_local[i]);CHKERRQ(ierr);} 719 } 720 ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr); 721 722 osm->n_local_true = n; 723 osm->is = 0; 724 osm->is_local = 0; 725 if (is) { 726 ierr = PetscMalloc1(n,&osm->is);CHKERRQ(ierr); 727 for (i=0; i<n; i++) osm->is[i] = is[i]; 728 /* Flag indicating that the user has set overlapping subdomains so PCASM should not increase their size. */ 729 osm->overlap = -1; 730 } 731 if (is_local) { 732 ierr = PetscMalloc1(n,&osm->is_local);CHKERRQ(ierr); 733 for (i=0; i<n; i++) osm->is_local[i] = is_local[i]; 734 if (!is) { 735 ierr = PetscMalloc1(osm->n_local_true,&osm->is);CHKERRQ(ierr); 736 for (i=0; i<osm->n_local_true; i++) { 737 if (osm->overlap > 0) { /* With positive overlap, osm->is[i] will be modified */ 738 ierr = ISDuplicate(osm->is_local[i],&osm->is[i]);CHKERRQ(ierr); 739 ierr = ISCopy(osm->is_local[i],osm->is[i]);CHKERRQ(ierr); 740 } else { 741 ierr = PetscObjectReference((PetscObject)osm->is_local[i]);CHKERRQ(ierr); 742 osm->is[i] = osm->is_local[i]; 743 } 744 } 745 } 746 } 747 } 748 PetscFunctionReturn(0); 749 } 750 751 static PetscErrorCode PCASMSetTotalSubdomains_ASM(PC pc,PetscInt N,IS *is,IS *is_local) 752 { 753 PC_ASM *osm = (PC_ASM*)pc->data; 754 PetscErrorCode ierr; 755 PetscMPIInt rank,size; 756 PetscInt n; 757 758 PetscFunctionBegin; 759 if (N < 1) SETERRQ1(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Number of total blocks must be > 0, N = %D",N); 760 if (is || is_local) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Use PCASMSetLocalSubdomains() to set specific index sets\n\they cannot be set globally yet."); 761 762 /* 763 Split the subdomains equally among all processors 764 */ 765 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 766 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 767 n = N/size + ((N % size) > rank); 768 if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one block: total processors %d total blocks %D",(int)rank,(int)size,N); 769 if (pc->setupcalled && n != osm->n_local_true) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCASMSetTotalSubdomains() should be called before PCSetUp()."); 770 if (!pc->setupcalled) { 771 ierr = PCASMDestroySubdomains(osm->n_local_true,osm->is,osm->is_local);CHKERRQ(ierr); 772 773 osm->n_local_true = n; 774 osm->is = 0; 775 osm->is_local = 0; 776 } 777 PetscFunctionReturn(0); 778 } 779 780 static PetscErrorCode PCASMSetOverlap_ASM(PC pc,PetscInt ovl) 781 { 782 PC_ASM *osm = (PC_ASM*)pc->data; 783 784 PetscFunctionBegin; 785 if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested"); 786 if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCASMSetOverlap() should be called before PCSetUp()."); 787 if (!pc->setupcalled) osm->overlap = ovl; 788 PetscFunctionReturn(0); 789 } 790 791 static PetscErrorCode PCASMSetType_ASM(PC pc,PCASMType type) 792 { 793 PC_ASM *osm = (PC_ASM*)pc->data; 794 795 PetscFunctionBegin; 796 osm->type = type; 797 osm->type_set = PETSC_TRUE; 798 PetscFunctionReturn(0); 799 } 800 801 static PetscErrorCode PCASMGetType_ASM(PC pc,PCASMType *type) 802 { 803 PC_ASM *osm = (PC_ASM*)pc->data; 804 805 PetscFunctionBegin; 806 *type = osm->type; 807 PetscFunctionReturn(0); 808 } 809 810 static PetscErrorCode PCASMSetLocalType_ASM(PC pc, PCCompositeType type) 811 { 812 PC_ASM *osm = (PC_ASM *) pc->data; 813 814 PetscFunctionBegin; 815 osm->loctype = type; 816 PetscFunctionReturn(0); 817 } 818 819 static PetscErrorCode PCASMGetLocalType_ASM(PC pc, PCCompositeType *type) 820 { 821 PC_ASM *osm = (PC_ASM *) pc->data; 822 823 PetscFunctionBegin; 824 *type = osm->loctype; 825 PetscFunctionReturn(0); 826 } 827 828 static PetscErrorCode PCASMSetSortIndices_ASM(PC pc,PetscBool doSort) 829 { 830 PC_ASM *osm = (PC_ASM*)pc->data; 831 832 PetscFunctionBegin; 833 osm->sort_indices = doSort; 834 PetscFunctionReturn(0); 835 } 836 837 static PetscErrorCode PCASMGetSubKSP_ASM(PC pc,PetscInt *n_local,PetscInt *first_local,KSP **ksp) 838 { 839 PC_ASM *osm = (PC_ASM*)pc->data; 840 PetscErrorCode ierr; 841 842 PetscFunctionBegin; 843 if (osm->n_local_true < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here"); 844 845 if (n_local) *n_local = osm->n_local_true; 846 if (first_local) { 847 ierr = MPI_Scan(&osm->n_local_true,first_local,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 848 *first_local -= osm->n_local_true; 849 } 850 if (ksp) { 851 /* Assume that local solves are now different; not necessarily 852 true though! This flag is used only for PCView_ASM() */ 853 *ksp = osm->ksp; 854 osm->same_local_solves = PETSC_FALSE; 855 } 856 PetscFunctionReturn(0); 857 } 858 859 static PetscErrorCode PCASMGetSubMatType_ASM(PC pc,MatType *sub_mat_type) 860 { 861 PC_ASM *osm = (PC_ASM*)pc->data; 862 863 PetscFunctionBegin; 864 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 865 PetscValidPointer(sub_mat_type,2); 866 *sub_mat_type = osm->sub_mat_type; 867 PetscFunctionReturn(0); 868 } 869 870 static PetscErrorCode PCASMSetSubMatType_ASM(PC pc,MatType sub_mat_type) 871 { 872 PetscErrorCode ierr; 873 PC_ASM *osm = (PC_ASM*)pc->data; 874 875 PetscFunctionBegin; 876 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 877 ierr = PetscFree(osm->sub_mat_type);CHKERRQ(ierr); 878 ierr = PetscStrallocpy(sub_mat_type,(char**)&osm->sub_mat_type);CHKERRQ(ierr); 879 PetscFunctionReturn(0); 880 } 881 882 /*@C 883 PCASMSetLocalSubdomains - Sets the local subdomains (for this processor only) for the additive Schwarz preconditioner. 884 885 Collective on PC 886 887 Input Parameters: 888 + pc - the preconditioner context 889 . n - the number of subdomains for this processor (default value = 1) 890 . is - the index set that defines the subdomains for this processor 891 (or NULL for PETSc to determine subdomains) 892 - is_local - the index sets that define the local part of the subdomains for this processor 893 (or NULL to use the default of 1 subdomain per process) 894 895 Notes: 896 The IS numbering is in the parallel, global numbering of the vector for both is and is_local 897 898 By default the ASM preconditioner uses 1 block per processor. 899 900 Use PCASMSetTotalSubdomains() to set the subdomains for all processors. 901 902 Level: advanced 903 904 .keywords: PC, ASM, set, local, subdomains, additive Schwarz 905 906 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(), 907 PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains() 908 @*/ 909 PetscErrorCode PCASMSetLocalSubdomains(PC pc,PetscInt n,IS is[],IS is_local[]) 910 { 911 PetscErrorCode ierr; 912 913 PetscFunctionBegin; 914 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 915 ierr = PetscTryMethod(pc,"PCASMSetLocalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,is,is_local));CHKERRQ(ierr); 916 PetscFunctionReturn(0); 917 } 918 919 /*@C 920 PCASMSetTotalSubdomains - Sets the subdomains for all processors for the 921 additive Schwarz preconditioner. Either all or no processors in the 922 PC communicator must call this routine, with the same index sets. 923 924 Collective on PC 925 926 Input Parameters: 927 + pc - the preconditioner context 928 . N - the number of subdomains for all processors 929 . is - the index sets that define the subdomains for all processors 930 (or NULL to ask PETSc to compe up with subdomains) 931 - is_local - the index sets that define the local part of the subdomains for this processor 932 (or NULL to use the default of 1 subdomain per process) 933 934 Options Database Key: 935 To set the total number of subdomain blocks rather than specify the 936 index sets, use the option 937 . -pc_asm_blocks <blks> - Sets total blocks 938 939 Notes: 940 Currently you cannot use this to set the actual subdomains with the argument is. 941 942 By default the ASM preconditioner uses 1 block per processor. 943 944 These index sets cannot be destroyed until after completion of the 945 linear solves for which the ASM preconditioner is being used. 946 947 Use PCASMSetLocalSubdomains() to set local subdomains. 948 949 The IS numbering is in the parallel, global numbering of the vector for both is and is_local 950 951 Level: advanced 952 953 .keywords: PC, ASM, set, total, global, subdomains, additive Schwarz 954 955 .seealso: PCASMSetLocalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(), 956 PCASMCreateSubdomains2D() 957 @*/ 958 PetscErrorCode PCASMSetTotalSubdomains(PC pc,PetscInt N,IS is[],IS is_local[]) 959 { 960 PetscErrorCode ierr; 961 962 PetscFunctionBegin; 963 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 964 ierr = PetscTryMethod(pc,"PCASMSetTotalSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,N,is,is_local));CHKERRQ(ierr); 965 PetscFunctionReturn(0); 966 } 967 968 /*@ 969 PCASMSetOverlap - Sets the overlap between a pair of subdomains for the 970 additive Schwarz preconditioner. Either all or no processors in the 971 PC communicator must call this routine. If MatIncreaseOverlap is used, 972 use option -mat_increase_overlap when the problem size large. 973 974 Logically Collective on PC 975 976 Input Parameters: 977 + pc - the preconditioner context 978 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1) 979 980 Options Database Key: 981 . -pc_asm_overlap <ovl> - Sets overlap 982 983 Notes: 984 By default the ASM preconditioner uses 1 block per processor. To use 985 multiple blocks per perocessor, see PCASMSetTotalSubdomains() and 986 PCASMSetLocalSubdomains() (and the option -pc_asm_blocks <blks>). 987 988 The overlap defaults to 1, so if one desires that no additional 989 overlap be computed beyond what may have been set with a call to 990 PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(), then ovl 991 must be set to be 0. In particular, if one does not explicitly set 992 the subdomains an application code, then all overlap would be computed 993 internally by PETSc, and using an overlap of 0 would result in an ASM 994 variant that is equivalent to the block Jacobi preconditioner. 995 996 Note that one can define initial index sets with any overlap via 997 PCASMSetTotalSubdomains() or PCASMSetLocalSubdomains(); the routine 998 PCASMSetOverlap() merely allows PETSc to extend that overlap further 999 if desired. 1000 1001 Level: intermediate 1002 1003 .keywords: PC, ASM, set, overlap 1004 1005 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(), 1006 PCASMCreateSubdomains2D(), PCASMGetLocalSubdomains() 1007 @*/ 1008 PetscErrorCode PCASMSetOverlap(PC pc,PetscInt ovl) 1009 { 1010 PetscErrorCode ierr; 1011 1012 PetscFunctionBegin; 1013 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1014 PetscValidLogicalCollectiveInt(pc,ovl,2); 1015 ierr = PetscTryMethod(pc,"PCASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr); 1016 PetscFunctionReturn(0); 1017 } 1018 1019 /*@ 1020 PCASMSetType - Sets the type of restriction and interpolation used 1021 for local problems in the additive Schwarz method. 1022 1023 Logically Collective on PC 1024 1025 Input Parameters: 1026 + pc - the preconditioner context 1027 - type - variant of ASM, one of 1028 .vb 1029 PC_ASM_BASIC - full interpolation and restriction 1030 PC_ASM_RESTRICT - full restriction, local processor interpolation 1031 PC_ASM_INTERPOLATE - full interpolation, local processor restriction 1032 PC_ASM_NONE - local processor restriction and interpolation 1033 .ve 1034 1035 Options Database Key: 1036 . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type 1037 1038 Level: intermediate 1039 1040 .keywords: PC, ASM, set, type 1041 1042 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(), 1043 PCASMCreateSubdomains2D() 1044 @*/ 1045 PetscErrorCode PCASMSetType(PC pc,PCASMType type) 1046 { 1047 PetscErrorCode ierr; 1048 1049 PetscFunctionBegin; 1050 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1051 PetscValidLogicalCollectiveEnum(pc,type,2); 1052 ierr = PetscTryMethod(pc,"PCASMSetType_C",(PC,PCASMType),(pc,type));CHKERRQ(ierr); 1053 PetscFunctionReturn(0); 1054 } 1055 1056 /*@ 1057 PCASMGetType - Gets the type of restriction and interpolation used 1058 for local problems in the additive Schwarz method. 1059 1060 Logically Collective on PC 1061 1062 Input Parameter: 1063 . pc - the preconditioner context 1064 1065 Output Parameter: 1066 . type - variant of ASM, one of 1067 1068 .vb 1069 PC_ASM_BASIC - full interpolation and restriction 1070 PC_ASM_RESTRICT - full restriction, local processor interpolation 1071 PC_ASM_INTERPOLATE - full interpolation, local processor restriction 1072 PC_ASM_NONE - local processor restriction and interpolation 1073 .ve 1074 1075 Options Database Key: 1076 . -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type 1077 1078 Level: intermediate 1079 1080 .keywords: PC, ASM, set, type 1081 1082 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(), 1083 PCASMCreateSubdomains2D() 1084 @*/ 1085 PetscErrorCode PCASMGetType(PC pc,PCASMType *type) 1086 { 1087 PetscErrorCode ierr; 1088 1089 PetscFunctionBegin; 1090 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1091 ierr = PetscUseMethod(pc,"PCASMGetType_C",(PC,PCASMType*),(pc,type));CHKERRQ(ierr); 1092 PetscFunctionReturn(0); 1093 } 1094 1095 /*@ 1096 PCASMSetLocalType - Sets the type of composition used for local problems in the additive Schwarz method. 1097 1098 Logically Collective on PC 1099 1100 Input Parameters: 1101 + pc - the preconditioner context 1102 - type - type of composition, one of 1103 .vb 1104 PC_COMPOSITE_ADDITIVE - local additive combination 1105 PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination 1106 .ve 1107 1108 Options Database Key: 1109 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type 1110 1111 Level: intermediate 1112 1113 .seealso: PCASMSetType(), PCASMGetType(), PCASMGetLocalType(), PCASMCreate() 1114 @*/ 1115 PetscErrorCode PCASMSetLocalType(PC pc, PCCompositeType type) 1116 { 1117 PetscErrorCode ierr; 1118 1119 PetscFunctionBegin; 1120 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1121 PetscValidLogicalCollectiveEnum(pc, type, 2); 1122 ierr = PetscTryMethod(pc, "PCASMSetLocalType_C", (PC, PCCompositeType), (pc, type));CHKERRQ(ierr); 1123 PetscFunctionReturn(0); 1124 } 1125 1126 /*@ 1127 PCASMGetLocalType - Gets the type of composition used for local problems in the additive Schwarz method. 1128 1129 Logically Collective on PC 1130 1131 Input Parameter: 1132 . pc - the preconditioner context 1133 1134 Output Parameter: 1135 . type - type of composition, one of 1136 .vb 1137 PC_COMPOSITE_ADDITIVE - local additive combination 1138 PC_COMPOSITE_MULTIPLICATIVE - local multiplicative combination 1139 .ve 1140 1141 Options Database Key: 1142 . -pc_asm_local_type [additive,multiplicative] - Sets local solver composition type 1143 1144 Level: intermediate 1145 1146 .seealso: PCASMSetType(), PCASMGetType(), PCASMSetLocalType(), PCASMCreate() 1147 @*/ 1148 PetscErrorCode PCASMGetLocalType(PC pc, PCCompositeType *type) 1149 { 1150 PetscErrorCode ierr; 1151 1152 PetscFunctionBegin; 1153 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1154 PetscValidPointer(type, 2); 1155 ierr = PetscUseMethod(pc, "PCASMGetLocalType_C", (PC, PCCompositeType *), (pc, type));CHKERRQ(ierr); 1156 PetscFunctionReturn(0); 1157 } 1158 1159 /*@ 1160 PCASMSetSortIndices - Determines whether subdomain indices are sorted. 1161 1162 Logically Collective on PC 1163 1164 Input Parameters: 1165 + pc - the preconditioner context 1166 - doSort - sort the subdomain indices 1167 1168 Level: intermediate 1169 1170 .keywords: PC, ASM, set, type 1171 1172 .seealso: PCASMSetLocalSubdomains(), PCASMSetTotalSubdomains(), PCASMGetSubKSP(), 1173 PCASMCreateSubdomains2D() 1174 @*/ 1175 PetscErrorCode PCASMSetSortIndices(PC pc,PetscBool doSort) 1176 { 1177 PetscErrorCode ierr; 1178 1179 PetscFunctionBegin; 1180 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1181 PetscValidLogicalCollectiveBool(pc,doSort,2); 1182 ierr = PetscTryMethod(pc,"PCASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr); 1183 PetscFunctionReturn(0); 1184 } 1185 1186 /*@C 1187 PCASMGetSubKSP - Gets the local KSP contexts for all blocks on 1188 this processor. 1189 1190 Collective on PC iff first_local is requested 1191 1192 Input Parameter: 1193 . pc - the preconditioner context 1194 1195 Output Parameters: 1196 + n_local - the number of blocks on this processor or NULL 1197 . first_local - the global number of the first block on this processor or NULL, 1198 all processors must request or all must pass NULL 1199 - ksp - the array of KSP contexts 1200 1201 Note: 1202 After PCASMGetSubKSP() the array of KSPes is not to be freed. 1203 1204 Currently for some matrix implementations only 1 block per processor 1205 is supported. 1206 1207 You must call KSPSetUp() before calling PCASMGetSubKSP(). 1208 1209 Fortran note: 1210 The output argument 'ksp' must be an array of sufficient length or PETSC_NULL_KSP. The latter can be used to learn the necessary length. 1211 1212 Level: advanced 1213 1214 .keywords: PC, ASM, additive Schwarz, get, sub, KSP, context 1215 1216 .seealso: PCASMSetTotalSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap(), 1217 PCASMCreateSubdomains2D(), 1218 @*/ 1219 PetscErrorCode PCASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[]) 1220 { 1221 PetscErrorCode ierr; 1222 1223 PetscFunctionBegin; 1224 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1225 ierr = PetscUseMethod(pc,"PCASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr); 1226 PetscFunctionReturn(0); 1227 } 1228 1229 /* -------------------------------------------------------------------------------------*/ 1230 /*MC 1231 PCASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with 1232 its own KSP object. 1233 1234 Options Database Keys: 1235 + -pc_asm_blocks <blks> - Sets total blocks 1236 . -pc_asm_overlap <ovl> - Sets overlap 1237 - -pc_asm_type [basic,restrict,interpolate,none] - Sets ASM type 1238 1239 IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you 1240 will get a different convergence rate due to the default option of -pc_asm_type restrict. Use 1241 -pc_asm_type basic to use the standard ASM. 1242 1243 Notes: Each processor can have one or more blocks, but a block cannot be shared by more 1244 than one processor. Defaults to one block per processor. 1245 1246 To set options on the solvers for each block append -sub_ to all the KSP, and PC 1247 options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly 1248 1249 To set the options on the solvers separate for each block call PCASMGetSubKSP() 1250 and set the options directly on the resulting KSP object (you can access its PC 1251 with KSPGetPC()) 1252 1253 1254 Level: beginner 1255 1256 Concepts: additive Schwarz method 1257 1258 References: 1259 + 1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions 1260 Courant Institute, New York University Technical report 1261 - 1. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations, 1262 Cambridge University Press. 1263 1264 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, 1265 PCBJACOBI, PCASMGetSubKSP(), PCASMSetLocalSubdomains(), 1266 PCASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCASMSetOverlap(), PCASMSetType() 1267 1268 M*/ 1269 1270 PETSC_EXTERN PetscErrorCode PCCreate_ASM(PC pc) 1271 { 1272 PetscErrorCode ierr; 1273 PC_ASM *osm; 1274 1275 PetscFunctionBegin; 1276 ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr); 1277 1278 osm->n = PETSC_DECIDE; 1279 osm->n_local = 0; 1280 osm->n_local_true = PETSC_DECIDE; 1281 osm->overlap = 1; 1282 osm->ksp = 0; 1283 osm->restriction = 0; 1284 osm->localization = 0; 1285 osm->prolongation = 0; 1286 osm->x = 0; 1287 osm->y = 0; 1288 osm->y_local = 0; 1289 osm->is = 0; 1290 osm->is_local = 0; 1291 osm->mat = 0; 1292 osm->pmat = 0; 1293 osm->type = PC_ASM_RESTRICT; 1294 osm->loctype = PC_COMPOSITE_ADDITIVE; 1295 osm->same_local_solves = PETSC_TRUE; 1296 osm->sort_indices = PETSC_TRUE; 1297 osm->dm_subdomains = PETSC_FALSE; 1298 osm->sub_mat_type = NULL; 1299 1300 pc->data = (void*)osm; 1301 pc->ops->apply = PCApply_ASM; 1302 pc->ops->applytranspose = PCApplyTranspose_ASM; 1303 pc->ops->setup = PCSetUp_ASM; 1304 pc->ops->reset = PCReset_ASM; 1305 pc->ops->destroy = PCDestroy_ASM; 1306 pc->ops->setfromoptions = PCSetFromOptions_ASM; 1307 pc->ops->setuponblocks = PCSetUpOnBlocks_ASM; 1308 pc->ops->view = PCView_ASM; 1309 pc->ops->applyrichardson = 0; 1310 1311 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalSubdomains_C",PCASMSetLocalSubdomains_ASM);CHKERRQ(ierr); 1312 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetTotalSubdomains_C",PCASMSetTotalSubdomains_ASM);CHKERRQ(ierr); 1313 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetOverlap_C",PCASMSetOverlap_ASM);CHKERRQ(ierr); 1314 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetType_C",PCASMSetType_ASM);CHKERRQ(ierr); 1315 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetType_C",PCASMGetType_ASM);CHKERRQ(ierr); 1316 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetLocalType_C",PCASMSetLocalType_ASM);CHKERRQ(ierr); 1317 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetLocalType_C",PCASMGetLocalType_ASM);CHKERRQ(ierr); 1318 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSortIndices_C",PCASMSetSortIndices_ASM);CHKERRQ(ierr); 1319 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubKSP_C",PCASMGetSubKSP_ASM);CHKERRQ(ierr); 1320 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMGetSubMatType_C",PCASMGetSubMatType_ASM);CHKERRQ(ierr); 1321 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCASMSetSubMatType_C",PCASMSetSubMatType_ASM);CHKERRQ(ierr); 1322 PetscFunctionReturn(0); 1323 } 1324 1325 /*@C 1326 PCASMCreateSubdomains - Creates the index sets for the overlapping Schwarz 1327 preconditioner for a any problem on a general grid. 1328 1329 Collective 1330 1331 Input Parameters: 1332 + A - The global matrix operator 1333 - n - the number of local blocks 1334 1335 Output Parameters: 1336 . outis - the array of index sets defining the subdomains 1337 1338 Level: advanced 1339 1340 Note: this generates nonoverlapping subdomains; the PCASM will generate the overlap 1341 from these if you use PCASMSetLocalSubdomains() 1342 1343 In the Fortran version you must provide the array outis[] already allocated of length n. 1344 1345 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid 1346 1347 .seealso: PCASMSetLocalSubdomains(), PCASMDestroySubdomains() 1348 @*/ 1349 PetscErrorCode PCASMCreateSubdomains(Mat A, PetscInt n, IS* outis[]) 1350 { 1351 MatPartitioning mpart; 1352 const char *prefix; 1353 void (*f)(void); 1354 PetscInt i,j,rstart,rend,bs; 1355 PetscBool isbaij = PETSC_FALSE,foundpart = PETSC_FALSE; 1356 Mat Ad = NULL, adj; 1357 IS ispart,isnumb,*is; 1358 PetscErrorCode ierr; 1359 1360 PetscFunctionBegin; 1361 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1362 PetscValidPointer(outis,3); 1363 if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n); 1364 1365 /* Get prefix, row distribution, and block size */ 1366 ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr); 1367 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 1368 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1369 if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs); 1370 1371 /* Get diagonal block from matrix if possible */ 1372 ierr = MatShellGetOperation(A,MATOP_GET_DIAGONAL_BLOCK,&f);CHKERRQ(ierr); 1373 if (f) { 1374 ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr); 1375 } 1376 if (Ad) { 1377 ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr); 1378 if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);} 1379 } 1380 if (Ad && n > 1) { 1381 PetscBool match,done; 1382 /* Try to setup a good matrix partitioning if available */ 1383 ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr); 1384 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 1385 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 1386 ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr); 1387 if (!match) { 1388 ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr); 1389 } 1390 if (!match) { /* assume a "good" partitioner is available */ 1391 PetscInt na; 1392 const PetscInt *ia,*ja; 1393 ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr); 1394 if (done) { 1395 /* Build adjacency matrix by hand. Unfortunately a call to 1396 MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will 1397 remove the block-aij structure and we cannot expect 1398 MatPartitioning to split vertices as we need */ 1399 PetscInt i,j,len,nnz,cnt,*iia=0,*jja=0; 1400 const PetscInt *row; 1401 nnz = 0; 1402 for (i=0; i<na; i++) { /* count number of nonzeros */ 1403 len = ia[i+1] - ia[i]; 1404 row = ja + ia[i]; 1405 for (j=0; j<len; j++) { 1406 if (row[j] == i) { /* don't count diagonal */ 1407 len--; break; 1408 } 1409 } 1410 nnz += len; 1411 } 1412 ierr = PetscMalloc1(na+1,&iia);CHKERRQ(ierr); 1413 ierr = PetscMalloc1(nnz,&jja);CHKERRQ(ierr); 1414 nnz = 0; 1415 iia[0] = 0; 1416 for (i=0; i<na; i++) { /* fill adjacency */ 1417 cnt = 0; 1418 len = ia[i+1] - ia[i]; 1419 row = ja + ia[i]; 1420 for (j=0; j<len; j++) { 1421 if (row[j] != i) { /* if not diagonal */ 1422 jja[nnz+cnt++] = row[j]; 1423 } 1424 } 1425 nnz += cnt; 1426 iia[i+1] = nnz; 1427 } 1428 /* Partitioning of the adjacency matrix */ 1429 ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr); 1430 ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr); 1431 ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr); 1432 ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr); 1433 ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr); 1434 ierr = MatDestroy(&adj);CHKERRQ(ierr); 1435 foundpart = PETSC_TRUE; 1436 } 1437 ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr); 1438 } 1439 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 1440 } 1441 1442 ierr = PetscMalloc1(n,&is);CHKERRQ(ierr); 1443 *outis = is; 1444 1445 if (!foundpart) { 1446 1447 /* Partitioning by contiguous chunks of rows */ 1448 1449 PetscInt mbs = (rend-rstart)/bs; 1450 PetscInt start = rstart; 1451 for (i=0; i<n; i++) { 1452 PetscInt count = (mbs/n + ((mbs % n) > i)) * bs; 1453 ierr = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr); 1454 start += count; 1455 } 1456 1457 } else { 1458 1459 /* Partitioning by adjacency of diagonal block */ 1460 1461 const PetscInt *numbering; 1462 PetscInt *count,nidx,*indices,*newidx,start=0; 1463 /* Get node count in each partition */ 1464 ierr = PetscMalloc1(n,&count);CHKERRQ(ierr); 1465 ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr); 1466 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1467 for (i=0; i<n; i++) count[i] *= bs; 1468 } 1469 /* Build indices from node numbering */ 1470 ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr); 1471 ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr); 1472 for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */ 1473 ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr); 1474 ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr); 1475 ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr); 1476 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1477 ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr); 1478 for (i=0; i<nidx; i++) { 1479 for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j; 1480 } 1481 ierr = PetscFree(indices);CHKERRQ(ierr); 1482 nidx *= bs; 1483 indices = newidx; 1484 } 1485 /* Shift to get global indices */ 1486 for (i=0; i<nidx; i++) indices[i] += rstart; 1487 1488 /* Build the index sets for each block */ 1489 for (i=0; i<n; i++) { 1490 ierr = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr); 1491 ierr = ISSort(is[i]);CHKERRQ(ierr); 1492 start += count[i]; 1493 } 1494 1495 ierr = PetscFree(count);CHKERRQ(ierr); 1496 ierr = PetscFree(indices);CHKERRQ(ierr); 1497 ierr = ISDestroy(&isnumb);CHKERRQ(ierr); 1498 ierr = ISDestroy(&ispart);CHKERRQ(ierr); 1499 1500 } 1501 PetscFunctionReturn(0); 1502 } 1503 1504 /*@C 1505 PCASMDestroySubdomains - Destroys the index sets created with 1506 PCASMCreateSubdomains(). Should be called after setting subdomains 1507 with PCASMSetLocalSubdomains(). 1508 1509 Collective 1510 1511 Input Parameters: 1512 + n - the number of index sets 1513 . is - the array of index sets 1514 - is_local - the array of local index sets, can be NULL 1515 1516 Level: advanced 1517 1518 .keywords: PC, ASM, additive Schwarz, create, subdomains, unstructured grid 1519 1520 .seealso: PCASMCreateSubdomains(), PCASMSetLocalSubdomains() 1521 @*/ 1522 PetscErrorCode PCASMDestroySubdomains(PetscInt n, IS is[], IS is_local[]) 1523 { 1524 PetscInt i; 1525 PetscErrorCode ierr; 1526 1527 PetscFunctionBegin; 1528 if (n <= 0) PetscFunctionReturn(0); 1529 if (is) { 1530 PetscValidPointer(is,2); 1531 for (i=0; i<n; i++) { ierr = ISDestroy(&is[i]);CHKERRQ(ierr); } 1532 ierr = PetscFree(is);CHKERRQ(ierr); 1533 } 1534 if (is_local) { 1535 PetscValidPointer(is_local,3); 1536 for (i=0; i<n; i++) { ierr = ISDestroy(&is_local[i]);CHKERRQ(ierr); } 1537 ierr = PetscFree(is_local);CHKERRQ(ierr); 1538 } 1539 PetscFunctionReturn(0); 1540 } 1541 1542 /*@ 1543 PCASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz 1544 preconditioner for a two-dimensional problem on a regular grid. 1545 1546 Not Collective 1547 1548 Input Parameters: 1549 + m, n - the number of mesh points in the x and y directions 1550 . M, N - the number of subdomains in the x and y directions 1551 . dof - degrees of freedom per node 1552 - overlap - overlap in mesh lines 1553 1554 Output Parameters: 1555 + Nsub - the number of subdomains created 1556 . is - array of index sets defining overlapping (if overlap > 0) subdomains 1557 - is_local - array of index sets defining non-overlapping subdomains 1558 1559 Note: 1560 Presently PCAMSCreateSubdomains2d() is valid only for sequential 1561 preconditioners. More general related routines are 1562 PCASMSetTotalSubdomains() and PCASMSetLocalSubdomains(). 1563 1564 Level: advanced 1565 1566 .keywords: PC, ASM, additive Schwarz, create, subdomains, 2D, regular grid 1567 1568 .seealso: PCASMSetTotalSubdomains(), PCASMSetLocalSubdomains(), PCASMGetSubKSP(), 1569 PCASMSetOverlap() 1570 @*/ 1571 PetscErrorCode PCASMCreateSubdomains2D(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt dof,PetscInt overlap,PetscInt *Nsub,IS **is,IS **is_local) 1572 { 1573 PetscInt i,j,height,width,ystart,xstart,yleft,yright,xleft,xright,loc_outer; 1574 PetscErrorCode ierr; 1575 PetscInt nidx,*idx,loc,ii,jj,count; 1576 1577 PetscFunctionBegin; 1578 if (dof != 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP," "); 1579 1580 *Nsub = N*M; 1581 ierr = PetscMalloc1(*Nsub,is);CHKERRQ(ierr); 1582 ierr = PetscMalloc1(*Nsub,is_local);CHKERRQ(ierr); 1583 ystart = 0; 1584 loc_outer = 0; 1585 for (i=0; i<N; i++) { 1586 height = n/N + ((n % N) > i); /* height of subdomain */ 1587 if (height < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many N subdomains for mesh dimension n"); 1588 yleft = ystart - overlap; if (yleft < 0) yleft = 0; 1589 yright = ystart + height + overlap; if (yright > n) yright = n; 1590 xstart = 0; 1591 for (j=0; j<M; j++) { 1592 width = m/M + ((m % M) > j); /* width of subdomain */ 1593 if (width < 2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many M subdomains for mesh dimension m"); 1594 xleft = xstart - overlap; if (xleft < 0) xleft = 0; 1595 xright = xstart + width + overlap; if (xright > m) xright = m; 1596 nidx = (xright - xleft)*(yright - yleft); 1597 ierr = PetscMalloc1(nidx,&idx);CHKERRQ(ierr); 1598 loc = 0; 1599 for (ii=yleft; ii<yright; ii++) { 1600 count = m*ii + xleft; 1601 for (jj=xleft; jj<xright; jj++) idx[loc++] = count++; 1602 } 1603 ierr = ISCreateGeneral(PETSC_COMM_SELF,nidx,idx,PETSC_COPY_VALUES,(*is)+loc_outer);CHKERRQ(ierr); 1604 if (overlap == 0) { 1605 ierr = PetscObjectReference((PetscObject)(*is)[loc_outer]);CHKERRQ(ierr); 1606 1607 (*is_local)[loc_outer] = (*is)[loc_outer]; 1608 } else { 1609 for (loc=0,ii=ystart; ii<ystart+height; ii++) { 1610 for (jj=xstart; jj<xstart+width; jj++) { 1611 idx[loc++] = m*ii + jj; 1612 } 1613 } 1614 ierr = ISCreateGeneral(PETSC_COMM_SELF,loc,idx,PETSC_COPY_VALUES,*is_local+loc_outer);CHKERRQ(ierr); 1615 } 1616 ierr = PetscFree(idx);CHKERRQ(ierr); 1617 xstart += width; 1618 loc_outer++; 1619 } 1620 ystart += height; 1621 } 1622 for (i=0; i<*Nsub; i++) { ierr = ISSort((*is)[i]);CHKERRQ(ierr); } 1623 PetscFunctionReturn(0); 1624 } 1625 1626 /*@C 1627 PCASMGetLocalSubdomains - Gets the local subdomains (for this processor 1628 only) for the additive Schwarz preconditioner. 1629 1630 Not Collective 1631 1632 Input Parameter: 1633 . pc - the preconditioner context 1634 1635 Output Parameters: 1636 + n - the number of subdomains for this processor (default value = 1) 1637 . is - the index sets that define the subdomains for this processor 1638 - is_local - the index sets that define the local part of the subdomains for this processor (can be NULL) 1639 1640 1641 Notes: 1642 The IS numbering is in the parallel, global numbering of the vector. 1643 1644 Level: advanced 1645 1646 .keywords: PC, ASM, set, local, subdomains, additive Schwarz 1647 1648 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(), 1649 PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubmatrices() 1650 @*/ 1651 PetscErrorCode PCASMGetLocalSubdomains(PC pc,PetscInt *n,IS *is[],IS *is_local[]) 1652 { 1653 PC_ASM *osm = (PC_ASM*)pc->data; 1654 PetscErrorCode ierr; 1655 PetscBool match; 1656 1657 PetscFunctionBegin; 1658 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1659 PetscValidIntPointer(n,2); 1660 if (is) PetscValidPointer(is,3); 1661 ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr); 1662 if (!match) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PC is not a PCASM"); 1663 if (n) *n = osm->n_local_true; 1664 if (is) *is = osm->is; 1665 if (is_local) *is_local = osm->is_local; 1666 PetscFunctionReturn(0); 1667 } 1668 1669 /*@C 1670 PCASMGetLocalSubmatrices - Gets the local submatrices (for this processor 1671 only) for the additive Schwarz preconditioner. 1672 1673 Not Collective 1674 1675 Input Parameter: 1676 . pc - the preconditioner context 1677 1678 Output Parameters: 1679 + n - the number of matrices for this processor (default value = 1) 1680 - mat - the matrices 1681 1682 1683 Level: advanced 1684 1685 Notes: Call after PCSetUp() (or KSPSetUp()) but before PCApply() (or KSPApply()) and before PCSetUpOnBlocks()) 1686 1687 Usually one would use PCSetModifySubmatrices() to change the submatrices in building the preconditioner. 1688 1689 .keywords: PC, ASM, set, local, subdomains, additive Schwarz, block Jacobi 1690 1691 .seealso: PCASMSetTotalSubdomains(), PCASMSetOverlap(), PCASMGetSubKSP(), 1692 PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains(), PCSetModifySubmatrices() 1693 @*/ 1694 PetscErrorCode PCASMGetLocalSubmatrices(PC pc,PetscInt *n,Mat *mat[]) 1695 { 1696 PC_ASM *osm; 1697 PetscErrorCode ierr; 1698 PetscBool match; 1699 1700 PetscFunctionBegin; 1701 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1702 PetscValidIntPointer(n,2); 1703 if (mat) PetscValidPointer(mat,3); 1704 if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp()."); 1705 ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr); 1706 if (!match) { 1707 if (n) *n = 0; 1708 if (mat) *mat = NULL; 1709 } else { 1710 osm = (PC_ASM*)pc->data; 1711 if (n) *n = osm->n_local_true; 1712 if (mat) *mat = osm->pmat; 1713 } 1714 PetscFunctionReturn(0); 1715 } 1716 1717 /*@ 1718 PCASMSetDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible. 1719 Logically Collective 1720 1721 Input Parameter: 1722 + pc - the preconditioner 1723 - flg - boolean indicating whether to use subdomains defined by the DM 1724 1725 Options Database Key: 1726 . -pc_asm_dm_subdomains 1727 1728 Level: intermediate 1729 1730 Notes: 1731 PCASMSetTotalSubdomains() and PCASMSetOverlap() take precedence over PCASMSetDMSubdomains(), 1732 so setting either of the first two effectively turns the latter off. 1733 1734 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz 1735 1736 .seealso: PCASMGetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap() 1737 PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains() 1738 @*/ 1739 PetscErrorCode PCASMSetDMSubdomains(PC pc,PetscBool flg) 1740 { 1741 PC_ASM *osm = (PC_ASM*)pc->data; 1742 PetscErrorCode ierr; 1743 PetscBool match; 1744 1745 PetscFunctionBegin; 1746 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1747 PetscValidLogicalCollectiveBool(pc,flg,2); 1748 if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC."); 1749 ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr); 1750 if (match) { 1751 osm->dm_subdomains = flg; 1752 } 1753 PetscFunctionReturn(0); 1754 } 1755 1756 /*@ 1757 PCASMGetDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible. 1758 Not Collective 1759 1760 Input Parameter: 1761 . pc - the preconditioner 1762 1763 Output Parameter: 1764 . flg - boolean indicating whether to use subdomains defined by the DM 1765 1766 Level: intermediate 1767 1768 .keywords: PC, ASM, DM, set, subdomains, additive Schwarz 1769 1770 .seealso: PCASMSetDMSubdomains(), PCASMSetTotalSubdomains(), PCASMSetOverlap() 1771 PCASMCreateSubdomains2D(), PCASMSetLocalSubdomains(), PCASMGetLocalSubdomains() 1772 @*/ 1773 PetscErrorCode PCASMGetDMSubdomains(PC pc,PetscBool* flg) 1774 { 1775 PC_ASM *osm = (PC_ASM*)pc->data; 1776 PetscErrorCode ierr; 1777 PetscBool match; 1778 1779 PetscFunctionBegin; 1780 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1781 PetscValidPointer(flg,2); 1782 ierr = PetscObjectTypeCompare((PetscObject)pc,PCASM,&match);CHKERRQ(ierr); 1783 if (match) { 1784 if (flg) *flg = osm->dm_subdomains; 1785 } 1786 PetscFunctionReturn(0); 1787 } 1788 1789 1790 /*@ 1791 PCASMGetSubMatType - Gets the matrix type used for ASM subsolves, as a string. 1792 1793 Not Collective 1794 1795 Input Parameter: 1796 . pc - the PC 1797 1798 Output Parameter: 1799 . sub_mat_type - name of matrix type 1800 1801 Level: advanced 1802 1803 .keywords: PC, PCASM, MatType, set 1804 1805 .seealso: PCASMSetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat 1806 @*/ 1807 PetscErrorCode PCASMGetSubMatType(PC pc,MatType *sub_mat_type){ 1808 PetscErrorCode ierr; 1809 1810 ierr = PetscTryMethod(pc,"PCASMGetSubMatType_C",(PC,MatType*),(pc,sub_mat_type));CHKERRQ(ierr); 1811 PetscFunctionReturn(0); 1812 } 1813 1814 /*@ 1815 PCASMSetSubMatType - Set the type of matrix used for ASM subsolves 1816 1817 Collective on Mat 1818 1819 Input Parameters: 1820 + pc - the PC object 1821 - sub_mat_type - matrix type 1822 1823 Options Database Key: 1824 . -pc_asm_sub_mat_type <sub_mat_type> - Sets the matrix type used for subsolves, for example, seqaijviennacl. If you specify a base name like aijviennacl, the corresponding sequential type is assumed. 1825 1826 Notes: 1827 See "${PETSC_DIR}/include/petscmat.h" for available types 1828 1829 Level: advanced 1830 1831 .keywords: PC, PCASM, MatType, set 1832 1833 .seealso: PCASMGetSubMatType(), PCASM, PCSetType(), VecSetType(), MatType, Mat 1834 @*/ 1835 PetscErrorCode PCASMSetSubMatType(PC pc,MatType sub_mat_type) 1836 { 1837 PetscErrorCode ierr; 1838 1839 ierr = PetscTryMethod(pc,"PCASMSetSubMatType_C",(PC,MatType),(pc,sub_mat_type));CHKERRQ(ierr); 1840 PetscFunctionReturn(0); 1841 } 1842