1 /* 2 This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation. 3 In this version each processor may intersect multiple subdomains and any subdomain may 4 intersect multiple processors. Intersections of subdomains with processors are called *local 5 subdomains*. 6 7 N - total number of local subdomains on all processors (set in PCGASMSetTotalSubdomains() or calculated in PCSetUp_GASM()) 8 n - actual number of local subdomains on this processor (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains()) 9 nmax - maximum number of local subdomains per processor (calculated in PCGASMSetTotalSubdomains() or in PCSetUp_GASM()) 10 */ 11 #include <petsc-private/pcimpl.h> /*I "petscpc.h" I*/ 12 13 typedef struct { 14 PetscInt N,n,nmax; 15 PetscInt overlap; /* overlap requested by user */ 16 KSP *ksp; /* linear solvers for each block */ 17 Vec gx,gy; /* Merged work vectors */ 18 Vec *x,*y; /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */ 19 VecScatter gorestriction; /* merged restriction to disjoint union of outer subdomains */ 20 VecScatter girestriction; /* merged restriction to disjoint union of inner subdomains */ 21 IS *ois; /* index sets that define the outer (conceptually, overlapping) subdomains */ 22 IS *iis; /* index sets that define the inner (conceptually, nonoverlapping) subdomains */ 23 Mat *pmat; /* subdomain block matrices */ 24 PCGASMType type; /* use reduced interpolation, restriction or both */ 25 PetscBool create_local; /* whether the autocreated subdomains are local or not. */ 26 PetscBool type_set; /* if user set this value (so won't change it for symmetric problems) */ 27 PetscBool same_subdomain_solvers; /* flag indicating whether all local solvers are same */ 28 PetscBool sort_indices; /* flag to sort subdomain indices */ 29 } PC_GASM; 30 31 #undef __FUNCT__ 32 #define __FUNCT__ "PCGASMSubdomainView_Private" 33 static PetscErrorCode PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer) 34 { 35 PC_GASM *osm = (PC_GASM*)pc->data; 36 PetscInt j,nidx; 37 const PetscInt *idx; 38 PetscViewer sviewer; 39 char *cidx; 40 PetscErrorCode ierr; 41 42 PetscFunctionBegin; 43 if (i < 0 || i > osm->n) SETERRQ2(((PetscObject)viewer)->comm, PETSC_ERR_ARG_WRONG, "Invalid subdomain %D: must nonnegative and less than %D", i, osm->n); 44 /* Inner subdomains. */ 45 ierr = ISGetLocalSize(osm->iis[i], &nidx);CHKERRQ(ierr); 46 /* 47 No more than 15 characters per index plus a space. 48 PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx, 49 in case nidx == 0. That will take care of the space for the trailing '\0' as well. 50 For nidx == 0, the whole string 16 '\0'. 51 */ 52 ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr); 53 ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr); 54 ierr = ISGetIndices(osm->iis[i], &idx);CHKERRQ(ierr); 55 for (j = 0; j < nidx; ++j) { 56 ierr = PetscViewerStringSPrintf(sviewer, "%D ", idx[j]);CHKERRQ(ierr); 57 } 58 ierr = ISRestoreIndices(osm->iis[i],&idx);CHKERRQ(ierr); 59 ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr); 60 ierr = PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n");CHKERRQ(ierr); 61 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 62 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 63 ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr); 64 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 65 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 66 ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr); 67 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 68 ierr = PetscFree(cidx);CHKERRQ(ierr); 69 /* Outer subdomains. */ 70 ierr = ISGetLocalSize(osm->ois[i], &nidx);CHKERRQ(ierr); 71 /* 72 No more than 15 characters per index plus a space. 73 PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx, 74 in case nidx == 0. That will take care of the space for the trailing '\0' as well. 75 For nidx == 0, the whole string 16 '\0'. 76 */ 77 ierr = PetscMalloc(sizeof(char)*(16*(nidx+1)+1), &cidx);CHKERRQ(ierr); 78 ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, 16*(nidx+1)+1, &sviewer);CHKERRQ(ierr); 79 ierr = ISGetIndices(osm->ois[i], &idx);CHKERRQ(ierr); 80 for (j = 0; j < nidx; ++j) { 81 ierr = PetscViewerStringSPrintf(sviewer,"%D ", idx[j]);CHKERRQ(ierr); 82 } 83 ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr); 84 ierr = ISRestoreIndices(osm->ois[i],&idx);CHKERRQ(ierr); 85 ierr = PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n");CHKERRQ(ierr); 86 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 87 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 88 ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr); 89 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 90 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 91 ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr); 92 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 93 ierr = PetscFree(cidx);CHKERRQ(ierr); 94 95 PetscFunctionReturn(0); 96 } 97 98 #undef __FUNCT__ 99 #define __FUNCT__ "PCGASMPrintSubdomains" 100 static PetscErrorCode PCGASMPrintSubdomains(PC pc) 101 { 102 PC_GASM *osm = (PC_GASM*)pc->data; 103 const char *prefix; 104 char fname[PETSC_MAX_PATH_LEN+1]; 105 PetscInt i, l, d, count, gcount, *permutation, *numbering; 106 PetscBool found; 107 PetscViewer viewer, sviewer = PETSC_NULL; 108 PetscErrorCode ierr; 109 110 PetscFunctionBegin; 111 ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr); 112 for (i = 0; i < osm->n; ++i) permutation[i] = i; 113 ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr); 114 ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr); 115 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 116 ierr = PetscOptionsGetString(prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,&found);CHKERRQ(ierr); 117 if (!found) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); }; 118 ierr = PetscViewerASCIIOpen(((PetscObject)pc)->comm,fname,&viewer);CHKERRQ(ierr); 119 /* 120 Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer: 121 the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm. 122 */ 123 ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr); 124 l = 0; 125 for (count = 0; count < gcount; ++count) { 126 /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */ 127 if (l<osm->n){ 128 d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */ 129 if (numbering[d] == count) { 130 ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr); 131 ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr); 132 ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr); 133 ++l; 134 } 135 } 136 ierr = MPI_Barrier(((PetscObject)pc)->comm);CHKERRQ(ierr); 137 } 138 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 139 ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr); 140 PetscFunctionReturn(0); 141 } 142 143 144 #undef __FUNCT__ 145 #define __FUNCT__ "PCView_GASM" 146 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer) 147 { 148 PC_GASM *osm = (PC_GASM*)pc->data; 149 const char *prefix; 150 PetscErrorCode ierr; 151 PetscMPIInt rank, size; 152 PetscInt i,bsz; 153 PetscBool iascii,view_subdomains=PETSC_FALSE; 154 PetscViewer sviewer; 155 PetscInt count, l, gcount, *numbering, *permutation; 156 char overlap[256] = "user-defined overlap"; 157 char gsubdomains[256] = "unknown total number of subdomains"; 158 char lsubdomains[256] = "unknown number of local subdomains"; 159 char msubdomains[256] = "unknown max number of local subdomains"; 160 PetscFunctionBegin; 161 ierr = MPI_Comm_size(((PetscObject)pc)->comm, &size);CHKERRQ(ierr); 162 ierr = MPI_Comm_rank(((PetscObject)pc)->comm, &rank);CHKERRQ(ierr); 163 164 165 ierr = PetscMalloc2(osm->n, PetscInt, &permutation, osm->n, PetscInt, &numbering);CHKERRQ(ierr); 166 for (i = 0; i < osm->n; ++i) permutation[i] = i; 167 ierr = PetscObjectsGetGlobalNumbering(((PetscObject)pc)->comm, osm->n, (PetscObject*)osm->ois, &gcount, numbering);CHKERRQ(ierr); 168 ierr = PetscSortIntWithPermutation(osm->n, numbering, permutation);CHKERRQ(ierr); 169 170 if (osm->overlap >= 0) { 171 ierr = PetscSNPrintf(overlap,sizeof(overlap),"requested amount of overlap = %D",osm->overlap);CHKERRQ(ierr); 172 } 173 ierr = PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %D",gcount);CHKERRQ(ierr); 174 if (osm->N > 0) { 175 ierr = PetscSNPrintf(lsubdomains, sizeof(gsubdomains), "number of local subdomains = %D",osm->N);CHKERRQ(ierr); 176 } 177 if (osm->nmax > 0){ 178 ierr = PetscSNPrintf(msubdomains,sizeof(msubdomains),"max number of local subdomains = %D",osm->nmax);CHKERRQ(ierr); 179 } 180 181 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 182 ierr = PetscOptionsGetBool(prefix,"-pc_gasm_view_subdomains",&view_subdomains,PETSC_NULL);CHKERRQ(ierr); 183 184 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 185 if (iascii) { 186 /* 187 Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer: 188 the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed 189 collectively on the comm. 190 */ 191 ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr); 192 ierr = PetscViewerASCIIPrintf(viewer,"Generalized additive Schwarz:\n");CHKERRQ(ierr); 193 ierr = PetscViewerASCIIPrintf(viewer,"Restriction/interpolation type: %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr); 194 ierr = PetscViewerASCIIPrintf(viewer,"%s\n",overlap);CHKERRQ(ierr); 195 ierr = PetscViewerASCIIPrintf(viewer,"%s\n",gsubdomains);CHKERRQ(ierr); 196 ierr = PetscViewerASCIIPrintf(viewer,"%s\n",lsubdomains);CHKERRQ(ierr); 197 ierr = PetscViewerASCIIPrintf(viewer,"%s\n",msubdomains);CHKERRQ(ierr); 198 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr); 199 ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d:%d] number of locally-supported subdomains = %D\n",(int)rank,(int)size,osm->n);CHKERRQ(ierr); 200 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 201 ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr); 202 /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */ 203 ierr = PetscViewerASCIIPrintf(viewer,"Subdomain solver info is as follows:\n");CHKERRQ(ierr); 204 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 205 ierr = PetscViewerASCIIPrintf(viewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 206 /* Make sure that everybody waits for the banner to be printed. */ 207 ierr = MPI_Barrier(((PetscObject)viewer)->comm);CHKERRQ(ierr); 208 /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */ 209 l = 0; 210 for (count = 0; count < gcount; ++count) { 211 PetscMPIInt srank, ssize; 212 if (l<osm->n){ 213 PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */ 214 if (numbering[d] == count) { 215 ierr = MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize);CHKERRQ(ierr); 216 ierr = MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank);CHKERRQ(ierr); 217 ierr = PetscViewerGetSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr); 218 ierr = ISGetLocalSize(osm->ois[d],&bsz);CHKERRQ(ierr); 219 ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_TRUE);CHKERRQ(ierr); 220 ierr = PetscViewerASCIISynchronizedPrintf(sviewer,"[%D:%D] (subcomm [%D:%D]) local subdomain number %D, local size = %D\n",(int)rank,(int)size,(int)srank,(int)ssize,d,bsz);CHKERRQ(ierr); 221 ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr); 222 ierr = PetscViewerASCIISynchronizedAllow(sviewer,PETSC_FALSE);CHKERRQ(ierr); 223 if (view_subdomains) { 224 ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr); 225 } 226 if (!pc->setupcalled) { 227 PetscViewerASCIIPrintf(sviewer, "Solver not set up yet: PCSetUp() not yet called\n");CHKERRQ(ierr); 228 } 229 else { 230 ierr = KSPView(osm->ksp[d],sviewer);CHKERRQ(ierr); 231 } 232 ierr = PetscViewerASCIIPrintf(sviewer,"- - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 233 ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr); 234 ierr = PetscViewerRestoreSubcomm(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr); 235 ++l; 236 } 237 } 238 ierr = MPI_Barrier(((PetscObject)pc)->comm);CHKERRQ(ierr); 239 } 240 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 241 } else { 242 SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PCGASM",((PetscObject)viewer)->type_name); 243 } 244 ierr = PetscFree2(permutation,numbering);CHKERRQ(ierr); 245 PetscFunctionReturn(0); 246 } 247 248 249 250 251 252 #undef __FUNCT__ 253 #define __FUNCT__ "PCSetUp_GASM" 254 static PetscErrorCode PCSetUp_GASM(PC pc) 255 { 256 PC_GASM *osm = (PC_GASM*)pc->data; 257 PetscErrorCode ierr; 258 PetscBool symset,flg; 259 PetscInt i; 260 PetscMPIInt rank, size; 261 MatReuse scall = MAT_REUSE_MATRIX; 262 KSP ksp; 263 PC subpc; 264 const char *prefix,*pprefix; 265 Vec x,y; 266 PetscInt oni; /* Number of indices in the i-th local outer subdomain. */ 267 const PetscInt *oidxi; /* Indices from the i-th subdomain local outer subdomain. */ 268 PetscInt on; /* Number of indices in the disjoint union of local outer subdomains. */ 269 PetscInt *oidx; /* Indices in the disjoint union of local outer subdomains. */ 270 IS gois; /* Disjoint union the global indices of outer subdomains. */ 271 IS goid; /* Identity IS of the size of the disjoint union of outer subdomains. */ 272 PetscScalar *gxarray, *gyarray; 273 PetscInt gofirst; /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy -- 274 over the disjoint union of outer subdomains. */ 275 DM *subdomain_dm = PETSC_NULL; 276 277 PetscFunctionBegin; 278 ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr); 279 ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr); 280 if (!pc->setupcalled) { 281 282 if (!osm->type_set) { 283 ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr); 284 if (symset && flg) { osm->type = PC_GASM_BASIC; } 285 } 286 287 /* 288 If subdomains have been set, then the local number of subdomains, osm->n, is NOT PETSC_DECIDE and is at least 1. 289 The total number of subdomains, osm->N is not necessarily set, might be PETSC_DECIDE, and then will have to be calculated from osm->n. 290 */ 291 if (osm->n == PETSC_DECIDE) { 292 /* no subdomains given */ 293 /* try pc->dm first */ 294 if (pc->dm) { 295 char ddm_name[1024]; 296 DM ddm; 297 PetscBool flg; 298 PetscInt num_subdomains, d; 299 char **subdomain_names; 300 IS *inner_subdomain_is, *outer_subdomain_is; 301 /* Allow the user to request a decomposition DM by name */ 302 ierr = PetscStrncpy(ddm_name, "", 1024);CHKERRQ(ierr); 303 ierr = PetscOptionsString("-pc_gasm_decomposition","Name of the DM defining the decomposition", "PCSetDM",ddm_name,ddm_name,1024,&flg);CHKERRQ(ierr); 304 if (flg) { 305 ierr = DMCreateDomainDecompositionDM(pc->dm, ddm_name, &ddm);CHKERRQ(ierr); 306 if (!ddm) { 307 SETERRQ1(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONGSTATE, "Uknown DM decomposition name %s", ddm_name); 308 } 309 ierr = PetscInfo(pc,"Using decomposition DM defined using options database\n");CHKERRQ(ierr); 310 ierr = PCSetDM(pc,ddm);CHKERRQ(ierr); 311 } 312 ierr = DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm);CHKERRQ(ierr); 313 if (num_subdomains) { 314 ierr = PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is);CHKERRQ(ierr); 315 } 316 for (d = 0; d < num_subdomains; ++d) { 317 if (subdomain_names) {ierr = PetscFree(subdomain_names[d]);CHKERRQ(ierr);} 318 if (inner_subdomain_is) {ierr = ISDestroy(&inner_subdomain_is[d]);CHKERRQ(ierr);} 319 if (outer_subdomain_is) {ierr = ISDestroy(&outer_subdomain_is[d]);CHKERRQ(ierr);} 320 } 321 ierr = PetscFree(subdomain_names);CHKERRQ(ierr); 322 ierr = PetscFree(inner_subdomain_is);CHKERRQ(ierr); 323 ierr = PetscFree(outer_subdomain_is);CHKERRQ(ierr); 324 } 325 if (osm->n == PETSC_DECIDE) { /* still no subdomains; use one per processor */ 326 osm->nmax = osm->n = 1; 327 ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr); 328 osm->N = size; 329 } 330 } 331 if (!osm->iis){ 332 /* 333 The local number of subdomains was set just above, or in PCGASMSetTotalSubdomains(), or in PCGASMSetSubdomains(), 334 but the actual subdomains have not been supplied (in PCGASMSetSubdomains()). 335 We create the requisite number of inner subdomains on PETSC_COMM_SELF (for now). 336 */ 337 ierr = PCGASMCreateLocalSubdomains(pc->pmat,osm->overlap,osm->n,&osm->iis,&osm->ois);CHKERRQ(ierr); 338 } 339 if (osm->N == PETSC_DECIDE) { 340 struct {PetscInt max,sum;} inwork,outwork; 341 /* determine global number of subdomains and the max number of local subdomains */ 342 inwork.max = osm->n; 343 inwork.sum = osm->n; 344 ierr = MPI_Allreduce(&inwork,&outwork,1,MPIU_2INT,PetscMaxSum_Op,((PetscObject)pc)->comm);CHKERRQ(ierr); 345 osm->nmax = outwork.max; 346 osm->N = outwork.sum; 347 } 348 349 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 350 flg = PETSC_FALSE; 351 ierr = PetscOptionsGetBool(prefix,"-pc_gasm_print_subdomains",&flg,PETSC_NULL);CHKERRQ(ierr); 352 if (flg) { ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); } 353 354 if (osm->sort_indices) { 355 for (i=0; i<osm->n; i++) { 356 ierr = ISSort(osm->ois[i]);CHKERRQ(ierr); 357 ierr = ISSort(osm->iis[i]);CHKERRQ(ierr); 358 } 359 } 360 /* 361 Merge the ISs, create merged vectors and restrictions. 362 */ 363 /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */ 364 on = 0; 365 for (i=0; i<osm->n; i++) { 366 ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr); 367 on += oni; 368 } 369 ierr = PetscMalloc(on*sizeof(PetscInt), &oidx);CHKERRQ(ierr); 370 on = 0; 371 for (i=0; i<osm->n; i++) { 372 ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr); 373 ierr = ISGetIndices(osm->ois[i],&oidxi);CHKERRQ(ierr); 374 ierr = PetscMemcpy(oidx+on, oidxi, sizeof(PetscInt)*oni);CHKERRQ(ierr); 375 ierr = ISRestoreIndices(osm->ois[i], &oidxi);CHKERRQ(ierr); 376 on += oni; 377 } 378 ierr = ISCreateGeneral(((PetscObject)(pc))->comm, on, oidx, PETSC_OWN_POINTER, &gois);CHKERRQ(ierr); 379 ierr = MatGetVecs(pc->pmat,&x,&y);CHKERRQ(ierr); 380 ierr = VecCreateMPI(((PetscObject)pc)->comm, on, PETSC_DECIDE, &osm->gx);CHKERRQ(ierr); 381 ierr = VecDuplicate(osm->gx,&osm->gy);CHKERRQ(ierr); 382 ierr = VecGetOwnershipRange(osm->gx, &gofirst, PETSC_NULL);CHKERRQ(ierr); 383 ierr = ISCreateStride(((PetscObject)pc)->comm,on,gofirst,1, &goid);CHKERRQ(ierr); 384 ierr = VecScatterCreate(x,gois,osm->gx,goid, &(osm->gorestriction));CHKERRQ(ierr); 385 ierr = VecDestroy(&x);CHKERRQ(ierr); 386 ierr = ISDestroy(&gois);CHKERRQ(ierr); 387 /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */ 388 { PetscInt ini; /* Number of indices the i-th a local inner subdomain. */ 389 PetscInt in; /* Number of indices in the disjoint uniont of local inner subdomains. */ 390 PetscInt *iidx; /* Global indices in the merged local inner subdomain. */ 391 PetscInt *ioidx; /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */ 392 IS giis; /* IS for the disjoint union of inner subdomains. */ 393 IS giois; /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */ 394 /**/ 395 in = 0; 396 for (i=0; i<osm->n; i++) { 397 ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr); 398 in += ini; 399 } 400 ierr = PetscMalloc(in*sizeof(PetscInt), &iidx);CHKERRQ(ierr); 401 ierr = PetscMalloc(in*sizeof(PetscInt), &ioidx);CHKERRQ(ierr); 402 ierr = VecGetOwnershipRange(osm->gx,&gofirst, PETSC_NULL);CHKERRQ(ierr); 403 in = 0; 404 on = 0; 405 for (i=0; i<osm->n; i++) { 406 const PetscInt *iidxi; /* Global indices of the i-th local inner subdomain. */ 407 ISLocalToGlobalMapping ltogi; /* Map from global to local indices of the i-th outer local subdomain. */ 408 PetscInt *ioidxi; /* Local indices of the i-th local inner subdomain within the local outer subdomain. */ 409 PetscInt ioni; /* Number of indices in ioidxi; if ioni != ini the inner subdomain is not a subdomain of the outer subdomain (error). */ 410 PetscInt k; 411 ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr); 412 ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr); 413 ierr = ISGetIndices(osm->iis[i],&iidxi);CHKERRQ(ierr); 414 ierr = PetscMemcpy(iidx+in, iidxi, sizeof(PetscInt)*ini);CHKERRQ(ierr); 415 ierr = ISLocalToGlobalMappingCreateIS(osm->ois[i],<ogi);CHKERRQ(ierr); 416 ioidxi = ioidx+in; 417 ierr = ISGlobalToLocalMappingApply(ltogi,IS_GTOLM_DROP,ini,iidxi,&ioni,ioidxi);CHKERRQ(ierr); 418 ierr = ISLocalToGlobalMappingDestroy(<ogi);CHKERRQ(ierr); 419 ierr = ISRestoreIndices(osm->iis[i], &iidxi);CHKERRQ(ierr); 420 if (ioni != ini) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Inner subdomain %D contains %D indices outside of its outer subdomain", i, ini - ioni); 421 for (k = 0; k < ini; ++k) { 422 ioidxi[k] += gofirst+on; 423 } 424 in += ini; 425 on += oni; 426 } 427 ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, iidx, PETSC_OWN_POINTER, &giis);CHKERRQ(ierr); 428 ierr = ISCreateGeneral(((PetscObject)pc)->comm, in, ioidx, PETSC_OWN_POINTER, &giois);CHKERRQ(ierr); 429 ierr = VecScatterCreate(y,giis,osm->gy,giois,&osm->girestriction);CHKERRQ(ierr); 430 ierr = VecDestroy(&y);CHKERRQ(ierr); 431 ierr = ISDestroy(&giis);CHKERRQ(ierr); 432 ierr = ISDestroy(&giois);CHKERRQ(ierr); 433 } 434 ierr = ISDestroy(&goid);CHKERRQ(ierr); 435 /* Create the subdomain work vectors. */ 436 ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->x);CHKERRQ(ierr); 437 ierr = PetscMalloc(osm->n*sizeof(Vec),&osm->y);CHKERRQ(ierr); 438 ierr = VecGetArray(osm->gx, &gxarray);CHKERRQ(ierr); 439 ierr = VecGetArray(osm->gy, &gyarray);CHKERRQ(ierr); 440 for (i=0, on=0; i<osm->n; ++i, on += oni) { 441 PetscInt oNi; 442 ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr); 443 ierr = ISGetSize(osm->ois[i],&oNi);CHKERRQ(ierr); 444 ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gxarray+on,&osm->x[i]);CHKERRQ(ierr); 445 ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gyarray+on,&osm->y[i]);CHKERRQ(ierr); 446 } 447 ierr = VecRestoreArray(osm->gx, &gxarray);CHKERRQ(ierr); 448 ierr = VecRestoreArray(osm->gy, &gyarray);CHKERRQ(ierr); 449 /* Create the local solvers */ 450 ierr = PetscMalloc(osm->n*sizeof(KSP *),&osm->ksp);CHKERRQ(ierr); 451 for (i=0; i<osm->n; i++) { /* KSPs are local */ 452 ierr = KSPCreate(((PetscObject)(osm->ois[i]))->comm,&ksp);CHKERRQ(ierr); 453 ierr = PetscLogObjectParent(pc,ksp);CHKERRQ(ierr); 454 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 455 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 456 ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr); 457 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 458 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 459 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 460 osm->ksp[i] = ksp; 461 } 462 scall = MAT_INITIAL_MATRIX; 463 464 }/*if (!pc->setupcalled)*/ 465 else { 466 /* 467 Destroy the blocks from the previous iteration 468 */ 469 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 470 ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr); 471 scall = MAT_INITIAL_MATRIX; 472 } 473 } 474 475 /* 476 Extract out the submatrices. 477 */ 478 if (size > 1) { 479 ierr = MatGetSubMatricesParallel(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr); 480 } 481 else { 482 ierr = MatGetSubMatrices(pc->pmat,osm->n,osm->ois, osm->ois,scall,&osm->pmat);CHKERRQ(ierr); 483 } 484 if (scall == MAT_INITIAL_MATRIX) { 485 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr); 486 for (i=0; i<osm->n; i++) { 487 ierr = PetscLogObjectParent(pc,osm->pmat[i]);CHKERRQ(ierr); 488 ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr); 489 } 490 } 491 492 /* Return control to the user so that the submatrices can be modified (e.g., to apply 493 different boundary conditions for the submatrices than for the global problem) */ 494 ierr = PCModifySubMatrices(pc,osm->n,osm->ois,osm->ois,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr); 495 496 /* 497 Loop over submatrices putting them into local ksps 498 */ 499 for (i=0; i<osm->n; i++) { 500 ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i],pc->flag);CHKERRQ(ierr); 501 if (!pc->setupcalled) { 502 ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr); 503 } 504 } 505 506 PetscFunctionReturn(0); 507 } 508 509 #undef __FUNCT__ 510 #define __FUNCT__ "PCSetUpOnBlocks_GASM" 511 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc) 512 { 513 PC_GASM *osm = (PC_GASM*)pc->data; 514 PetscErrorCode ierr; 515 PetscInt i; 516 517 PetscFunctionBegin; 518 for (i=0; i<osm->n; i++) { 519 ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr); 520 } 521 PetscFunctionReturn(0); 522 } 523 524 #undef __FUNCT__ 525 #define __FUNCT__ "PCApply_GASM" 526 static PetscErrorCode PCApply_GASM(PC pc,Vec x,Vec y) 527 { 528 PC_GASM *osm = (PC_GASM*)pc->data; 529 PetscErrorCode ierr; 530 PetscInt i; 531 ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE; 532 533 PetscFunctionBegin; 534 /* 535 Support for limiting the restriction or interpolation only to the inner 536 subdomain values (leaving the other values 0). 537 */ 538 if (!(osm->type & PC_GASM_RESTRICT)) { 539 /* have to zero the work RHS since scatter may leave some slots empty */ 540 ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr); 541 ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 542 } 543 else { 544 ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 545 } 546 ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr); 547 if (!(osm->type & PC_GASM_RESTRICT)) { 548 ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 549 } 550 else { 551 ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 552 } 553 /* do the subdomain solves */ 554 for (i=0; i<osm->n; ++i) { 555 ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr); 556 } 557 ierr = VecZeroEntries(y);CHKERRQ(ierr); 558 if (!(osm->type & PC_GASM_INTERPOLATE)) { 559 ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 560 ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); PetscFunctionReturn(0); 561 } 562 else { 563 ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 564 ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); PetscFunctionReturn(0); 565 } 566 } 567 568 #undef __FUNCT__ 569 #define __FUNCT__ "PCApplyTranspose_GASM" 570 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec x,Vec y) 571 { 572 PC_GASM *osm = (PC_GASM*)pc->data; 573 PetscErrorCode ierr; 574 PetscInt i; 575 ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE; 576 577 PetscFunctionBegin; 578 /* 579 Support for limiting the restriction or interpolation to only local 580 subdomain values (leaving the other values 0). 581 582 Note: these are reversed from the PCApply_GASM() because we are applying the 583 transpose of the three terms 584 */ 585 if (!(osm->type & PC_GASM_INTERPOLATE)) { 586 /* have to zero the work RHS since scatter may leave some slots empty */ 587 ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr); 588 ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 589 } 590 else { 591 ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 592 } 593 ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr); 594 if (!(osm->type & PC_GASM_INTERPOLATE)) { 595 ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 596 } 597 else { 598 ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 599 } 600 /* do the local solves */ 601 for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */ 602 ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr); 603 } 604 ierr = VecZeroEntries(y);CHKERRQ(ierr); 605 if (!(osm->type & PC_GASM_RESTRICT)) { 606 ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 607 ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 608 } 609 else { 610 ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 611 ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 612 } 613 614 PetscFunctionReturn(0); 615 } 616 617 #undef __FUNCT__ 618 #define __FUNCT__ "PCReset_GASM" 619 static PetscErrorCode PCReset_GASM(PC pc) 620 { 621 PC_GASM *osm = (PC_GASM*)pc->data; 622 PetscErrorCode ierr; 623 PetscInt i; 624 625 PetscFunctionBegin; 626 if (osm->ksp) { 627 for (i=0; i<osm->n; i++) { 628 ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr); 629 } 630 } 631 if (osm->pmat) { 632 if (osm->n > 0) { 633 ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr); 634 } 635 } 636 if (osm->x) { 637 for (i=0; i<osm->n; i++) { 638 ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr); 639 ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr); 640 } 641 } 642 ierr = VecDestroy(&osm->gx);CHKERRQ(ierr); 643 ierr = VecDestroy(&osm->gy);CHKERRQ(ierr); 644 645 ierr = VecScatterDestroy(&osm->gorestriction);CHKERRQ(ierr); 646 ierr = VecScatterDestroy(&osm->girestriction);CHKERRQ(ierr); 647 ierr = PCGASMDestroySubdomains(osm->n,osm->ois,osm->iis);CHKERRQ(ierr); 648 osm->ois = 0; 649 osm->iis = 0; 650 PetscFunctionReturn(0); 651 } 652 653 #undef __FUNCT__ 654 #define __FUNCT__ "PCDestroy_GASM" 655 static PetscErrorCode PCDestroy_GASM(PC pc) 656 { 657 PC_GASM *osm = (PC_GASM*)pc->data; 658 PetscErrorCode ierr; 659 PetscInt i; 660 661 PetscFunctionBegin; 662 ierr = PCReset_GASM(pc);CHKERRQ(ierr); 663 if (osm->ksp) { 664 for (i=0; i<osm->n; i++) { 665 ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr); 666 } 667 ierr = PetscFree(osm->ksp);CHKERRQ(ierr); 668 } 669 ierr = PetscFree(osm->x);CHKERRQ(ierr); 670 ierr = PetscFree(osm->y);CHKERRQ(ierr); 671 ierr = PetscFree(pc->data);CHKERRQ(ierr); 672 PetscFunctionReturn(0); 673 } 674 675 #undef __FUNCT__ 676 #define __FUNCT__ "PCSetFromOptions_GASM" 677 static PetscErrorCode PCSetFromOptions_GASM(PC pc) { 678 PC_GASM *osm = (PC_GASM*)pc->data; 679 PetscErrorCode ierr; 680 PetscInt blocks,ovl; 681 PetscBool symset,flg; 682 PCGASMType gasmtype; 683 684 PetscFunctionBegin; 685 /* set the type to symmetric if matrix is symmetric */ 686 if (!osm->type_set && pc->pmat) { 687 ierr = MatIsSymmetricKnown(pc->pmat,&symset,&flg);CHKERRQ(ierr); 688 if (symset && flg) { osm->type = PC_GASM_BASIC; } 689 } 690 ierr = PetscOptionsHead("Generalized additive Schwarz options");CHKERRQ(ierr); 691 ierr = PetscOptionsInt("-pc_gasm_total_subdomains","Total number of subdomains across communicator","PCGASMSetTotalSubdomains",osm->n,&blocks,&flg);CHKERRQ(ierr); 692 osm->create_local = PETSC_TRUE; 693 ierr = PetscOptionsBool("-pc_gasm_subdomains_create_local","Whether to make autocreated subdomains local (true by default)","PCGASMSetTotalSubdomains",osm->create_local,&osm->create_local,&flg);CHKERRQ(ierr); 694 if (!osm->create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No support for autocreation of nonlocal subdomains yet."); 695 696 if (flg) {ierr = PCGASMSetTotalSubdomains(pc,blocks,osm->create_local);CHKERRQ(ierr); } 697 ierr = PetscOptionsInt("-pc_gasm_overlap","Number of overlapping degrees of freedom","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr); 698 if (flg) {ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); } 699 flg = PETSC_FALSE; 700 ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr); 701 if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr); } 702 ierr = PetscOptionsTail();CHKERRQ(ierr); 703 PetscFunctionReturn(0); 704 } 705 706 /*------------------------------------------------------------------------------------*/ 707 708 EXTERN_C_BEGIN 709 #undef __FUNCT__ 710 #define __FUNCT__ "PCGASMSetSubdomains_GASM" 711 PetscErrorCode PCGASMSetSubdomains_GASM(PC pc,PetscInt n,IS iis[],IS ois[]) 712 { 713 PC_GASM *osm = (PC_GASM*)pc->data; 714 PetscErrorCode ierr; 715 PetscInt i; 716 717 PetscFunctionBegin; 718 if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more subdomains, n = %D",n); 719 if (pc->setupcalled && (n != osm->n || iis || ois)) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetSubdomains() should be called before calling PCSetUp()."); 720 721 if (!pc->setupcalled) { 722 osm->n = n; 723 osm->ois = 0; 724 osm->iis = 0; 725 if (ois) { 726 for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);} 727 } 728 if (iis) { 729 for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr);} 730 } 731 ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr); 732 if (ois) { 733 ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr); 734 for (i=0; i<n; i++) { osm->ois[i] = ois[i]; } 735 /* Flag indicating that the user has set outer subdomains, so PCGASM should not increase their size. */ 736 osm->overlap = -1; 737 if (!iis) { 738 ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr); 739 for (i=0; i<n; i++) { 740 for (i=0; i<n; i++) {ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr);} 741 osm->iis[i] = ois[i]; 742 } 743 } 744 } 745 if (iis) { 746 ierr = PetscMalloc(n*sizeof(IS),&osm->iis);CHKERRQ(ierr); 747 for (i=0; i<n; i++) { osm->iis[i] = iis[i]; } 748 if (!ois) { 749 ierr = PetscMalloc(n*sizeof(IS),&osm->ois);CHKERRQ(ierr); 750 for (i=0; i<n; i++) { 751 for (i=0; i<n; i++) { 752 ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr); 753 osm->ois[i] = iis[i]; 754 } 755 } 756 if (osm->overlap > 0) { 757 /* Extend the "overlapping" regions by a number of steps */ 758 ierr = MatIncreaseOverlap(pc->pmat,osm->n,osm->ois,osm->overlap);CHKERRQ(ierr); 759 } 760 } 761 } 762 } 763 PetscFunctionReturn(0); 764 } 765 EXTERN_C_END 766 767 EXTERN_C_BEGIN 768 #undef __FUNCT__ 769 #define __FUNCT__ "PCGASMSetTotalSubdomains_GASM" 770 PetscErrorCode PCGASMSetTotalSubdomains_GASM(PC pc,PetscInt N, PetscBool create_local) 771 { 772 PC_GASM *osm = (PC_GASM*)pc->data; 773 PetscErrorCode ierr; 774 PetscMPIInt rank,size; 775 PetscInt n; 776 PetscInt Nmin, Nmax; 777 PetscFunctionBegin; 778 if (!create_local) SETERRQ(((PetscObject)pc)->comm, PETSC_ERR_SUP, "No suppor for autocreation of nonlocal subdomains."); 779 if (N < 1) SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Total number of subdomains must be > 0, N = %D",N); 780 ierr = MPI_Allreduce(&N,&Nmin,1,MPIU_INT,MPIU_MIN,((PetscObject)pc)->comm);CHKERRQ(ierr); 781 ierr = MPI_Allreduce(&N,&Nmax,1,MPIU_INT,MPIU_MAX,((PetscObject)pc)->comm);CHKERRQ(ierr); 782 if (Nmin != Nmax) 783 SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "All processors must use the same number of subdomains. min(N) = %D != %D = max(N)", Nmin, Nmax); 784 785 osm->create_local = create_local; 786 /* 787 Split the subdomains equally among all processors 788 */ 789 ierr = MPI_Comm_rank(((PetscObject)pc)->comm,&rank);CHKERRQ(ierr); 790 ierr = MPI_Comm_size(((PetscObject)pc)->comm,&size);CHKERRQ(ierr); 791 n = N/size + ((N % size) > rank); 792 if (!n) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Process %d must have at least one subdomain: total processors %d total blocks %D",(int)rank,(int)size,N); 793 if (pc->setupcalled && n != osm->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before PCSetUp()."); 794 if (!pc->setupcalled) { 795 ierr = PCGASMDestroySubdomains(osm->n,osm->iis,osm->ois);CHKERRQ(ierr); 796 osm->N = N; 797 osm->n = n; 798 osm->nmax = N/size + ((N%size)?1:0); 799 osm->ois = 0; 800 osm->iis = 0; 801 } 802 PetscFunctionReturn(0); 803 } 804 EXTERN_C_END 805 806 EXTERN_C_BEGIN 807 #undef __FUNCT__ 808 #define __FUNCT__ "PCGASMSetOverlap_GASM" 809 PetscErrorCode PCGASMSetOverlap_GASM(PC pc,PetscInt ovl) 810 { 811 PC_GASM *osm = (PC_GASM*)pc->data; 812 813 PetscFunctionBegin; 814 if (ovl < 0) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested"); 815 if (pc->setupcalled && ovl != osm->overlap) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp()."); 816 if (!pc->setupcalled) { 817 osm->overlap = ovl; 818 } 819 PetscFunctionReturn(0); 820 } 821 EXTERN_C_END 822 823 EXTERN_C_BEGIN 824 #undef __FUNCT__ 825 #define __FUNCT__ "PCGASMSetType_GASM" 826 PetscErrorCode PCGASMSetType_GASM(PC pc,PCGASMType type) 827 { 828 PC_GASM *osm = (PC_GASM*)pc->data; 829 830 PetscFunctionBegin; 831 osm->type = type; 832 osm->type_set = PETSC_TRUE; 833 PetscFunctionReturn(0); 834 } 835 EXTERN_C_END 836 837 EXTERN_C_BEGIN 838 #undef __FUNCT__ 839 #define __FUNCT__ "PCGASMSetSortIndices_GASM" 840 PetscErrorCode PCGASMSetSortIndices_GASM(PC pc,PetscBool doSort) 841 { 842 PC_GASM *osm = (PC_GASM*)pc->data; 843 844 PetscFunctionBegin; 845 osm->sort_indices = doSort; 846 PetscFunctionReturn(0); 847 } 848 EXTERN_C_END 849 850 EXTERN_C_BEGIN 851 #undef __FUNCT__ 852 #define __FUNCT__ "PCGASMGetSubKSP_GASM" 853 /* 854 FIX: This routine might need to be modified once multiple ranks per subdomain are allowed. 855 In particular, it would upset the global subdomain number calculation. 856 */ 857 PetscErrorCode PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp) 858 { 859 PC_GASM *osm = (PC_GASM*)pc->data; 860 PetscErrorCode ierr; 861 862 PetscFunctionBegin; 863 if (osm->n < 1) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ORDER,"Need to call PCSetUP() on PC (or KSPSetUp() on the outer KSP object) before calling here"); 864 865 if (n) { 866 *n = osm->n; 867 } 868 if (first) { 869 ierr = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr); 870 *first -= osm->n; 871 } 872 if (ksp) { 873 /* Assume that local solves are now different; not necessarily 874 true, though! This flag is used only for PCView_GASM() */ 875 *ksp = osm->ksp; 876 osm->same_subdomain_solvers = PETSC_FALSE; 877 } 878 PetscFunctionReturn(0); 879 }/* PCGASMGetSubKSP_GASM() */ 880 EXTERN_C_END 881 882 883 #undef __FUNCT__ 884 #define __FUNCT__ "PCGASMSetSubdomains" 885 /*@C 886 PCGASMSetSubdomains - Sets the subdomains for this processor 887 for the additive Schwarz preconditioner. 888 889 Collective on PC 890 891 Input Parameters: 892 + pc - the preconditioner context 893 . n - the number of subdomains for this processor 894 . iis - the index sets that define this processor's local inner subdomains 895 (or PETSC_NULL for PETSc to determine subdomains) 896 - ois- the index sets that define this processor's local outer subdomains 897 (or PETSC_NULL to use the same as iis) 898 899 Notes: 900 The IS indices use the parallel, global numbering of the vector entries. 901 Inner subdomains are those where the correction is applied. 902 Outer subdomains are those where the residual necessary to obtain the 903 corrections is obtained (see PCGASMType for the use of inner/outer subdomains). 904 Both inner and outer subdomains can extend over several processors. 905 This processor's portion of a subdomain is known as a local subdomain. 906 907 By default the GASM preconditioner uses 1 (local) subdomain per processor. 908 Use PCGASMSetTotalSubdomains() to set the total number of subdomains across 909 all processors that PCGASM will create automatically, and to specify whether 910 they should be local or not. 911 912 913 Level: advanced 914 915 .keywords: PC, GASM, set, subdomains, additive Schwarz 916 917 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(), 918 PCGASMCreateSubdomains2D(), PCGASMGetSubdomains() 919 @*/ 920 PetscErrorCode PCGASMSetSubdomains(PC pc,PetscInt n,IS iis[],IS ois[]) 921 { 922 PetscErrorCode ierr; 923 924 PetscFunctionBegin; 925 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 926 ierr = PetscTryMethod(pc,"PCGASMSetSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,iis,ois));CHKERRQ(ierr); 927 PetscFunctionReturn(0); 928 } 929 930 #undef __FUNCT__ 931 #define __FUNCT__ "PCGASMSetTotalSubdomains" 932 /*@C 933 PCGASMSetTotalSubdomains - Sets the total number of subdomains to use in the generalized additive 934 Schwarz preconditioner. The number of subdomains is cumulative across all processors in pc's 935 communicator. Either all or no processors in the PC communicator must call this routine with 936 the same N. The subdomains will be created automatically during PCSetUp(). 937 938 Collective on PC 939 940 Input Parameters: 941 + pc - the preconditioner context 942 . N - the total number of subdomains cumulative across all processors 943 - create_local - whether the subdomains to be created are to be local 944 945 Options Database Key: 946 To set the total number of subdomains and let PCGASM autocreate them, rather than specify the index sets, use the following options: 947 + -pc_gasm_total_subdomains <n> - sets the total number of subdomains to be autocreated by PCGASM 948 - -pc_gasm_subdomains_create_local <true|false> - whether autocreated subdomains should be local or not (default is true) 949 950 By default the GASM preconditioner uses 1 subdomain per processor. 951 952 953 Use PCGASMSetSubdomains() to set subdomains explicitly or to set different numbers 954 of subdomains per processor. 955 956 Level: advanced 957 958 .keywords: PC, GASM, set, total, global, subdomains, additive Schwarz 959 960 .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(), 961 PCGASMCreateSubdomains2D() 962 @*/ 963 PetscErrorCode PCGASMSetTotalSubdomains(PC pc,PetscInt N, PetscBool create_local) 964 { 965 PetscErrorCode ierr; 966 967 PetscFunctionBegin; 968 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 969 ierr = PetscTryMethod(pc,"PCGASMSetTotalSubdomains_C",(PC,PetscInt,PetscBool),(pc,N,create_local));CHKERRQ(ierr); 970 PetscFunctionReturn(0); 971 } 972 973 #undef __FUNCT__ 974 #define __FUNCT__ "PCGASMSetOverlap" 975 /*@ 976 PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the 977 additive Schwarz preconditioner. Either all or no processors in the 978 PC communicator must call this routine. 979 980 Logically Collective on PC 981 982 Input Parameters: 983 + pc - the preconditioner context 984 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 1) 985 986 Options Database Key: 987 . -pc_gasm_overlap <overlap> - Sets overlap 988 989 Notes: 990 By default the GASM preconditioner uses 1 subdomain per processor. To use 991 multiple subdomain per perocessor, see PCGASMSetTotalSubdomains() or 992 PCGASMSetSubdomains() (and the option -pc_gasm_total_subdomains <n>). 993 994 The overlap defaults to 1, so if one desires that no additional 995 overlap be computed beyond what may have been set with a call to 996 PCGASMSetTotalSubdomains() or PCGASMSetSubdomains(), then ovl 997 must be set to be 0. In particular, if one does not explicitly set 998 the subdomains in application code, then all overlap would be computed 999 internally by PETSc, and using an overlap of 0 would result in an GASM 1000 variant that is equivalent to the block Jacobi preconditioner. 1001 1002 Note that one can define initial index sets with any overlap via 1003 PCGASMSetSubdomains(); the routine PCGASMSetOverlap() merely allows 1004 PETSc to extend that overlap further, if desired. 1005 1006 Level: intermediate 1007 1008 .keywords: PC, GASM, set, overlap 1009 1010 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(), 1011 PCGASMCreateSubdomains2D(), PCGASMGetSubdomains() 1012 @*/ 1013 PetscErrorCode PCGASMSetOverlap(PC pc,PetscInt ovl) 1014 { 1015 PetscErrorCode ierr; 1016 1017 PetscFunctionBegin; 1018 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1019 PetscValidLogicalCollectiveInt(pc,ovl,2); 1020 ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr); 1021 PetscFunctionReturn(0); 1022 } 1023 1024 #undef __FUNCT__ 1025 #define __FUNCT__ "PCGASMSetType" 1026 /*@ 1027 PCGASMSetType - Sets the type of restriction and interpolation used 1028 for local problems in the additive Schwarz method. 1029 1030 Logically Collective on PC 1031 1032 Input Parameters: 1033 + pc - the preconditioner context 1034 - type - variant of GASM, one of 1035 .vb 1036 PC_GASM_BASIC - full interpolation and restriction 1037 PC_GASM_RESTRICT - full restriction, local processor interpolation 1038 PC_GASM_INTERPOLATE - full interpolation, local processor restriction 1039 PC_GASM_NONE - local processor restriction and interpolation 1040 .ve 1041 1042 Options Database Key: 1043 . -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type 1044 1045 Level: intermediate 1046 1047 .keywords: PC, GASM, set, type 1048 1049 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(), 1050 PCGASMCreateSubdomains2D() 1051 @*/ 1052 PetscErrorCode PCGASMSetType(PC pc,PCGASMType type) 1053 { 1054 PetscErrorCode ierr; 1055 1056 PetscFunctionBegin; 1057 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1058 PetscValidLogicalCollectiveEnum(pc,type,2); 1059 ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr); 1060 PetscFunctionReturn(0); 1061 } 1062 1063 #undef __FUNCT__ 1064 #define __FUNCT__ "PCGASMSetSortIndices" 1065 /*@ 1066 PCGASMSetSortIndices - Determines whether subdomain indices are sorted. 1067 1068 Logically Collective on PC 1069 1070 Input Parameters: 1071 + pc - the preconditioner context 1072 - doSort - sort the subdomain indices 1073 1074 Level: intermediate 1075 1076 .keywords: PC, GASM, set, type 1077 1078 .seealso: PCGASMSetSubdomains(), PCGASMSetTotalSubdomains(), PCGASMGetSubKSP(), 1079 PCGASMCreateSubdomains2D() 1080 @*/ 1081 PetscErrorCode PCGASMSetSortIndices(PC pc,PetscBool doSort) 1082 { 1083 PetscErrorCode ierr; 1084 1085 PetscFunctionBegin; 1086 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1087 PetscValidLogicalCollectiveBool(pc,doSort,2); 1088 ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr); 1089 PetscFunctionReturn(0); 1090 } 1091 1092 #undef __FUNCT__ 1093 #define __FUNCT__ "PCGASMGetSubKSP" 1094 /*@C 1095 PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on 1096 this processor. 1097 1098 Collective on PC iff first_local is requested 1099 1100 Input Parameter: 1101 . pc - the preconditioner context 1102 1103 Output Parameters: 1104 + n_local - the number of blocks on this processor or PETSC_NULL 1105 . first_local - the global number of the first block on this processor or PETSC_NULL, 1106 all processors must request or all must pass PETSC_NULL 1107 - ksp - the array of KSP contexts 1108 1109 Note: 1110 After PCGASMGetSubKSP() the array of KSPes is not to be freed 1111 1112 Currently for some matrix implementations only 1 block per processor 1113 is supported. 1114 1115 You must call KSPSetUp() before calling PCGASMGetSubKSP(). 1116 1117 Level: advanced 1118 1119 .keywords: PC, GASM, additive Schwarz, get, sub, KSP, context 1120 1121 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMSetOverlap(), 1122 PCGASMCreateSubdomains2D(), 1123 @*/ 1124 PetscErrorCode PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[]) 1125 { 1126 PetscErrorCode ierr; 1127 1128 PetscFunctionBegin; 1129 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1130 ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr); 1131 PetscFunctionReturn(0); 1132 } 1133 1134 /* -------------------------------------------------------------------------------------*/ 1135 /*MC 1136 PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with 1137 its own KSP object. 1138 1139 Options Database Keys: 1140 + -pc_gasm_total_block_count <n> - Sets total number of local subdomains (known as blocks) to be distributed among processors 1141 . -pc_gasm_view_subdomains - activates the printing of subdomain indices in PCView(), -ksp_view or -snes_view 1142 . -pc_gasm_print_subdomains - activates the printing of subdomain indices in PCSetUp() 1143 . -pc_gasm_overlap <ovl> - Sets overlap by which to (automatically) extend local subdomains 1144 - -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type 1145 1146 IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you 1147 will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use 1148 -pc_gasm_type basic to use the standard GASM. 1149 1150 Notes: Each processor can have one or more blocks, but a block cannot be shared by more 1151 than one processor. Defaults to one block per processor. 1152 1153 To set options on the solvers for each block append -sub_ to all the KSP, and PC 1154 options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly 1155 1156 To set the options on the solvers separate for each block call PCGASMGetSubKSP() 1157 and set the options directly on the resulting KSP object (you can access its PC 1158 with KSPGetPC()) 1159 1160 1161 Level: beginner 1162 1163 Concepts: additive Schwarz method 1164 1165 References: 1166 An additive variant of the Schwarz alternating method for the case of many subregions 1167 M Dryja, OB Widlund - Courant Institute, New York University Technical report 1168 1169 Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations, 1170 Barry Smith, Petter Bjorstad, and William Gropp, Cambridge University Press, ISBN 0-521-49589-X. 1171 1172 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, 1173 PCBJACOBI, PCGASMSetUseTrueLocal(), PCGASMGetSubKSP(), PCGASMSetSubdomains(), 1174 PCGASMSetTotalSubdomains(), PCSetModifySubmatrices(), PCGASMSetOverlap(), PCGASMSetType() 1175 1176 M*/ 1177 1178 EXTERN_C_BEGIN 1179 #undef __FUNCT__ 1180 #define __FUNCT__ "PCCreate_GASM" 1181 PetscErrorCode PCCreate_GASM(PC pc) 1182 { 1183 PetscErrorCode ierr; 1184 PC_GASM *osm; 1185 1186 PetscFunctionBegin; 1187 ierr = PetscNewLog(pc,PC_GASM,&osm);CHKERRQ(ierr); 1188 osm->N = PETSC_DECIDE; 1189 osm->n = PETSC_DECIDE; 1190 osm->nmax = 0; 1191 osm->overlap = 1; 1192 osm->ksp = 0; 1193 osm->gorestriction = 0; 1194 osm->girestriction = 0; 1195 osm->gx = 0; 1196 osm->gy = 0; 1197 osm->x = 0; 1198 osm->y = 0; 1199 osm->ois = 0; 1200 osm->iis = 0; 1201 osm->pmat = 0; 1202 osm->type = PC_GASM_RESTRICT; 1203 osm->same_subdomain_solvers = PETSC_TRUE; 1204 osm->sort_indices = PETSC_TRUE; 1205 1206 pc->data = (void*)osm; 1207 pc->ops->apply = PCApply_GASM; 1208 pc->ops->applytranspose = PCApplyTranspose_GASM; 1209 pc->ops->setup = PCSetUp_GASM; 1210 pc->ops->reset = PCReset_GASM; 1211 pc->ops->destroy = PCDestroy_GASM; 1212 pc->ops->setfromoptions = PCSetFromOptions_GASM; 1213 pc->ops->setuponblocks = PCSetUpOnBlocks_GASM; 1214 pc->ops->view = PCView_GASM; 1215 pc->ops->applyrichardson = 0; 1216 1217 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSubdomains_C","PCGASMSetSubdomains_GASM", 1218 PCGASMSetSubdomains_GASM);CHKERRQ(ierr); 1219 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetTotalSubdomains_C","PCGASMSetTotalSubdomains_GASM", 1220 PCGASMSetTotalSubdomains_GASM);CHKERRQ(ierr); 1221 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetOverlap_C","PCGASMSetOverlap_GASM", 1222 PCGASMSetOverlap_GASM);CHKERRQ(ierr); 1223 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetType_C","PCGASMSetType_GASM", 1224 PCGASMSetType_GASM);CHKERRQ(ierr); 1225 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMSetSortIndices_C","PCGASMSetSortIndices_GASM", 1226 PCGASMSetSortIndices_GASM);CHKERRQ(ierr); 1227 ierr = PetscObjectComposeFunctionDynamic((PetscObject)pc,"PCGASMGetSubKSP_C","PCGASMGetSubKSP_GASM", 1228 PCGASMGetSubKSP_GASM);CHKERRQ(ierr); 1229 PetscFunctionReturn(0); 1230 } 1231 EXTERN_C_END 1232 1233 1234 #undef __FUNCT__ 1235 #define __FUNCT__ "PCGASMCreateLocalSubdomains" 1236 /*@C 1237 PCGASMCreateLocalSubdomains - Creates n local index sets for the overlapping 1238 Schwarz preconditioner for a any problem based on its matrix. 1239 1240 Collective 1241 1242 Input Parameters: 1243 + A - The global matrix operator 1244 . overlap - amount of overlap in outer subdomains 1245 - n - the number of local subdomains 1246 1247 Output Parameters: 1248 + iis - the array of index sets defining the local inner subdomains (on which the correction is applied) 1249 - ois - the array of index sets defining the local outer subdomains (on which the residual is computed) 1250 1251 Level: advanced 1252 1253 Note: this generates n nonoverlapping local inner subdomains on PETSC_COMM_SELF; 1254 PCGASM will generate the overlap from these if you use them in PCGASMSetSubdomains() and set a 1255 nonzero overlap with PCGASMSetOverlap() 1256 1257 In the Fortran version you must provide the array outis[] already allocated of length n. 1258 1259 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid 1260 1261 .seealso: PCGASMSetSubdomains(), PCGASMDestroySubdomains() 1262 @*/ 1263 PetscErrorCode PCGASMCreateLocalSubdomains(Mat A, PetscInt overlap, PetscInt n, IS* iis[], IS* ois[]) 1264 { 1265 MatPartitioning mpart; 1266 const char *prefix; 1267 PetscErrorCode (*f)(Mat,MatReuse,Mat*); 1268 PetscMPIInt size; 1269 PetscInt i,j,rstart,rend,bs; 1270 PetscBool isbaij = PETSC_FALSE,foundpart = PETSC_FALSE; 1271 Mat Ad = PETSC_NULL, adj; 1272 IS ispart,isnumb,*is; 1273 PetscErrorCode ierr; 1274 1275 PetscFunctionBegin; 1276 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1277 PetscValidPointer(iis,4); 1278 if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local blocks must be > 0, n = %D",n); 1279 1280 /* Get prefix, row distribution, and block size */ 1281 ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr); 1282 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 1283 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1284 if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs); 1285 1286 /* Get diagonal block from matrix if possible */ 1287 ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 1288 ierr = PetscObjectQueryFunction((PetscObject)A,"MatGetDiagonalBlock_C",(void (**)(void))&f);CHKERRQ(ierr); 1289 if (f) { 1290 ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr); 1291 } else if (size == 1) { 1292 Ad = A; 1293 } 1294 if (Ad) { 1295 ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr); 1296 if (!isbaij) {ierr = PetscObjectTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);} 1297 } 1298 if (Ad && n > 1) { 1299 PetscBool match,done; 1300 /* Try to setup a good matrix partitioning if available */ 1301 ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr); 1302 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 1303 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 1304 ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr); 1305 if (!match) { 1306 ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr); 1307 } 1308 if (!match) { /* assume a "good" partitioner is available */ 1309 PetscInt na; 1310 const PetscInt *ia,*ja; 1311 ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr); 1312 if (done) { 1313 /* Build adjacency matrix by hand. Unfortunately a call to 1314 MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will 1315 remove the block-aij structure and we cannot expect 1316 MatPartitioning to split vertices as we need */ 1317 PetscInt i,j,len,nnz,cnt,*iia=0,*jja=0; 1318 const PetscInt *row; 1319 nnz = 0; 1320 for (i=0; i<na; i++) { /* count number of nonzeros */ 1321 len = ia[i+1] - ia[i]; 1322 row = ja + ia[i]; 1323 for (j=0; j<len; j++) { 1324 if (row[j] == i) { /* don't count diagonal */ 1325 len--; break; 1326 } 1327 } 1328 nnz += len; 1329 } 1330 ierr = PetscMalloc((na+1)*sizeof(PetscInt),&iia);CHKERRQ(ierr); 1331 ierr = PetscMalloc((nnz)*sizeof(PetscInt),&jja);CHKERRQ(ierr); 1332 nnz = 0; 1333 iia[0] = 0; 1334 for (i=0; i<na; i++) { /* fill adjacency */ 1335 cnt = 0; 1336 len = ia[i+1] - ia[i]; 1337 row = ja + ia[i]; 1338 for (j=0; j<len; j++) { 1339 if (row[j] != i) { /* if not diagonal */ 1340 jja[nnz+cnt++] = row[j]; 1341 } 1342 } 1343 nnz += cnt; 1344 iia[i+1] = nnz; 1345 } 1346 /* Partitioning of the adjacency matrix */ 1347 ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,PETSC_NULL,&adj);CHKERRQ(ierr); 1348 ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr); 1349 ierr = MatPartitioningSetNParts(mpart,n);CHKERRQ(ierr); 1350 ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr); 1351 ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr); 1352 ierr = MatDestroy(&adj);CHKERRQ(ierr); 1353 foundpart = PETSC_TRUE; 1354 } 1355 ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr); 1356 } 1357 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 1358 } 1359 ierr = PetscMalloc(n*sizeof(IS),&is);CHKERRQ(ierr); 1360 if (!foundpart) { 1361 1362 /* Partitioning by contiguous chunks of rows */ 1363 1364 PetscInt mbs = (rend-rstart)/bs; 1365 PetscInt start = rstart; 1366 for (i=0; i<n; i++) { 1367 PetscInt count = (mbs/n + ((mbs % n) > i)) * bs; 1368 ierr = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr); 1369 start += count; 1370 } 1371 1372 } else { 1373 1374 /* Partitioning by adjacency of diagonal block */ 1375 1376 const PetscInt *numbering; 1377 PetscInt *count,nidx,*indices,*newidx,start=0; 1378 /* Get node count in each partition */ 1379 ierr = PetscMalloc(n*sizeof(PetscInt),&count);CHKERRQ(ierr); 1380 ierr = ISPartitioningCount(ispart,n,count);CHKERRQ(ierr); 1381 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1382 for (i=0; i<n; i++) count[i] *= bs; 1383 } 1384 /* Build indices from node numbering */ 1385 ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr); 1386 ierr = PetscMalloc(nidx*sizeof(PetscInt),&indices);CHKERRQ(ierr); 1387 for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */ 1388 ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr); 1389 ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr); 1390 ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr); 1391 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1392 ierr = PetscMalloc(nidx*bs*sizeof(PetscInt),&newidx);CHKERRQ(ierr); 1393 for (i=0; i<nidx; i++) 1394 for (j=0; j<bs; j++) 1395 newidx[i*bs+j] = indices[i]*bs + j; 1396 ierr = PetscFree(indices);CHKERRQ(ierr); 1397 nidx *= bs; 1398 indices = newidx; 1399 } 1400 /* Shift to get global indices */ 1401 for (i=0; i<nidx; i++) indices[i] += rstart; 1402 1403 /* Build the index sets for each block */ 1404 for (i=0; i<n; i++) { 1405 ierr = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr); 1406 ierr = ISSort(is[i]);CHKERRQ(ierr); 1407 start += count[i]; 1408 } 1409 1410 ierr = PetscFree(count); 1411 ierr = PetscFree(indices); 1412 ierr = ISDestroy(&isnumb);CHKERRQ(ierr); 1413 ierr = ISDestroy(&ispart);CHKERRQ(ierr); 1414 } 1415 *iis = is; 1416 if (!ois) PetscFunctionReturn(0); 1417 /* 1418 Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap 1419 has been requested, copy the inner subdomains over so they can be modified. 1420 */ 1421 ierr = PetscMalloc(n*sizeof(IS),ois);CHKERRQ(ierr); 1422 for (i=0; i<n; ++i) { 1423 if (overlap > 0) { /* With positive overlap, (*iis)[i] will be modified */ 1424 ierr = ISDuplicate((*iis)[i],(*ois)+i);CHKERRQ(ierr); 1425 ierr = ISCopy((*iis)[i],(*ois)[i]);CHKERRQ(ierr); 1426 } else { 1427 ierr = PetscObjectReference((PetscObject)(*iis)[i]);CHKERRQ(ierr); 1428 (*ois)[i] = (*iis)[i]; 1429 } 1430 } 1431 if (overlap > 0) { 1432 /* Extend the "overlapping" regions by a number of steps */ 1433 ierr = MatIncreaseOverlap(A,n,*ois,overlap);CHKERRQ(ierr); 1434 } 1435 PetscFunctionReturn(0); 1436 } 1437 1438 #undef __FUNCT__ 1439 #define __FUNCT__ "PCGASMDestroySubdomains" 1440 /*@C 1441 PCGASMDestroySubdomains - Destroys the index sets created with 1442 PCGASMCreateLocalSubdomains() or PCGASMCreateSubdomains2D. Should be 1443 called after setting subdomains with PCGASMSetSubdomains(). 1444 1445 Collective 1446 1447 Input Parameters: 1448 + n - the number of index sets 1449 . iis - the array of inner subdomains, 1450 - ois - the array of outer subdomains, can be PETSC_NULL 1451 1452 Level: intermediate 1453 1454 Notes: this is merely a convenience subroutine that walks each list, 1455 destroys each IS on the list, and then frees the list. 1456 1457 .keywords: PC, GASM, additive Schwarz, create, subdomains, unstructured grid 1458 1459 .seealso: PCGASMCreateLocalSubdomains(), PCGASMSetSubdomains() 1460 @*/ 1461 PetscErrorCode PCGASMDestroySubdomains(PetscInt n, IS iis[], IS ois[]) 1462 { 1463 PetscInt i; 1464 PetscErrorCode ierr; 1465 PetscFunctionBegin; 1466 if (n <= 0) PetscFunctionReturn(0); 1467 if (iis) { 1468 PetscValidPointer(iis,2); 1469 for (i=0; i<n; i++) { 1470 ierr = ISDestroy(&iis[i]);CHKERRQ(ierr); 1471 } 1472 ierr = PetscFree(iis);CHKERRQ(ierr); 1473 } 1474 if (ois) { 1475 for (i=0; i<n; i++) { 1476 ierr = ISDestroy(&ois[i]);CHKERRQ(ierr); 1477 } 1478 ierr = PetscFree(ois);CHKERRQ(ierr); 1479 } 1480 PetscFunctionReturn(0); 1481 } 1482 1483 1484 #define PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,xleft_loc,ylow_loc,xright_loc,yhigh_loc,n) \ 1485 { \ 1486 PetscInt first_row = first/M, last_row = last/M+1; \ 1487 /* \ 1488 Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners \ 1489 of the bounding box of the intersection of the subdomain with the local ownership range (local \ 1490 subdomain). \ 1491 Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows \ 1492 of the intersection. \ 1493 */ \ 1494 /* ylow_loc is the grid row containing the first element of the local sumbdomain */ \ 1495 *ylow_loc = PetscMax(first_row,ylow); \ 1496 /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \ 1497 *xleft_loc = *ylow_loc==first_row?PetscMax(first%M,xleft):xleft; \ 1498 /* yhigh_loc is the grid row above the last local subdomain element */ \ 1499 *yhigh_loc = PetscMin(last_row,yhigh); \ 1500 /* xright is the offset of the end of the local subdomain within its grid row (might actually be outside the local subdomain) */ \ 1501 *xright_loc = *yhigh_loc==last_row?PetscMin(xright,last%M):xright; \ 1502 /* Now compute the size of the local subdomain n. */ \ 1503 *n = 0; \ 1504 if (*ylow_loc < *yhigh_loc) { \ 1505 PetscInt width = xright-xleft; \ 1506 *n += width*(*yhigh_loc-*ylow_loc-1); \ 1507 *n += PetscMin(PetscMax(*xright_loc-xleft,0),width); \ 1508 *n -= PetscMin(PetscMax(*xleft_loc-xleft,0), width); \ 1509 }\ 1510 } 1511 1512 1513 1514 #undef __FUNCT__ 1515 #define __FUNCT__ "PCGASMCreateSubdomains2D" 1516 /*@ 1517 PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz 1518 preconditioner for a two-dimensional problem on a regular grid. 1519 1520 Collective 1521 1522 Input Parameters: 1523 + M, N - the global number of grid points in the x and y directions 1524 . Mdomains, Ndomains - the global number of subdomains in the x and y directions 1525 . dof - degrees of freedom per node 1526 - overlap - overlap in mesh lines 1527 1528 Output Parameters: 1529 + Nsub - the number of local subdomains created 1530 . iis - array of index sets defining inner (nonoverlapping) subdomains 1531 - ois - array of index sets defining outer (overlapping, if overlap > 0) subdomains 1532 1533 1534 Level: advanced 1535 1536 .keywords: PC, GASM, additive Schwarz, create, subdomains, 2D, regular grid 1537 1538 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetSubdomains(), PCGASMGetSubKSP(), 1539 PCGASMSetOverlap() 1540 @*/ 1541 PetscErrorCode PCGASMCreateSubdomains2D(PC pc, PetscInt M,PetscInt N,PetscInt Mdomains,PetscInt Ndomains,PetscInt dof,PetscInt overlap, PetscInt *nsub,IS **iis,IS **ois) 1542 { 1543 PetscErrorCode ierr; 1544 PetscMPIInt size, rank; 1545 PetscInt i, j; 1546 PetscInt maxheight, maxwidth; 1547 PetscInt xstart, xleft, xright, xleft_loc, xright_loc; 1548 PetscInt ystart, ylow, yhigh, ylow_loc, yhigh_loc; 1549 PetscInt x[2][2], y[2][2], n[2]; 1550 PetscInt first, last; 1551 PetscInt nidx, *idx; 1552 PetscInt ii,jj,s,q,d; 1553 PetscInt k,kk; 1554 PetscMPIInt color; 1555 MPI_Comm comm, subcomm; 1556 IS **xis = 0, **is = ois, **is_local = iis; 1557 1558 PetscFunctionBegin; 1559 ierr = PetscObjectGetComm((PetscObject)pc, &comm);CHKERRQ(ierr); 1560 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1561 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1562 ierr = MatGetOwnershipRange(pc->pmat, &first, &last);CHKERRQ(ierr); 1563 if (first%dof || last%dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Matrix row partitioning unsuitable for domain decomposition: local row range (%D,%D) " 1564 "does not respect the number of degrees of freedom per grid point %D", first, last, dof); 1565 1566 /* Determine the number of domains with nonzero intersections with the local ownership range. */ 1567 s = 0; 1568 ystart = 0; 1569 for (j=0; j<Ndomains; ++j) { 1570 maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */ 1571 if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N); 1572 /* Vertical domain limits with an overlap. */ 1573 ylow = PetscMax(ystart - overlap,0); 1574 yhigh = PetscMin(ystart + maxheight + overlap,N); 1575 xstart = 0; 1576 for (i=0; i<Mdomains; ++i) { 1577 maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */ 1578 if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M); 1579 /* Horizontal domain limits with an overlap. */ 1580 xleft = PetscMax(xstart - overlap,0); 1581 xright = PetscMin(xstart + maxwidth + overlap,M); 1582 /* 1583 Determine whether this subdomain intersects this processor's ownership range of pc->pmat. 1584 */ 1585 PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx)); 1586 if (nidx) { 1587 ++s; 1588 } 1589 xstart += maxwidth; 1590 }/* for (i = 0; i < Mdomains; ++i) */ 1591 ystart += maxheight; 1592 }/* for (j = 0; j < Ndomains; ++j) */ 1593 /* Now we can allocate the necessary number of ISs. */ 1594 *nsub = s; 1595 ierr = PetscMalloc((*nsub)*sizeof(IS*),is);CHKERRQ(ierr); 1596 ierr = PetscMalloc((*nsub)*sizeof(IS*),is_local);CHKERRQ(ierr); 1597 s = 0; 1598 ystart = 0; 1599 for (j=0; j<Ndomains; ++j) { 1600 maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */ 1601 if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N); 1602 /* Vertical domain limits with an overlap. */ 1603 y[0][0] = PetscMax(ystart - overlap,0); 1604 y[0][1] = PetscMin(ystart + maxheight + overlap,N); 1605 /* Vertical domain limits without an overlap. */ 1606 y[1][0] = ystart; 1607 y[1][1] = PetscMin(ystart + maxheight,N); 1608 xstart = 0; 1609 for (i=0; i<Mdomains; ++i) { 1610 maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */ 1611 if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M); 1612 /* Horizontal domain limits with an overlap. */ 1613 x[0][0] = PetscMax(xstart - overlap,0); 1614 x[0][1] = PetscMin(xstart + maxwidth + overlap,M); 1615 /* Horizontal domain limits without an overlap. */ 1616 x[1][0] = xstart; 1617 x[1][1] = PetscMin(xstart+maxwidth,M); 1618 /* 1619 Determine whether this domain intersects this processor's ownership range of pc->pmat. 1620 Do this twice: first for the domains with overlaps, and once without. 1621 During the first pass create the subcommunicators, and use them on the second pass as well. 1622 */ 1623 for (q = 0; q < 2; ++q) { 1624 /* 1625 domain limits, (xleft, xright) and (ylow, yheigh) are adjusted 1626 according to whether the domain with an overlap or without is considered. 1627 */ 1628 xleft = x[q][0]; xright = x[q][1]; 1629 ylow = y[q][0]; yhigh = y[q][1]; 1630 PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx)); 1631 nidx *= dof; 1632 n[q] = nidx; 1633 /* 1634 Based on the counted number of indices in the local domain *with an overlap*, 1635 construct a subcommunicator of all the processors supporting this domain. 1636 Observe that a domain with an overlap might have nontrivial local support, 1637 while the domain without an overlap might not. Hence, the decision to participate 1638 in the subcommunicator must be based on the domain with an overlap. 1639 */ 1640 if (q == 0) { 1641 if (nidx) { 1642 color = 1; 1643 } else { 1644 color = MPI_UNDEFINED; 1645 } 1646 ierr = MPI_Comm_split(comm, color, rank, &subcomm);CHKERRQ(ierr); 1647 } 1648 /* 1649 Proceed only if the number of local indices *with an overlap* is nonzero. 1650 */ 1651 if (n[0]) { 1652 if (q == 0) { 1653 xis = is; 1654 } 1655 if (q == 1) { 1656 /* 1657 The IS for the no-overlap subdomain shares a communicator with the overlapping domain. 1658 Moreover, if the overlap is zero, the two ISs are identical. 1659 */ 1660 if (overlap == 0) { 1661 (*is_local)[s] = (*is)[s]; 1662 ierr = PetscObjectReference((PetscObject)(*is)[s]);CHKERRQ(ierr); 1663 continue; 1664 } else { 1665 xis = is_local; 1666 subcomm = ((PetscObject)(*is)[s])->comm; 1667 } 1668 }/* if (q == 1) */ 1669 idx = PETSC_NULL; 1670 ierr = PetscMalloc(nidx*sizeof(PetscInt),&idx);CHKERRQ(ierr); 1671 if (nidx) { 1672 k = 0; 1673 for (jj=ylow_loc; jj<yhigh_loc; ++jj) { 1674 PetscInt x0 = (jj==ylow_loc)?xleft_loc:xleft; 1675 PetscInt x1 = (jj==yhigh_loc-1)?xright_loc:xright; 1676 kk = dof*(M*jj + x0); 1677 for (ii=x0; ii<x1; ++ii) { 1678 for (d = 0; d < dof; ++d) { 1679 idx[k++] = kk++; 1680 } 1681 } 1682 } 1683 } 1684 ierr = ISCreateGeneral(subcomm,nidx,idx,PETSC_OWN_POINTER,(*xis)+s);CHKERRQ(ierr); 1685 }/* if (n[0]) */ 1686 }/* for (q = 0; q < 2; ++q) */ 1687 if (n[0]) { 1688 ++s; 1689 } 1690 xstart += maxwidth; 1691 }/* for (i = 0; i < Mdomains; ++i) */ 1692 ystart += maxheight; 1693 }/* for (j = 0; j < Ndomains; ++j) */ 1694 PetscFunctionReturn(0); 1695 } 1696 1697 #undef __FUNCT__ 1698 #define __FUNCT__ "PCGASMGetSubdomains" 1699 /*@C 1700 PCGASMGetSubdomains - Gets the subdomains supported on this processor 1701 for the additive Schwarz preconditioner. 1702 1703 Not Collective 1704 1705 Input Parameter: 1706 . pc - the preconditioner context 1707 1708 Output Parameters: 1709 + n - the number of subdomains for this processor (default value = 1) 1710 . iis - the index sets that define the inner subdomains (without overlap) supported on this processor (can be PETSC_NULL) 1711 - ois - the index sets that define the outer subdomains (with overlap) supported on this processor (can be PETSC_NULL) 1712 1713 1714 Notes: 1715 The user is responsible for destroying the ISs and freeing the returned arrays. 1716 The IS numbering is in the parallel, global numbering of the vector. 1717 1718 Level: advanced 1719 1720 .keywords: PC, GASM, get, subdomains, additive Schwarz 1721 1722 .seealso: PCGASMSetTotalSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(), 1723 PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubmatrices() 1724 @*/ 1725 PetscErrorCode PCGASMGetSubdomains(PC pc,PetscInt *n,IS *iis[],IS *ois[]) 1726 { 1727 PC_GASM *osm; 1728 PetscErrorCode ierr; 1729 PetscBool match; 1730 PetscInt i; 1731 PetscFunctionBegin; 1732 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1733 ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr); 1734 if (!match) 1735 SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name); 1736 osm = (PC_GASM*)pc->data; 1737 if (n) *n = osm->n; 1738 if (iis) { 1739 ierr = PetscMalloc(osm->n*sizeof(IS), iis);CHKERRQ(ierr); 1740 } 1741 if (ois) { 1742 ierr = PetscMalloc(osm->n*sizeof(IS), ois);CHKERRQ(ierr); 1743 } 1744 if (iis || ois) { 1745 for (i = 0; i < osm->n; ++i) { 1746 if (iis) (*iis)[i] = osm->iis[i]; 1747 if (ois) (*ois)[i] = osm->ois[i]; 1748 } 1749 } 1750 PetscFunctionReturn(0); 1751 } 1752 1753 #undef __FUNCT__ 1754 #define __FUNCT__ "PCGASMGetSubmatrices" 1755 /*@C 1756 PCGASMGetSubmatrices - Gets the local submatrices (for this processor 1757 only) for the additive Schwarz preconditioner. 1758 1759 Not Collective 1760 1761 Input Parameter: 1762 . pc - the preconditioner context 1763 1764 Output Parameters: 1765 + n - the number of matrices for this processor (default value = 1) 1766 - mat - the matrices 1767 1768 Notes: matrices returned by this routine have the same communicators as the index sets (IS) 1769 used to define subdomains in PCGASMSetSubdomains(), or PETSC_COMM_SELF, if the 1770 subdomains were defined using PCGASMSetTotalSubdomains(). 1771 Level: advanced 1772 1773 .keywords: PC, GASM, set, local, subdomains, additive Schwarz, block Jacobi 1774 1775 .seealso: PCGASMSetTotalSubdomain(), PCGASMSetOverlap(), PCGASMGetSubKSP(), 1776 PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubdomains() 1777 @*/ 1778 PetscErrorCode PCGASMGetSubmatrices(PC pc,PetscInt *n,Mat *mat[]) 1779 { 1780 PC_GASM *osm; 1781 PetscErrorCode ierr; 1782 PetscBool match; 1783 1784 PetscFunctionBegin; 1785 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1786 PetscValidIntPointer(n,2); 1787 if (mat) PetscValidPointer(mat,3); 1788 if (!pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUP() or PCSetUp()."); 1789 ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr); 1790 if (!match) SETERRQ2(((PetscObject)pc)->comm, PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name); 1791 osm = (PC_GASM*)pc->data; 1792 if (n) *n = osm->n; 1793 if (mat) *mat = osm->pmat; 1794 1795 PetscFunctionReturn(0); 1796 } 1797