1 /* 2 This file defines an "generalized" additive Schwarz preconditioner for any Mat implementation. 3 In this version each processor may intersect multiple subdomains and any subdomain may 4 intersect multiple processors. Intersections of subdomains with processors are called *local 5 subdomains*. 6 7 N - total number of distinct global subdomains (set explicitly in PCGASMSetTotalSubdomains() or implicitly PCGASMSetSubdomains() and then calculated in PCSetUp_GASM()) 8 n - actual number of local subdomains on this processor (set in PCGASMSetSubdomains() or calculated in PCGASMSetTotalSubdomains()) 9 nmax - maximum number of local subdomains per processor (calculated in PCSetUp_GASM()) 10 */ 11 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 12 #include <petscdm.h> 13 14 typedef struct { 15 PetscInt N,n,nmax; 16 PetscInt overlap; /* overlap requested by user */ 17 PCGASMType type; /* use reduced interpolation, restriction or both */ 18 PetscBool type_set; /* if user set this value (so won't change it for symmetric problems) */ 19 PetscBool same_subdomain_solvers; /* flag indicating whether all local solvers are same */ 20 PetscBool sort_indices; /* flag to sort subdomain indices */ 21 PetscBool user_subdomains; /* whether the user set explicit subdomain index sets -- keep them on PCReset() */ 22 PetscBool dm_subdomains; /* whether DM is allowed to define subdomains */ 23 PetscBool hierarchicalpartitioning; 24 IS *ois; /* index sets that define the outer (conceptually, overlapping) subdomains */ 25 IS *iis; /* index sets that define the inner (conceptually, nonoverlapping) subdomains */ 26 KSP *ksp; /* linear solvers for each subdomain */ 27 Mat *pmat; /* subdomain block matrices */ 28 Vec gx,gy; /* Merged work vectors */ 29 Vec *x,*y; /* Split work vectors; storage aliases pieces of storage of the above merged vectors. */ 30 VecScatter gorestriction; /* merged restriction to disjoint union of outer subdomains */ 31 VecScatter girestriction; /* merged restriction to disjoint union of inner subdomains */ 32 VecScatter pctoouter; 33 IS permutationIS; 34 Mat permutationP; 35 Mat pcmat; 36 Vec pcx,pcy; 37 } PC_GASM; 38 39 static PetscErrorCode PCGASMComputeGlobalSubdomainNumbering_Private(PC pc,PetscInt **numbering,PetscInt **permutation) 40 { 41 PC_GASM *osm = (PC_GASM*)pc->data; 42 PetscInt i; 43 PetscErrorCode ierr; 44 45 PetscFunctionBegin; 46 /* Determine the number of globally-distinct subdomains and compute a global numbering for them. */ 47 ierr = PetscMalloc2(osm->n,numbering,osm->n,permutation);CHKERRQ(ierr); 48 ierr = PetscObjectsListGetGlobalNumbering(PetscObjectComm((PetscObject)pc),osm->n,(PetscObject*)osm->iis,NULL,*numbering);CHKERRQ(ierr); 49 for (i = 0; i < osm->n; ++i) (*permutation)[i] = i; 50 ierr = PetscSortIntWithPermutation(osm->n,*numbering,*permutation);CHKERRQ(ierr); 51 PetscFunctionReturn(0); 52 } 53 54 static PetscErrorCode PCGASMSubdomainView_Private(PC pc, PetscInt i, PetscViewer viewer) 55 { 56 PC_GASM *osm = (PC_GASM*)pc->data; 57 PetscInt j,nidx; 58 const PetscInt *idx; 59 PetscViewer sviewer; 60 char *cidx; 61 PetscErrorCode ierr; 62 63 PetscFunctionBegin; 64 if (i < 0 || i > osm->n) SETERRQ2(PetscObjectComm((PetscObject)viewer), PETSC_ERR_ARG_WRONG, "Invalid subdomain %D: must nonnegative and less than %D", i, osm->n); 65 /* Inner subdomains. */ 66 ierr = ISGetLocalSize(osm->iis[i], &nidx);CHKERRQ(ierr); 67 /* 68 No more than 15 characters per index plus a space. 69 PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx, 70 in case nidx == 0. That will take care of the space for the trailing '\0' as well. 71 For nidx == 0, the whole string 16 '\0'. 72 */ 73 #define len 16*(nidx+1)+1 74 ierr = PetscMalloc1(len, &cidx);CHKERRQ(ierr); 75 ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, len, &sviewer);CHKERRQ(ierr); 76 #undef len 77 ierr = ISGetIndices(osm->iis[i], &idx);CHKERRQ(ierr); 78 for (j = 0; j < nidx; ++j) { 79 ierr = PetscViewerStringSPrintf(sviewer, "%D ", idx[j]);CHKERRQ(ierr); 80 } 81 ierr = ISRestoreIndices(osm->iis[i],&idx);CHKERRQ(ierr); 82 ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr); 83 ierr = PetscViewerASCIIPrintf(viewer, "Inner subdomain:\n");CHKERRQ(ierr); 84 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 85 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 86 ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr); 87 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 88 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 89 ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr); 90 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 91 ierr = PetscFree(cidx);CHKERRQ(ierr); 92 /* Outer subdomains. */ 93 ierr = ISGetLocalSize(osm->ois[i], &nidx);CHKERRQ(ierr); 94 /* 95 No more than 15 characters per index plus a space. 96 PetscViewerStringSPrintf requires a string of size at least 2, so use (nidx+1) instead of nidx, 97 in case nidx == 0. That will take care of the space for the trailing '\0' as well. 98 For nidx == 0, the whole string 16 '\0'. 99 */ 100 #define len 16*(nidx+1)+1 101 ierr = PetscMalloc1(len, &cidx);CHKERRQ(ierr); 102 ierr = PetscViewerStringOpen(PETSC_COMM_SELF, cidx, len, &sviewer);CHKERRQ(ierr); 103 #undef len 104 ierr = ISGetIndices(osm->ois[i], &idx);CHKERRQ(ierr); 105 for (j = 0; j < nidx; ++j) { 106 ierr = PetscViewerStringSPrintf(sviewer,"%D ", idx[j]);CHKERRQ(ierr); 107 } 108 ierr = PetscViewerDestroy(&sviewer);CHKERRQ(ierr); 109 ierr = ISRestoreIndices(osm->ois[i],&idx);CHKERRQ(ierr); 110 ierr = PetscViewerASCIIPrintf(viewer, "Outer subdomain:\n");CHKERRQ(ierr); 111 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 112 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 113 ierr = PetscViewerASCIISynchronizedPrintf(viewer, "%s", cidx);CHKERRQ(ierr); 114 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 115 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 116 ierr = PetscViewerASCIIPrintf(viewer, "\n");CHKERRQ(ierr); 117 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 118 ierr = PetscFree(cidx);CHKERRQ(ierr); 119 PetscFunctionReturn(0); 120 } 121 122 static PetscErrorCode PCGASMPrintSubdomains(PC pc) 123 { 124 PC_GASM *osm = (PC_GASM*)pc->data; 125 const char *prefix; 126 char fname[PETSC_MAX_PATH_LEN+1]; 127 PetscInt l, d, count; 128 PetscBool doprint,found; 129 PetscViewer viewer, sviewer = NULL; 130 PetscInt *numbering,*permutation;/* global numbering of locally-supported subdomains and the permutation from the local ordering */ 131 PetscErrorCode ierr; 132 133 PetscFunctionBegin; 134 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 135 doprint = PETSC_FALSE; 136 ierr = PetscOptionsGetBool(NULL,prefix,"-pc_gasm_print_subdomains",&doprint,NULL);CHKERRQ(ierr); 137 if (!doprint) PetscFunctionReturn(0); 138 ierr = PetscOptionsGetString(NULL,prefix,"-pc_gasm_print_subdomains",fname,PETSC_MAX_PATH_LEN,&found);CHKERRQ(ierr); 139 if (!found) { ierr = PetscStrcpy(fname,"stdout");CHKERRQ(ierr); }; 140 ierr = PetscViewerASCIIOpen(PetscObjectComm((PetscObject)pc),fname,&viewer);CHKERRQ(ierr); 141 /* 142 Make sure the viewer has a name. Otherwise this may cause a deadlock or other weird errors when creating a subcomm viewer: 143 the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed collectively on the comm. 144 */ 145 ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr); 146 l = 0; 147 ierr = PCGASMComputeGlobalSubdomainNumbering_Private(pc,&numbering,&permutation);CHKERRQ(ierr); 148 for (count = 0; count < osm->N; ++count) { 149 /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */ 150 if (l<osm->n) { 151 d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */ 152 if (numbering[d] == count) { 153 ierr = PetscViewerGetSubViewer(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr); 154 ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr); 155 ierr = PetscViewerRestoreSubViewer(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr); 156 ++l; 157 } 158 } 159 ierr = MPI_Barrier(PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 160 } 161 ierr = PetscFree2(numbering,permutation);CHKERRQ(ierr); 162 ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr); 163 PetscFunctionReturn(0); 164 } 165 166 167 static PetscErrorCode PCView_GASM(PC pc,PetscViewer viewer) 168 { 169 PC_GASM *osm = (PC_GASM*)pc->data; 170 const char *prefix; 171 PetscErrorCode ierr; 172 PetscMPIInt rank, size; 173 PetscInt bsz; 174 PetscBool iascii,view_subdomains=PETSC_FALSE; 175 PetscViewer sviewer; 176 PetscInt count, l; 177 char overlap[256] = "user-defined overlap"; 178 char gsubdomains[256] = "unknown total number of subdomains"; 179 char msubdomains[256] = "unknown max number of local subdomains"; 180 PetscInt *numbering,*permutation;/* global numbering of locally-supported subdomains and the permutation from the local ordering */ 181 182 PetscFunctionBegin; 183 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size);CHKERRQ(ierr); 184 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank);CHKERRQ(ierr); 185 186 if (osm->overlap >= 0) { 187 ierr = PetscSNPrintf(overlap,sizeof(overlap),"requested amount of overlap = %D",osm->overlap);CHKERRQ(ierr); 188 } 189 if (osm->N != PETSC_DETERMINE) { 190 ierr = PetscSNPrintf(gsubdomains, sizeof(gsubdomains), "total number of subdomains = %D",osm->N);CHKERRQ(ierr); 191 } 192 if (osm->nmax != PETSC_DETERMINE) { 193 ierr = PetscSNPrintf(msubdomains,sizeof(msubdomains),"max number of local subdomains = %D",osm->nmax);CHKERRQ(ierr); 194 } 195 196 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 197 ierr = PetscOptionsGetBool(NULL,prefix,"-pc_gasm_view_subdomains",&view_subdomains,NULL);CHKERRQ(ierr); 198 199 ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 200 if (iascii) { 201 /* 202 Make sure the viewer has a name. Otherwise this may cause a deadlock when creating a subcomm viewer: 203 the subcomm viewer will attempt to inherit the viewer's name, which, if not set, will be constructed 204 collectively on the comm. 205 */ 206 ierr = PetscObjectName((PetscObject)viewer);CHKERRQ(ierr); 207 ierr = PetscViewerASCIIPrintf(viewer," Restriction/interpolation type: %s\n",PCGASMTypes[osm->type]);CHKERRQ(ierr); 208 ierr = PetscViewerASCIIPrintf(viewer," %s\n",overlap);CHKERRQ(ierr); 209 ierr = PetscViewerASCIIPrintf(viewer," %s\n",gsubdomains);CHKERRQ(ierr); 210 ierr = PetscViewerASCIIPrintf(viewer," %s\n",msubdomains);CHKERRQ(ierr); 211 ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr); 212 ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d|%d] number of locally-supported subdomains = %D\n",rank,size,osm->n);CHKERRQ(ierr); 213 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 214 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 215 /* Cannot take advantage of osm->same_subdomain_solvers without a global numbering of subdomains. */ 216 ierr = PetscViewerASCIIPrintf(viewer," Subdomain solver info is as follows:\n");CHKERRQ(ierr); 217 ierr = PetscViewerASCIIPushTab(viewer);CHKERRQ(ierr); 218 ierr = PetscViewerASCIIPrintf(viewer," - - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 219 /* Make sure that everybody waits for the banner to be printed. */ 220 ierr = MPI_Barrier(PetscObjectComm((PetscObject)viewer));CHKERRQ(ierr); 221 /* Now let subdomains go one at a time in the global numbering order and print their subdomain/solver info. */ 222 ierr = PCGASMComputeGlobalSubdomainNumbering_Private(pc,&numbering,&permutation);CHKERRQ(ierr); 223 l = 0; 224 for (count = 0; count < osm->N; ++count) { 225 PetscMPIInt srank, ssize; 226 if (l<osm->n) { 227 PetscInt d = permutation[l]; /* d is the local number of the l-th smallest (in the global ordering) among the locally supported subdomains */ 228 if (numbering[d] == count) { 229 ierr = MPI_Comm_size(((PetscObject)osm->ois[d])->comm, &ssize);CHKERRQ(ierr); 230 ierr = MPI_Comm_rank(((PetscObject)osm->ois[d])->comm, &srank);CHKERRQ(ierr); 231 ierr = PetscViewerGetSubViewer(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr); 232 ierr = ISGetLocalSize(osm->ois[d],&bsz);CHKERRQ(ierr); 233 ierr = PetscViewerASCIISynchronizedPrintf(sviewer," [%d|%d] (subcomm [%d|%d]) local subdomain number %D, local size = %D\n",rank,size,srank,ssize,d,bsz);CHKERRQ(ierr); 234 ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr); 235 if (view_subdomains) { 236 ierr = PCGASMSubdomainView_Private(pc,d,sviewer);CHKERRQ(ierr); 237 } 238 if (!pc->setupcalled) { 239 ierr = PetscViewerASCIIPrintf(sviewer, " Solver not set up yet: PCSetUp() not yet called\n");CHKERRQ(ierr); 240 } else { 241 ierr = KSPView(osm->ksp[d],sviewer);CHKERRQ(ierr); 242 } 243 ierr = PetscViewerASCIIPrintf(sviewer," - - - - - - - - - - - - - - - - - -\n");CHKERRQ(ierr); 244 ierr = PetscViewerFlush(sviewer);CHKERRQ(ierr); 245 ierr = PetscViewerRestoreSubViewer(viewer,((PetscObject)osm->ois[d])->comm, &sviewer);CHKERRQ(ierr); 246 ++l; 247 } 248 } 249 ierr = MPI_Barrier(PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 250 } 251 ierr = PetscFree2(numbering,permutation);CHKERRQ(ierr); 252 ierr = PetscViewerASCIIPopTab(viewer);CHKERRQ(ierr); 253 } 254 ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 255 ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr); 256 PetscFunctionReturn(0); 257 } 258 259 PETSC_INTERN PetscErrorCode PCGASMCreateLocalSubdomains(Mat A, PetscInt nloc, IS *iis[]); 260 261 262 263 PetscErrorCode PCGASMSetHierarchicalPartitioning(PC pc) 264 { 265 PC_GASM *osm = (PC_GASM*)pc->data; 266 MatPartitioning part; 267 MPI_Comm comm; 268 PetscMPIInt size; 269 PetscInt nlocalsubdomains,fromrows_localsize; 270 IS partitioning,fromrows,isn; 271 Vec outervec; 272 PetscErrorCode ierr; 273 274 PetscFunctionBegin; 275 ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr); 276 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 277 /* we do not need a hierarchical partitioning when 278 * the total number of subdomains is consistent with 279 * the number of MPI tasks. 280 * For the following cases, we do not need to use HP 281 * */ 282 if(osm->N==PETSC_DETERMINE || osm->N>=size || osm->N==1) PetscFunctionReturn(0); 283 if(size%osm->N != 0) SETERRQ2(PETSC_COMM_WORLD,PETSC_ERR_ARG_INCOMP,"have to specify the total number of subdomains %D to be a factor of the number of processors %d \n",osm->N,size); 284 nlocalsubdomains = size/osm->N; 285 osm->n = 1; 286 ierr = MatPartitioningCreate(comm,&part);CHKERRQ(ierr); 287 ierr = MatPartitioningSetAdjacency(part,pc->pmat);CHKERRQ(ierr); 288 ierr = MatPartitioningSetType(part,MATPARTITIONINGHIERARCH);CHKERRQ(ierr); 289 ierr = MatPartitioningHierarchicalSetNcoarseparts(part,osm->N);CHKERRQ(ierr); 290 ierr = MatPartitioningHierarchicalSetNfineparts(part,nlocalsubdomains);CHKERRQ(ierr); 291 ierr = MatPartitioningSetFromOptions(part);CHKERRQ(ierr); 292 /* get new processor owner number of each vertex */ 293 ierr = MatPartitioningApply(part,&partitioning);CHKERRQ(ierr); 294 ierr = ISBuildTwoSided(partitioning,NULL,&fromrows);CHKERRQ(ierr); 295 ierr = ISPartitioningToNumbering(partitioning,&isn);CHKERRQ(ierr); 296 ierr = ISDestroy(&isn);CHKERRQ(ierr); 297 ierr = ISGetLocalSize(fromrows,&fromrows_localsize);CHKERRQ(ierr); 298 ierr = MatPartitioningDestroy(&part);CHKERRQ(ierr); 299 ierr = MatCreateVecs(pc->pmat,&outervec,NULL);CHKERRQ(ierr); 300 ierr = VecCreateMPI(comm,fromrows_localsize,PETSC_DETERMINE,&(osm->pcx));CHKERRQ(ierr); 301 ierr = VecDuplicate(osm->pcx,&(osm->pcy));CHKERRQ(ierr); 302 ierr = VecScatterCreate(osm->pcx,NULL,outervec,fromrows,&(osm->pctoouter));CHKERRQ(ierr); 303 ierr = MatCreateSubMatrix(pc->pmat,fromrows,fromrows,MAT_INITIAL_MATRIX,&(osm->permutationP));CHKERRQ(ierr); 304 ierr = PetscObjectReference((PetscObject)fromrows);CHKERRQ(ierr); 305 osm->permutationIS = fromrows; 306 osm->pcmat = pc->pmat; 307 ierr = PetscObjectReference((PetscObject)osm->permutationP);CHKERRQ(ierr); 308 pc->pmat = osm->permutationP; 309 ierr = VecDestroy(&outervec);CHKERRQ(ierr); 310 ierr = ISDestroy(&fromrows);CHKERRQ(ierr); 311 ierr = ISDestroy(&partitioning);CHKERRQ(ierr); 312 osm->n = PETSC_DETERMINE; 313 PetscFunctionReturn(0); 314 } 315 316 317 318 static PetscErrorCode PCSetUp_GASM(PC pc) 319 { 320 PC_GASM *osm = (PC_GASM*)pc->data; 321 PetscErrorCode ierr; 322 PetscInt i,nInnerIndices,nTotalInnerIndices; 323 PetscMPIInt rank, size; 324 MatReuse scall = MAT_REUSE_MATRIX; 325 KSP ksp; 326 PC subpc; 327 const char *prefix,*pprefix; 328 Vec x,y; 329 PetscInt oni; /* Number of indices in the i-th local outer subdomain. */ 330 const PetscInt *oidxi; /* Indices from the i-th subdomain local outer subdomain. */ 331 PetscInt on; /* Number of indices in the disjoint union of local outer subdomains. */ 332 PetscInt *oidx; /* Indices in the disjoint union of local outer subdomains. */ 333 IS gois; /* Disjoint union the global indices of outer subdomains. */ 334 IS goid; /* Identity IS of the size of the disjoint union of outer subdomains. */ 335 PetscScalar *gxarray, *gyarray; 336 PetscInt gostart; /* Start of locally-owned indices in the vectors -- osm->gx,osm->gy -- over the disjoint union of outer subdomains. */ 337 PetscInt num_subdomains = 0; 338 DM *subdomain_dm = NULL; 339 char **subdomain_names = NULL; 340 PetscInt *numbering; 341 342 343 PetscFunctionBegin; 344 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 345 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 346 if (!pc->setupcalled) { 347 /* use a hierarchical partitioning */ 348 if(osm->hierarchicalpartitioning){ 349 ierr = PCGASMSetHierarchicalPartitioning(pc);CHKERRQ(ierr); 350 } 351 if (osm->n == PETSC_DETERMINE) { 352 if (osm->N != PETSC_DETERMINE) { 353 /* No local subdomains given, but the desired number of total subdomains is known, so construct them accordingly. */ 354 ierr = PCGASMCreateSubdomains(pc->pmat,osm->N,&osm->n,&osm->iis);CHKERRQ(ierr); 355 } else if (osm->dm_subdomains && pc->dm) { 356 /* try pc->dm next, if allowed */ 357 PetscInt d; 358 IS *inner_subdomain_is, *outer_subdomain_is; 359 ierr = DMCreateDomainDecomposition(pc->dm, &num_subdomains, &subdomain_names, &inner_subdomain_is, &outer_subdomain_is, &subdomain_dm);CHKERRQ(ierr); 360 if (num_subdomains) { 361 ierr = PCGASMSetSubdomains(pc, num_subdomains, inner_subdomain_is, outer_subdomain_is);CHKERRQ(ierr); 362 } 363 for (d = 0; d < num_subdomains; ++d) { 364 if (inner_subdomain_is) {ierr = ISDestroy(&inner_subdomain_is[d]);CHKERRQ(ierr);} 365 if (outer_subdomain_is) {ierr = ISDestroy(&outer_subdomain_is[d]);CHKERRQ(ierr);} 366 } 367 ierr = PetscFree(inner_subdomain_is);CHKERRQ(ierr); 368 ierr = PetscFree(outer_subdomain_is);CHKERRQ(ierr); 369 } else { 370 /* still no subdomains; use one per processor */ 371 osm->nmax = osm->n = 1; 372 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 373 osm->N = size; 374 ierr = PCGASMCreateLocalSubdomains(pc->pmat,osm->n,&osm->iis);CHKERRQ(ierr); 375 } 376 } 377 if (!osm->iis) { 378 /* 379 osm->n was set in PCGASMSetSubdomains(), but the actual subdomains have not been supplied. 380 We create the requisite number of local inner subdomains and then expand them into 381 out subdomains, if necessary. 382 */ 383 ierr = PCGASMCreateLocalSubdomains(pc->pmat,osm->n,&osm->iis);CHKERRQ(ierr); 384 } 385 if (!osm->ois) { 386 /* 387 Initially make outer subdomains the same as inner subdomains. If nonzero additional overlap 388 has been requested, copy the inner subdomains over so they can be modified. 389 */ 390 ierr = PetscMalloc1(osm->n,&osm->ois);CHKERRQ(ierr); 391 for (i=0; i<osm->n; ++i) { 392 if (osm->overlap > 0 && osm->N>1) { /* With positive overlap, osm->iis[i] will be modified */ 393 ierr = ISDuplicate(osm->iis[i],(osm->ois)+i);CHKERRQ(ierr); 394 ierr = ISCopy(osm->iis[i],osm->ois[i]);CHKERRQ(ierr); 395 } else { 396 ierr = PetscObjectReference((PetscObject)((osm->iis)[i]));CHKERRQ(ierr); 397 osm->ois[i] = osm->iis[i]; 398 } 399 } 400 if (osm->overlap>0 && osm->N>1) { 401 /* Extend the "overlapping" regions by a number of steps */ 402 ierr = MatIncreaseOverlapSplit(pc->pmat,osm->n,osm->ois,osm->overlap);CHKERRQ(ierr); 403 } 404 } 405 406 /* Now the subdomains are defined. Determine their global and max local numbers, if necessary. */ 407 if (osm->nmax == PETSC_DETERMINE) { 408 PetscMPIInt inwork,outwork; 409 /* determine global number of subdomains and the max number of local subdomains */ 410 inwork = osm->n; 411 ierr = MPIU_Allreduce(&inwork,&outwork,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 412 osm->nmax = outwork; 413 } 414 if (osm->N == PETSC_DETERMINE) { 415 /* Determine the number of globally-distinct subdomains and compute a global numbering for them. */ 416 ierr = PetscObjectsListGetGlobalNumbering(PetscObjectComm((PetscObject)pc),osm->n,(PetscObject*)osm->ois,&osm->N,NULL);CHKERRQ(ierr); 417 } 418 419 420 if (osm->sort_indices) { 421 for (i=0; i<osm->n; i++) { 422 ierr = ISSort(osm->ois[i]);CHKERRQ(ierr); 423 ierr = ISSort(osm->iis[i]);CHKERRQ(ierr); 424 } 425 } 426 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 427 ierr = PCGASMPrintSubdomains(pc);CHKERRQ(ierr); 428 429 /* 430 Merge the ISs, create merged vectors and restrictions. 431 */ 432 /* Merge outer subdomain ISs and construct a restriction onto the disjoint union of local outer subdomains. */ 433 on = 0; 434 for (i=0; i<osm->n; i++) { 435 ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr); 436 on += oni; 437 } 438 ierr = PetscMalloc1(on, &oidx);CHKERRQ(ierr); 439 on = 0; 440 /* Merge local indices together */ 441 for (i=0; i<osm->n; i++) { 442 ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr); 443 ierr = ISGetIndices(osm->ois[i],&oidxi);CHKERRQ(ierr); 444 ierr = PetscArraycpy(oidx+on,oidxi,oni);CHKERRQ(ierr); 445 ierr = ISRestoreIndices(osm->ois[i],&oidxi);CHKERRQ(ierr); 446 on += oni; 447 } 448 ierr = ISCreateGeneral(((PetscObject)(pc))->comm,on,oidx,PETSC_OWN_POINTER,&gois);CHKERRQ(ierr); 449 nTotalInnerIndices = 0; 450 for(i=0; i<osm->n; i++){ 451 ierr = ISGetLocalSize(osm->iis[i],&nInnerIndices);CHKERRQ(ierr); 452 nTotalInnerIndices += nInnerIndices; 453 } 454 ierr = VecCreateMPI(((PetscObject)(pc))->comm,nTotalInnerIndices,PETSC_DETERMINE,&x);CHKERRQ(ierr); 455 ierr = VecDuplicate(x,&y);CHKERRQ(ierr); 456 457 ierr = VecCreateMPI(PetscObjectComm((PetscObject)pc),on,PETSC_DECIDE,&osm->gx);CHKERRQ(ierr); 458 ierr = VecDuplicate(osm->gx,&osm->gy);CHKERRQ(ierr); 459 ierr = VecGetOwnershipRange(osm->gx, &gostart, NULL);CHKERRQ(ierr); 460 ierr = ISCreateStride(PetscObjectComm((PetscObject)pc),on,gostart,1, &goid);CHKERRQ(ierr); 461 /* gois might indices not on local */ 462 ierr = VecScatterCreate(x,gois,osm->gx,goid, &(osm->gorestriction));CHKERRQ(ierr); 463 ierr = PetscMalloc1(osm->n,&numbering);CHKERRQ(ierr); 464 ierr = PetscObjectsListGetGlobalNumbering(PetscObjectComm((PetscObject)pc),osm->n,(PetscObject*)osm->ois,NULL,numbering);CHKERRQ(ierr); 465 ierr = VecDestroy(&x);CHKERRQ(ierr); 466 ierr = ISDestroy(&gois);CHKERRQ(ierr); 467 468 /* Merge inner subdomain ISs and construct a restriction onto the disjoint union of local inner subdomains. */ 469 { 470 PetscInt ini; /* Number of indices the i-th a local inner subdomain. */ 471 PetscInt in; /* Number of indices in the disjoint union of local inner subdomains. */ 472 PetscInt *iidx; /* Global indices in the merged local inner subdomain. */ 473 PetscInt *ioidx; /* Global indices of the disjoint union of inner subdomains within the disjoint union of outer subdomains. */ 474 IS giis; /* IS for the disjoint union of inner subdomains. */ 475 IS giois; /* IS for the disjoint union of inner subdomains within the disjoint union of outer subdomains. */ 476 PetscScalar *array; 477 const PetscInt *indices; 478 PetscInt k; 479 on = 0; 480 for (i=0; i<osm->n; i++) { 481 ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr); 482 on += oni; 483 } 484 ierr = PetscMalloc1(on, &iidx);CHKERRQ(ierr); 485 ierr = PetscMalloc1(on, &ioidx);CHKERRQ(ierr); 486 ierr = VecGetArray(y,&array);CHKERRQ(ierr); 487 /* set communicator id to determine where overlap is */ 488 in = 0; 489 for (i=0; i<osm->n; i++) { 490 ierr = ISGetLocalSize(osm->iis[i],&ini);CHKERRQ(ierr); 491 for (k = 0; k < ini; ++k){ 492 array[in+k] = numbering[i]; 493 } 494 in += ini; 495 } 496 ierr = VecRestoreArray(y,&array);CHKERRQ(ierr); 497 ierr = VecScatterBegin(osm->gorestriction,y,osm->gy,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 498 ierr = VecScatterEnd(osm->gorestriction,y,osm->gy,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 499 ierr = VecGetOwnershipRange(osm->gy,&gostart, NULL);CHKERRQ(ierr); 500 ierr = VecGetArray(osm->gy,&array);CHKERRQ(ierr); 501 on = 0; 502 in = 0; 503 for (i=0; i<osm->n; i++) { 504 ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr); 505 ierr = ISGetIndices(osm->ois[i],&indices);CHKERRQ(ierr); 506 for (k=0; k<oni; k++) { 507 /* skip overlapping indices to get inner domain */ 508 if(PetscRealPart(array[on+k]) != numbering[i]) continue; 509 iidx[in] = indices[k]; 510 ioidx[in++] = gostart+on+k; 511 } 512 ierr = ISRestoreIndices(osm->ois[i], &indices);CHKERRQ(ierr); 513 on += oni; 514 } 515 ierr = VecRestoreArray(osm->gy,&array);CHKERRQ(ierr); 516 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)pc),in,iidx,PETSC_OWN_POINTER,&giis);CHKERRQ(ierr); 517 ierr = ISCreateGeneral(PetscObjectComm((PetscObject)pc),in,ioidx,PETSC_OWN_POINTER,&giois);CHKERRQ(ierr); 518 ierr = VecScatterCreate(y,giis,osm->gy,giois,&osm->girestriction);CHKERRQ(ierr); 519 ierr = VecDestroy(&y);CHKERRQ(ierr); 520 ierr = ISDestroy(&giis);CHKERRQ(ierr); 521 ierr = ISDestroy(&giois);CHKERRQ(ierr); 522 } 523 ierr = ISDestroy(&goid);CHKERRQ(ierr); 524 ierr = PetscFree(numbering);CHKERRQ(ierr); 525 526 /* Create the subdomain work vectors. */ 527 ierr = PetscMalloc1(osm->n,&osm->x);CHKERRQ(ierr); 528 ierr = PetscMalloc1(osm->n,&osm->y);CHKERRQ(ierr); 529 ierr = VecGetArray(osm->gx, &gxarray);CHKERRQ(ierr); 530 ierr = VecGetArray(osm->gy, &gyarray);CHKERRQ(ierr); 531 for (i=0, on=0; i<osm->n; ++i, on += oni) { 532 PetscInt oNi; 533 ierr = ISGetLocalSize(osm->ois[i],&oni);CHKERRQ(ierr); 534 /* on a sub communicator */ 535 ierr = ISGetSize(osm->ois[i],&oNi);CHKERRQ(ierr); 536 ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gxarray+on,&osm->x[i]);CHKERRQ(ierr); 537 ierr = VecCreateMPIWithArray(((PetscObject)(osm->ois[i]))->comm,1,oni,oNi,gyarray+on,&osm->y[i]);CHKERRQ(ierr); 538 } 539 ierr = VecRestoreArray(osm->gx, &gxarray);CHKERRQ(ierr); 540 ierr = VecRestoreArray(osm->gy, &gyarray);CHKERRQ(ierr); 541 /* Create the subdomain solvers */ 542 ierr = PetscMalloc1(osm->n,&osm->ksp);CHKERRQ(ierr); 543 for (i=0; i<osm->n; i++) { 544 char subprefix[PETSC_MAX_PATH_LEN+1]; 545 ierr = KSPCreate(((PetscObject)(osm->ois[i]))->comm,&ksp);CHKERRQ(ierr); 546 ierr = KSPSetErrorIfNotConverged(ksp,pc->erroriffailure);CHKERRQ(ierr); 547 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)ksp);CHKERRQ(ierr); 548 ierr = PetscObjectIncrementTabLevel((PetscObject)ksp,(PetscObject)pc,1);CHKERRQ(ierr); 549 ierr = KSPSetType(ksp,KSPPREONLY);CHKERRQ(ierr); 550 ierr = KSPGetPC(ksp,&subpc);CHKERRQ(ierr); /* Why do we need this here? */ 551 if (subdomain_dm) { 552 ierr = KSPSetDM(ksp,subdomain_dm[i]);CHKERRQ(ierr); 553 ierr = DMDestroy(subdomain_dm+i);CHKERRQ(ierr); 554 } 555 ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr); 556 ierr = KSPSetOptionsPrefix(ksp,prefix);CHKERRQ(ierr); 557 if (subdomain_names && subdomain_names[i]) { 558 ierr = PetscSNPrintf(subprefix,PETSC_MAX_PATH_LEN,"sub_%s_",subdomain_names[i]);CHKERRQ(ierr); 559 ierr = KSPAppendOptionsPrefix(ksp,subprefix);CHKERRQ(ierr); 560 ierr = PetscFree(subdomain_names[i]);CHKERRQ(ierr); 561 } 562 ierr = KSPAppendOptionsPrefix(ksp,"sub_");CHKERRQ(ierr); 563 osm->ksp[i] = ksp; 564 } 565 ierr = PetscFree(subdomain_dm);CHKERRQ(ierr); 566 ierr = PetscFree(subdomain_names);CHKERRQ(ierr); 567 scall = MAT_INITIAL_MATRIX; 568 569 } else { /* if (pc->setupcalled) */ 570 /* 571 Destroy the submatrices from the previous iteration 572 */ 573 if (pc->flag == DIFFERENT_NONZERO_PATTERN) { 574 ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr); 575 scall = MAT_INITIAL_MATRIX; 576 } 577 if(osm->permutationIS){ 578 ierr = MatCreateSubMatrix(pc->pmat,osm->permutationIS,osm->permutationIS,scall,&osm->permutationP);CHKERRQ(ierr); 579 ierr = PetscObjectReference((PetscObject)osm->permutationP);CHKERRQ(ierr); 580 osm->pcmat = pc->pmat; 581 pc->pmat = osm->permutationP; 582 } 583 584 } 585 586 587 /* 588 Extract out the submatrices. 589 */ 590 if (size > 1) { 591 ierr = MatCreateSubMatricesMPI(pc->pmat,osm->n,osm->ois,osm->ois,scall,&osm->pmat);CHKERRQ(ierr); 592 } else { 593 ierr = MatCreateSubMatrices(pc->pmat,osm->n,osm->ois,osm->ois,scall,&osm->pmat);CHKERRQ(ierr); 594 } 595 if (scall == MAT_INITIAL_MATRIX) { 596 ierr = PetscObjectGetOptionsPrefix((PetscObject)pc->pmat,&pprefix);CHKERRQ(ierr); 597 for (i=0; i<osm->n; i++) { 598 ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)osm->pmat[i]);CHKERRQ(ierr); 599 ierr = PetscObjectSetOptionsPrefix((PetscObject)osm->pmat[i],pprefix);CHKERRQ(ierr); 600 } 601 } 602 603 /* Return control to the user so that the submatrices can be modified (e.g., to apply 604 different boundary conditions for the submatrices than for the global problem) */ 605 ierr = PCModifySubMatrices(pc,osm->n,osm->ois,osm->ois,osm->pmat,pc->modifysubmatricesP);CHKERRQ(ierr); 606 607 /* 608 Loop over submatrices putting them into local ksps 609 */ 610 for (i=0; i<osm->n; i++) { 611 ierr = KSPSetOperators(osm->ksp[i],osm->pmat[i],osm->pmat[i]);CHKERRQ(ierr); 612 if (!pc->setupcalled) { 613 ierr = KSPSetFromOptions(osm->ksp[i]);CHKERRQ(ierr); 614 } 615 } 616 if(osm->pcmat){ 617 ierr = MatDestroy(&pc->pmat);CHKERRQ(ierr); 618 pc->pmat = osm->pcmat; 619 osm->pcmat = NULL; 620 } 621 PetscFunctionReturn(0); 622 } 623 624 static PetscErrorCode PCSetUpOnBlocks_GASM(PC pc) 625 { 626 PC_GASM *osm = (PC_GASM*)pc->data; 627 PetscErrorCode ierr; 628 PetscInt i; 629 630 PetscFunctionBegin; 631 for (i=0; i<osm->n; i++) { 632 ierr = KSPSetUp(osm->ksp[i]);CHKERRQ(ierr); 633 } 634 PetscFunctionReturn(0); 635 } 636 637 static PetscErrorCode PCApply_GASM(PC pc,Vec xin,Vec yout) 638 { 639 PC_GASM *osm = (PC_GASM*)pc->data; 640 PetscErrorCode ierr; 641 PetscInt i; 642 Vec x,y; 643 ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE; 644 645 PetscFunctionBegin; 646 if(osm->pctoouter){ 647 ierr = VecScatterBegin(osm->pctoouter,xin,osm->pcx,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 648 ierr = VecScatterEnd(osm->pctoouter,xin,osm->pcx,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 649 x = osm->pcx; 650 y = osm->pcy; 651 }else{ 652 x = xin; 653 y = yout; 654 } 655 /* 656 Support for limiting the restriction or interpolation only to the inner 657 subdomain values (leaving the other values 0). 658 */ 659 if (!(osm->type & PC_GASM_RESTRICT)) { 660 /* have to zero the work RHS since scatter may leave some slots empty */ 661 ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr); 662 ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 663 } else { 664 ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 665 } 666 ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr); 667 if (!(osm->type & PC_GASM_RESTRICT)) { 668 ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 669 } else { 670 ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 671 } 672 /* do the subdomain solves */ 673 for (i=0; i<osm->n; ++i) { 674 ierr = KSPSolve(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr); 675 ierr = KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);CHKERRQ(ierr); 676 } 677 /* Do we need to zero y ?? */ 678 ierr = VecZeroEntries(y);CHKERRQ(ierr); 679 if (!(osm->type & PC_GASM_INTERPOLATE)) { 680 ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 681 ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 682 } else { 683 ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 684 ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 685 } 686 if(osm->pctoouter){ 687 ierr = VecScatterBegin(osm->pctoouter,y,yout,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 688 ierr = VecScatterEnd(osm->pctoouter,y,yout,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 689 } 690 PetscFunctionReturn(0); 691 } 692 693 static PetscErrorCode PCApplyTranspose_GASM(PC pc,Vec xin,Vec yout) 694 { 695 PC_GASM *osm = (PC_GASM*)pc->data; 696 PetscErrorCode ierr; 697 PetscInt i; 698 Vec x,y; 699 ScatterMode forward = SCATTER_FORWARD,reverse = SCATTER_REVERSE; 700 701 PetscFunctionBegin; 702 if(osm->pctoouter){ 703 ierr = VecScatterBegin(osm->pctoouter,xin,osm->pcx,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 704 ierr = VecScatterEnd(osm->pctoouter,xin,osm->pcx,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 705 x = osm->pcx; 706 y = osm->pcy; 707 }else{ 708 x = xin; 709 y = yout; 710 } 711 /* 712 Support for limiting the restriction or interpolation to only local 713 subdomain values (leaving the other values 0). 714 715 Note: these are reversed from the PCApply_GASM() because we are applying the 716 transpose of the three terms 717 */ 718 if (!(osm->type & PC_GASM_INTERPOLATE)) { 719 /* have to zero the work RHS since scatter may leave some slots empty */ 720 ierr = VecZeroEntries(osm->gx);CHKERRQ(ierr); 721 ierr = VecScatterBegin(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 722 } else { 723 ierr = VecScatterBegin(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 724 } 725 ierr = VecZeroEntries(osm->gy);CHKERRQ(ierr); 726 if (!(osm->type & PC_GASM_INTERPOLATE)) { 727 ierr = VecScatterEnd(osm->girestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 728 } else { 729 ierr = VecScatterEnd(osm->gorestriction,x,osm->gx,INSERT_VALUES,forward);CHKERRQ(ierr); 730 } 731 /* do the local solves */ 732 for (i=0; i<osm->n; ++i) { /* Note that the solves are local, so we can go to osm->n, rather than osm->nmax. */ 733 ierr = KSPSolveTranspose(osm->ksp[i],osm->x[i],osm->y[i]);CHKERRQ(ierr); 734 ierr = KSPCheckSolve(osm->ksp[i],pc,osm->y[i]);CHKERRQ(ierr); 735 } 736 ierr = VecZeroEntries(y);CHKERRQ(ierr); 737 if (!(osm->type & PC_GASM_RESTRICT)) { 738 ierr = VecScatterBegin(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 739 ierr = VecScatterEnd(osm->girestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 740 } else { 741 ierr = VecScatterBegin(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 742 ierr = VecScatterEnd(osm->gorestriction,osm->gy,y,ADD_VALUES,reverse);CHKERRQ(ierr); 743 } 744 if(osm->pctoouter){ 745 ierr = VecScatterBegin(osm->pctoouter,y,yout,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 746 ierr = VecScatterEnd(osm->pctoouter,y,yout,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 747 } 748 PetscFunctionReturn(0); 749 } 750 751 static PetscErrorCode PCReset_GASM(PC pc) 752 { 753 PC_GASM *osm = (PC_GASM*)pc->data; 754 PetscErrorCode ierr; 755 PetscInt i; 756 757 PetscFunctionBegin; 758 if (osm->ksp) { 759 for (i=0; i<osm->n; i++) { 760 ierr = KSPReset(osm->ksp[i]);CHKERRQ(ierr); 761 } 762 } 763 if (osm->pmat) { 764 if (osm->n > 0) { 765 PetscMPIInt size; 766 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 767 if (size > 1) { 768 /* osm->pmat is created by MatCreateSubMatricesMPI(), cannot use MatDestroySubMatrices() */ 769 ierr = MatDestroyMatrices(osm->n,&osm->pmat);CHKERRQ(ierr); 770 } else { 771 ierr = MatDestroySubMatrices(osm->n,&osm->pmat);CHKERRQ(ierr); 772 } 773 } 774 } 775 if (osm->x) { 776 for (i=0; i<osm->n; i++) { 777 ierr = VecDestroy(&osm->x[i]);CHKERRQ(ierr); 778 ierr = VecDestroy(&osm->y[i]);CHKERRQ(ierr); 779 } 780 } 781 ierr = VecDestroy(&osm->gx);CHKERRQ(ierr); 782 ierr = VecDestroy(&osm->gy);CHKERRQ(ierr); 783 784 ierr = VecScatterDestroy(&osm->gorestriction);CHKERRQ(ierr); 785 ierr = VecScatterDestroy(&osm->girestriction);CHKERRQ(ierr); 786 if (!osm->user_subdomains) { 787 ierr = PCGASMDestroySubdomains(osm->n,&osm->ois,&osm->iis);CHKERRQ(ierr); 788 osm->N = PETSC_DETERMINE; 789 osm->nmax = PETSC_DETERMINE; 790 } 791 if(osm->pctoouter){ 792 ierr = VecScatterDestroy(&(osm->pctoouter));CHKERRQ(ierr); 793 } 794 if(osm->permutationIS){ 795 ierr = ISDestroy(&(osm->permutationIS));CHKERRQ(ierr); 796 } 797 if(osm->pcx){ 798 ierr = VecDestroy(&(osm->pcx));CHKERRQ(ierr); 799 } 800 if(osm->pcy){ 801 ierr = VecDestroy(&(osm->pcy));CHKERRQ(ierr); 802 } 803 if(osm->permutationP){ 804 ierr = MatDestroy(&(osm->permutationP));CHKERRQ(ierr); 805 } 806 if(osm->pcmat){ 807 ierr = MatDestroy(&osm->pcmat);CHKERRQ(ierr); 808 } 809 PetscFunctionReturn(0); 810 } 811 812 static PetscErrorCode PCDestroy_GASM(PC pc) 813 { 814 PC_GASM *osm = (PC_GASM*)pc->data; 815 PetscErrorCode ierr; 816 PetscInt i; 817 818 PetscFunctionBegin; 819 ierr = PCReset_GASM(pc);CHKERRQ(ierr); 820 /* PCReset will not destroy subdomains, if user_subdomains is true. */ 821 ierr = PCGASMDestroySubdomains(osm->n,&osm->ois,&osm->iis);CHKERRQ(ierr); 822 if (osm->ksp) { 823 for (i=0; i<osm->n; i++) { 824 ierr = KSPDestroy(&osm->ksp[i]);CHKERRQ(ierr); 825 } 826 ierr = PetscFree(osm->ksp);CHKERRQ(ierr); 827 } 828 ierr = PetscFree(osm->x);CHKERRQ(ierr); 829 ierr = PetscFree(osm->y);CHKERRQ(ierr); 830 ierr = PetscFree(pc->data);CHKERRQ(ierr); 831 PetscFunctionReturn(0); 832 } 833 834 static PetscErrorCode PCSetFromOptions_GASM(PetscOptionItems *PetscOptionsObject,PC pc) 835 { 836 PC_GASM *osm = (PC_GASM*)pc->data; 837 PetscErrorCode ierr; 838 PetscInt blocks,ovl; 839 PetscBool flg; 840 PCGASMType gasmtype; 841 842 PetscFunctionBegin; 843 ierr = PetscOptionsHead(PetscOptionsObject,"Generalized additive Schwarz options");CHKERRQ(ierr); 844 ierr = PetscOptionsBool("-pc_gasm_use_dm_subdomains","If subdomains aren't set, use DMCreateDomainDecomposition() to define subdomains.","PCGASMSetUseDMSubdomains",osm->dm_subdomains,&osm->dm_subdomains,&flg);CHKERRQ(ierr); 845 ierr = PetscOptionsInt("-pc_gasm_total_subdomains","Total number of subdomains across communicator","PCGASMSetTotalSubdomains",osm->N,&blocks,&flg);CHKERRQ(ierr); 846 if (flg) { 847 ierr = PCGASMSetTotalSubdomains(pc,blocks);CHKERRQ(ierr); 848 } 849 ierr = PetscOptionsInt("-pc_gasm_overlap","Number of overlapping degrees of freedom","PCGASMSetOverlap",osm->overlap,&ovl,&flg);CHKERRQ(ierr); 850 if (flg) { 851 ierr = PCGASMSetOverlap(pc,ovl);CHKERRQ(ierr); 852 osm->dm_subdomains = PETSC_FALSE; 853 } 854 flg = PETSC_FALSE; 855 ierr = PetscOptionsEnum("-pc_gasm_type","Type of restriction/extension","PCGASMSetType",PCGASMTypes,(PetscEnum)osm->type,(PetscEnum*)&gasmtype,&flg);CHKERRQ(ierr); 856 if (flg) {ierr = PCGASMSetType(pc,gasmtype);CHKERRQ(ierr);} 857 ierr = PetscOptionsBool("-pc_gasm_use_hierachical_partitioning","use hierarchical partitioning",NULL,osm->hierarchicalpartitioning,&osm->hierarchicalpartitioning,&flg);CHKERRQ(ierr); 858 ierr = PetscOptionsTail();CHKERRQ(ierr); 859 PetscFunctionReturn(0); 860 } 861 862 /*------------------------------------------------------------------------------------*/ 863 864 /*@ 865 PCGASMSetTotalSubdomains - sets the total number of subdomains to use across the 866 communicator. 867 Logically collective on pc 868 869 Input Parameters: 870 + pc - the preconditioner 871 - N - total number of subdomains 872 873 874 Level: beginner 875 876 .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap() 877 PCGASMCreateSubdomains2D() 878 @*/ 879 PetscErrorCode PCGASMSetTotalSubdomains(PC pc,PetscInt N) 880 { 881 PC_GASM *osm = (PC_GASM*)pc->data; 882 PetscMPIInt size,rank; 883 PetscErrorCode ierr; 884 885 PetscFunctionBegin; 886 if (N < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Total number of subdomains must be 1 or more, got N = %D",N); 887 if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetTotalSubdomains() should be called before calling PCSetUp()."); 888 889 ierr = PCGASMDestroySubdomains(osm->n,&osm->iis,&osm->ois);CHKERRQ(ierr); 890 osm->ois = osm->iis = NULL; 891 892 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size);CHKERRQ(ierr); 893 ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank);CHKERRQ(ierr); 894 osm->N = N; 895 osm->n = PETSC_DETERMINE; 896 osm->nmax = PETSC_DETERMINE; 897 osm->dm_subdomains = PETSC_FALSE; 898 PetscFunctionReturn(0); 899 } 900 901 902 static PetscErrorCode PCGASMSetSubdomains_GASM(PC pc,PetscInt n,IS iis[],IS ois[]) 903 { 904 PC_GASM *osm = (PC_GASM*)pc->data; 905 PetscErrorCode ierr; 906 PetscInt i; 907 908 PetscFunctionBegin; 909 if (n < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Each process must have 1 or more subdomains, got n = %D",n); 910 if (pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetSubdomains() should be called before calling PCSetUp()."); 911 912 ierr = PCGASMDestroySubdomains(osm->n,&osm->iis,&osm->ois);CHKERRQ(ierr); 913 osm->iis = osm->ois = NULL; 914 osm->n = n; 915 osm->N = PETSC_DETERMINE; 916 osm->nmax = PETSC_DETERMINE; 917 if (ois) { 918 ierr = PetscMalloc1(n,&osm->ois);CHKERRQ(ierr); 919 for (i=0; i<n; i++) { 920 ierr = PetscObjectReference((PetscObject)ois[i]);CHKERRQ(ierr); 921 osm->ois[i] = ois[i]; 922 } 923 /* 924 Since the user set the outer subdomains, even if nontrivial overlap was requested via PCGASMSetOverlap(), 925 it will be ignored. To avoid confusion later on (e.g., when viewing the PC), the overlap size is set to -1. 926 */ 927 osm->overlap = -1; 928 /* inner subdomains must be provided */ 929 if (!iis) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"inner indices have to be provided \n"); 930 }/* end if */ 931 if (iis) { 932 ierr = PetscMalloc1(n,&osm->iis);CHKERRQ(ierr); 933 for (i=0; i<n; i++) { 934 ierr = PetscObjectReference((PetscObject)iis[i]);CHKERRQ(ierr); 935 osm->iis[i] = iis[i]; 936 } 937 if (!ois) { 938 osm->ois = NULL; 939 /* if user does not provide outer indices, we will create the corresponding outer indices using osm->overlap =1 in PCSetUp_GASM */ 940 } 941 } 942 if (PetscDefined(USE_DEBUG)) { 943 PetscInt j,rstart,rend,*covered,lsize; 944 const PetscInt *indices; 945 /* check if the inner indices cover and only cover the local portion of the preconditioning matrix */ 946 ierr = MatGetOwnershipRange(pc->pmat,&rstart,&rend);CHKERRQ(ierr); 947 ierr = PetscCalloc1(rend-rstart,&covered);CHKERRQ(ierr); 948 /* check if the current processor owns indices from others */ 949 for (i=0; i<n; i++) { 950 ierr = ISGetIndices(osm->iis[i],&indices);CHKERRQ(ierr); 951 ierr = ISGetLocalSize(osm->iis[i],&lsize);CHKERRQ(ierr); 952 for (j=0; j<lsize; j++) { 953 if (indices[j]<rstart || indices[j]>=rend) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"inner subdomains can not own an index %d from other processors", indices[j]); 954 else if (covered[indices[j]-rstart]==1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"inner subdomains can not have an overlapping index %d ",indices[j]); 955 else covered[indices[j]-rstart] = 1; 956 } 957 ierr = ISRestoreIndices(osm->iis[i],&indices);CHKERRQ(ierr); 958 } 959 /* check if we miss any indices */ 960 for (i=rstart; i<rend; i++) { 961 if (!covered[i-rstart]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_NULL,"local entity %d was not covered by inner subdomains",i); 962 } 963 ierr = PetscFree(covered);CHKERRQ(ierr); 964 } 965 if (iis) osm->user_subdomains = PETSC_TRUE; 966 PetscFunctionReturn(0); 967 } 968 969 970 static PetscErrorCode PCGASMSetOverlap_GASM(PC pc,PetscInt ovl) 971 { 972 PC_GASM *osm = (PC_GASM*)pc->data; 973 974 PetscFunctionBegin; 975 if (ovl < 0) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_OUTOFRANGE,"Negative overlap value requested"); 976 if (pc->setupcalled && ovl != osm->overlap) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"PCGASMSetOverlap() should be called before PCSetUp()."); 977 if (!pc->setupcalled) osm->overlap = ovl; 978 PetscFunctionReturn(0); 979 } 980 981 static PetscErrorCode PCGASMSetType_GASM(PC pc,PCGASMType type) 982 { 983 PC_GASM *osm = (PC_GASM*)pc->data; 984 985 PetscFunctionBegin; 986 osm->type = type; 987 osm->type_set = PETSC_TRUE; 988 PetscFunctionReturn(0); 989 } 990 991 static PetscErrorCode PCGASMSetSortIndices_GASM(PC pc,PetscBool doSort) 992 { 993 PC_GASM *osm = (PC_GASM*)pc->data; 994 995 PetscFunctionBegin; 996 osm->sort_indices = doSort; 997 PetscFunctionReturn(0); 998 } 999 1000 /* 1001 FIXME: This routine might need to be modified now that multiple ranks per subdomain are allowed. 1002 In particular, it would upset the global subdomain number calculation. 1003 */ 1004 static PetscErrorCode PCGASMGetSubKSP_GASM(PC pc,PetscInt *n,PetscInt *first,KSP **ksp) 1005 { 1006 PC_GASM *osm = (PC_GASM*)pc->data; 1007 PetscErrorCode ierr; 1008 1009 PetscFunctionBegin; 1010 if (osm->n < 1) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ORDER,"Need to call PCSetUp() on PC (or KSPSetUp() on the outer KSP object) before calling here"); 1011 1012 if (n) *n = osm->n; 1013 if (first) { 1014 ierr = MPI_Scan(&osm->n,first,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRQ(ierr); 1015 *first -= osm->n; 1016 } 1017 if (ksp) { 1018 /* Assume that local solves are now different; not necessarily 1019 true, though! This flag is used only for PCView_GASM() */ 1020 *ksp = osm->ksp; 1021 osm->same_subdomain_solvers = PETSC_FALSE; 1022 } 1023 PetscFunctionReturn(0); 1024 } /* PCGASMGetSubKSP_GASM() */ 1025 1026 /*@C 1027 PCGASMSetSubdomains - Sets the subdomains for this processor 1028 for the additive Schwarz preconditioner. 1029 1030 Collective on pc 1031 1032 Input Parameters: 1033 + pc - the preconditioner object 1034 . n - the number of subdomains for this processor 1035 . iis - the index sets that define the inner subdomains (or NULL for PETSc to determine subdomains) 1036 - ois - the index sets that define the outer subdomains (or NULL to use the same as iis, or to construct by expanding iis by the requested overlap) 1037 1038 Notes: 1039 The IS indices use the parallel, global numbering of the vector entries. 1040 Inner subdomains are those where the correction is applied. 1041 Outer subdomains are those where the residual necessary to obtain the 1042 corrections is obtained (see PCGASMType for the use of inner/outer subdomains). 1043 Both inner and outer subdomains can extend over several processors. 1044 This processor's portion of a subdomain is known as a local subdomain. 1045 1046 Inner subdomains can not overlap with each other, do not have any entities from remote processors, 1047 and have to cover the entire local subdomain owned by the current processor. The index sets on each 1048 process should be ordered such that the ith local subdomain is connected to the ith remote subdomain 1049 on another MPI process. 1050 1051 By default the GASM preconditioner uses 1 (local) subdomain per processor. 1052 1053 1054 Level: advanced 1055 1056 .seealso: PCGASMSetNumSubdomains(), PCGASMSetOverlap(), PCGASMGetSubKSP(), 1057 PCGASMCreateSubdomains2D(), PCGASMGetSubdomains() 1058 @*/ 1059 PetscErrorCode PCGASMSetSubdomains(PC pc,PetscInt n,IS iis[],IS ois[]) 1060 { 1061 PC_GASM *osm = (PC_GASM*)pc->data; 1062 PetscErrorCode ierr; 1063 1064 PetscFunctionBegin; 1065 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1066 ierr = PetscTryMethod(pc,"PCGASMSetSubdomains_C",(PC,PetscInt,IS[],IS[]),(pc,n,iis,ois));CHKERRQ(ierr); 1067 osm->dm_subdomains = PETSC_FALSE; 1068 PetscFunctionReturn(0); 1069 } 1070 1071 1072 /*@ 1073 PCGASMSetOverlap - Sets the overlap between a pair of subdomains for the 1074 additive Schwarz preconditioner. Either all or no processors in the 1075 pc communicator must call this routine. 1076 1077 Logically Collective on pc 1078 1079 Input Parameters: 1080 + pc - the preconditioner context 1081 - ovl - the amount of overlap between subdomains (ovl >= 0, default value = 0) 1082 1083 Options Database Key: 1084 . -pc_gasm_overlap <overlap> - Sets overlap 1085 1086 Notes: 1087 By default the GASM preconditioner uses 1 subdomain per processor. To use 1088 multiple subdomain per perocessor or "straddling" subdomains that intersect 1089 multiple processors use PCGASMSetSubdomains() (or option -pc_gasm_total_subdomains <n>). 1090 1091 The overlap defaults to 0, so if one desires that no additional 1092 overlap be computed beyond what may have been set with a call to 1093 PCGASMSetSubdomains(), then ovl must be set to be 0. In particular, if one does 1094 not explicitly set the subdomains in application code, then all overlap would be computed 1095 internally by PETSc, and using an overlap of 0 would result in an GASM 1096 variant that is equivalent to the block Jacobi preconditioner. 1097 1098 Note that one can define initial index sets with any overlap via 1099 PCGASMSetSubdomains(); the routine PCGASMSetOverlap() merely allows 1100 PETSc to extend that overlap further, if desired. 1101 1102 Level: intermediate 1103 1104 .seealso: PCGASMSetSubdomains(), PCGASMGetSubKSP(), 1105 PCGASMCreateSubdomains2D(), PCGASMGetSubdomains() 1106 @*/ 1107 PetscErrorCode PCGASMSetOverlap(PC pc,PetscInt ovl) 1108 { 1109 PetscErrorCode ierr; 1110 PC_GASM *osm = (PC_GASM*)pc->data; 1111 1112 PetscFunctionBegin; 1113 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1114 PetscValidLogicalCollectiveInt(pc,ovl,2); 1115 ierr = PetscTryMethod(pc,"PCGASMSetOverlap_C",(PC,PetscInt),(pc,ovl));CHKERRQ(ierr); 1116 osm->dm_subdomains = PETSC_FALSE; 1117 PetscFunctionReturn(0); 1118 } 1119 1120 /*@ 1121 PCGASMSetType - Sets the type of restriction and interpolation used 1122 for local problems in the additive Schwarz method. 1123 1124 Logically Collective on PC 1125 1126 Input Parameters: 1127 + pc - the preconditioner context 1128 - type - variant of GASM, one of 1129 .vb 1130 PC_GASM_BASIC - full interpolation and restriction 1131 PC_GASM_RESTRICT - full restriction, local processor interpolation 1132 PC_GASM_INTERPOLATE - full interpolation, local processor restriction 1133 PC_GASM_NONE - local processor restriction and interpolation 1134 .ve 1135 1136 Options Database Key: 1137 . -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type 1138 1139 Level: intermediate 1140 1141 .seealso: PCGASMSetSubdomains(), PCGASMGetSubKSP(), 1142 PCGASMCreateSubdomains2D() 1143 @*/ 1144 PetscErrorCode PCGASMSetType(PC pc,PCGASMType type) 1145 { 1146 PetscErrorCode ierr; 1147 1148 PetscFunctionBegin; 1149 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1150 PetscValidLogicalCollectiveEnum(pc,type,2); 1151 ierr = PetscTryMethod(pc,"PCGASMSetType_C",(PC,PCGASMType),(pc,type));CHKERRQ(ierr); 1152 PetscFunctionReturn(0); 1153 } 1154 1155 /*@ 1156 PCGASMSetSortIndices - Determines whether subdomain indices are sorted. 1157 1158 Logically Collective on PC 1159 1160 Input Parameters: 1161 + pc - the preconditioner context 1162 - doSort - sort the subdomain indices 1163 1164 Level: intermediate 1165 1166 .seealso: PCGASMSetSubdomains(), PCGASMGetSubKSP(), 1167 PCGASMCreateSubdomains2D() 1168 @*/ 1169 PetscErrorCode PCGASMSetSortIndices(PC pc,PetscBool doSort) 1170 { 1171 PetscErrorCode ierr; 1172 1173 PetscFunctionBegin; 1174 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1175 PetscValidLogicalCollectiveBool(pc,doSort,2); 1176 ierr = PetscTryMethod(pc,"PCGASMSetSortIndices_C",(PC,PetscBool),(pc,doSort));CHKERRQ(ierr); 1177 PetscFunctionReturn(0); 1178 } 1179 1180 /*@C 1181 PCGASMGetSubKSP - Gets the local KSP contexts for all blocks on 1182 this processor. 1183 1184 Collective on PC iff first_local is requested 1185 1186 Input Parameter: 1187 . pc - the preconditioner context 1188 1189 Output Parameters: 1190 + n_local - the number of blocks on this processor or NULL 1191 . first_local - the global number of the first block on this processor or NULL, 1192 all processors must request or all must pass NULL 1193 - ksp - the array of KSP contexts 1194 1195 Note: 1196 After PCGASMGetSubKSP() the array of KSPes is not to be freed 1197 1198 Currently for some matrix implementations only 1 block per processor 1199 is supported. 1200 1201 You must call KSPSetUp() before calling PCGASMGetSubKSP(). 1202 1203 Level: advanced 1204 1205 .seealso: PCGASMSetSubdomains(), PCGASMSetOverlap(), 1206 PCGASMCreateSubdomains2D(), 1207 @*/ 1208 PetscErrorCode PCGASMGetSubKSP(PC pc,PetscInt *n_local,PetscInt *first_local,KSP *ksp[]) 1209 { 1210 PetscErrorCode ierr; 1211 1212 PetscFunctionBegin; 1213 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1214 ierr = PetscUseMethod(pc,"PCGASMGetSubKSP_C",(PC,PetscInt*,PetscInt*,KSP **),(pc,n_local,first_local,ksp));CHKERRQ(ierr); 1215 PetscFunctionReturn(0); 1216 } 1217 1218 /* -------------------------------------------------------------------------------------*/ 1219 /*MC 1220 PCGASM - Use the (restricted) additive Schwarz method, each block is (approximately) solved with 1221 its own KSP object. 1222 1223 Options Database Keys: 1224 + -pc_gasm_total_subdomains <n> - Sets total number of local subdomains to be distributed among processors 1225 . -pc_gasm_view_subdomains - activates the printing of subdomain indices in PCView(), -ksp_view or -snes_view 1226 . -pc_gasm_print_subdomains - activates the printing of subdomain indices in PCSetUp() 1227 . -pc_gasm_overlap <ovl> - Sets overlap by which to (automatically) extend local subdomains 1228 - -pc_gasm_type [basic,restrict,interpolate,none] - Sets GASM type 1229 1230 IMPORTANT: If you run with, for example, 3 blocks on 1 processor or 3 blocks on 3 processors you 1231 will get a different convergence rate due to the default option of -pc_gasm_type restrict. Use 1232 -pc_gasm_type basic to use the standard GASM. 1233 1234 Notes: 1235 Blocks can be shared by multiple processes. 1236 1237 To set options on the solvers for each block append -sub_ to all the KSP, and PC 1238 options database keys. For example, -sub_pc_type ilu -sub_pc_factor_levels 1 -sub_ksp_type preonly 1239 1240 To set the options on the solvers separate for each block call PCGASMGetSubKSP() 1241 and set the options directly on the resulting KSP object (you can access its PC 1242 with KSPGetPC()) 1243 1244 1245 Level: beginner 1246 1247 References: 1248 + 1. - M Dryja, OB Widlund, An additive variant of the Schwarz alternating method for the case of many subregions 1249 Courant Institute, New York University Technical report 1250 - 2. - Barry Smith, Petter Bjorstad, and William Gropp, Domain Decompositions: Parallel Multilevel Methods for Elliptic Partial Differential Equations, 1251 Cambridge University Press. 1252 1253 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, 1254 PCBJACOBI, PCGASMGetSubKSP(), PCGASMSetSubdomains(), 1255 PCSetModifySubMatrices(), PCGASMSetOverlap(), PCGASMSetType() 1256 1257 M*/ 1258 1259 PETSC_EXTERN PetscErrorCode PCCreate_GASM(PC pc) 1260 { 1261 PetscErrorCode ierr; 1262 PC_GASM *osm; 1263 1264 PetscFunctionBegin; 1265 ierr = PetscNewLog(pc,&osm);CHKERRQ(ierr); 1266 1267 osm->N = PETSC_DETERMINE; 1268 osm->n = PETSC_DECIDE; 1269 osm->nmax = PETSC_DETERMINE; 1270 osm->overlap = 0; 1271 osm->ksp = NULL; 1272 osm->gorestriction = NULL; 1273 osm->girestriction = NULL; 1274 osm->pctoouter = NULL; 1275 osm->gx = NULL; 1276 osm->gy = NULL; 1277 osm->x = NULL; 1278 osm->y = NULL; 1279 osm->pcx = NULL; 1280 osm->pcy = NULL; 1281 osm->permutationIS = NULL; 1282 osm->permutationP = NULL; 1283 osm->pcmat = NULL; 1284 osm->ois = NULL; 1285 osm->iis = NULL; 1286 osm->pmat = NULL; 1287 osm->type = PC_GASM_RESTRICT; 1288 osm->same_subdomain_solvers = PETSC_TRUE; 1289 osm->sort_indices = PETSC_TRUE; 1290 osm->dm_subdomains = PETSC_FALSE; 1291 osm->hierarchicalpartitioning = PETSC_FALSE; 1292 1293 pc->data = (void*)osm; 1294 pc->ops->apply = PCApply_GASM; 1295 pc->ops->applytranspose = PCApplyTranspose_GASM; 1296 pc->ops->setup = PCSetUp_GASM; 1297 pc->ops->reset = PCReset_GASM; 1298 pc->ops->destroy = PCDestroy_GASM; 1299 pc->ops->setfromoptions = PCSetFromOptions_GASM; 1300 pc->ops->setuponblocks = PCSetUpOnBlocks_GASM; 1301 pc->ops->view = PCView_GASM; 1302 pc->ops->applyrichardson = NULL; 1303 1304 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetSubdomains_C",PCGASMSetSubdomains_GASM);CHKERRQ(ierr); 1305 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetOverlap_C",PCGASMSetOverlap_GASM);CHKERRQ(ierr); 1306 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetType_C",PCGASMSetType_GASM);CHKERRQ(ierr); 1307 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMSetSortIndices_C",PCGASMSetSortIndices_GASM);CHKERRQ(ierr); 1308 ierr = PetscObjectComposeFunction((PetscObject)pc,"PCGASMGetSubKSP_C",PCGASMGetSubKSP_GASM);CHKERRQ(ierr); 1309 PetscFunctionReturn(0); 1310 } 1311 1312 1313 PetscErrorCode PCGASMCreateLocalSubdomains(Mat A, PetscInt nloc, IS *iis[]) 1314 { 1315 MatPartitioning mpart; 1316 const char *prefix; 1317 PetscInt i,j,rstart,rend,bs; 1318 PetscBool hasop, isbaij = PETSC_FALSE,foundpart = PETSC_FALSE; 1319 Mat Ad = NULL, adj; 1320 IS ispart,isnumb,*is; 1321 PetscErrorCode ierr; 1322 1323 PetscFunctionBegin; 1324 if (nloc < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"number of local subdomains must > 0, got nloc = %D",nloc); 1325 1326 /* Get prefix, row distribution, and block size */ 1327 ierr = MatGetOptionsPrefix(A,&prefix);CHKERRQ(ierr); 1328 ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 1329 ierr = MatGetBlockSize(A,&bs);CHKERRQ(ierr); 1330 if (rstart/bs*bs != rstart || rend/bs*bs != rend) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"bad row distribution [%D,%D) for matrix block size %D",rstart,rend,bs); 1331 1332 /* Get diagonal block from matrix if possible */ 1333 ierr = MatHasOperation(A,MATOP_GET_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 1334 if (hasop) { 1335 ierr = MatGetDiagonalBlock(A,&Ad);CHKERRQ(ierr); 1336 } 1337 if (Ad) { 1338 ierr = PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQBAIJ,&isbaij);CHKERRQ(ierr); 1339 if (!isbaij) {ierr = PetscObjectBaseTypeCompare((PetscObject)Ad,MATSEQSBAIJ,&isbaij);CHKERRQ(ierr);} 1340 } 1341 if (Ad && nloc > 1) { 1342 PetscBool match,done; 1343 /* Try to setup a good matrix partitioning if available */ 1344 ierr = MatPartitioningCreate(PETSC_COMM_SELF,&mpart);CHKERRQ(ierr); 1345 ierr = PetscObjectSetOptionsPrefix((PetscObject)mpart,prefix);CHKERRQ(ierr); 1346 ierr = MatPartitioningSetFromOptions(mpart);CHKERRQ(ierr); 1347 ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGCURRENT,&match);CHKERRQ(ierr); 1348 if (!match) { 1349 ierr = PetscObjectTypeCompare((PetscObject)mpart,MATPARTITIONINGSQUARE,&match);CHKERRQ(ierr); 1350 } 1351 if (!match) { /* assume a "good" partitioner is available */ 1352 PetscInt na; 1353 const PetscInt *ia,*ja; 1354 ierr = MatGetRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr); 1355 if (done) { 1356 /* Build adjacency matrix by hand. Unfortunately a call to 1357 MatConvert(Ad,MATMPIADJ,MAT_INITIAL_MATRIX,&adj) will 1358 remove the block-aij structure and we cannot expect 1359 MatPartitioning to split vertices as we need */ 1360 PetscInt i,j,len,nnz,cnt,*iia=NULL,*jja=NULL; 1361 const PetscInt *row; 1362 nnz = 0; 1363 for (i=0; i<na; i++) { /* count number of nonzeros */ 1364 len = ia[i+1] - ia[i]; 1365 row = ja + ia[i]; 1366 for (j=0; j<len; j++) { 1367 if (row[j] == i) { /* don't count diagonal */ 1368 len--; break; 1369 } 1370 } 1371 nnz += len; 1372 } 1373 ierr = PetscMalloc1(na+1,&iia);CHKERRQ(ierr); 1374 ierr = PetscMalloc1(nnz,&jja);CHKERRQ(ierr); 1375 nnz = 0; 1376 iia[0] = 0; 1377 for (i=0; i<na; i++) { /* fill adjacency */ 1378 cnt = 0; 1379 len = ia[i+1] - ia[i]; 1380 row = ja + ia[i]; 1381 for (j=0; j<len; j++) { 1382 if (row[j] != i) jja[nnz+cnt++] = row[j]; /* if not diagonal */ 1383 } 1384 nnz += cnt; 1385 iia[i+1] = nnz; 1386 } 1387 /* Partitioning of the adjacency matrix */ 1388 ierr = MatCreateMPIAdj(PETSC_COMM_SELF,na,na,iia,jja,NULL,&adj);CHKERRQ(ierr); 1389 ierr = MatPartitioningSetAdjacency(mpart,adj);CHKERRQ(ierr); 1390 ierr = MatPartitioningSetNParts(mpart,nloc);CHKERRQ(ierr); 1391 ierr = MatPartitioningApply(mpart,&ispart);CHKERRQ(ierr); 1392 ierr = ISPartitioningToNumbering(ispart,&isnumb);CHKERRQ(ierr); 1393 ierr = MatDestroy(&adj);CHKERRQ(ierr); 1394 foundpart = PETSC_TRUE; 1395 } 1396 ierr = MatRestoreRowIJ(Ad,0,PETSC_TRUE,isbaij,&na,&ia,&ja,&done);CHKERRQ(ierr); 1397 } 1398 ierr = MatPartitioningDestroy(&mpart);CHKERRQ(ierr); 1399 } 1400 ierr = PetscMalloc1(nloc,&is);CHKERRQ(ierr); 1401 if (!foundpart) { 1402 1403 /* Partitioning by contiguous chunks of rows */ 1404 1405 PetscInt mbs = (rend-rstart)/bs; 1406 PetscInt start = rstart; 1407 for (i=0; i<nloc; i++) { 1408 PetscInt count = (mbs/nloc + ((mbs % nloc) > i)) * bs; 1409 ierr = ISCreateStride(PETSC_COMM_SELF,count,start,1,&is[i]);CHKERRQ(ierr); 1410 start += count; 1411 } 1412 1413 } else { 1414 1415 /* Partitioning by adjacency of diagonal block */ 1416 1417 const PetscInt *numbering; 1418 PetscInt *count,nidx,*indices,*newidx,start=0; 1419 /* Get node count in each partition */ 1420 ierr = PetscMalloc1(nloc,&count);CHKERRQ(ierr); 1421 ierr = ISPartitioningCount(ispart,nloc,count);CHKERRQ(ierr); 1422 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1423 for (i=0; i<nloc; i++) count[i] *= bs; 1424 } 1425 /* Build indices from node numbering */ 1426 ierr = ISGetLocalSize(isnumb,&nidx);CHKERRQ(ierr); 1427 ierr = PetscMalloc1(nidx,&indices);CHKERRQ(ierr); 1428 for (i=0; i<nidx; i++) indices[i] = i; /* needs to be initialized */ 1429 ierr = ISGetIndices(isnumb,&numbering);CHKERRQ(ierr); 1430 ierr = PetscSortIntWithPermutation(nidx,numbering,indices);CHKERRQ(ierr); 1431 ierr = ISRestoreIndices(isnumb,&numbering);CHKERRQ(ierr); 1432 if (isbaij && bs > 1) { /* adjust for the block-aij case */ 1433 ierr = PetscMalloc1(nidx*bs,&newidx);CHKERRQ(ierr); 1434 for (i=0; i<nidx; i++) { 1435 for (j=0; j<bs; j++) newidx[i*bs+j] = indices[i]*bs + j; 1436 } 1437 ierr = PetscFree(indices);CHKERRQ(ierr); 1438 nidx *= bs; 1439 indices = newidx; 1440 } 1441 /* Shift to get global indices */ 1442 for (i=0; i<nidx; i++) indices[i] += rstart; 1443 1444 /* Build the index sets for each block */ 1445 for (i=0; i<nloc; i++) { 1446 ierr = ISCreateGeneral(PETSC_COMM_SELF,count[i],&indices[start],PETSC_COPY_VALUES,&is[i]);CHKERRQ(ierr); 1447 ierr = ISSort(is[i]);CHKERRQ(ierr); 1448 start += count[i]; 1449 } 1450 1451 ierr = PetscFree(count);CHKERRQ(ierr); 1452 ierr = PetscFree(indices);CHKERRQ(ierr); 1453 ierr = ISDestroy(&isnumb);CHKERRQ(ierr); 1454 ierr = ISDestroy(&ispart);CHKERRQ(ierr); 1455 } 1456 *iis = is; 1457 PetscFunctionReturn(0); 1458 } 1459 1460 PETSC_INTERN PetscErrorCode PCGASMCreateStraddlingSubdomains(Mat A,PetscInt N,PetscInt *n,IS *iis[]) 1461 { 1462 PetscErrorCode ierr; 1463 1464 PetscFunctionBegin; 1465 ierr = MatSubdomainsCreateCoalesce(A,N,n,iis);CHKERRQ(ierr); 1466 PetscFunctionReturn(0); 1467 } 1468 1469 1470 1471 /*@C 1472 PCGASMCreateSubdomains - Creates n index sets defining n nonoverlapping subdomains for the additive 1473 Schwarz preconditioner for a any problem based on its matrix. 1474 1475 Collective 1476 1477 Input Parameters: 1478 + A - The global matrix operator 1479 - N - the number of global subdomains requested 1480 1481 Output Parameters: 1482 + n - the number of subdomains created on this processor 1483 - iis - the array of index sets defining the local inner subdomains (on which the correction is applied) 1484 1485 Level: advanced 1486 1487 Note: When N >= A's communicator size, each subdomain is local -- contained within a single processor. 1488 When N < size, the subdomains are 'straddling' (processor boundaries) and are no longer local. 1489 The resulting subdomains can be use in PCGASMSetSubdomains(pc,n,iss,NULL). The overlapping 1490 outer subdomains will be automatically generated from these according to the requested amount of 1491 overlap; this is currently supported only with local subdomains. 1492 1493 1494 .seealso: PCGASMSetSubdomains(), PCGASMDestroySubdomains() 1495 @*/ 1496 PetscErrorCode PCGASMCreateSubdomains(Mat A,PetscInt N,PetscInt *n,IS *iis[]) 1497 { 1498 PetscMPIInt size; 1499 PetscErrorCode ierr; 1500 1501 PetscFunctionBegin; 1502 PetscValidHeaderSpecific(A,MAT_CLASSID,1); 1503 PetscValidPointer(iis,4); 1504 1505 if (N < 1) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Number of subdomains must be > 0, N = %D",N); 1506 ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr); 1507 if (N >= size) { 1508 *n = N/size + (N%size); 1509 ierr = PCGASMCreateLocalSubdomains(A,*n,iis);CHKERRQ(ierr); 1510 } else { 1511 ierr = PCGASMCreateStraddlingSubdomains(A,N,n,iis);CHKERRQ(ierr); 1512 } 1513 PetscFunctionReturn(0); 1514 } 1515 1516 /*@C 1517 PCGASMDestroySubdomains - Destroys the index sets created with 1518 PCGASMCreateSubdomains() or PCGASMCreateSubdomains2D. Should be 1519 called after setting subdomains with PCGASMSetSubdomains(). 1520 1521 Collective 1522 1523 Input Parameters: 1524 + n - the number of index sets 1525 . iis - the array of inner subdomains, 1526 - ois - the array of outer subdomains, can be NULL 1527 1528 Level: intermediate 1529 1530 Notes: 1531 this is merely a convenience subroutine that walks each list, 1532 destroys each IS on the list, and then frees the list. At the end the 1533 list pointers are set to NULL. 1534 1535 .seealso: PCGASMCreateSubdomains(), PCGASMSetSubdomains() 1536 @*/ 1537 PetscErrorCode PCGASMDestroySubdomains(PetscInt n,IS **iis,IS **ois) 1538 { 1539 PetscInt i; 1540 PetscErrorCode ierr; 1541 1542 PetscFunctionBegin; 1543 if (n <= 0) PetscFunctionReturn(0); 1544 if (ois) { 1545 PetscValidPointer(ois,3); 1546 if (*ois) { 1547 PetscValidPointer(*ois,3); 1548 for (i=0; i<n; i++) { 1549 ierr = ISDestroy(&(*ois)[i]);CHKERRQ(ierr); 1550 } 1551 ierr = PetscFree((*ois));CHKERRQ(ierr); 1552 } 1553 } 1554 if (iis) { 1555 PetscValidPointer(iis,2); 1556 if (*iis) { 1557 PetscValidPointer(*iis,2); 1558 for (i=0; i<n; i++) { 1559 ierr = ISDestroy(&(*iis)[i]);CHKERRQ(ierr); 1560 } 1561 ierr = PetscFree((*iis));CHKERRQ(ierr); 1562 } 1563 } 1564 PetscFunctionReturn(0); 1565 } 1566 1567 1568 #define PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,xleft_loc,ylow_loc,xright_loc,yhigh_loc,n) \ 1569 { \ 1570 PetscInt first_row = first/M, last_row = last/M+1; \ 1571 /* \ 1572 Compute ylow_loc and yhigh_loc so that (ylow_loc,xleft) and (yhigh_loc,xright) are the corners \ 1573 of the bounding box of the intersection of the subdomain with the local ownership range (local \ 1574 subdomain). \ 1575 Also compute xleft_loc and xright_loc as the lower and upper bounds on the first and last rows \ 1576 of the intersection. \ 1577 */ \ 1578 /* ylow_loc is the grid row containing the first element of the local sumbdomain */ \ 1579 *ylow_loc = PetscMax(first_row,ylow); \ 1580 /* xleft_loc is the offset of first element of the local subdomain within its grid row (might actually be outside the local subdomain) */ \ 1581 *xleft_loc = *ylow_loc==first_row ? PetscMax(first%M,xleft) : xleft; \ 1582 /* yhigh_loc is the grid row above the last local subdomain element */ \ 1583 *yhigh_loc = PetscMin(last_row,yhigh); \ 1584 /* xright is the offset of the end of the local subdomain within its grid row (might actually be outside the local subdomain) */ \ 1585 *xright_loc = *yhigh_loc==last_row ? PetscMin(xright,last%M) : xright; \ 1586 /* Now compute the size of the local subdomain n. */ \ 1587 *n = 0; \ 1588 if (*ylow_loc < *yhigh_loc) { \ 1589 PetscInt width = xright-xleft; \ 1590 *n += width*(*yhigh_loc-*ylow_loc-1); \ 1591 *n += PetscMin(PetscMax(*xright_loc-xleft,0),width); \ 1592 *n -= PetscMin(PetscMax(*xleft_loc-xleft,0), width); \ 1593 } \ 1594 } 1595 1596 1597 1598 /*@ 1599 PCGASMCreateSubdomains2D - Creates the index sets for the overlapping Schwarz 1600 preconditioner for a two-dimensional problem on a regular grid. 1601 1602 Collective 1603 1604 Input Parameters: 1605 + M, N - the global number of grid points in the x and y directions 1606 . Mdomains, Ndomains - the global number of subdomains in the x and y directions 1607 . dof - degrees of freedom per node 1608 - overlap - overlap in mesh lines 1609 1610 Output Parameters: 1611 + Nsub - the number of local subdomains created 1612 . iis - array of index sets defining inner (nonoverlapping) subdomains 1613 - ois - array of index sets defining outer (overlapping, if overlap > 0) subdomains 1614 1615 1616 Level: advanced 1617 1618 .seealso: PCGASMSetSubdomains(), PCGASMGetSubKSP(), PCGASMSetOverlap() 1619 @*/ 1620 PetscErrorCode PCGASMCreateSubdomains2D(PC pc,PetscInt M,PetscInt N,PetscInt Mdomains,PetscInt Ndomains,PetscInt dof,PetscInt overlap,PetscInt *nsub,IS **iis,IS **ois) 1621 { 1622 PetscErrorCode ierr; 1623 PetscMPIInt size, rank; 1624 PetscInt i, j; 1625 PetscInt maxheight, maxwidth; 1626 PetscInt xstart, xleft, xright, xleft_loc, xright_loc; 1627 PetscInt ystart, ylow, yhigh, ylow_loc, yhigh_loc; 1628 PetscInt x[2][2], y[2][2], n[2]; 1629 PetscInt first, last; 1630 PetscInt nidx, *idx; 1631 PetscInt ii,jj,s,q,d; 1632 PetscInt k,kk; 1633 PetscMPIInt color; 1634 MPI_Comm comm, subcomm; 1635 IS **xis = NULL, **is = ois, **is_local = iis; 1636 1637 PetscFunctionBegin; 1638 ierr = PetscObjectGetComm((PetscObject)pc, &comm);CHKERRQ(ierr); 1639 ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr); 1640 ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr); 1641 ierr = MatGetOwnershipRange(pc->pmat, &first, &last);CHKERRQ(ierr); 1642 if (first%dof || last%dof) SETERRQ3(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Matrix row partitioning unsuitable for domain decomposition: local row range (%D,%D) " 1643 "does not respect the number of degrees of freedom per grid point %D", first, last, dof); 1644 1645 /* Determine the number of domains with nonzero intersections with the local ownership range. */ 1646 s = 0; 1647 ystart = 0; 1648 for (j=0; j<Ndomains; ++j) { 1649 maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */ 1650 if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N); 1651 /* Vertical domain limits with an overlap. */ 1652 ylow = PetscMax(ystart - overlap,0); 1653 yhigh = PetscMin(ystart + maxheight + overlap,N); 1654 xstart = 0; 1655 for (i=0; i<Mdomains; ++i) { 1656 maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */ 1657 if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M); 1658 /* Horizontal domain limits with an overlap. */ 1659 xleft = PetscMax(xstart - overlap,0); 1660 xright = PetscMin(xstart + maxwidth + overlap,M); 1661 /* 1662 Determine whether this subdomain intersects this processor's ownership range of pc->pmat. 1663 */ 1664 PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx)); 1665 if (nidx) ++s; 1666 xstart += maxwidth; 1667 } /* for (i = 0; i < Mdomains; ++i) */ 1668 ystart += maxheight; 1669 } /* for (j = 0; j < Ndomains; ++j) */ 1670 1671 /* Now we can allocate the necessary number of ISs. */ 1672 *nsub = s; 1673 ierr = PetscMalloc1(*nsub,is);CHKERRQ(ierr); 1674 ierr = PetscMalloc1(*nsub,is_local);CHKERRQ(ierr); 1675 s = 0; 1676 ystart = 0; 1677 for (j=0; j<Ndomains; ++j) { 1678 maxheight = N/Ndomains + ((N % Ndomains) > j); /* Maximal height of subdomain */ 1679 if (maxheight < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the vertical directon for mesh height %D", Ndomains, N); 1680 /* Vertical domain limits with an overlap. */ 1681 y[0][0] = PetscMax(ystart - overlap,0); 1682 y[0][1] = PetscMin(ystart + maxheight + overlap,N); 1683 /* Vertical domain limits without an overlap. */ 1684 y[1][0] = ystart; 1685 y[1][1] = PetscMin(ystart + maxheight,N); 1686 xstart = 0; 1687 for (i=0; i<Mdomains; ++i) { 1688 maxwidth = M/Mdomains + ((M % Mdomains) > i); /* Maximal width of subdomain */ 1689 if (maxwidth < 2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Too many %D subdomains in the horizontal direction for mesh width %D", Mdomains, M); 1690 /* Horizontal domain limits with an overlap. */ 1691 x[0][0] = PetscMax(xstart - overlap,0); 1692 x[0][1] = PetscMin(xstart + maxwidth + overlap,M); 1693 /* Horizontal domain limits without an overlap. */ 1694 x[1][0] = xstart; 1695 x[1][1] = PetscMin(xstart+maxwidth,M); 1696 /* 1697 Determine whether this domain intersects this processor's ownership range of pc->pmat. 1698 Do this twice: first for the domains with overlaps, and once without. 1699 During the first pass create the subcommunicators, and use them on the second pass as well. 1700 */ 1701 for (q = 0; q < 2; ++q) { 1702 PetscBool split = PETSC_FALSE; 1703 /* 1704 domain limits, (xleft, xright) and (ylow, yheigh) are adjusted 1705 according to whether the domain with an overlap or without is considered. 1706 */ 1707 xleft = x[q][0]; xright = x[q][1]; 1708 ylow = y[q][0]; yhigh = y[q][1]; 1709 PCGASMLocalSubdomainBounds2D(M,N,xleft,ylow,xright,yhigh,first,last,(&xleft_loc),(&ylow_loc),(&xright_loc),(&yhigh_loc),(&nidx)); 1710 nidx *= dof; 1711 n[q] = nidx; 1712 /* 1713 Based on the counted number of indices in the local domain *with an overlap*, 1714 construct a subcommunicator of all the processors supporting this domain. 1715 Observe that a domain with an overlap might have nontrivial local support, 1716 while the domain without an overlap might not. Hence, the decision to participate 1717 in the subcommunicator must be based on the domain with an overlap. 1718 */ 1719 if (q == 0) { 1720 if (nidx) color = 1; 1721 else color = MPI_UNDEFINED; 1722 ierr = MPI_Comm_split(comm, color, rank, &subcomm);CHKERRQ(ierr); 1723 split = PETSC_TRUE; 1724 } 1725 /* 1726 Proceed only if the number of local indices *with an overlap* is nonzero. 1727 */ 1728 if (n[0]) { 1729 if (q == 0) xis = is; 1730 if (q == 1) { 1731 /* 1732 The IS for the no-overlap subdomain shares a communicator with the overlapping domain. 1733 Moreover, if the overlap is zero, the two ISs are identical. 1734 */ 1735 if (overlap == 0) { 1736 (*is_local)[s] = (*is)[s]; 1737 ierr = PetscObjectReference((PetscObject)(*is)[s]);CHKERRQ(ierr); 1738 continue; 1739 } else { 1740 xis = is_local; 1741 subcomm = ((PetscObject)(*is)[s])->comm; 1742 } 1743 } /* if (q == 1) */ 1744 idx = NULL; 1745 ierr = PetscMalloc1(nidx,&idx);CHKERRQ(ierr); 1746 if (nidx) { 1747 k = 0; 1748 for (jj=ylow_loc; jj<yhigh_loc; ++jj) { 1749 PetscInt x0 = (jj==ylow_loc) ? xleft_loc : xleft; 1750 PetscInt x1 = (jj==yhigh_loc-1) ? xright_loc : xright; 1751 kk = dof*(M*jj + x0); 1752 for (ii=x0; ii<x1; ++ii) { 1753 for (d = 0; d < dof; ++d) { 1754 idx[k++] = kk++; 1755 } 1756 } 1757 } 1758 } 1759 ierr = ISCreateGeneral(subcomm,nidx,idx,PETSC_OWN_POINTER,(*xis)+s);CHKERRQ(ierr); 1760 if (split) { 1761 ierr = MPI_Comm_free(&subcomm);CHKERRQ(ierr); 1762 } 1763 }/* if (n[0]) */ 1764 }/* for (q = 0; q < 2; ++q) */ 1765 if (n[0]) ++s; 1766 xstart += maxwidth; 1767 } /* for (i = 0; i < Mdomains; ++i) */ 1768 ystart += maxheight; 1769 } /* for (j = 0; j < Ndomains; ++j) */ 1770 PetscFunctionReturn(0); 1771 } 1772 1773 /*@C 1774 PCGASMGetSubdomains - Gets the subdomains supported on this processor 1775 for the additive Schwarz preconditioner. 1776 1777 Not Collective 1778 1779 Input Parameter: 1780 . pc - the preconditioner context 1781 1782 Output Parameters: 1783 + n - the number of subdomains for this processor (default value = 1) 1784 . iis - the index sets that define the inner subdomains (without overlap) supported on this processor (can be NULL) 1785 - ois - the index sets that define the outer subdomains (with overlap) supported on this processor (can be NULL) 1786 1787 1788 Notes: 1789 The user is responsible for destroying the ISs and freeing the returned arrays. 1790 The IS numbering is in the parallel, global numbering of the vector. 1791 1792 Level: advanced 1793 1794 .seealso: PCGASMSetOverlap(), PCGASMGetSubKSP(), PCGASMCreateSubdomains2D(), 1795 PCGASMSetSubdomains(), PCGASMGetSubmatrices() 1796 @*/ 1797 PetscErrorCode PCGASMGetSubdomains(PC pc,PetscInt *n,IS *iis[],IS *ois[]) 1798 { 1799 PC_GASM *osm; 1800 PetscErrorCode ierr; 1801 PetscBool match; 1802 PetscInt i; 1803 1804 PetscFunctionBegin; 1805 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1806 ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr); 1807 if (!match) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Incorrect object type: expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name); 1808 osm = (PC_GASM*)pc->data; 1809 if (n) *n = osm->n; 1810 if (iis) { 1811 ierr = PetscMalloc1(osm->n, iis);CHKERRQ(ierr); 1812 } 1813 if (ois) { 1814 ierr = PetscMalloc1(osm->n, ois);CHKERRQ(ierr); 1815 } 1816 if (iis || ois) { 1817 for (i = 0; i < osm->n; ++i) { 1818 if (iis) (*iis)[i] = osm->iis[i]; 1819 if (ois) (*ois)[i] = osm->ois[i]; 1820 } 1821 } 1822 PetscFunctionReturn(0); 1823 } 1824 1825 /*@C 1826 PCGASMGetSubmatrices - Gets the local submatrices (for this processor 1827 only) for the additive Schwarz preconditioner. 1828 1829 Not Collective 1830 1831 Input Parameter: 1832 . pc - the preconditioner context 1833 1834 Output Parameters: 1835 + n - the number of matrices for this processor (default value = 1) 1836 - mat - the matrices 1837 1838 Notes: 1839 matrices returned by this routine have the same communicators as the index sets (IS) 1840 used to define subdomains in PCGASMSetSubdomains() 1841 Level: advanced 1842 1843 .seealso: PCGASMSetOverlap(), PCGASMGetSubKSP(), 1844 PCGASMCreateSubdomains2D(), PCGASMSetSubdomains(), PCGASMGetSubdomains() 1845 @*/ 1846 PetscErrorCode PCGASMGetSubmatrices(PC pc,PetscInt *n,Mat *mat[]) 1847 { 1848 PC_GASM *osm; 1849 PetscErrorCode ierr; 1850 PetscBool match; 1851 1852 PetscFunctionBegin; 1853 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1854 PetscValidIntPointer(n,2); 1855 if (mat) PetscValidPointer(mat,3); 1856 if (!pc->setupcalled) SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONGSTATE,"Must call after KSPSetUp() or PCSetUp()."); 1857 ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr); 1858 if (!match) SETERRQ2(PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Expected %s, got %s instead", PCGASM, ((PetscObject)pc)->type_name); 1859 osm = (PC_GASM*)pc->data; 1860 if (n) *n = osm->n; 1861 if (mat) *mat = osm->pmat; 1862 PetscFunctionReturn(0); 1863 } 1864 1865 /*@ 1866 PCGASMSetUseDMSubdomains - Indicates whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible. 1867 Logically Collective 1868 1869 Input Parameter: 1870 + pc - the preconditioner 1871 - flg - boolean indicating whether to use subdomains defined by the DM 1872 1873 Options Database Key: 1874 . -pc_gasm_dm_subdomains -pc_gasm_overlap -pc_gasm_total_subdomains 1875 1876 Level: intermediate 1877 1878 Notes: 1879 PCGASMSetSubdomains(), PCGASMSetTotalSubdomains() or PCGASMSetOverlap() take precedence over PCGASMSetUseDMSubdomains(), 1880 so setting PCGASMSetSubdomains() with nontrivial subdomain ISs or any of PCGASMSetTotalSubdomains() and PCGASMSetOverlap() 1881 automatically turns the latter off. 1882 1883 .seealso: PCGASMGetUseDMSubdomains(), PCGASMSetSubdomains(), PCGASMSetOverlap() 1884 PCGASMCreateSubdomains2D() 1885 @*/ 1886 PetscErrorCode PCGASMSetUseDMSubdomains(PC pc,PetscBool flg) 1887 { 1888 PC_GASM *osm = (PC_GASM*)pc->data; 1889 PetscErrorCode ierr; 1890 PetscBool match; 1891 1892 PetscFunctionBegin; 1893 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1894 PetscValidLogicalCollectiveBool(pc,flg,2); 1895 if (pc->setupcalled) SETERRQ(((PetscObject)pc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Not for a setup PC."); 1896 ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr); 1897 if (match) { 1898 if (!osm->user_subdomains && osm->N == PETSC_DETERMINE && osm->overlap < 0) { 1899 osm->dm_subdomains = flg; 1900 } 1901 } 1902 PetscFunctionReturn(0); 1903 } 1904 1905 /*@ 1906 PCGASMGetUseDMSubdomains - Returns flag indicating whether to use DMCreateDomainDecomposition() to define the subdomains, whenever possible. 1907 Not Collective 1908 1909 Input Parameter: 1910 . pc - the preconditioner 1911 1912 Output Parameter: 1913 . flg - boolean indicating whether to use subdomains defined by the DM 1914 1915 Level: intermediate 1916 1917 .seealso: PCGASMSetUseDMSubdomains(), PCGASMSetOverlap() 1918 PCGASMCreateSubdomains2D() 1919 @*/ 1920 PetscErrorCode PCGASMGetUseDMSubdomains(PC pc,PetscBool* flg) 1921 { 1922 PC_GASM *osm = (PC_GASM*)pc->data; 1923 PetscErrorCode ierr; 1924 PetscBool match; 1925 1926 PetscFunctionBegin; 1927 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1928 PetscValidBoolPointer(flg,2); 1929 ierr = PetscObjectTypeCompare((PetscObject)pc,PCGASM,&match);CHKERRQ(ierr); 1930 if (match) { 1931 if (flg) *flg = osm->dm_subdomains; 1932 } 1933 PetscFunctionReturn(0); 1934 } 1935