xref: /petsc/src/dm/impls/da/dadd.c (revision 26f7fa13d894e02c8192cf9a0d4955dba2e1288c)
1 #include <petsc-private/daimpl.h>
2 
3 #undef __FUNCT__
4 #define __FUNCT__ "DMDACreatePatchIS"
5 
6 PetscErrorCode DMDACreatePatchIS(DM da,MatStencil *lower,MatStencil *upper,IS *is)
7 {
8   PetscErrorCode ierr;
9   PetscInt       i,j,k,idx;
10   PetscInt       ii,jj,kk;
11   Vec            v;
12   PetscInt       n,pn,bs;
13   PetscMPIInt    rank;
14   PetscSF        sf,psf;
15   PetscLayout    map;
16   MPI_Comm       comm;
17   PetscInt       *natidx,*globidx,*leafidx;
18   PetscInt       *pnatidx,*pleafidx;
19   PetscInt       base;
20   PetscInt       ox,oy,oz;
21   DM_DA          *dd;
22 
23   PetscFunctionBegin;
24   comm = ((PetscObject)da)->comm;
25   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
26   dd = (DM_DA *)da->data;
27 
28   /* construct the natural mapping */
29   ierr = DMGetGlobalVector(da,&v);CHKERRQ(ierr);
30   ierr = VecGetLocalSize(v,&n);CHKERRQ(ierr);
31   ierr = VecGetOwnershipRange(v,&base,PETSC_NULL);CHKERRQ(ierr);
32   ierr = VecGetBlockSize(v,&bs);CHKERRQ(ierr);
33   ierr = DMRestoreGlobalVector(da,&v);CHKERRQ(ierr);
34 
35   /* construct the layout */
36   ierr = PetscLayoutCreate(comm,&map);CHKERRQ(ierr);
37   ierr = PetscLayoutSetBlockSize(map,1);CHKERRQ(ierr);
38   ierr = PetscLayoutSetLocalSize(map,n);CHKERRQ(ierr);
39   ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
40 
41   /* construct the list of natural indices on this process when PETSc ordering is considered */
42   ierr = DMDAGetOffset(da,&ox,&oy,&oz);CHKERRQ(ierr);
43   ierr = PetscMalloc(sizeof(PetscInt)*n,&natidx);CHKERRQ(ierr);
44   ierr = PetscMalloc(sizeof(PetscInt)*n,&globidx);CHKERRQ(ierr);
45   ierr = PetscMalloc(sizeof(PetscInt)*n,&leafidx);CHKERRQ(ierr);
46   idx = 0;
47   for (k=dd->zs;k<dd->ze;k++) {
48     for (j=dd->ys;j<dd->ye;j++) {
49       for (i=dd->xs;i<dd->xe;i++) {
50         natidx[idx] = i + dd->w*(j*dd->M + k*dd->M*dd->N);
51         globidx[idx] = base + idx;
52         leafidx[idx] = 0;
53         idx++;
54       }
55     }
56   }
57 
58   if (idx != n) SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE, "for some reason the count is wrong.");
59 
60   /* construct the SF going from the natural indices to the local set of PETSc indices */
61   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
62   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
63   ierr = PetscSFSetGraphLayout(sf,map,n,PETSC_NULL,PETSC_OWN_POINTER,natidx);CHKERRQ(ierr);
64 
65   /* broadcast the global indices over to the corresponding natural indices */
66   ierr = PetscSFGatherBegin(sf,MPIU_INT,globidx,leafidx);CHKERRQ(ierr);
67   ierr = PetscSFGatherEnd(sf,MPIU_INT,globidx,leafidx);CHKERRQ(ierr);
68   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
69 
70 
71   pn = dd->w*(upper->k - lower->k)*(upper->j - lower->j)*(upper->i - lower->i);
72   ierr = PetscMalloc(sizeof(PetscInt)*pn,&pnatidx);CHKERRQ(ierr);
73   ierr = PetscMalloc(sizeof(PetscInt)*pn,&pleafidx);CHKERRQ(ierr);
74   idx = 0;
75   for (k=lower->k-oz;k<upper->k-oz;k++) {
76     for (j=lower->j-oy;j<upper->j-oy;j++) {
77       for (i=dd->w*(lower->i-ox);i<dd->w*(upper->i-ox);i++) {
78         ii = i % (dd->w*dd->M);
79         jj = j % dd->N;
80         kk = k % dd->P;
81         if (ii < 0) ii = dd->w*dd->M + ii;
82         if (jj < 0) jj = dd->N + jj;
83         if (kk < 0) kk = dd->P + kk;
84         pnatidx[idx] = ii + dd->w*(jj*dd->M + kk*dd->M*dd->N);
85         idx++;
86       }
87     }
88   }
89 
90   if (idx != pn) SETERRQ(comm,PETSC_ERR_ARG_WRONGSTATE, "for some reason the count is wrong");
91 
92   ierr = PetscSFCreate(comm,&psf);CHKERRQ(ierr);
93   ierr = PetscSFSetFromOptions(psf);CHKERRQ(ierr);
94   ierr = PetscSFSetGraphLayout(psf,map,pn,PETSC_NULL,PETSC_OWN_POINTER,pnatidx);CHKERRQ(ierr);
95 
96   /* broadcast the global indices through to the patch */
97   ierr = PetscSFBcastBegin(psf,MPIU_INT,leafidx,pleafidx);CHKERRQ(ierr);
98   ierr = PetscSFBcastEnd(psf,MPIU_INT,leafidx,pleafidx);CHKERRQ(ierr);
99 
100   /* create the IS */
101   ierr = ISCreateGeneral(comm,pn,pleafidx,PETSC_OWN_POINTER,is);CHKERRQ(ierr);
102 
103   ierr = PetscSFDestroy(&psf);CHKERRQ(ierr);
104 
105   ierr = PetscLayoutDestroy(&map);CHKERRQ(ierr);
106 
107   ierr = PetscFree(globidx);CHKERRQ(ierr);
108   ierr = PetscFree(leafidx);CHKERRQ(ierr);
109   ierr = PetscFree(natidx);CHKERRQ(ierr);
110   ierr = PetscFree(pnatidx);CHKERRQ(ierr);
111 
112   PetscFunctionReturn(0);
113 }
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "DMDASubDomainDA_Private"
117 PetscErrorCode DMDASubDomainDA_Private(DM dm, DM *dddm)
118 {
119   DM               da;
120   DM_DA            *dd;
121   PetscErrorCode   ierr;
122   DMDALocalInfo    info;
123   PetscReal        lmin[3],lmax[3];
124   PetscInt         xsize,ysize,zsize;
125   PetscInt         xo,yo,zo;
126 
127   PetscFunctionBegin;
128   ierr = DMDAGetLocalInfo(dm,&info);CHKERRQ(ierr);
129   ierr = DMDACreate(PETSC_COMM_SELF,&da);CHKERRQ(ierr);
130   ierr = DMSetOptionsPrefix(da,"sub_");CHKERRQ(ierr);
131 
132   ierr = DMDASetDim(da, info.dim);CHKERRQ(ierr);
133   ierr = DMDASetDof(da, info.dof);CHKERRQ(ierr);
134 
135   ierr = DMDASetStencilType(da,info.st);CHKERRQ(ierr);
136   ierr = DMDASetStencilWidth(da,info.sw);CHKERRQ(ierr);
137 
138   dd = (DM_DA *)dm->data;
139 
140   /* local with overlap */
141   xsize = info.xm;
142   ysize = info.ym;
143   zsize = info.zm;
144   xo    = info.xs;
145   yo    = info.ys;
146   zo    = info.zs;
147   if (info.bx == DMDA_BOUNDARY_PERIODIC || (info.xs != 0)) {
148     xsize += dd->overlap;
149     xo -= dd->overlap;
150   }
151   if (info.by == DMDA_BOUNDARY_PERIODIC || (info.ys != 0)) {
152     ysize += dd->overlap;
153     yo    -= dd->overlap;
154   }
155   if (info.bz == DMDA_BOUNDARY_PERIODIC || (info.zs != 0)) {
156     zsize += dd->overlap;
157     zo    -= dd->overlap;
158   }
159 
160   if (info.bx == DMDA_BOUNDARY_PERIODIC || (info.xs+info.xm != info.mx)) {
161     xsize += dd->overlap;
162   }
163   if (info.by == DMDA_BOUNDARY_PERIODIC || (info.ys+info.ym != info.my)) {
164     ysize += dd->overlap;
165   }
166   if (info.bz == DMDA_BOUNDARY_PERIODIC || (info.zs+info.zm != info.mz)) {
167     zsize += dd->overlap;
168   }
169 
170   ierr = DMDASetSizes(da, xsize, ysize, zsize);CHKERRQ(ierr);
171   ierr = DMDASetNumProcs(da, 1, 1, 1);CHKERRQ(ierr);
172   ierr = DMDASetBoundaryType(da, DMDA_BOUNDARY_GHOSTED, DMDA_BOUNDARY_GHOSTED, DMDA_BOUNDARY_GHOSTED);CHKERRQ(ierr);
173 
174   /* set up as a block instead */
175   ierr = DMSetUp(da);CHKERRQ(ierr);
176   ierr = DMDASetOffset(da,xo,yo,zo);CHKERRQ(ierr);
177 
178 
179   /* todo - nonuniform coordinates */
180   ierr = DMDAGetLocalBoundingBox(dm,lmin,lmax);CHKERRQ(ierr);
181   ierr = DMDASetUniformCoordinates(da,lmin[0],lmax[0],lmin[1],lmax[1],lmin[2],lmax[2]);CHKERRQ(ierr);
182 
183   dd = (DM_DA *)da->data;
184   dd->Mo = info.mx;
185   dd->No = info.my;
186   dd->Po = info.mz;
187 
188   *dddm = da;
189 
190   PetscFunctionReturn(0);
191 }
192 
193 #undef __FUNCT__
194 #define __FUNCT__ "DMCreateDomainDecompositionScatters_DA"
195 /*
196  Fills the local vector problem on the subdomain from the global problem.
197 
198  Right now this assumes one subdomain per processor.
199 
200  */
201 PetscErrorCode DMCreateDomainDecompositionScatters_DA(DM dm,PetscInt nsubdms,DM *subdms,VecScatter **iscat,VecScatter **oscat, VecScatter **lscat)
202 {
203   PetscErrorCode   ierr;
204   DMDALocalInfo    info,subinfo;
205   DM               subdm;
206   MatStencil       upper,lower;
207   IS               idis,isis,odis,osis,gdis;
208   Vec              svec,dvec,slvec;
209 
210   PetscFunctionBegin;
211   if (nsubdms != 1) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Cannot have more than one subdomain per processor (yet)");
212 
213   /* allocate the arrays of scatters */
214   if (iscat) {ierr = PetscMalloc(sizeof(VecScatter *),iscat);CHKERRQ(ierr);}
215   if (oscat) {ierr = PetscMalloc(sizeof(VecScatter *),oscat);CHKERRQ(ierr);}
216   if (lscat) {ierr = PetscMalloc(sizeof(VecScatter *),lscat);CHKERRQ(ierr);}
217 
218   ierr = DMDAGetLocalInfo(dm,&info);CHKERRQ(ierr);
219   subdm = subdms[0];
220   ierr = DMDAGetLocalInfo(subdm,&subinfo);CHKERRQ(ierr);
221 
222   /* create the global and subdomain index sets for the inner domain */
223   /* TODO - make this actually support multiple subdomains -- subdomain needs to provide where it's nonoverlapping portion belongs */
224   lower.i = info.xs;
225   lower.j = info.ys;
226   lower.k = info.zs;
227   upper.i = info.xs+info.xm;
228   upper.j = info.ys+info.ym;
229   upper.k = info.zs+info.zm;
230   ierr = DMDACreatePatchIS(dm,&lower,&upper,&idis);CHKERRQ(ierr);
231   ierr = DMDACreatePatchIS(subdm,&lower,&upper,&isis);CHKERRQ(ierr);
232 
233   /* create the global and subdomain index sets for the outer subdomain */
234   lower.i = subinfo.xs;
235   lower.j = subinfo.ys;
236   lower.k = subinfo.zs;
237   upper.i = subinfo.xs+subinfo.xm;
238   upper.j = subinfo.ys+subinfo.ym;
239   upper.k = subinfo.zs+subinfo.zm;
240   ierr = DMDACreatePatchIS(dm,&lower,&upper,&odis);CHKERRQ(ierr);
241   ierr = DMDACreatePatchIS(subdm,&lower,&upper,&osis);CHKERRQ(ierr);
242 
243   /* global and subdomain ISes for the local indices of the subdomain */
244   /* todo - make this not loop over at nonperiodic boundaries, which will be more involved */
245   lower.i = subinfo.gxs;
246   lower.j = subinfo.gys;
247   lower.k = subinfo.gzs;
248   upper.i = subinfo.gxs+subinfo.gxm;
249   upper.j = subinfo.gys+subinfo.gym;
250   upper.k = subinfo.gzs+subinfo.gzm;
251 
252   ierr = DMDACreatePatchIS(dm,&lower,&upper,&gdis);CHKERRQ(ierr);
253 
254     /* form the scatter */
255     ierr = DMGetGlobalVector(dm,&dvec);CHKERRQ(ierr);
256     ierr = DMGetGlobalVector(subdm,&svec);CHKERRQ(ierr);
257     ierr = DMGetLocalVector(subdm,&slvec);CHKERRQ(ierr);
258 
259     if (iscat) {ierr = VecScatterCreate(dvec,idis,svec,isis,&(*iscat)[0]);CHKERRQ(ierr);}
260     if (oscat) {ierr = VecScatterCreate(dvec,odis,svec,osis,&(*oscat)[0]);CHKERRQ(ierr);}
261     if (lscat) {ierr = VecScatterCreate(dvec,gdis,slvec,PETSC_NULL,&(*lscat)[0]);CHKERRQ(ierr);}
262 
263     ierr = DMRestoreGlobalVector(dm,&dvec);CHKERRQ(ierr);
264     ierr = DMRestoreGlobalVector(subdm,&svec);CHKERRQ(ierr);
265     ierr = DMRestoreLocalVector(subdm,&slvec);CHKERRQ(ierr);
266 
267     ierr = ISDestroy(&idis);CHKERRQ(ierr);
268     ierr = ISDestroy(&isis);CHKERRQ(ierr);
269 
270     ierr = ISDestroy(&odis);CHKERRQ(ierr);
271     ierr = ISDestroy(&osis);CHKERRQ(ierr);
272 
273     ierr = ISDestroy(&gdis);CHKERRQ(ierr);
274     PetscFunctionReturn(0);
275 }
276 
277 #undef __FUNCT__
278 #define __FUNCT__ "DMDASubDomainIS_Private"
279 /* We have that the interior regions are going to be the same, but the ghost regions might not match up
280 
281 ----------
282 ----------
283 --++++++o=
284 --++++++o=
285 --++++++o=
286 --++++++o=
287 --ooooooo=
288 --========
289 
290 Therefore, for each point in the overall, we must check if it's:
291 
292 1. +: In the interior of the global dm; it lines up
293 2. o: In the overlap region -- for now the same as 1; no overlap
294 3. =: In the shared ghost region -- handled by DMCreateDomainDecompositionLocalScatter()
295 4. -: In the nonshared ghost region
296  */
297 
298 PetscErrorCode DMDASubDomainIS_Private(DM dm,DM subdm,IS *iis,IS *ois)
299 {
300   PetscErrorCode   ierr;
301   DMDALocalInfo    info,subinfo;
302   MatStencil       lower,upper;
303 
304   PetscFunctionBegin;
305   ierr = DMDAGetLocalInfo(dm,&info);CHKERRQ(ierr);
306   ierr = DMDAGetLocalInfo(subdm,&subinfo);CHKERRQ(ierr);
307 
308   /* create the inner IS */
309   lower.i = info.xs;
310   lower.j = info.ys;
311   lower.k = info.zs;
312   upper.i = info.xs+info.xm;
313   upper.j = info.ys+info.ym;
314   upper.k = info.zs+info.zm;
315 
316   ierr = DMDACreatePatchIS(dm,&lower,&upper,iis);CHKERRQ(ierr);
317 
318   /* create the outer IS */
319   lower.i = subinfo.xs;
320   lower.j = subinfo.ys;
321   lower.k = subinfo.zs;
322   upper.i = subinfo.xs+subinfo.xm;
323   upper.j = subinfo.ys+subinfo.ym;
324   upper.k = subinfo.zs+subinfo.zm;
325   ierr = DMDACreatePatchIS(dm,&lower,&upper,ois);CHKERRQ(ierr);
326 
327   PetscFunctionReturn(0);
328 }
329 
330 #undef __FUNCT__
331 #define __FUNCT__ "DMCreateDomainDecomposition_DA"
332 PetscErrorCode DMCreateDomainDecomposition_DA(DM dm,PetscInt *len,char ***names,IS **iis,IS **ois,DM **subdm)
333 {
334   PetscErrorCode ierr;
335   IS             iis0,ois0;
336   DM             subdm0;
337   DM_DA          *dd = (DM_DA*)dm;
338 
339   PetscFunctionBegin;
340   /* fix to enable PCASM default behavior as taking overlap from the matrix */
341   if (!dd->decompositiondm) {
342     if (len)*len=0;
343     if (names)*names=0;
344     if (iis)*iis=0;
345     if (ois)*ois=0;
346     if (subdm)*subdm=0;
347     PetscFunctionReturn(0);
348   }
349 
350   if (len)*len = 1;
351 
352   if (iis) {ierr = PetscMalloc(sizeof(IS *),iis);CHKERRQ(ierr);}
353   if (ois) {ierr = PetscMalloc(sizeof(IS *),ois);CHKERRQ(ierr);}
354   if (subdm) {ierr = PetscMalloc(sizeof(DM *),subdm);CHKERRQ(ierr);}
355   if (names) {ierr = PetscMalloc(sizeof(char *),names);CHKERRQ(ierr);}
356   ierr = DMDASubDomainDA_Private(dm,&subdm0);CHKERRQ(ierr);
357   ierr = DMDASubDomainIS_Private(dm,subdm0,&iis0,&ois0);CHKERRQ(ierr);
358   if (iis) {
359     (*iis)[0] = iis0;
360   } else {
361     ierr = ISDestroy(&iis0);CHKERRQ(ierr);
362   }
363   if (ois) {
364     (*ois)[0] = ois0;
365   } else {
366     ierr = ISDestroy(&ois0);CHKERRQ(ierr);
367   }
368   if (subdm) (*subdm)[0] = subdm0;
369   if (names) (*names)[0] = 0;
370   PetscFunctionReturn(0);
371 }
372 
373 #undef __FUNCT__
374 #define __FUNCT__ "DMCreateDomainDecompositionDM_DA"
375 PetscErrorCode DMCreateDomainDecompositionDM_DA(DM dm,const char *name,DM *ddm)
376 {
377   DM_DA          *dd = (DM_DA*)dm;
378   PetscBool      flg;
379   PetscErrorCode ierr;
380 
381   PetscFunctionBegin;
382   ierr = PetscStrcmp(name,"default",&flg);CHKERRQ(ierr);
383   if (flg) {
384     dd->decompositiondm = PETSC_TRUE;
385     *ddm = dm;
386   } else {
387     dd->decompositiondm = PETSC_FALSE;
388   }
389   PetscFunctionReturn(0);
390 }
391