xref: /petsc/src/dm/impls/da/da2.c (revision 8e181257e433d6fa85548ee14cf924180d19bb9b)
1 
2 #define PETSCDM_DLL
3 
4 #include "private/daimpl.h"    /*I   "petscdm.h"   I*/
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMView_DA_2d"
8 PetscErrorCode DMView_DA_2d(DM da,PetscViewer viewer)
9 {
10   PetscErrorCode ierr;
11   PetscMPIInt    rank;
12   PetscBool      iascii,isdraw,isbinary;
13   DM_DA          *dd = (DM_DA*)da->data;
14 #if defined(PETSC_HAVE_MATLAB_ENGINE)
15   PetscBool      ismatlab;
16 #endif
17 
18   PetscFunctionBegin;
19   ierr = MPI_Comm_rank(((PetscObject)da)->comm,&rank);CHKERRQ(ierr);
20 
21   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
22   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
23   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
24 #if defined(PETSC_HAVE_MATLAB_ENGINE)
25   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr);
26 #endif
27   if (iascii) {
28     PetscViewerFormat format;
29 
30     ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
31     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
32       DMDALocalInfo info;
33       ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
34       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D N %D m %D n %D w %D s %D\n",rank,dd->M,dd->N,dd->m,dd->n,dd->w,dd->s);CHKERRQ(ierr);
35       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D, Y range of indices: %D %D\n",info.xs,info.xs+info.xm,info.ys,info.ys+info.ym);CHKERRQ(ierr);
36       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37     }
38   } else if (isdraw) {
39     PetscDraw       draw;
40     double     ymin = -1*dd->s-1,ymax = dd->N+dd->s;
41     double     xmin = -1*dd->s-1,xmax = dd->M+dd->s;
42     double     x,y;
43     PetscInt   base,*idx;
44     char       node[10];
45     PetscBool  isnull;
46 
47     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
48     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
49     if (!dd->coordinates) {
50       ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr);
51     }
52     ierr = PetscDrawSynchronizedClear(draw);CHKERRQ(ierr);
53 
54     /* first processor draw all node lines */
55     if (!rank) {
56       ymin = 0.0; ymax = dd->N - 1;
57       for (xmin=0; xmin<dd->M; xmin++) {
58         ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
59       }
60       xmin = 0.0; xmax = dd->M - 1;
61       for (ymin=0; ymin<dd->N; ymin++) {
62         ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr);
63       }
64     }
65     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
66     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
67 
68     /* draw my box */
69     ymin = dd->ys; ymax = dd->ye - 1; xmin = dd->xs/dd->w;
70     xmax =(dd->xe-1)/dd->w;
71     ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr);
72     ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
73     ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
74     ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
75 
76     /* put in numbers */
77     base = (dd->base)/dd->w;
78     for (y=ymin; y<=ymax; y++) {
79       for (x=xmin; x<=xmax; x++) {
80         sprintf(node,"%d",(int)base++);
81         ierr = PetscDrawString(draw,x,y,PETSC_DRAW_BLACK,node);CHKERRQ(ierr);
82       }
83     }
84 
85     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
86     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
87     /* overlay ghost numbers, useful for error checking */
88     /* put in numbers */
89 
90     base = 0; idx = dd->idx;
91     ymin = dd->Ys; ymax = dd->Ye; xmin = dd->Xs; xmax = dd->Xe;
92     for (y=ymin; y<ymax; y++) {
93       for (x=xmin; x<xmax; x++) {
94         if ((base % dd->w) == 0) {
95           sprintf(node,"%d",(int)(idx[base]/dd->w));
96           ierr = PetscDrawString(draw,x/dd->w,y,PETSC_DRAW_BLUE,node);CHKERRQ(ierr);
97         }
98         base++;
99       }
100     }
101     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
102     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
103   } else if (isbinary){
104     ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr);
105 #if defined(PETSC_HAVE_MATLAB_ENGINE)
106   } else if (ismatlab) {
107     ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr);
108 #endif
109   } else SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for DMDA 1d",((PetscObject)viewer)->type_name);
110   PetscFunctionReturn(0);
111 }
112 
113 /*
114       M is number of grid points
115       m is number of processors
116 
117 */
118 #undef __FUNCT__
119 #define __FUNCT__ "DMDASplitComm2d"
120 PetscErrorCode PETSCDM_DLLEXPORT DMDASplitComm2d(MPI_Comm comm,PetscInt M,PetscInt N,PetscInt sw,MPI_Comm *outcomm)
121 {
122   PetscErrorCode ierr;
123   PetscInt       m,n = 0,x = 0,y = 0;
124   PetscMPIInt    size,csize,rank;
125 
126   PetscFunctionBegin;
127   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
128   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
129 
130   csize = 4*size;
131   do {
132     if (csize % 4) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Cannot split communicator of size %d tried %d %D %D",size,csize,x,y);
133     csize   = csize/4;
134 
135     m = (PetscInt)(0.5 + sqrt(((double)M)*((double)csize)/((double)N)));
136     if (!m) m = 1;
137     while (m > 0) {
138       n = csize/m;
139       if (m*n == csize) break;
140       m--;
141     }
142     if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}
143 
144     x = M/m + ((M % m) > ((csize-1) % m));
145     y = (N + (csize-1)/m)/n;
146   } while ((x < 4 || y < 4) && csize > 1);
147   if (size != csize) {
148     MPI_Group    entire_group,sub_group;
149     PetscMPIInt  i,*groupies;
150 
151     ierr = MPI_Comm_group(comm,&entire_group);CHKERRQ(ierr);
152     ierr = PetscMalloc(csize*sizeof(PetscInt),&groupies);CHKERRQ(ierr);
153     for (i=0; i<csize; i++) {
154       groupies[i] = (rank/csize)*csize + i;
155     }
156     ierr = MPI_Group_incl(entire_group,csize,groupies,&sub_group);CHKERRQ(ierr);
157     ierr = PetscFree(groupies);CHKERRQ(ierr);
158     ierr = MPI_Comm_create(comm,sub_group,outcomm);CHKERRQ(ierr);
159     ierr = MPI_Group_free(&entire_group);CHKERRQ(ierr);
160     ierr = MPI_Group_free(&sub_group);CHKERRQ(ierr);
161     ierr = PetscInfo1(0,"DMDASplitComm2d:Creating redundant coarse problems of size %d\n",csize);CHKERRQ(ierr);
162   } else {
163     *outcomm = comm;
164   }
165   PetscFunctionReturn(0);
166 }
167 
168 #undef __FUNCT__
169 #define __FUNCT__ "DMGetElements_DA_2d_P1"
170 PetscErrorCode DMGetElements_DA_2d_P1(DM da,PetscInt *n,const PetscInt *e[])
171 {
172   PetscErrorCode ierr;
173   DM_DA          *dd = (DM_DA*)da->data;
174   PetscInt       i,j,cnt,xs,xe = dd->xe,ys,ye = dd->ye,Xs = dd->Xs, Xe = dd->Xe, Ys = dd->Ys;
175 
176   PetscFunctionBegin;
177   if (!dd->e) {
178     if (dd->xs == Xs) xs = dd->xs; else xs = dd->xs - 1;
179     if (dd->ys == Ys) ys = dd->ys; else ys = dd->ys - 1;
180     dd->ne = 2*(xe - xs - 1)*(ye - ys - 1);
181     ierr   = PetscMalloc((1 + 3*dd->ne)*sizeof(PetscInt),&dd->e);CHKERRQ(ierr);
182     cnt    = 0;
183     for (j=ys; j<ye-1; j++) {
184       for (i=xs; i<xe-1; i++) {
185         dd->e[cnt]   = i - Xs + (j - Ys)*(Xe - Xs);
186         dd->e[cnt+1] = i - Xs + 1 + (j - Ys)*(Xe - Xs);
187         dd->e[cnt+2] = i - Xs + (j - Ys + 1)*(Xe - Xs);
188 
189         dd->e[cnt+3] = i - Xs + 1 + (j - Ys + 1)*(Xe - Xs);
190         dd->e[cnt+4] = i - Xs + (j - Ys + 1)*(Xe - Xs);
191         dd->e[cnt+5] = i - Xs + 1 + (j - Ys)*(Xe - Xs);
192         cnt += 6;
193       }
194     }
195   }
196   *n = dd->ne;
197   *e = dd->e;
198   PetscFunctionReturn(0);
199 }
200 
201 #undef __FUNCT__
202 #define __FUNCT__ "DMDASetLocalFunction"
203 /*@C
204        DMDASetLocalFunction - Caches in a DM a local function.
205 
206    Logically Collective on DMDA
207 
208    Input Parameter:
209 +  da - initial distributed array
210 -  lf - the local function
211 
212    Level: intermediate
213 
214    Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.
215 
216 .keywords:  distributed array, refine
217 
218 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunctioni()
219 @*/
220 PetscErrorCode PETSCDM_DLLEXPORT DMDASetLocalFunction(DM da,DMDALocalFunction1 lf)
221 {
222   DM_DA          *dd = (DM_DA*)da->data;
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(da,DM_CLASSID,1);
225   dd->lf    = lf;
226   PetscFunctionReturn(0);
227 }
228 
229 #undef __FUNCT__
230 #define __FUNCT__ "DMDASetLocalFunctioni"
231 /*@C
232        DMDASetLocalFunctioni - Caches in a DM a local function that evaluates a single component
233 
234    Logically Collective on DMDA
235 
236    Input Parameter:
237 +  da - initial distributed array
238 -  lfi - the local function
239 
240    Level: intermediate
241 
242 .keywords:  distributed array, refine
243 
244 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction()
245 @*/
246 PetscErrorCode PETSCDM_DLLEXPORT DMDASetLocalFunctioni(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
247 {
248   DM_DA          *dd = (DM_DA*)da->data;
249   PetscFunctionBegin;
250   PetscValidHeaderSpecific(da,DM_CLASSID,1);
251   dd->lfi = lfi;
252   PetscFunctionReturn(0);
253 }
254 
255 #undef __FUNCT__
256 #define __FUNCT__ "DMDASetLocalFunctionib"
257 /*@C
258        DMDASetLocalFunctionib - Caches in a DM a block local function that evaluates a single component
259 
260    Logically Collective on DMDA
261 
262    Input Parameter:
263 +  da - initial distributed array
264 -  lfi - the local function
265 
266    Level: intermediate
267 
268 .keywords:  distributed array, refine
269 
270 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction()
271 @*/
272 PetscErrorCode PETSCDM_DLLEXPORT DMDASetLocalFunctionib(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
273 {
274   DM_DA          *dd = (DM_DA*)da->data;
275   PetscFunctionBegin;
276   PetscValidHeaderSpecific(da,DM_CLASSID,1);
277   dd->lfib = lfi;
278   PetscFunctionReturn(0);
279 }
280 
281 #undef __FUNCT__
282 #define __FUNCT__ "DMDASetLocalAdicFunction_Private"
283 PetscErrorCode DMDASetLocalAdicFunction_Private(DM da,DMDALocalFunction1 ad_lf)
284 {
285   DM_DA          *dd = (DM_DA*)da->data;
286   PetscFunctionBegin;
287   PetscValidHeaderSpecific(da,DM_CLASSID,1);
288   dd->adic_lf = ad_lf;
289   PetscFunctionReturn(0);
290 }
291 
292 /*MC
293        DMDASetLocalAdicFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR
294 
295    Synopsis:
296    PetscErrorCode DMDASetLocalAdicFunctioni(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
297 
298    Logically Collective on DMDA
299 
300    Input Parameter:
301 +  da - initial distributed array
302 -  ad_lfi - the local function as computed by ADIC/ADIFOR
303 
304    Level: intermediate
305 
306 .keywords:  distributed array, refine
307 
308 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
309           DMDASetLocalJacobian(), DMDASetLocalFunctioni()
310 M*/
311 
312 #undef __FUNCT__
313 #define __FUNCT__ "DMDASetLocalAdicFunctioni_Private"
314 PetscErrorCode DMDASetLocalAdicFunctioni_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
315 {
316   DM_DA          *dd = (DM_DA*)da->data;
317   PetscFunctionBegin;
318   PetscValidHeaderSpecific(da,DM_CLASSID,1);
319   dd->adic_lfi = ad_lfi;
320   PetscFunctionReturn(0);
321 }
322 
323 /*MC
324        DMDASetLocalAdicMFFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR
325 
326    Synopsis:
327    PetscErrorCode  DMDASetLocalAdicFunctioni(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
328 
329    Logically Collective on DMDA
330 
331    Input Parameter:
332 +  da - initial distributed array
333 -  admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR
334 
335    Level: intermediate
336 
337 .keywords:  distributed array, refine
338 
339 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
340           DMDASetLocalJacobian(), DMDASetLocalFunctioni()
341 M*/
342 
343 #undef __FUNCT__
344 #define __FUNCT__ "DMDASetLocalAdicMFFunctioni_Private"
345 PetscErrorCode DMDASetLocalAdicMFFunctioni_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
346 {
347   DM_DA          *dd = (DM_DA*)da->data;
348   PetscFunctionBegin;
349   PetscValidHeaderSpecific(da,DM_CLASSID,1);
350   dd->adicmf_lfi = admf_lfi;
351   PetscFunctionReturn(0);
352 }
353 
354 /*MC
355        DMDASetLocalAdicFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR
356 
357    Synopsis:
358    PetscErrorCode DMDASetLocalAdicFunctionib(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
359 
360    Logically Collective on DMDA
361 
362    Input Parameter:
363 +  da - initial distributed array
364 -  ad_lfi - the local function as computed by ADIC/ADIFOR
365 
366    Level: intermediate
367 
368 .keywords:  distributed array, refine
369 
370 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
371           DMDASetLocalJacobian(), DMDASetLocalFunctionib()
372 M*/
373 
374 #undef __FUNCT__
375 #define __FUNCT__ "DMDASetLocalAdicFunctionib_Private"
376 PetscErrorCode DMDASetLocalAdicFunctionib_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
377 {
378   DM_DA          *dd = (DM_DA*)da->data;
379   PetscFunctionBegin;
380   PetscValidHeaderSpecific(da,DM_CLASSID,1);
381   dd->adic_lfib = ad_lfi;
382   PetscFunctionReturn(0);
383 }
384 
385 /*MC
386        DMDASetLocalAdicMFFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR
387 
388    Synopsis:
389    PetscErrorCode  DMDASetLocalAdicFunctionib(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
390 
391    Logically Collective on DMDA
392 
393    Input Parameter:
394 +  da - initial distributed array
395 -  admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR
396 
397    Level: intermediate
398 
399 .keywords:  distributed array, refine
400 
401 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
402           DMDASetLocalJacobian(), DMDASetLocalFunctionib()
403 M*/
404 
405 #undef __FUNCT__
406 #define __FUNCT__ "DMDASetLocalAdicMFFunctionib_Private"
407 PetscErrorCode DMDASetLocalAdicMFFunctionib_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
408 {
409   DM_DA          *dd = (DM_DA*)da->data;
410   PetscFunctionBegin;
411   PetscValidHeaderSpecific(da,DM_CLASSID,1);
412   dd->adicmf_lfib = admf_lfi;
413   PetscFunctionReturn(0);
414 }
415 
416 /*MC
417        DMDASetLocalAdicMFFunction - Caches in a DM a local function computed by ADIC/ADIFOR
418 
419    Synopsis:
420    PetscErrorCode DMDASetLocalAdicMFFunction(DM da,DMDALocalFunction1 ad_lf)
421 
422    Logically Collective on DMDA
423 
424    Input Parameter:
425 +  da - initial distributed array
426 -  ad_lf - the local function as computed by ADIC/ADIFOR
427 
428    Level: intermediate
429 
430 .keywords:  distributed array, refine
431 
432 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
433           DMDASetLocalJacobian()
434 M*/
435 
436 #undef __FUNCT__
437 #define __FUNCT__ "DMDASetLocalAdicMFFunction_Private"
438 PetscErrorCode DMDASetLocalAdicMFFunction_Private(DM da,DMDALocalFunction1 ad_lf)
439 {
440   DM_DA          *dd = (DM_DA*)da->data;
441   PetscFunctionBegin;
442   PetscValidHeaderSpecific(da,DM_CLASSID,1);
443   dd->adicmf_lf = ad_lf;
444   PetscFunctionReturn(0);
445 }
446 
447 /*@C
448        DMDASetLocalJacobian - Caches in a DM a local Jacobian computation function
449 
450    Logically Collective on DMDA
451 
452 
453    Input Parameter:
454 +  da - initial distributed array
455 -  lj - the local Jacobian
456 
457    Level: intermediate
458 
459    Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.
460 
461 .keywords:  distributed array, refine
462 
463 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction()
464 @*/
465 #undef __FUNCT__
466 #define __FUNCT__ "DMDASetLocalJacobian"
467 PetscErrorCode PETSCDM_DLLEXPORT DMDASetLocalJacobian(DM da,DMDALocalFunction1 lj)
468 {
469   DM_DA          *dd = (DM_DA*)da->data;
470   PetscFunctionBegin;
471   PetscValidHeaderSpecific(da,DM_CLASSID,1);
472   dd->lj    = lj;
473   PetscFunctionReturn(0);
474 }
475 
476 #undef __FUNCT__
477 #define __FUNCT__ "DMDAGetLocalFunction"
478 /*@C
479        DMDAGetLocalFunction - Gets from a DM a local function and its ADIC/ADIFOR Jacobian
480 
481    Note Collective
482 
483    Input Parameter:
484 .  da - initial distributed array
485 
486    Output Parameter:
487 .  lf - the local function
488 
489    Level: intermediate
490 
491 .keywords:  distributed array, refine
492 
493 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalJacobian(), DMDASetLocalFunction()
494 @*/
495 PetscErrorCode PETSCDM_DLLEXPORT DMDAGetLocalFunction(DM da,DMDALocalFunction1 *lf)
496 {
497   DM_DA          *dd = (DM_DA*)da->data;
498   PetscFunctionBegin;
499   PetscValidHeaderSpecific(da,DM_CLASSID,1);
500   if (lf)       *lf = dd->lf;
501   PetscFunctionReturn(0);
502 }
503 
504 #undef __FUNCT__
505 #define __FUNCT__ "DMDAGetLocalJacobian"
506 /*@C
507        DMDAGetLocalJacobian - Gets from a DM a local jacobian
508 
509    Not Collective
510 
511    Input Parameter:
512 .  da - initial distributed array
513 
514    Output Parameter:
515 .  lj - the local jacobian
516 
517    Level: intermediate
518 
519 .keywords:  distributed array, refine
520 
521 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalJacobian()
522 @*/
523 PetscErrorCode PETSCDM_DLLEXPORT DMDAGetLocalJacobian(DM da,DMDALocalFunction1 *lj)
524 {
525   DM_DA          *dd = (DM_DA*)da->data;
526   PetscFunctionBegin;
527   PetscValidHeaderSpecific(da,DM_CLASSID,1);
528   if (lj) *lj = dd->lj;
529   PetscFunctionReturn(0);
530 }
531 
532 #undef __FUNCT__
533 #define __FUNCT__ "DMDAFormFunction"
534 /*@
535     DMDAFormFunction - Evaluates a user provided function on each processor that
536         share a DMDA
537 
538    Input Parameters:
539 +    da - the DM that defines the grid
540 .    vu - input vector
541 .    vfu - output vector
542 -    w - any user data
543 
544     Notes: Does NOT do ghost updates on vu upon entry
545 
546            This should eventually replace DMDAFormFunction1
547 
548     Level: advanced
549 
550 .seealso: DMDAComputeJacobian1WithAdic()
551 
552 @*/
553 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunction(DM da,PetscErrorCode (*lf)(void),Vec vu,Vec vfu,void *w)
554 {
555   PetscErrorCode ierr;
556   void           *u,*fu;
557   DMDALocalInfo    info;
558   PetscErrorCode (*f)(DMDALocalInfo*,void*,void*,void*) = (PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*))lf;
559 
560   PetscFunctionBegin;
561   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
562   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
563   ierr = DMDAVecGetArray(da,vfu,&fu);CHKERRQ(ierr);
564 
565   ierr = (*f)(&info,u,fu,w);
566   if (PetscExceptionValue(ierr)) {
567     PetscErrorCode pierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(pierr);
568     pierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(pierr);
569   }
570  CHKERRQ(ierr);
571 
572   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
573   ierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(ierr);
574   PetscFunctionReturn(0);
575 }
576 
577 #undef __FUNCT__
578 #define __FUNCT__ "DMDAFormFunctionLocal"
579 /*@C
580    DMDAFormFunctionLocal - This is a universal function evaluation routine for
581    a local DM function.
582 
583    Collective on DMDA
584 
585    Input Parameters:
586 +  da - the DM context
587 .  func - The local function
588 .  X - input vector
589 .  F - function vector
590 -  ctx - A user context
591 
592    Level: intermediate
593 
594 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(),
595           SNESSetFunction(), SNESSetJacobian()
596 
597 @*/
598 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunctionLocal(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx)
599 {
600   Vec            localX;
601   DMDALocalInfo    info;
602   void          *u;
603   void          *fu;
604   PetscErrorCode ierr;
605 
606   PetscFunctionBegin;
607   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
608   /*
609      Scatter ghost points to local vector, using the 2-step process
610         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
611   */
612   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
613   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
614   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
615   ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr);
616   ierr = DMDAVecGetArray(da,F,&fu);CHKERRQ(ierr);
617   ierr = (*func)(&info,u,fu,ctx);
618   if (PetscExceptionValue(ierr)) {
619     PetscErrorCode pierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
620     pierr = DMDAVecRestoreArray(da,F,&fu);CHKERRQ(pierr);
621   }
622  CHKERRQ(ierr);
623   ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr);
624   ierr = DMDAVecRestoreArray(da,F,&fu);CHKERRQ(ierr);
625   if (PetscExceptionValue(ierr)) {
626     PetscErrorCode pierr = DMRestoreLocalVector(da,&localX);CHKERRQ(pierr);
627   }
628  CHKERRQ(ierr);
629   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
630   PetscFunctionReturn(0);
631 }
632 
633 #undef __FUNCT__
634 #define __FUNCT__ "DMDAFormFunctionLocalGhost"
635 /*@C
636    DMDAFormFunctionLocalGhost - This is a universal function evaluation routine for
637    a local DM function, but the ghost values of the output are communicated and added.
638 
639    Collective on DMDA
640 
641    Input Parameters:
642 +  da - the DM context
643 .  func - The local function
644 .  X - input vector
645 .  F - function vector
646 -  ctx - A user context
647 
648    Level: intermediate
649 
650 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(),
651           SNESSetFunction(), SNESSetJacobian()
652 
653 @*/
654 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunctionLocalGhost(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx)
655 {
656   Vec            localX, localF;
657   DMDALocalInfo    info;
658   void          *u;
659   void          *fu;
660   PetscErrorCode ierr;
661 
662   PetscFunctionBegin;
663   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
664   ierr = DMGetLocalVector(da,&localF);CHKERRQ(ierr);
665   /*
666      Scatter ghost points to local vector, using the 2-step process
667         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
668   */
669   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
670   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
671   ierr = VecSet(F, 0.0);CHKERRQ(ierr);
672   ierr = VecSet(localF, 0.0);CHKERRQ(ierr);
673   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
674   ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr);
675   ierr = DMDAVecGetArray(da,localF,&fu);CHKERRQ(ierr);
676   ierr = (*func)(&info,u,fu,ctx);
677   if (PetscExceptionValue(ierr)) {
678     PetscErrorCode pierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
679     pierr = DMDAVecRestoreArray(da,localF,&fu);CHKERRQ(pierr);
680   }
681   CHKERRQ(ierr);
682   ierr = DMLocalToGlobalBegin(da,localF,ADD_VALUES,F);CHKERRQ(ierr);
683   ierr = DMLocalToGlobalEnd(da,localF,ADD_VALUES,F);CHKERRQ(ierr);
684   ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr);
685   ierr = DMDAVecRestoreArray(da,localF,&fu);CHKERRQ(ierr);
686   if (PetscExceptionValue(ierr)) {
687     PetscErrorCode pierr = DMRestoreLocalVector(da,&localX);CHKERRQ(pierr);
688   ierr = DMRestoreLocalVector(da,&localF);CHKERRQ(ierr);
689   }
690   CHKERRQ(ierr);
691   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
692   ierr = DMRestoreLocalVector(da,&localF);CHKERRQ(ierr);
693   PetscFunctionReturn(0);
694 }
695 
696 #undef __FUNCT__
697 #define __FUNCT__ "DMDAFormFunction1"
698 /*@
699     DMDAFormFunction1 - Evaluates a user provided function on each processor that
700         share a DMDA
701 
702    Input Parameters:
703 +    da - the DM that defines the grid
704 .    vu - input vector
705 .    vfu - output vector
706 -    w - any user data
707 
708     Notes: Does NOT do ghost updates on vu upon entry
709 
710     Level: advanced
711 
712 .seealso: DMDAComputeJacobian1WithAdic()
713 
714 @*/
715 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunction1(DM da,Vec vu,Vec vfu,void *w)
716 {
717   PetscErrorCode ierr;
718   void           *u,*fu;
719   DMDALocalInfo    info;
720   DM_DA          *dd = (DM_DA*)da->data;
721 
722   PetscFunctionBegin;
723 
724   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
725   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
726   ierr = DMDAVecGetArray(da,vfu,&fu);CHKERRQ(ierr);
727 
728   CHKMEMQ;
729   ierr = (*dd->lf)(&info,u,fu,w);
730   if (PetscExceptionValue(ierr)) {
731     PetscErrorCode pierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(pierr);
732     pierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(pierr);
733   }
734   CHKERRQ(ierr);
735   CHKMEMQ;
736 
737   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
738   ierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(ierr);
739   PetscFunctionReturn(0);
740 }
741 
742 #undef __FUNCT__
743 #define __FUNCT__ "DMDAFormFunctioniTest1"
744 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunctioniTest1(DM da,void *w)
745 {
746   Vec            vu,fu,fui;
747   PetscErrorCode ierr;
748   PetscInt       i,n;
749   PetscScalar    *ui;
750   PetscRandom    rnd;
751   PetscReal      norm;
752 
753   PetscFunctionBegin;
754   ierr = DMGetLocalVector(da,&vu);CHKERRQ(ierr);
755   ierr = PetscRandomCreate(PETSC_COMM_SELF,&rnd);CHKERRQ(ierr);
756   ierr = PetscRandomSetFromOptions(rnd);CHKERRQ(ierr);
757   ierr = VecSetRandom(vu,rnd);CHKERRQ(ierr);
758   ierr = PetscRandomDestroy(rnd);CHKERRQ(ierr);
759 
760   ierr = DMGetGlobalVector(da,&fu);CHKERRQ(ierr);
761   ierr = DMGetGlobalVector(da,&fui);CHKERRQ(ierr);
762 
763   ierr = DMDAFormFunction1(da,vu,fu,w);CHKERRQ(ierr);
764 
765   ierr = VecGetArray(fui,&ui);CHKERRQ(ierr);
766   ierr = VecGetLocalSize(fui,&n);CHKERRQ(ierr);
767   for (i=0; i<n; i++) {
768     ierr = DMDAFormFunctioni1(da,i,vu,ui+i,w);CHKERRQ(ierr);
769   }
770   ierr = VecRestoreArray(fui,&ui);CHKERRQ(ierr);
771 
772   ierr = VecAXPY(fui,-1.0,fu);CHKERRQ(ierr);
773   ierr = VecNorm(fui,NORM_2,&norm);CHKERRQ(ierr);
774   ierr = PetscPrintf(((PetscObject)da)->comm,"Norm of difference in vectors %G\n",norm);CHKERRQ(ierr);
775   ierr = VecView(fu,0);CHKERRQ(ierr);
776   ierr = VecView(fui,0);CHKERRQ(ierr);
777 
778   ierr = DMRestoreLocalVector(da,&vu);CHKERRQ(ierr);
779   ierr = DMRestoreGlobalVector(da,&fu);CHKERRQ(ierr);
780   ierr = DMRestoreGlobalVector(da,&fui);CHKERRQ(ierr);
781   PetscFunctionReturn(0);
782 }
783 
784 #undef __FUNCT__
785 #define __FUNCT__ "DMDAFormFunctioni1"
786 /*@
787     DMDAFormFunctioni1 - Evaluates a user provided point-wise function
788 
789    Input Parameters:
790 +    da - the DM that defines the grid
791 .    i - the component of the function we wish to compute (must be local)
792 .    vu - input vector
793 .    vfu - output value
794 -    w - any user data
795 
796     Notes: Does NOT do ghost updates on vu upon entry
797 
798     Level: advanced
799 
800 .seealso: DMDAComputeJacobian1WithAdic()
801 
802 @*/
803 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunctioni1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
804 {
805   PetscErrorCode ierr;
806   void           *u;
807   DMDALocalInfo    info;
808   MatStencil     stencil;
809   DM_DA          *dd = (DM_DA*)da->data;
810 
811   PetscFunctionBegin;
812 
813   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
814   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
815 
816   /* figure out stencil value from i */
817   stencil.c = i % info.dof;
818   stencil.i = (i % (info.xm*info.dof))/info.dof;
819   stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
820   stencil.k = i/(info.xm*info.ym*info.dof);
821 
822   ierr = (*dd->lfi)(&info,&stencil,u,vfu,w);CHKERRQ(ierr);
823 
824   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
825   PetscFunctionReturn(0);
826 }
827 
828 #undef __FUNCT__
829 #define __FUNCT__ "DMDAFormFunctionib1"
830 /*@
831     DMDAFormFunctionib1 - Evaluates a user provided point-block function
832 
833    Input Parameters:
834 +    da - the DM that defines the grid
835 .    i - the component of the function we wish to compute (must be local)
836 .    vu - input vector
837 .    vfu - output value
838 -    w - any user data
839 
840     Notes: Does NOT do ghost updates on vu upon entry
841 
842     Level: advanced
843 
844 .seealso: DMDAComputeJacobian1WithAdic()
845 
846 @*/
847 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunctionib1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
848 {
849   PetscErrorCode ierr;
850   void           *u;
851   DMDALocalInfo    info;
852   MatStencil     stencil;
853   DM_DA          *dd = (DM_DA*)da->data;
854 
855   PetscFunctionBegin;
856   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
857   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
858 
859   /* figure out stencil value from i */
860   stencil.c = i % info.dof;
861   if (stencil.c) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_WRONG,"Point-block functions can only be called for the entire block");
862   stencil.i = (i % (info.xm*info.dof))/info.dof;
863   stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
864   stencil.k = i/(info.xm*info.ym*info.dof);
865 
866   ierr = (*dd->lfib)(&info,&stencil,u,vfu,w);CHKERRQ(ierr);
867 
868   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
869   PetscFunctionReturn(0);
870 }
871 
872 #if defined(new)
873 #undef __FUNCT__
874 #define __FUNCT__ "DMDAGetDiagonal_MFFD"
875 /*
876   DMDAGetDiagonal_MFFD - Gets the diagonal for a matrix free matrix where local
877     function lives on a DMDA
878 
879         y ~= (F(u + ha) - F(u))/h,
880   where F = nonlinear function, as set by SNESSetFunction()
881         u = current iterate
882         h = difference interval
883 */
884 PetscErrorCode DMDAGetDiagonal_MFFD(DM da,Vec U,Vec a)
885 {
886   PetscScalar    h,*aa,*ww,v;
887   PetscReal      epsilon = PETSC_SQRT_MACHINE_EPSILON,umin = 100.0*PETSC_SQRT_MACHINE_EPSILON;
888   PetscErrorCode ierr;
889   PetscInt       gI,nI;
890   MatStencil     stencil;
891   DMDALocalInfo    info;
892 
893   PetscFunctionBegin;
894   ierr = (*ctx->func)(0,U,a,ctx->funcctx);CHKERRQ(ierr);
895   ierr = (*ctx->funcisetbase)(U,ctx->funcctx);CHKERRQ(ierr);
896 
897   ierr = VecGetArray(U,&ww);CHKERRQ(ierr);
898   ierr = VecGetArray(a,&aa);CHKERRQ(ierr);
899 
900   nI = 0;
901     h  = ww[gI];
902     if (h == 0.0) h = 1.0;
903 #if !defined(PETSC_USE_COMPLEX)
904     if (h < umin && h >= 0.0)      h = umin;
905     else if (h < 0.0 && h > -umin) h = -umin;
906 #else
907     if (PetscAbsScalar(h) < umin && PetscRealPart(h) >= 0.0)     h = umin;
908     else if (PetscRealPart(h) < 0.0 && PetscAbsScalar(h) < umin) h = -umin;
909 #endif
910     h     *= epsilon;
911 
912     ww[gI] += h;
913     ierr          = (*ctx->funci)(i,w,&v,ctx->funcctx);CHKERRQ(ierr);
914     aa[nI]  = (v - aa[nI])/h;
915     ww[gI] -= h;
916     nI++;
917   }
918   ierr = VecRestoreArray(U,&ww);CHKERRQ(ierr);
919   ierr = VecRestoreArray(a,&aa);CHKERRQ(ierr);
920   PetscFunctionReturn(0);
921 }
922 #endif
923 
924 #if defined(PETSC_HAVE_ADIC)
925 EXTERN_C_BEGIN
926 #include "adic/ad_utils.h"
927 EXTERN_C_END
928 
929 #undef __FUNCT__
930 #define __FUNCT__ "DMDAComputeJacobian1WithAdic"
931 /*@C
932     DMDAComputeJacobian1WithAdic - Evaluates a adiC provided Jacobian function on each processor that
933         share a DMDA
934 
935    Input Parameters:
936 +    da - the DM that defines the grid
937 .    vu - input vector (ghosted)
938 .    J - output matrix
939 -    w - any user data
940 
941    Level: advanced
942 
943     Notes: Does NOT do ghost updates on vu upon entry
944 
945 .seealso: DMDAFormFunction1()
946 
947 @*/
948 PetscErrorCode PETSCDM_DLLEXPORT DMDAComputeJacobian1WithAdic(DM da,Vec vu,Mat J,void *w)
949 {
950   PetscErrorCode ierr;
951   PetscInt       gtdof,tdof;
952   PetscScalar    *ustart;
953   DMDALocalInfo    info;
954   void           *ad_u,*ad_f,*ad_ustart,*ad_fstart;
955   ISColoring     iscoloring;
956 
957   PetscFunctionBegin;
958   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
959 
960   PetscADResetIndep();
961 
962   /* get space for derivative objects.  */
963   ierr = DMDAGetAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,&gtdof);CHKERRQ(ierr);
964   ierr = DMDAGetAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr);
965   ierr = VecGetArray(vu,&ustart);CHKERRQ(ierr);
966   ierr = DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);CHKERRQ(ierr);
967 
968   PetscADSetValueAndColor(ad_ustart,gtdof,iscoloring->colors,ustart);
969 
970   ierr = VecRestoreArray(vu,&ustart);CHKERRQ(ierr);
971   ierr = ISColoringDestroy(iscoloring);CHKERRQ(ierr);
972   ierr = PetscADIncrementTotalGradSize(iscoloring->n);CHKERRQ(ierr);
973   PetscADSetIndepDone();
974 
975   ierr = PetscLogEventBegin(DMDA_LocalADFunction,0,0,0,0);CHKERRQ(ierr);
976   ierr = (*dd->adic_lf)(&info,ad_u,ad_f,w);CHKERRQ(ierr);
977   ierr = PetscLogEventEnd(DMDA_LocalADFunction,0,0,0,0);CHKERRQ(ierr);
978 
979   /* stick the values into the matrix */
980   ierr = MatSetValuesAdic(J,(PetscScalar**)ad_fstart);CHKERRQ(ierr);
981 
982   /* return space for derivative objects.  */
983   ierr = DMDARestoreAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,&gtdof);CHKERRQ(ierr);
984   ierr = DMDARestoreAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr);
985   PetscFunctionReturn(0);
986 }
987 
988 #undef __FUNCT__
989 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAdic"
990 /*@C
991     DMDAMultiplyByJacobian1WithAdic - Applies an ADIC-provided Jacobian function to a vector on
992     each processor that shares a DMDA.
993 
994     Input Parameters:
995 +   da - the DM that defines the grid
996 .   vu - Jacobian is computed at this point (ghosted)
997 .   v - product is done on this vector (ghosted)
998 .   fu - output vector = J(vu)*v (not ghosted)
999 -   w - any user data
1000 
1001     Notes:
1002     This routine does NOT do ghost updates on vu upon entry.
1003 
1004    Level: advanced
1005 
1006 .seealso: DMDAFormFunction1()
1007 
1008 @*/
1009 PetscErrorCode PETSCDM_DLLEXPORT DMDAMultiplyByJacobian1WithAdic(DM da,Vec vu,Vec v,Vec f,void *w)
1010 {
1011   PetscErrorCode ierr;
1012   PetscInt       i,gtdof,tdof;
1013   PetscScalar    *avu,*av,*af,*ad_vustart,*ad_fstart;
1014   DMDALocalInfo    info;
1015   void           *ad_vu,*ad_f;
1016 
1017   PetscFunctionBegin;
1018   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
1019 
1020   /* get space for derivative objects.  */
1021   ierr = DMDAGetAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,&gtdof);CHKERRQ(ierr);
1022   ierr = DMDAGetAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr);
1023 
1024   /* copy input vector into derivative object */
1025   ierr = VecGetArray(vu,&avu);CHKERRQ(ierr);
1026   ierr = VecGetArray(v,&av);CHKERRQ(ierr);
1027   for (i=0; i<gtdof; i++) {
1028     ad_vustart[2*i]   = avu[i];
1029     ad_vustart[2*i+1] = av[i];
1030   }
1031   ierr = VecRestoreArray(vu,&avu);CHKERRQ(ierr);
1032   ierr = VecRestoreArray(v,&av);CHKERRQ(ierr);
1033 
1034   PetscADResetIndep();
1035   ierr = PetscADIncrementTotalGradSize(1);CHKERRQ(ierr);
1036   PetscADSetIndepDone();
1037 
1038   ierr = (*dd->adicmf_lf)(&info,ad_vu,ad_f,w);CHKERRQ(ierr);
1039 
1040   /* stick the values into the vector */
1041   ierr = VecGetArray(f,&af);CHKERRQ(ierr);
1042   for (i=0; i<tdof; i++) {
1043     af[i] = ad_fstart[2*i+1];
1044   }
1045   ierr = VecRestoreArray(f,&af);CHKERRQ(ierr);
1046 
1047   /* return space for derivative objects.  */
1048   ierr = DMDARestoreAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,&gtdof);CHKERRQ(ierr);
1049   ierr = DMDARestoreAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr);
1050   PetscFunctionReturn(0);
1051 }
1052 #endif
1053 
1054 #undef __FUNCT__
1055 #define __FUNCT__ "DMDAComputeJacobian1"
1056 /*@
1057     DMDAComputeJacobian1 - Evaluates a local Jacobian function on each processor that
1058         share a DMDA
1059 
1060    Input Parameters:
1061 +    da - the DM that defines the grid
1062 .    vu - input vector (ghosted)
1063 .    J - output matrix
1064 -    w - any user data
1065 
1066     Notes: Does NOT do ghost updates on vu upon entry
1067 
1068     Level: advanced
1069 
1070 .seealso: DMDAFormFunction1()
1071 
1072 @*/
1073 PetscErrorCode PETSCDM_DLLEXPORT DMDAComputeJacobian1(DM da,Vec vu,Mat J,void *w)
1074 {
1075   PetscErrorCode ierr;
1076   void           *u;
1077   DMDALocalInfo    info;
1078   DM_DA          *dd = (DM_DA*)da->data;
1079 
1080   PetscFunctionBegin;
1081   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
1082   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
1083   ierr = (*dd->lj)(&info,u,J,w);CHKERRQ(ierr);
1084   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
1085   PetscFunctionReturn(0);
1086 }
1087 
1088 
1089 #undef __FUNCT__
1090 #define __FUNCT__ "DMDAComputeJacobian1WithAdifor"
1091 /*
1092     DMDAComputeJacobian1WithAdifor - Evaluates a ADIFOR provided Jacobian local function on each processor that
1093         share a DMDA
1094 
1095    Input Parameters:
1096 +    da - the DM that defines the grid
1097 .    vu - input vector (ghosted)
1098 .    J - output matrix
1099 -    w - any user data
1100 
1101     Notes: Does NOT do ghost updates on vu upon entry
1102 
1103 .seealso: DMDAFormFunction1()
1104 
1105 */
1106 PetscErrorCode PETSCDM_DLLEXPORT DMDAComputeJacobian1WithAdifor(DM da,Vec vu,Mat J,void *w)
1107 {
1108   PetscErrorCode  ierr;
1109   PetscInt        i,Nc,N;
1110   ISColoringValue *color;
1111   DMDALocalInfo     info;
1112   PetscScalar     *u,*g_u,*g_f,*f = 0,*p_u;
1113   ISColoring      iscoloring;
1114   DM_DA          *dd = (DM_DA*)da->data;
1115   void            (*lf)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*) =
1116                   (void (*)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*))*dd->adifor_lf;
1117 
1118   PetscFunctionBegin;
1119   ierr = DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);CHKERRQ(ierr);
1120   Nc   = iscoloring->n;
1121   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
1122   N    = info.gxm*info.gym*info.gzm*info.dof;
1123 
1124   /* get space for derivative objects.  */
1125   ierr  = PetscMalloc(Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar),&g_u);CHKERRQ(ierr);
1126   ierr  = PetscMemzero(g_u,Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar));CHKERRQ(ierr);
1127   p_u   = g_u;
1128   color = iscoloring->colors;
1129   for (i=0; i<N; i++) {
1130     p_u[*color++] = 1.0;
1131     p_u          += Nc;
1132   }
1133   ierr = ISColoringDestroy(iscoloring);CHKERRQ(ierr);
1134   ierr = PetscMalloc2(Nc*info.xm*info.ym*info.zm*info.dof,PetscScalar,&g_f,info.xm*info.ym*info.zm*info.dof,PetscScalar,&f);CHKERRQ(ierr);
1135 
1136   /* Seed the input array g_u with coloring information */
1137 
1138   ierr = VecGetArray(vu,&u);CHKERRQ(ierr);
1139   (lf)(&Nc,&info,u,g_u,&Nc,f,g_f,&Nc,w,&ierr);CHKERRQ(ierr);
1140   ierr = VecRestoreArray(vu,&u);CHKERRQ(ierr);
1141 
1142   /* stick the values into the matrix */
1143   /* PetscScalarView(Nc*info.xm*info.ym,g_f,0); */
1144   ierr = MatSetValuesAdifor(J,Nc,g_f);CHKERRQ(ierr);
1145 
1146   /* return space for derivative objects.  */
1147   ierr = PetscFree(g_u);CHKERRQ(ierr);
1148   ierr = PetscFree2(g_f,f);CHKERRQ(ierr);
1149   PetscFunctionReturn(0);
1150 }
1151 
1152 #undef __FUNCT__
1153 #define __FUNCT__ "DMDAFormJacobianLocal"
1154 /*@C
1155    DMDAFormjacobianLocal - This is a universal Jacobian evaluation routine for
1156    a local DM function.
1157 
1158    Collective on DMDA
1159 
1160    Input Parameters:
1161 +  da - the DM context
1162 .  func - The local function
1163 .  X - input vector
1164 .  J - Jacobian matrix
1165 -  ctx - A user context
1166 
1167    Level: intermediate
1168 
1169 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(),
1170           SNESSetFunction(), SNESSetJacobian()
1171 
1172 @*/
1173 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormJacobianLocal(DM da, DMDALocalFunction1 func, Vec X, Mat J, void *ctx)
1174 {
1175   Vec            localX;
1176   DMDALocalInfo    info;
1177   void          *u;
1178   PetscErrorCode ierr;
1179 
1180   PetscFunctionBegin;
1181   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
1182   /*
1183      Scatter ghost points to local vector, using the 2-step process
1184         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
1185   */
1186   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
1187   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
1188   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
1189   ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr);
1190   ierr = (*func)(&info,u,J,ctx);
1191   if (PetscExceptionValue(ierr)) {
1192     PetscErrorCode pierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
1193   }
1194   CHKERRQ(ierr);
1195   ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr);
1196   if (PetscExceptionValue(ierr)) {
1197     PetscErrorCode pierr = DMRestoreLocalVector(da,&localX);CHKERRQ(pierr);
1198   }
1199   CHKERRQ(ierr);
1200   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
1201   PetscFunctionReturn(0);
1202 }
1203 
1204 #undef __FUNCT__
1205 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAD"
1206 /*@C
1207     DMDAMultiplyByJacobian1WithAD - Applies a Jacobian function supplied by ADIFOR or ADIC
1208     to a vector on each processor that shares a DMDA.
1209 
1210    Input Parameters:
1211 +    da - the DM that defines the grid
1212 .    vu - Jacobian is computed at this point (ghosted)
1213 .    v - product is done on this vector (ghosted)
1214 .    fu - output vector = J(vu)*v (not ghosted)
1215 -    w - any user data
1216 
1217     Notes:
1218     This routine does NOT do ghost updates on vu and v upon entry.
1219 
1220     Automatically calls DMDAMultiplyByJacobian1WithAdifor() or DMDAMultiplyByJacobian1WithAdic()
1221     depending on whether DMDASetLocalAdicMFFunction() or DMDASetLocalAdiforMFFunction() was called.
1222 
1223    Level: advanced
1224 
1225 .seealso: DMDAFormFunction1(), DMDAMultiplyByJacobian1WithAdifor(), DMDAMultiplyByJacobian1WithAdic()
1226 
1227 @*/
1228 PetscErrorCode PETSCDM_DLLEXPORT DMDAMultiplyByJacobian1WithAD(DM da,Vec u,Vec v,Vec f,void *w)
1229 {
1230   PetscErrorCode ierr;
1231   DM_DA          *dd = (DM_DA*)da->data;
1232 
1233   PetscFunctionBegin;
1234   if (dd->adicmf_lf) {
1235 #if defined(PETSC_HAVE_ADIC)
1236     ierr = DMDAMultiplyByJacobian1WithAdic(da,u,v,f,w);CHKERRQ(ierr);
1237 #else
1238     SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP_SYS,"Requires ADIC to be installed and cannot use complex numbers");
1239 #endif
1240   } else if (dd->adiformf_lf) {
1241     ierr = DMDAMultiplyByJacobian1WithAdifor(da,u,v,f,w);CHKERRQ(ierr);
1242   } else {
1243     SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ORDER,"Must call DMDASetLocalAdiforMFFunction() or DMDASetLocalAdicMFFunction() before using");
1244   }
1245   PetscFunctionReturn(0);
1246 }
1247 
1248 
1249 #undef __FUNCT__
1250 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAdifor"
1251 /*@C
1252     DMDAMultiplyByJacobian1WithAdifor - Applies a ADIFOR provided Jacobian function on each processor that
1253         share a DM to a vector
1254 
1255    Input Parameters:
1256 +    da - the DM that defines the grid
1257 .    vu - Jacobian is computed at this point (ghosted)
1258 .    v - product is done on this vector (ghosted)
1259 .    fu - output vector = J(vu)*v (not ghosted)
1260 -    w - any user data
1261 
1262     Notes: Does NOT do ghost updates on vu and v upon entry
1263 
1264    Level: advanced
1265 
1266 .seealso: DMDAFormFunction1()
1267 
1268 @*/
1269 PetscErrorCode PETSCDM_DLLEXPORT DMDAMultiplyByJacobian1WithAdifor(DM da,Vec u,Vec v,Vec f,void *w)
1270 {
1271   PetscErrorCode ierr;
1272   PetscScalar    *au,*av,*af,*awork;
1273   Vec            work;
1274   DMDALocalInfo    info;
1275   DM_DA          *dd = (DM_DA*)da->data;
1276   void           (*lf)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*) =
1277                  (void (*)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*))*dd->adiformf_lf;
1278 
1279   PetscFunctionBegin;
1280   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
1281 
1282   ierr = DMGetGlobalVector(da,&work);CHKERRQ(ierr);
1283   ierr = VecGetArray(u,&au);CHKERRQ(ierr);
1284   ierr = VecGetArray(v,&av);CHKERRQ(ierr);
1285   ierr = VecGetArray(f,&af);CHKERRQ(ierr);
1286   ierr = VecGetArray(work,&awork);CHKERRQ(ierr);
1287   (lf)(&info,au,av,awork,af,w,&ierr);CHKERRQ(ierr);
1288   ierr = VecRestoreArray(u,&au);CHKERRQ(ierr);
1289   ierr = VecRestoreArray(v,&av);CHKERRQ(ierr);
1290   ierr = VecRestoreArray(f,&af);CHKERRQ(ierr);
1291   ierr = VecRestoreArray(work,&awork);CHKERRQ(ierr);
1292   ierr = DMRestoreGlobalVector(da,&work);CHKERRQ(ierr);
1293 
1294   PetscFunctionReturn(0);
1295 }
1296 
1297 #undef __FUNCT__
1298 #define __FUNCT__ "DMSetUp_DA_2D"
1299 PetscErrorCode PETSCDM_DLLEXPORT DMSetUp_DA_2D(DM da)
1300 {
1301   DM_DA               *dd = (DM_DA*)da->data;
1302   const PetscInt       M            = dd->M;
1303   const PetscInt       N            = dd->N;
1304   PetscInt             m            = dd->m;
1305   PetscInt             n            = dd->n;
1306   const PetscInt       dof          = dd->w;
1307   const PetscInt       s            = dd->s;
1308   const DMDAPeriodicType wrap         = dd->wrap;
1309   const DMDAStencilType  stencil_type = dd->stencil_type;
1310   PetscInt            *lx           = dd->lx;
1311   PetscInt            *ly           = dd->ly;
1312   MPI_Comm             comm;
1313   PetscMPIInt          rank,size;
1314   PetscInt             xs,xe,ys,ye,x,y,Xs,Xe,Ys,Ye,start,end;
1315   PetscInt             up,down,left,i,n0,n1,n2,n3,n5,n6,n7,n8,*idx,nn;
1316   PetscInt             xbase,*bases,*ldims,j,x_t,y_t,s_t,base,count;
1317   PetscInt             s_x,s_y; /* s proportionalized to w */
1318   PetscInt             sn0 = 0,sn2 = 0,sn6 = 0,sn8 = 0;
1319   Vec                  local,global;
1320   VecScatter           ltog,gtol;
1321   IS                   to,from;
1322   PetscErrorCode       ierr;
1323 
1324   PetscFunctionBegin;
1325   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
1326 
1327   if (dof < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
1328   if (s < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);
1329 
1330   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1331   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1332 
1333   da->ops->getelements = DMGetElements_DA_2d_P1;
1334 
1335   dd->dim         = 2;
1336   dd->elementtype = DMDA_ELEMENT_P1;
1337   ierr = PetscMalloc(dof*sizeof(char*),&dd->fieldname);CHKERRQ(ierr);
1338   ierr = PetscMemzero(dd->fieldname,dof*sizeof(char*));CHKERRQ(ierr);
1339 
1340   if (m != PETSC_DECIDE) {
1341     if (m < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in X direction: %D",m);
1342     else if (m > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in X direction: %D %d",m,size);
1343   }
1344   if (n != PETSC_DECIDE) {
1345     if (n < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in Y direction: %D",n);
1346     else if (n > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in Y direction: %D %d",n,size);
1347   }
1348 
1349   if (m == PETSC_DECIDE || n == PETSC_DECIDE) {
1350     if (n != PETSC_DECIDE) {
1351       m = size/n;
1352     } else if (m != PETSC_DECIDE) {
1353       n = size/m;
1354     } else {
1355       /* try for squarish distribution */
1356       m = (PetscInt)(0.5 + sqrt(((double)M)*((double)size)/((double)N)));
1357       if (!m) m = 1;
1358       while (m > 0) {
1359 	n = size/m;
1360 	if (m*n == size) break;
1361 	m--;
1362       }
1363       if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}
1364     }
1365     if (m*n != size) SETERRQ(comm,PETSC_ERR_PLIB,"Unable to create partition, check the size of the communicator and input m and n ");
1366   } else if (m*n != size) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Given Bad partition");
1367 
1368   if (M < m) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in x direction is too fine! %D %D",M,m);
1369   if (N < n) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in y direction is too fine! %D %D",N,n);
1370 
1371   /*
1372      Determine locally owned region
1373      xs is the first local node number, x is the number of local nodes
1374   */
1375   if (!lx) {
1376     ierr = PetscMalloc(m*sizeof(PetscInt), &dd->lx);CHKERRQ(ierr);
1377     lx = dd->lx;
1378     for (i=0; i<m; i++) {
1379       lx[i] = M/m + ((M % m) > i);
1380     }
1381   }
1382   x  = lx[rank % m];
1383   xs = 0;
1384   for (i=0; i<(rank % m); i++) {
1385     xs += lx[i];
1386   }
1387 #if defined(PETSC_USE_DEBUG)
1388   left = xs;
1389   for (i=(rank % m); i<m; i++) {
1390     left += lx[i];
1391   }
1392   if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M: %D %D",left,M);
1393 #endif
1394 
1395   /*
1396      Determine locally owned region
1397      ys is the first local node number, y is the number of local nodes
1398   */
1399   if (!ly) {
1400     ierr = PetscMalloc(n*sizeof(PetscInt), &dd->ly);CHKERRQ(ierr);
1401     ly = dd->ly;
1402     for (i=0; i<n; i++) {
1403       ly[i] = N/n + ((N % n) > i);
1404     }
1405   }
1406   y  = ly[rank/m];
1407   ys = 0;
1408   for (i=0; i<(rank/m); i++) {
1409     ys += ly[i];
1410   }
1411 #if defined(PETSC_USE_DEBUG)
1412   left = ys;
1413   for (i=(rank/m); i<n; i++) {
1414     left += ly[i];
1415   }
1416   if (left != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of ly across processors not equal to N: %D %D",left,N);
1417 #endif
1418 
1419   if (x < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
1420   if (y < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local y-width of domain y %D is smaller than stencil width s %D",y,s);
1421   xe = xs + x;
1422   ye = ys + y;
1423 
1424   /* determine ghost region */
1425   /* Assume No Periodicity */
1426   if (xs-s > 0) Xs = xs - s; else Xs = 0;
1427   if (ys-s > 0) Ys = ys - s; else Ys = 0;
1428   if (xe+s <= M) Xe = xe + s; else Xe = M;
1429   if (ye+s <= N) Ye = ye + s; else Ye = N;
1430 
1431   /* X Periodic */
1432   if (DMDAXPeriodic(wrap)){
1433     Xs = xs - s;
1434     Xe = xe + s;
1435   }
1436 
1437   /* Y Periodic */
1438   if (DMDAYPeriodic(wrap)){
1439     Ys = ys - s;
1440     Ye = ye + s;
1441   }
1442 
1443   /* Resize all X parameters to reflect w */
1444   x   *= dof;
1445   xs  *= dof;
1446   xe  *= dof;
1447   Xs  *= dof;
1448   Xe  *= dof;
1449   s_x = s*dof;
1450   s_y = s;
1451 
1452   /* determine starting point of each processor */
1453   nn    = x*y;
1454   ierr  = PetscMalloc2(size+1,PetscInt,&bases,size,PetscInt,&ldims);CHKERRQ(ierr);
1455   ierr  = MPI_Allgather(&nn,1,MPIU_INT,ldims,1,MPIU_INT,comm);CHKERRQ(ierr);
1456   bases[0] = 0;
1457   for (i=1; i<=size; i++) {
1458     bases[i] = ldims[i-1];
1459   }
1460   for (i=1; i<=size; i++) {
1461     bases[i] += bases[i-1];
1462   }
1463 
1464   /* allocate the base parallel and sequential vectors */
1465   dd->Nlocal = x*y;
1466   ierr = VecCreateMPIWithArray(comm,dd->Nlocal,PETSC_DECIDE,0,&global);CHKERRQ(ierr);
1467   ierr = VecSetBlockSize(global,dof);CHKERRQ(ierr);
1468   dd->nlocal = (Xe-Xs)*(Ye-Ys);
1469   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dd->nlocal,0,&local);CHKERRQ(ierr);
1470   ierr = VecSetBlockSize(local,dof);CHKERRQ(ierr);
1471 
1472 
1473   /* generate appropriate vector scatters */
1474   /* local to global inserts non-ghost point region into global */
1475   ierr = VecGetOwnershipRange(global,&start,&end);CHKERRQ(ierr);
1476   ierr = ISCreateStride(comm,x*y,start,1,&to);CHKERRQ(ierr);
1477 
1478   left  = xs - Xs; down  = ys - Ys; up    = down + y;
1479   ierr = PetscMalloc(x*(up - down)*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1480   count = 0;
1481   for (i=down; i<up; i++) {
1482     for (j=0; j<x/dof; j++) {
1483       idx[count++] = (left + i*(Xe-Xs) + j*dof)/dof;
1484     }
1485   }
1486   ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&from);CHKERRQ(ierr);
1487 
1488   ierr = VecScatterCreate(local,from,global,to,&ltog);CHKERRQ(ierr);
1489   ierr = PetscLogObjectParent(dd,ltog);CHKERRQ(ierr);
1490   ierr = ISDestroy(from);CHKERRQ(ierr);
1491   ierr = ISDestroy(to);CHKERRQ(ierr);
1492 
1493   /* global to local must include ghost points */
1494   if (stencil_type == DMDA_STENCIL_BOX) {
1495     ierr = ISCreateStride(comm,(Xe-Xs)*(Ye-Ys),0,1,&to);CHKERRQ(ierr);
1496   } else {
1497     /* must drop into cross shape region */
1498     /*       ---------|
1499             |  top    |
1500          |---         ---|
1501          |   middle      |
1502          |               |
1503          ----         ----
1504             | bottom  |
1505             -----------
1506         Xs xs        xe  Xe */
1507     /* bottom */
1508     left  = xs - Xs; down = ys - Ys; up    = down + y;
1509     count = down*(xe-xs) + (up-down)*(Xe-Xs) + (Ye-Ys-up)*(xe-xs);
1510     ierr  = PetscMalloc(count*sizeof(PetscInt)/dof,&idx);CHKERRQ(ierr);
1511     count = 0;
1512     for (i=0; i<down; i++) {
1513       for (j=0; j<xe-xs; j += dof) {
1514         idx[count++] = left + i*(Xe-Xs) + j;
1515       }
1516     }
1517     /* middle */
1518     for (i=down; i<up; i++) {
1519       for (j=0; j<Xe-Xs; j += dof) {
1520         idx[count++] = i*(Xe-Xs) + j;
1521       }
1522     }
1523     /* top */
1524     for (i=up; i<Ye-Ys; i++) {
1525       for (j=0; j<xe-xs; j += dof) {
1526         idx[count++] = (left + i*(Xe-Xs) + j);
1527       }
1528     }
1529     for (i=0; i<count; i++) idx[i] = idx[i]/dof;
1530     ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&to);CHKERRQ(ierr);
1531   }
1532 
1533 
1534   /* determine who lies on each side of us stored in    n6 n7 n8
1535                                                         n3    n5
1536                                                         n0 n1 n2
1537   */
1538 
1539   /* Assume the Non-Periodic Case */
1540   n1 = rank - m;
1541   if (rank % m) {
1542     n0 = n1 - 1;
1543   } else {
1544     n0 = -1;
1545   }
1546   if ((rank+1) % m) {
1547     n2 = n1 + 1;
1548     n5 = rank + 1;
1549     n8 = rank + m + 1; if (n8 >= m*n) n8 = -1;
1550   } else {
1551     n2 = -1; n5 = -1; n8 = -1;
1552   }
1553   if (rank % m) {
1554     n3 = rank - 1;
1555     n6 = n3 + m; if (n6 >= m*n) n6 = -1;
1556   } else {
1557     n3 = -1; n6 = -1;
1558   }
1559   n7 = rank + m; if (n7 >= m*n) n7 = -1;
1560 
1561 
1562   /* Modify for Periodic Cases */
1563   if (wrap == DMDA_YPERIODIC) {  /* Handle Top and Bottom Sides */
1564     if (n1 < 0) n1 = rank + m * (n-1);
1565     if (n7 < 0) n7 = rank - m * (n-1);
1566     if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
1567     if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
1568     if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
1569     if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;
1570   } else if (wrap == DMDA_XPERIODIC) { /* Handle Left and Right Sides */
1571     if (n3 < 0) n3 = rank + (m-1);
1572     if (n5 < 0) n5 = rank - (m-1);
1573     if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
1574     if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
1575     if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
1576     if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
1577   } else if (wrap == DMDA_XYPERIODIC) {
1578 
1579     /* Handle all four corners */
1580     if ((n6 < 0) && (n7 < 0) && (n3 < 0)) n6 = m-1;
1581     if ((n8 < 0) && (n7 < 0) && (n5 < 0)) n8 = 0;
1582     if ((n2 < 0) && (n5 < 0) && (n1 < 0)) n2 = size-m;
1583     if ((n0 < 0) && (n3 < 0) && (n1 < 0)) n0 = size-1;
1584 
1585     /* Handle Top and Bottom Sides */
1586     if (n1 < 0) n1 = rank + m * (n-1);
1587     if (n7 < 0) n7 = rank - m * (n-1);
1588     if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
1589     if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
1590     if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
1591     if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;
1592 
1593     /* Handle Left and Right Sides */
1594     if (n3 < 0) n3 = rank + (m-1);
1595     if (n5 < 0) n5 = rank - (m-1);
1596     if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
1597     if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
1598     if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
1599     if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
1600   }
1601   ierr = PetscMalloc(9*sizeof(PetscInt),&dd->neighbors);CHKERRQ(ierr);
1602   dd->neighbors[0] = n0;
1603   dd->neighbors[1] = n1;
1604   dd->neighbors[2] = n2;
1605   dd->neighbors[3] = n3;
1606   dd->neighbors[4] = rank;
1607   dd->neighbors[5] = n5;
1608   dd->neighbors[6] = n6;
1609   dd->neighbors[7] = n7;
1610   dd->neighbors[8] = n8;
1611 
1612   if (stencil_type == DMDA_STENCIL_STAR) {
1613     /* save corner processor numbers */
1614     sn0 = n0; sn2 = n2; sn6 = n6; sn8 = n8;
1615     n0 = n2 = n6 = n8 = -1;
1616   }
1617 
1618   ierr = PetscMalloc((x+2*s_x)*(y+2*s_y)*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1619   ierr = PetscLogObjectMemory(da,(x+2*s_x)*(y+2*s_y)*sizeof(PetscInt));CHKERRQ(ierr);
1620   nn = 0;
1621 
1622   xbase = bases[rank];
1623   for (i=1; i<=s_y; i++) {
1624     if (n0 >= 0) { /* left below */
1625       x_t = lx[n0 % m]*dof;
1626       y_t = ly[(n0/m)];
1627       s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
1628       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1629     }
1630     if (n1 >= 0) { /* directly below */
1631       x_t = x;
1632       y_t = ly[(n1/m)];
1633       s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
1634       for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1635     }
1636     if (n2 >= 0) { /* right below */
1637       x_t = lx[n2 % m]*dof;
1638       y_t = ly[(n2/m)];
1639       s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
1640       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1641     }
1642   }
1643 
1644   for (i=0; i<y; i++) {
1645     if (n3 >= 0) { /* directly left */
1646       x_t = lx[n3 % m]*dof;
1647       /* y_t = y; */
1648       s_t = bases[n3] + (i+1)*x_t - s_x;
1649       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1650     }
1651 
1652     for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */
1653 
1654     if (n5 >= 0) { /* directly right */
1655       x_t = lx[n5 % m]*dof;
1656       /* y_t = y; */
1657       s_t = bases[n5] + (i)*x_t;
1658       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1659     }
1660   }
1661 
1662   for (i=1; i<=s_y; i++) {
1663     if (n6 >= 0) { /* left above */
1664       x_t = lx[n6 % m]*dof;
1665       /* y_t = ly[(n6/m)]; */
1666       s_t = bases[n6] + (i)*x_t - s_x;
1667       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1668     }
1669     if (n7 >= 0) { /* directly above */
1670       x_t = x;
1671       /* y_t = ly[(n7/m)]; */
1672       s_t = bases[n7] + (i-1)*x_t;
1673       for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1674     }
1675     if (n8 >= 0) { /* right above */
1676       x_t = lx[n8 % m]*dof;
1677       /* y_t = ly[(n8/m)]; */
1678       s_t = bases[n8] + (i-1)*x_t;
1679       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1680     }
1681   }
1682 
1683   base = bases[rank];
1684   {
1685     PetscInt nnn = nn/dof,*iidx;
1686     ierr = PetscMalloc(nnn*sizeof(PetscInt),&iidx);CHKERRQ(ierr);
1687     for (i=0; i<nnn; i++) {
1688       iidx[i] = idx[dof*i]/dof;
1689     }
1690     ierr = ISCreateBlock(comm,dof,nnn,iidx,PETSC_OWN_POINTER,&from);CHKERRQ(ierr);
1691   }
1692   ierr = VecScatterCreate(global,from,local,to,&gtol);CHKERRQ(ierr);
1693   ierr = PetscLogObjectParent(da,gtol);CHKERRQ(ierr);
1694   ierr = ISDestroy(to);CHKERRQ(ierr);
1695   ierr = ISDestroy(from);CHKERRQ(ierr);
1696 
1697   if (stencil_type == DMDA_STENCIL_STAR) {
1698     /*
1699         Recompute the local to global mappings, this time keeping the
1700       information about the cross corner processor numbers.
1701     */
1702     n0 = sn0; n2 = sn2; n6 = sn6; n8 = sn8;
1703     nn = 0;
1704     xbase = bases[rank];
1705     for (i=1; i<=s_y; i++) {
1706       if (n0 >= 0) { /* left below */
1707         x_t = lx[n0 % m]*dof;
1708         y_t = ly[(n0/m)];
1709         s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
1710         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1711       }
1712       if (n1 >= 0) { /* directly below */
1713         x_t = x;
1714         y_t = ly[(n1/m)];
1715         s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
1716         for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1717       }
1718       if (n2 >= 0) { /* right below */
1719         x_t = lx[n2 % m]*dof;
1720         y_t = ly[(n2/m)];
1721         s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
1722         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1723       }
1724     }
1725 
1726     for (i=0; i<y; i++) {
1727       if (n3 >= 0) { /* directly left */
1728         x_t = lx[n3 % m]*dof;
1729         /* y_t = y; */
1730         s_t = bases[n3] + (i+1)*x_t - s_x;
1731         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1732       }
1733 
1734       for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */
1735 
1736       if (n5 >= 0) { /* directly right */
1737         x_t = lx[n5 % m]*dof;
1738         /* y_t = y; */
1739         s_t = bases[n5] + (i)*x_t;
1740         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1741       }
1742     }
1743 
1744     for (i=1; i<=s_y; i++) {
1745       if (n6 >= 0) { /* left above */
1746         x_t = lx[n6 % m]*dof;
1747         /* y_t = ly[(n6/m)]; */
1748         s_t = bases[n6] + (i)*x_t - s_x;
1749         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1750       }
1751       if (n7 >= 0) { /* directly above */
1752         x_t = x;
1753         /* y_t = ly[(n7/m)]; */
1754         s_t = bases[n7] + (i-1)*x_t;
1755         for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1756       }
1757       if (n8 >= 0) { /* right above */
1758         x_t = lx[n8 % m]*dof;
1759         /* y_t = ly[(n8/m)]; */
1760         s_t = bases[n8] + (i-1)*x_t;
1761         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1762       }
1763     }
1764   }
1765   ierr = PetscFree2(bases,ldims);CHKERRQ(ierr);
1766 
1767   dd->m  = m;  dd->n  = n;
1768   dd->xs = xs; dd->xe = xe; dd->ys = ys; dd->ye = ye; dd->zs = 0; dd->ze = 1;
1769   dd->Xs = Xs; dd->Xe = Xe; dd->Ys = Ys; dd->Ye = Ye; dd->Zs = 0; dd->Ze = 1;
1770 
1771   ierr = VecDestroy(local);CHKERRQ(ierr);
1772   ierr = VecDestroy(global);CHKERRQ(ierr);
1773 
1774   dd->gtol      = gtol;
1775   dd->ltog      = ltog;
1776   dd->idx       = idx;
1777   dd->Nl        = nn;
1778   dd->base      = base;
1779   da->ops->view = DMView_DA_2d;
1780 
1781   /*
1782      Set the local to global ordering in the global vector, this allows use
1783      of VecSetValuesLocal().
1784   */
1785   ierr = ISLocalToGlobalMappingCreate(comm,nn,idx,PETSC_OWN_POINTER,&dd->ltogmap);CHKERRQ(ierr);
1786   ierr = ISLocalToGlobalMappingBlock(dd->ltogmap,dd->w,&dd->ltogmapb);CHKERRQ(ierr);
1787   ierr = PetscLogObjectParent(da,dd->ltogmap);CHKERRQ(ierr);
1788 
1789   dd->ltol = PETSC_NULL;
1790   dd->ao   = PETSC_NULL;
1791 
1792   PetscFunctionReturn(0);
1793 }
1794 
1795 #undef __FUNCT__
1796 #define __FUNCT__ "DMDACreate2d"
1797 /*@C
1798    DMDACreate2d -  Creates an object that will manage the communication of  two-dimensional
1799    regular array data that is distributed across some processors.
1800 
1801    Collective on MPI_Comm
1802 
1803    Input Parameters:
1804 +  comm - MPI communicator
1805 .  wrap - type of periodicity should the array have.
1806          Use one of DMDA_NONPERIODIC, DMDA_XPERIODIC, DMDA_YPERIODIC, or DMDA_XYPERIODIC.
1807 .  stencil_type - stencil type.  Use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
1808 .  M,N - global dimension in each direction of the array (use -M and or -N to indicate that it may be set to a different value
1809             from the command line with -da_grid_x <M> -da_grid_y <N>)
1810 .  m,n - corresponding number of processors in each dimension
1811          (or PETSC_DECIDE to have calculated)
1812 .  dof - number of degrees of freedom per node
1813 .  s - stencil width
1814 -  lx, ly - arrays containing the number of nodes in each cell along
1815            the x and y coordinates, or PETSC_NULL. If non-null, these
1816            must be of length as m and n, and the corresponding
1817            m and n cannot be PETSC_DECIDE. The sum of the lx[] entries
1818            must be M, and the sum of the ly[] entries must be N.
1819 
1820    Output Parameter:
1821 .  da - the resulting distributed array object
1822 
1823    Options Database Key:
1824 +  -da_view - Calls DMView() at the conclusion of DMDACreate2d()
1825 .  -da_grid_x <nx> - number of grid points in x direction, if M < 0
1826 .  -da_grid_y <ny> - number of grid points in y direction, if N < 0
1827 .  -da_processors_x <nx> - number of processors in x direction
1828 .  -da_processors_y <ny> - number of processors in y direction
1829 .  -da_refine_x - refinement ratio in x direction
1830 -  -da_refine_y - refinement ratio in y direction
1831 
1832    Level: beginner
1833 
1834    Notes:
1835    The stencil type DMDA_STENCIL_STAR with width 1 corresponds to the
1836    standard 5-pt stencil, while DMDA_STENCIL_BOX with width 1 denotes
1837    the standard 9-pt stencil.
1838 
1839    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
1840    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
1841    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
1842 
1843 .keywords: distributed array, create, two-dimensional
1844 
1845 .seealso: DMDestroy(), DMView(), DMDACreate1d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDAGetRefinementFactor(),
1846           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDASetRefinementFactor(),
1847           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMDALoad(), DMDAGetOwnershipRanges()
1848 
1849 @*/
1850 PetscErrorCode PETSCDM_DLLEXPORT DMDACreate2d(MPI_Comm comm,DMDAPeriodicType wrap,DMDAStencilType stencil_type,
1851                           PetscInt M,PetscInt N,PetscInt m,PetscInt n,PetscInt dof,PetscInt s,const PetscInt lx[],const PetscInt ly[],DM *da)
1852 {
1853   PetscErrorCode ierr;
1854 
1855   PetscFunctionBegin;
1856   ierr = DMDACreate(comm, da);CHKERRQ(ierr);
1857   ierr = DMDASetDim(*da, 2);CHKERRQ(ierr);
1858   ierr = DMDASetSizes(*da, M, N, 1);CHKERRQ(ierr);
1859   ierr = DMDASetNumProcs(*da, m, n, PETSC_DECIDE);CHKERRQ(ierr);
1860   ierr = DMDASetPeriodicity(*da, wrap);CHKERRQ(ierr);
1861   ierr = DMDASetDof(*da, dof);CHKERRQ(ierr);
1862   ierr = DMDASetStencilType(*da, stencil_type);CHKERRQ(ierr);
1863   ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr);
1864   ierr = DMDASetOwnershipRanges(*da, lx, ly, PETSC_NULL);CHKERRQ(ierr);
1865   /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
1866   ierr = DMSetFromOptions(*da);CHKERRQ(ierr);
1867   ierr = DMSetUp(*da);CHKERRQ(ierr);
1868   PetscFunctionReturn(0);
1869 }
1870