xref: /petsc/src/dm/impls/da/da2.c (revision 7b6bb2c608b6fc6714ef38fda02c2dbb91c82665)
1 
2 #include <private/daimpl.h>    /*I   "petscdmda.h"   I*/
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "DMView_DA_2d"
6 PetscErrorCode DMView_DA_2d(DM da,PetscViewer viewer)
7 {
8   PetscErrorCode ierr;
9   PetscMPIInt    rank;
10   PetscBool      iascii,isdraw,isbinary;
11   DM_DA          *dd = (DM_DA*)da->data;
12 #if defined(PETSC_HAVE_MATLAB_ENGINE)
13   PetscBool      ismatlab;
14 #endif
15 
16   PetscFunctionBegin;
17   ierr = MPI_Comm_rank(((PetscObject)da)->comm,&rank);CHKERRQ(ierr);
18 
19   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
20   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
21   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
22 #if defined(PETSC_HAVE_MATLAB_ENGINE)
23   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr);
24 #endif
25   if (iascii) {
26     PetscViewerFormat format;
27 
28     ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
29     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
30       DMDALocalInfo info;
31       ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
32       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D N %D m %D n %D w %D s %D\n",rank,dd->M,dd->N,dd->m,dd->n,dd->w,dd->s);CHKERRQ(ierr);
34       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D, Y range of indices: %D %D\n",info.xs,info.xs+info.xm,info.ys,info.ys+info.ym);CHKERRQ(ierr);
35       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36       ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
37     } else {
38       ierr = DMView_DA_VTK(da,viewer);CHKERRQ(ierr);
39     }
40   } else if (isdraw) {
41     PetscDraw  draw;
42     double     ymin = -1*dd->s-1,ymax = dd->N+dd->s;
43     double     xmin = -1*dd->s-1,xmax = dd->M+dd->s;
44     double     x,y;
45     PetscInt   base,*idx;
46     char       node[10];
47     PetscBool  isnull;
48 
49     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
50     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
51     if (!dd->coordinates) {
52       ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr);
53     }
54     ierr = PetscDrawSynchronizedClear(draw);CHKERRQ(ierr);
55 
56     /* first processor draw all node lines */
57     if (!rank) {
58       ymin = 0.0; ymax = dd->N - 1;
59       for (xmin=0; xmin<dd->M; xmin++) {
60         ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
61       }
62       xmin = 0.0; xmax = dd->M - 1;
63       for (ymin=0; ymin<dd->N; ymin++) {
64         ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr);
65       }
66     }
67     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
68     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
69 
70     /* draw my box */
71     ymin = dd->ys; ymax = dd->ye - 1; xmin = dd->xs/dd->w;
72     xmax =(dd->xe-1)/dd->w;
73     ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr);
74     ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
75     ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
76     ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
77 
78     /* put in numbers */
79     base = (dd->base)/dd->w;
80     for (y=ymin; y<=ymax; y++) {
81       for (x=xmin; x<=xmax; x++) {
82         sprintf(node,"%d",(int)base++);
83         ierr = PetscDrawString(draw,x,y,PETSC_DRAW_BLACK,node);CHKERRQ(ierr);
84       }
85     }
86 
87     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
88     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
89     /* overlay ghost numbers, useful for error checking */
90     /* put in numbers */
91 
92     base = 0; idx = dd->idx;
93     ymin = dd->Ys; ymax = dd->Ye; xmin = dd->Xs; xmax = dd->Xe;
94     for (y=ymin; y<ymax; y++) {
95       for (x=xmin; x<xmax; x++) {
96         if ((base % dd->w) == 0) {
97           sprintf(node,"%d",(int)(idx[base]/dd->w));
98           ierr = PetscDrawString(draw,x/dd->w,y,PETSC_DRAW_BLUE,node);CHKERRQ(ierr);
99         }
100         base++;
101       }
102     }
103     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
104     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
105   } else if (isbinary){
106     ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr);
107 #if defined(PETSC_HAVE_MATLAB_ENGINE)
108   } else if (ismatlab) {
109     ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr);
110 #endif
111   } else SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for DMDA 1d",((PetscObject)viewer)->type_name);
112   PetscFunctionReturn(0);
113 }
114 
115 /*
116       M is number of grid points
117       m is number of processors
118 
119 */
120 #undef __FUNCT__
121 #define __FUNCT__ "DMDASplitComm2d"
122 PetscErrorCode  DMDASplitComm2d(MPI_Comm comm,PetscInt M,PetscInt N,PetscInt sw,MPI_Comm *outcomm)
123 {
124   PetscErrorCode ierr;
125   PetscInt       m,n = 0,x = 0,y = 0;
126   PetscMPIInt    size,csize,rank;
127 
128   PetscFunctionBegin;
129   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
130   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
131 
132   csize = 4*size;
133   do {
134     if (csize % 4) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Cannot split communicator of size %d tried %d %D %D",size,csize,x,y);
135     csize   = csize/4;
136 
137     m = (PetscInt)(0.5 + sqrt(((double)M)*((double)csize)/((double)N)));
138     if (!m) m = 1;
139     while (m > 0) {
140       n = csize/m;
141       if (m*n == csize) break;
142       m--;
143     }
144     if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}
145 
146     x = M/m + ((M % m) > ((csize-1) % m));
147     y = (N + (csize-1)/m)/n;
148   } while ((x < 4 || y < 4) && csize > 1);
149   if (size != csize) {
150     MPI_Group    entire_group,sub_group;
151     PetscMPIInt  i,*groupies;
152 
153     ierr = MPI_Comm_group(comm,&entire_group);CHKERRQ(ierr);
154     ierr = PetscMalloc(csize*sizeof(PetscInt),&groupies);CHKERRQ(ierr);
155     for (i=0; i<csize; i++) {
156       groupies[i] = (rank/csize)*csize + i;
157     }
158     ierr = MPI_Group_incl(entire_group,csize,groupies,&sub_group);CHKERRQ(ierr);
159     ierr = PetscFree(groupies);CHKERRQ(ierr);
160     ierr = MPI_Comm_create(comm,sub_group,outcomm);CHKERRQ(ierr);
161     ierr = MPI_Group_free(&entire_group);CHKERRQ(ierr);
162     ierr = MPI_Group_free(&sub_group);CHKERRQ(ierr);
163     ierr = PetscInfo1(0,"DMDASplitComm2d:Creating redundant coarse problems of size %d\n",csize);CHKERRQ(ierr);
164   } else {
165     *outcomm = comm;
166   }
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "DMDASetLocalFunction"
172 /*@C
173        DMDASetLocalFunction - Caches in a DM a local function.
174 
175    Logically Collective on DMDA
176 
177    Input Parameter:
178 +  da - initial distributed array
179 -  lf - the local function
180 
181    Level: intermediate
182 
183    Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.
184 
185 .keywords:  distributed array, refine
186 
187 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunctioni()
188 @*/
189 PetscErrorCode  DMDASetLocalFunction(DM da,DMDALocalFunction1 lf)
190 {
191   DM_DA          *dd = (DM_DA*)da->data;
192   PetscFunctionBegin;
193   PetscValidHeaderSpecific(da,DM_CLASSID,1);
194   dd->lf    = lf;
195   PetscFunctionReturn(0);
196 }
197 
198 #undef __FUNCT__
199 #define __FUNCT__ "DMDASetLocalFunctioni"
200 /*@C
201        DMDASetLocalFunctioni - Caches in a DM a local function that evaluates a single component
202 
203    Logically Collective on DMDA
204 
205    Input Parameter:
206 +  da - initial distributed array
207 -  lfi - the local function
208 
209    Level: intermediate
210 
211 .keywords:  distributed array, refine
212 
213 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction()
214 @*/
215 PetscErrorCode  DMDASetLocalFunctioni(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
216 {
217   DM_DA          *dd = (DM_DA*)da->data;
218   PetscFunctionBegin;
219   PetscValidHeaderSpecific(da,DM_CLASSID,1);
220   dd->lfi = lfi;
221   PetscFunctionReturn(0);
222 }
223 
224 #undef __FUNCT__
225 #define __FUNCT__ "DMDASetLocalFunctionib"
226 /*@C
227        DMDASetLocalFunctionib - Caches in a DM a block local function that evaluates a single component
228 
229    Logically Collective on DMDA
230 
231    Input Parameter:
232 +  da - initial distributed array
233 -  lfi - the local function
234 
235    Level: intermediate
236 
237 .keywords:  distributed array, refine
238 
239 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction()
240 @*/
241 PetscErrorCode  DMDASetLocalFunctionib(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
242 {
243   DM_DA          *dd = (DM_DA*)da->data;
244   PetscFunctionBegin;
245   PetscValidHeaderSpecific(da,DM_CLASSID,1);
246   dd->lfib = lfi;
247   PetscFunctionReturn(0);
248 }
249 
250 #undef __FUNCT__
251 #define __FUNCT__ "DMDASetLocalAdicFunction_Private"
252 PetscErrorCode DMDASetLocalAdicFunction_Private(DM da,DMDALocalFunction1 ad_lf)
253 {
254   DM_DA          *dd = (DM_DA*)da->data;
255   PetscFunctionBegin;
256   PetscValidHeaderSpecific(da,DM_CLASSID,1);
257   dd->adic_lf = ad_lf;
258   PetscFunctionReturn(0);
259 }
260 
261 /*MC
262        DMDASetLocalAdicFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR
263 
264    Synopsis:
265    PetscErrorCode DMDASetLocalAdicFunctioni(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
266 
267    Logically Collective on DMDA
268 
269    Input Parameter:
270 +  da - initial distributed array
271 -  ad_lfi - the local function as computed by ADIC/ADIFOR
272 
273    Level: intermediate
274 
275 .keywords:  distributed array, refine
276 
277 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
278           DMDASetLocalJacobian(), DMDASetLocalFunctioni()
279 M*/
280 
281 #undef __FUNCT__
282 #define __FUNCT__ "DMDASetLocalAdicFunctioni_Private"
283 PetscErrorCode DMDASetLocalAdicFunctioni_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
284 {
285   DM_DA          *dd = (DM_DA*)da->data;
286   PetscFunctionBegin;
287   PetscValidHeaderSpecific(da,DM_CLASSID,1);
288   dd->adic_lfi = ad_lfi;
289   PetscFunctionReturn(0);
290 }
291 
292 /*MC
293        DMDASetLocalAdicMFFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR
294 
295    Synopsis:
296    PetscErrorCode  DMDASetLocalAdicFunctioni(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
297 
298    Logically Collective on DMDA
299 
300    Input Parameter:
301 +  da - initial distributed array
302 -  admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR
303 
304    Level: intermediate
305 
306 .keywords:  distributed array, refine
307 
308 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
309           DMDASetLocalJacobian(), DMDASetLocalFunctioni()
310 M*/
311 
312 #undef __FUNCT__
313 #define __FUNCT__ "DMDASetLocalAdicMFFunctioni_Private"
314 PetscErrorCode DMDASetLocalAdicMFFunctioni_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
315 {
316   DM_DA          *dd = (DM_DA*)da->data;
317   PetscFunctionBegin;
318   PetscValidHeaderSpecific(da,DM_CLASSID,1);
319   dd->adicmf_lfi = admf_lfi;
320   PetscFunctionReturn(0);
321 }
322 
323 /*MC
324        DMDASetLocalAdicFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR
325 
326    Synopsis:
327    PetscErrorCode DMDASetLocalAdicFunctionib(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
328 
329    Logically Collective on DMDA
330 
331    Input Parameter:
332 +  da - initial distributed array
333 -  ad_lfi - the local function as computed by ADIC/ADIFOR
334 
335    Level: intermediate
336 
337 .keywords:  distributed array, refine
338 
339 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
340           DMDASetLocalJacobian(), DMDASetLocalFunctionib()
341 M*/
342 
343 #undef __FUNCT__
344 #define __FUNCT__ "DMDASetLocalAdicFunctionib_Private"
345 PetscErrorCode DMDASetLocalAdicFunctionib_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
346 {
347   DM_DA          *dd = (DM_DA*)da->data;
348   PetscFunctionBegin;
349   PetscValidHeaderSpecific(da,DM_CLASSID,1);
350   dd->adic_lfib = ad_lfi;
351   PetscFunctionReturn(0);
352 }
353 
354 /*MC
355        DMDASetLocalAdicMFFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR
356 
357    Synopsis:
358    PetscErrorCode  DMDASetLocalAdicFunctionib(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
359 
360    Logically Collective on DMDA
361 
362    Input Parameter:
363 +  da - initial distributed array
364 -  admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR
365 
366    Level: intermediate
367 
368 .keywords:  distributed array, refine
369 
370 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
371           DMDASetLocalJacobian(), DMDASetLocalFunctionib()
372 M*/
373 
374 #undef __FUNCT__
375 #define __FUNCT__ "DMDASetLocalAdicMFFunctionib_Private"
376 PetscErrorCode DMDASetLocalAdicMFFunctionib_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
377 {
378   DM_DA          *dd = (DM_DA*)da->data;
379   PetscFunctionBegin;
380   PetscValidHeaderSpecific(da,DM_CLASSID,1);
381   dd->adicmf_lfib = admf_lfi;
382   PetscFunctionReturn(0);
383 }
384 
385 /*MC
386        DMDASetLocalAdicMFFunction - Caches in a DM a local function computed by ADIC/ADIFOR
387 
388    Synopsis:
389    PetscErrorCode DMDASetLocalAdicMFFunction(DM da,DMDALocalFunction1 ad_lf)
390 
391    Logically Collective on DMDA
392 
393    Input Parameter:
394 +  da - initial distributed array
395 -  ad_lf - the local function as computed by ADIC/ADIFOR
396 
397    Level: intermediate
398 
399 .keywords:  distributed array, refine
400 
401 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
402           DMDASetLocalJacobian()
403 M*/
404 
405 #undef __FUNCT__
406 #define __FUNCT__ "DMDASetLocalAdicMFFunction_Private"
407 PetscErrorCode DMDASetLocalAdicMFFunction_Private(DM da,DMDALocalFunction1 ad_lf)
408 {
409   DM_DA          *dd = (DM_DA*)da->data;
410   PetscFunctionBegin;
411   PetscValidHeaderSpecific(da,DM_CLASSID,1);
412   dd->adicmf_lf = ad_lf;
413   PetscFunctionReturn(0);
414 }
415 
416 /*@C
417        DMDASetLocalJacobian - Caches in a DM a local Jacobian computation function
418 
419    Logically Collective on DMDA
420 
421 
422    Input Parameter:
423 +  da - initial distributed array
424 -  lj - the local Jacobian
425 
426    Level: intermediate
427 
428    Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.
429 
430 .keywords:  distributed array, refine
431 
432 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction()
433 @*/
434 #undef __FUNCT__
435 #define __FUNCT__ "DMDASetLocalJacobian"
436 PetscErrorCode  DMDASetLocalJacobian(DM da,DMDALocalFunction1 lj)
437 {
438   DM_DA          *dd = (DM_DA*)da->data;
439   PetscFunctionBegin;
440   PetscValidHeaderSpecific(da,DM_CLASSID,1);
441   dd->lj    = lj;
442   PetscFunctionReturn(0);
443 }
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "DMDAGetLocalFunction"
447 /*@C
448        DMDAGetLocalFunction - Gets from a DM a local function and its ADIC/ADIFOR Jacobian
449 
450    Note Collective
451 
452    Input Parameter:
453 .  da - initial distributed array
454 
455    Output Parameter:
456 .  lf - the local function
457 
458    Level: intermediate
459 
460 .keywords:  distributed array, refine
461 
462 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalJacobian(), DMDASetLocalFunction()
463 @*/
464 PetscErrorCode  DMDAGetLocalFunction(DM da,DMDALocalFunction1 *lf)
465 {
466   DM_DA *dd = (DM_DA*)da->data;
467   PetscFunctionBegin;
468   PetscValidHeaderSpecific(da,DM_CLASSID,1);
469   if (lf) *lf = dd->lf;
470   PetscFunctionReturn(0);
471 }
472 
473 #undef __FUNCT__
474 #define __FUNCT__ "DMDAGetLocalJacobian"
475 /*@C
476        DMDAGetLocalJacobian - Gets from a DM a local jacobian
477 
478    Not Collective
479 
480    Input Parameter:
481 .  da - initial distributed array
482 
483    Output Parameter:
484 .  lj - the local jacobian
485 
486    Level: intermediate
487 
488 .keywords:  distributed array, refine
489 
490 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalJacobian()
491 @*/
492 PetscErrorCode  DMDAGetLocalJacobian(DM da,DMDALocalFunction1 *lj)
493 {
494   DM_DA *dd = (DM_DA*)da->data;
495   PetscFunctionBegin;
496   PetscValidHeaderSpecific(da,DM_CLASSID,1);
497   if (lj) *lj = dd->lj;
498   PetscFunctionReturn(0);
499 }
500 
501 #undef __FUNCT__
502 #define __FUNCT__ "DMDAFormFunction"
503 /*@
504     DMDAFormFunction - Evaluates a user provided function on each processor that
505         share a DMDA
506 
507    Input Parameters:
508 +    da - the DM that defines the grid
509 .    vu - input vector
510 .    vfu - output vector
511 -    w - any user data
512 
513     Notes: Does NOT do ghost updates on vu upon entry
514 
515            This should eventually replace DMDAFormFunction1
516 
517     Level: advanced
518 
519 .seealso: DMDAComputeJacobian1WithAdic()
520 
521 @*/
522 PetscErrorCode  DMDAFormFunction(DM da,PetscErrorCode (*lf)(void),Vec vu,Vec vfu,void *w)
523 {
524   PetscErrorCode ierr;
525   void           *u,*fu;
526   DMDALocalInfo  info;
527   PetscErrorCode (*f)(DMDALocalInfo*,void*,void*,void*) = (PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*))lf;
528 
529   PetscFunctionBegin;
530   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
531   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
532   ierr = DMDAVecGetArray(da,vfu,&fu);CHKERRQ(ierr);
533 
534   ierr = (*f)(&info,u,fu,w);CHKERRQ(ierr);
535 
536   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
537   ierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(ierr);
538   PetscFunctionReturn(0);
539 }
540 
541 #undef __FUNCT__
542 #define __FUNCT__ "DMDAFormFunctionLocal"
543 /*@C
544    DMDAFormFunctionLocal - This is a universal function evaluation routine for
545    a local DM function.
546 
547    Collective on DMDA
548 
549    Input Parameters:
550 +  da - the DM context
551 .  func - The local function
552 .  X - input vector
553 .  F - function vector
554 -  ctx - A user context
555 
556    Level: intermediate
557 
558 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(),
559           SNESSetFunction(), SNESSetJacobian()
560 
561 @*/
562 PetscErrorCode  DMDAFormFunctionLocal(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx)
563 {
564   Vec            localX;
565   DMDALocalInfo  info;
566   void           *u;
567   void           *fu;
568   PetscErrorCode ierr;
569 
570   PetscFunctionBegin;
571   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
572   /*
573      Scatter ghost points to local vector, using the 2-step process
574         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
575   */
576   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
577   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
578   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
579   ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr);
580   ierr = DMDAVecGetArray(da,F,&fu);CHKERRQ(ierr);
581   ierr = (*func)(&info,u,fu,ctx);CHKERRQ(ierr);
582   ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr);
583   ierr = DMDAVecRestoreArray(da,F,&fu);CHKERRQ(ierr);
584   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
585   PetscFunctionReturn(0);
586 }
587 
588 #undef __FUNCT__
589 #define __FUNCT__ "DMDAFormFunctionLocalGhost"
590 /*@C
591    DMDAFormFunctionLocalGhost - This is a universal function evaluation routine for
592    a local DM function, but the ghost values of the output are communicated and added.
593 
594    Collective on DMDA
595 
596    Input Parameters:
597 +  da - the DM context
598 .  func - The local function
599 .  X - input vector
600 .  F - function vector
601 -  ctx - A user context
602 
603    Level: intermediate
604 
605 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(),
606           SNESSetFunction(), SNESSetJacobian()
607 
608 @*/
609 PetscErrorCode  DMDAFormFunctionLocalGhost(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx)
610 {
611   Vec            localX, localF;
612   DMDALocalInfo  info;
613   void           *u;
614   void           *fu;
615   PetscErrorCode ierr;
616 
617   PetscFunctionBegin;
618   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
619   ierr = DMGetLocalVector(da,&localF);CHKERRQ(ierr);
620   /*
621      Scatter ghost points to local vector, using the 2-step process
622         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
623   */
624   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
625   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
626   ierr = VecSet(F, 0.0);CHKERRQ(ierr);
627   ierr = VecSet(localF, 0.0);CHKERRQ(ierr);
628   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
629   ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr);
630   ierr = DMDAVecGetArray(da,localF,&fu);CHKERRQ(ierr);
631   ierr = (*func)(&info,u,fu,ctx);CHKERRQ(ierr);
632   ierr = DMLocalToGlobalBegin(da,localF,ADD_VALUES,F);CHKERRQ(ierr);
633   ierr = DMLocalToGlobalEnd(da,localF,ADD_VALUES,F);CHKERRQ(ierr);
634   ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr);
635   ierr = DMDAVecRestoreArray(da,localF,&fu);CHKERRQ(ierr);
636   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
637   ierr = DMRestoreLocalVector(da,&localF);CHKERRQ(ierr);
638   PetscFunctionReturn(0);
639 }
640 
641 #undef __FUNCT__
642 #define __FUNCT__ "DMDAFormFunction1"
643 /*@
644     DMDAFormFunction1 - Evaluates a user provided function on each processor that
645         share a DMDA
646 
647    Input Parameters:
648 +    da - the DM that defines the grid
649 .    vu - input vector
650 .    vfu - output vector
651 -    w - any user data
652 
653     Notes: Does NOT do ghost updates on vu upon entry
654 
655     Level: advanced
656 
657 .seealso: DMDAComputeJacobian1WithAdic()
658 
659 @*/
660 PetscErrorCode  DMDAFormFunction1(DM da,Vec vu,Vec vfu,void *w)
661 {
662   PetscErrorCode ierr;
663   void           *u,*fu;
664   DMDALocalInfo  info;
665   DM_DA          *dd = (DM_DA*)da->data;
666 
667   PetscFunctionBegin;
668   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
669   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
670   ierr = DMDAVecGetArray(da,vfu,&fu);CHKERRQ(ierr);
671 
672   CHKMEMQ;
673   ierr = (*dd->lf)(&info,u,fu,w);CHKERRQ(ierr);
674   CHKMEMQ;
675 
676   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
677   ierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(ierr);
678   PetscFunctionReturn(0);
679 }
680 
681 #undef __FUNCT__
682 #define __FUNCT__ "DMDAFormFunctioniTest1"
683 PetscErrorCode  DMDAFormFunctioniTest1(DM da,void *w)
684 {
685   Vec            vu,fu,fui;
686   PetscErrorCode ierr;
687   PetscInt       i,n;
688   PetscScalar    *ui;
689   PetscRandom    rnd;
690   PetscReal      norm;
691 
692   PetscFunctionBegin;
693   ierr = DMGetLocalVector(da,&vu);CHKERRQ(ierr);
694   ierr = PetscRandomCreate(PETSC_COMM_SELF,&rnd);CHKERRQ(ierr);
695   ierr = PetscRandomSetFromOptions(rnd);CHKERRQ(ierr);
696   ierr = VecSetRandom(vu,rnd);CHKERRQ(ierr);
697   ierr = PetscRandomDestroy(&rnd);CHKERRQ(ierr);
698 
699   ierr = DMGetGlobalVector(da,&fu);CHKERRQ(ierr);
700   ierr = DMGetGlobalVector(da,&fui);CHKERRQ(ierr);
701 
702   ierr = DMDAFormFunction1(da,vu,fu,w);CHKERRQ(ierr);
703 
704   ierr = VecGetArray(fui,&ui);CHKERRQ(ierr);
705   ierr = VecGetLocalSize(fui,&n);CHKERRQ(ierr);
706   for (i=0; i<n; i++) {
707     ierr = DMDAFormFunctioni1(da,i,vu,ui+i,w);CHKERRQ(ierr);
708   }
709   ierr = VecRestoreArray(fui,&ui);CHKERRQ(ierr);
710 
711   ierr = VecAXPY(fui,-1.0,fu);CHKERRQ(ierr);
712   ierr = VecNorm(fui,NORM_2,&norm);CHKERRQ(ierr);
713   ierr = PetscPrintf(((PetscObject)da)->comm,"Norm of difference in vectors %G\n",norm);CHKERRQ(ierr);
714   ierr = VecView(fu,0);CHKERRQ(ierr);
715   ierr = VecView(fui,0);CHKERRQ(ierr);
716 
717   ierr = DMRestoreLocalVector(da,&vu);CHKERRQ(ierr);
718   ierr = DMRestoreGlobalVector(da,&fu);CHKERRQ(ierr);
719   ierr = DMRestoreGlobalVector(da,&fui);CHKERRQ(ierr);
720   PetscFunctionReturn(0);
721 }
722 
723 #undef __FUNCT__
724 #define __FUNCT__ "DMDAFormFunctioni1"
725 /*@
726     DMDAFormFunctioni1 - Evaluates a user provided point-wise function
727 
728    Input Parameters:
729 +    da - the DM that defines the grid
730 .    i - the component of the function we wish to compute (must be local)
731 .    vu - input vector
732 .    vfu - output value
733 -    w - any user data
734 
735     Notes: Does NOT do ghost updates on vu upon entry
736 
737     Level: advanced
738 
739 .seealso: DMDAComputeJacobian1WithAdic()
740 
741 @*/
742 PetscErrorCode  DMDAFormFunctioni1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
743 {
744   PetscErrorCode ierr;
745   void           *u;
746   DMDALocalInfo  info;
747   MatStencil     stencil;
748   DM_DA          *dd = (DM_DA*)da->data;
749 
750   PetscFunctionBegin;
751 
752   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
753   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
754 
755   /* figure out stencil value from i */
756   stencil.c = i % info.dof;
757   stencil.i = (i % (info.xm*info.dof))/info.dof;
758   stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
759   stencil.k = i/(info.xm*info.ym*info.dof);
760 
761   ierr = (*dd->lfi)(&info,&stencil,u,vfu,w);CHKERRQ(ierr);
762 
763   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
764   PetscFunctionReturn(0);
765 }
766 
767 #undef __FUNCT__
768 #define __FUNCT__ "DMDAFormFunctionib1"
769 /*@
770     DMDAFormFunctionib1 - Evaluates a user provided point-block function
771 
772    Input Parameters:
773 +    da - the DM that defines the grid
774 .    i - the component of the function we wish to compute (must be local)
775 .    vu - input vector
776 .    vfu - output value
777 -    w - any user data
778 
779     Notes: Does NOT do ghost updates on vu upon entry
780 
781     Level: advanced
782 
783 .seealso: DMDAComputeJacobian1WithAdic()
784 
785 @*/
786 PetscErrorCode  DMDAFormFunctionib1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
787 {
788   PetscErrorCode ierr;
789   void           *u;
790   DMDALocalInfo  info;
791   MatStencil     stencil;
792   DM_DA          *dd = (DM_DA*)da->data;
793 
794   PetscFunctionBegin;
795   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
796   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
797 
798   /* figure out stencil value from i */
799   stencil.c = i % info.dof;
800   if (stencil.c) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_WRONG,"Point-block functions can only be called for the entire block");
801   stencil.i = (i % (info.xm*info.dof))/info.dof;
802   stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
803   stencil.k = i/(info.xm*info.ym*info.dof);
804 
805   ierr = (*dd->lfib)(&info,&stencil,u,vfu,w);CHKERRQ(ierr);
806 
807   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
808   PetscFunctionReturn(0);
809 }
810 
811 #if defined(new)
812 #undef __FUNCT__
813 #define __FUNCT__ "DMDAGetDiagonal_MFFD"
814 /*
815   DMDAGetDiagonal_MFFD - Gets the diagonal for a matrix free matrix where local
816     function lives on a DMDA
817 
818         y ~= (F(u + ha) - F(u))/h,
819   where F = nonlinear function, as set by SNESSetFunction()
820         u = current iterate
821         h = difference interval
822 */
823 PetscErrorCode DMDAGetDiagonal_MFFD(DM da,Vec U,Vec a)
824 {
825   PetscScalar    h,*aa,*ww,v;
826   PetscReal      epsilon = PETSC_SQRT_MACHINE_EPSILON,umin = 100.0*PETSC_SQRT_MACHINE_EPSILON;
827   PetscErrorCode ierr;
828   PetscInt       gI,nI;
829   MatStencil     stencil;
830   DMDALocalInfo  info;
831 
832   PetscFunctionBegin;
833   ierr = (*ctx->func)(0,U,a,ctx->funcctx);CHKERRQ(ierr);
834   ierr = (*ctx->funcisetbase)(U,ctx->funcctx);CHKERRQ(ierr);
835 
836   ierr = VecGetArray(U,&ww);CHKERRQ(ierr);
837   ierr = VecGetArray(a,&aa);CHKERRQ(ierr);
838 
839   nI = 0;
840     h  = ww[gI];
841     if (h == 0.0) h = 1.0;
842 #if !defined(PETSC_USE_COMPLEX)
843     if (h < umin && h >= 0.0)      h = umin;
844     else if (h < 0.0 && h > -umin) h = -umin;
845 #else
846     if (PetscAbsScalar(h) < umin && PetscRealPart(h) >= 0.0)     h = umin;
847     else if (PetscRealPart(h) < 0.0 && PetscAbsScalar(h) < umin) h = -umin;
848 #endif
849     h     *= epsilon;
850 
851     ww[gI] += h;
852     ierr          = (*ctx->funci)(i,w,&v,ctx->funcctx);CHKERRQ(ierr);
853     aa[nI]  = (v - aa[nI])/h;
854     ww[gI] -= h;
855     nI++;
856   }
857   ierr = VecRestoreArray(U,&ww);CHKERRQ(ierr);
858   ierr = VecRestoreArray(a,&aa);CHKERRQ(ierr);
859   PetscFunctionReturn(0);
860 }
861 #endif
862 
863 #if defined(PETSC_HAVE_ADIC)
864 EXTERN_C_BEGIN
865 #include <adic/ad_utils.h>
866 EXTERN_C_END
867 
868 #undef __FUNCT__
869 #define __FUNCT__ "DMDAComputeJacobian1WithAdic"
870 /*@C
871     DMDAComputeJacobian1WithAdic - Evaluates a adiC provided Jacobian function on each processor that
872         share a DMDA
873 
874    Input Parameters:
875 +    da - the DM that defines the grid
876 .    vu - input vector (ghosted)
877 .    J - output matrix
878 -    w - any user data
879 
880    Level: advanced
881 
882     Notes: Does NOT do ghost updates on vu upon entry
883 
884 .seealso: DMDAFormFunction1()
885 
886 @*/
887 PetscErrorCode  DMDAComputeJacobian1WithAdic(DM da,Vec vu,Mat J,void *w)
888 {
889   PetscErrorCode ierr;
890   PetscInt       gtdof,tdof;
891   PetscScalar    *ustart;
892   DMDALocalInfo  info;
893   void           *ad_u,*ad_f,*ad_ustart,*ad_fstart;
894   ISColoring     iscoloring;
895 
896   PetscFunctionBegin;
897   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
898 
899   PetscADResetIndep();
900 
901   /* get space for derivative objects.  */
902   ierr = DMDAGetAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,&gtdof);CHKERRQ(ierr);
903   ierr = DMDAGetAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr);
904   ierr = VecGetArray(vu,&ustart);CHKERRQ(ierr);
905   ierr = DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);CHKERRQ(ierr);
906 
907   PetscADSetValueAndColor(ad_ustart,gtdof,iscoloring->colors,ustart);
908 
909   ierr = VecRestoreArray(vu,&ustart);CHKERRQ(ierr);
910   ierr = ISColoringDestroy(&iscoloring);CHKERRQ(ierr);
911   ierr = PetscADIncrementTotalGradSize(iscoloring->n);CHKERRQ(ierr);
912   PetscADSetIndepDone();
913 
914   ierr = PetscLogEventBegin(DMDA_LocalADFunction,0,0,0,0);CHKERRQ(ierr);
915   ierr = (*dd->adic_lf)(&info,ad_u,ad_f,w);CHKERRQ(ierr);
916   ierr = PetscLogEventEnd(DMDA_LocalADFunction,0,0,0,0);CHKERRQ(ierr);
917 
918   /* stick the values into the matrix */
919   ierr = MatSetValuesAdic(J,(PetscScalar**)ad_fstart);CHKERRQ(ierr);
920 
921   /* return space for derivative objects.  */
922   ierr = DMDARestoreAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,&gtdof);CHKERRQ(ierr);
923   ierr = DMDARestoreAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr);
924   PetscFunctionReturn(0);
925 }
926 
927 #undef __FUNCT__
928 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAdic"
929 /*@C
930     DMDAMultiplyByJacobian1WithAdic - Applies an ADIC-provided Jacobian function to a vector on
931     each processor that shares a DMDA.
932 
933     Input Parameters:
934 +   da - the DM that defines the grid
935 .   vu - Jacobian is computed at this point (ghosted)
936 .   v - product is done on this vector (ghosted)
937 .   fu - output vector = J(vu)*v (not ghosted)
938 -   w - any user data
939 
940     Notes:
941     This routine does NOT do ghost updates on vu upon entry.
942 
943    Level: advanced
944 
945 .seealso: DMDAFormFunction1()
946 
947 @*/
948 PetscErrorCode  DMDAMultiplyByJacobian1WithAdic(DM da,Vec vu,Vec v,Vec f,void *w)
949 {
950   PetscErrorCode ierr;
951   PetscInt       i,gtdof,tdof;
952   PetscScalar    *avu,*av,*af,*ad_vustart,*ad_fstart;
953   DMDALocalInfo  info;
954   void           *ad_vu,*ad_f;
955 
956   PetscFunctionBegin;
957   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
958 
959   /* get space for derivative objects.  */
960   ierr = DMDAGetAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,&gtdof);CHKERRQ(ierr);
961   ierr = DMDAGetAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr);
962 
963   /* copy input vector into derivative object */
964   ierr = VecGetArray(vu,&avu);CHKERRQ(ierr);
965   ierr = VecGetArray(v,&av);CHKERRQ(ierr);
966   for (i=0; i<gtdof; i++) {
967     ad_vustart[2*i]   = avu[i];
968     ad_vustart[2*i+1] = av[i];
969   }
970   ierr = VecRestoreArray(vu,&avu);CHKERRQ(ierr);
971   ierr = VecRestoreArray(v,&av);CHKERRQ(ierr);
972 
973   PetscADResetIndep();
974   ierr = PetscADIncrementTotalGradSize(1);CHKERRQ(ierr);
975   PetscADSetIndepDone();
976 
977   ierr = (*dd->adicmf_lf)(&info,ad_vu,ad_f,w);CHKERRQ(ierr);
978 
979   /* stick the values into the vector */
980   ierr = VecGetArray(f,&af);CHKERRQ(ierr);
981   for (i=0; i<tdof; i++) {
982     af[i] = ad_fstart[2*i+1];
983   }
984   ierr = VecRestoreArray(f,&af);CHKERRQ(ierr);
985 
986   /* return space for derivative objects.  */
987   ierr = DMDARestoreAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,&gtdof);CHKERRQ(ierr);
988   ierr = DMDARestoreAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr);
989   PetscFunctionReturn(0);
990 }
991 #endif
992 
993 #undef __FUNCT__
994 #define __FUNCT__ "DMDAComputeJacobian1"
995 /*@
996     DMDAComputeJacobian1 - Evaluates a local Jacobian function on each processor that
997         share a DMDA
998 
999    Input Parameters:
1000 +    da - the DM that defines the grid
1001 .    vu - input vector (ghosted)
1002 .    J - output matrix
1003 -    w - any user data
1004 
1005     Notes: Does NOT do ghost updates on vu upon entry
1006 
1007     Level: advanced
1008 
1009 .seealso: DMDAFormFunction1()
1010 
1011 @*/
1012 PetscErrorCode  DMDAComputeJacobian1(DM da,Vec vu,Mat J,void *w)
1013 {
1014   PetscErrorCode ierr;
1015   void           *u;
1016   DMDALocalInfo  info;
1017   DM_DA          *dd = (DM_DA*)da->data;
1018 
1019   PetscFunctionBegin;
1020   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
1021   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
1022   ierr = (*dd->lj)(&info,u,J,w);CHKERRQ(ierr);
1023   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
1024   PetscFunctionReturn(0);
1025 }
1026 
1027 
1028 #undef __FUNCT__
1029 #define __FUNCT__ "DMDAComputeJacobian1WithAdifor"
1030 /*
1031     DMDAComputeJacobian1WithAdifor - Evaluates a ADIFOR provided Jacobian local function on each processor that
1032         share a DMDA
1033 
1034    Input Parameters:
1035 +    da - the DM that defines the grid
1036 .    vu - input vector (ghosted)
1037 .    J - output matrix
1038 -    w - any user data
1039 
1040     Notes: Does NOT do ghost updates on vu upon entry
1041 
1042 .seealso: DMDAFormFunction1()
1043 
1044 */
1045 PetscErrorCode  DMDAComputeJacobian1WithAdifor(DM da,Vec vu,Mat J,void *w)
1046 {
1047   PetscErrorCode  ierr;
1048   PetscInt        i,Nc,N;
1049   ISColoringValue *color;
1050   DMDALocalInfo   info;
1051   PetscScalar     *u,*g_u,*g_f,*f = 0,*p_u;
1052   ISColoring      iscoloring;
1053   DM_DA          *dd = (DM_DA*)da->data;
1054   void            (*lf)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*) =
1055                   (void (*)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*))*dd->adifor_lf;
1056 
1057   PetscFunctionBegin;
1058   ierr = DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);CHKERRQ(ierr);
1059   Nc   = iscoloring->n;
1060   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
1061   N    = info.gxm*info.gym*info.gzm*info.dof;
1062 
1063   /* get space for derivative objects.  */
1064   ierr  = PetscMalloc(Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar),&g_u);CHKERRQ(ierr);
1065   ierr  = PetscMemzero(g_u,Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar));CHKERRQ(ierr);
1066   p_u   = g_u;
1067   color = iscoloring->colors;
1068   for (i=0; i<N; i++) {
1069     p_u[*color++] = 1.0;
1070     p_u          += Nc;
1071   }
1072   ierr = ISColoringDestroy(&iscoloring);CHKERRQ(ierr);
1073   ierr = PetscMalloc2(Nc*info.xm*info.ym*info.zm*info.dof,PetscScalar,&g_f,info.xm*info.ym*info.zm*info.dof,PetscScalar,&f);CHKERRQ(ierr);
1074 
1075   /* Seed the input array g_u with coloring information */
1076 
1077   ierr = VecGetArray(vu,&u);CHKERRQ(ierr);
1078   (lf)(&Nc,&info,u,g_u,&Nc,f,g_f,&Nc,w,&ierr);CHKERRQ(ierr);
1079   ierr = VecRestoreArray(vu,&u);CHKERRQ(ierr);
1080 
1081   /* stick the values into the matrix */
1082   /* PetscScalarView(Nc*info.xm*info.ym,g_f,0); */
1083   ierr = MatSetValuesAdifor(J,Nc,g_f);CHKERRQ(ierr);
1084 
1085   /* return space for derivative objects.  */
1086   ierr = PetscFree(g_u);CHKERRQ(ierr);
1087   ierr = PetscFree2(g_f,f);CHKERRQ(ierr);
1088   PetscFunctionReturn(0);
1089 }
1090 
1091 #undef __FUNCT__
1092 #define __FUNCT__ "DMDAFormJacobianLocal"
1093 /*@C
1094    DMDAFormjacobianLocal - This is a universal Jacobian evaluation routine for
1095    a local DM function.
1096 
1097    Collective on DMDA
1098 
1099    Input Parameters:
1100 +  da - the DM context
1101 .  func - The local function
1102 .  X - input vector
1103 .  J - Jacobian matrix
1104 -  ctx - A user context
1105 
1106    Level: intermediate
1107 
1108 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(),
1109           SNESSetFunction(), SNESSetJacobian()
1110 
1111 @*/
1112 PetscErrorCode  DMDAFormJacobianLocal(DM da, DMDALocalFunction1 func, Vec X, Mat J, void *ctx)
1113 {
1114   Vec            localX;
1115   DMDALocalInfo  info;
1116   void           *u;
1117   PetscErrorCode ierr;
1118 
1119   PetscFunctionBegin;
1120   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
1121   /*
1122      Scatter ghost points to local vector, using the 2-step process
1123         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
1124   */
1125   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
1126   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
1127   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
1128   ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr);
1129   ierr = (*func)(&info,u,J,ctx);CHKERRQ(ierr);
1130   ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr);
1131   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
1132   PetscFunctionReturn(0);
1133 }
1134 
1135 #undef __FUNCT__
1136 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAD"
1137 /*@C
1138     DMDAMultiplyByJacobian1WithAD - Applies a Jacobian function supplied by ADIFOR or ADIC
1139     to a vector on each processor that shares a DMDA.
1140 
1141    Input Parameters:
1142 +    da - the DM that defines the grid
1143 .    vu - Jacobian is computed at this point (ghosted)
1144 .    v - product is done on this vector (ghosted)
1145 .    fu - output vector = J(vu)*v (not ghosted)
1146 -    w - any user data
1147 
1148     Notes:
1149     This routine does NOT do ghost updates on vu and v upon entry.
1150 
1151     Automatically calls DMDAMultiplyByJacobian1WithAdifor() or DMDAMultiplyByJacobian1WithAdic()
1152     depending on whether DMDASetLocalAdicMFFunction() or DMDASetLocalAdiforMFFunction() was called.
1153 
1154    Level: advanced
1155 
1156 .seealso: DMDAFormFunction1(), DMDAMultiplyByJacobian1WithAdifor(), DMDAMultiplyByJacobian1WithAdic()
1157 
1158 @*/
1159 PetscErrorCode  DMDAMultiplyByJacobian1WithAD(DM da,Vec u,Vec v,Vec f,void *w)
1160 {
1161   PetscErrorCode ierr;
1162   DM_DA          *dd = (DM_DA*)da->data;
1163 
1164   PetscFunctionBegin;
1165   if (dd->adicmf_lf) {
1166 #if defined(PETSC_HAVE_ADIC)
1167     ierr = DMDAMultiplyByJacobian1WithAdic(da,u,v,f,w);CHKERRQ(ierr);
1168 #else
1169     SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP_SYS,"Requires ADIC to be installed and cannot use complex numbers");
1170 #endif
1171   } else if (dd->adiformf_lf) {
1172     ierr = DMDAMultiplyByJacobian1WithAdifor(da,u,v,f,w);CHKERRQ(ierr);
1173   } else {
1174     SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ORDER,"Must call DMDASetLocalAdiforMFFunction() or DMDASetLocalAdicMFFunction() before using");
1175   }
1176   PetscFunctionReturn(0);
1177 }
1178 
1179 
1180 #undef __FUNCT__
1181 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAdifor"
1182 /*@C
1183     DMDAMultiplyByJacobian1WithAdifor - Applies a ADIFOR provided Jacobian function on each processor that
1184         share a DM to a vector
1185 
1186    Input Parameters:
1187 +    da - the DM that defines the grid
1188 .    vu - Jacobian is computed at this point (ghosted)
1189 .    v - product is done on this vector (ghosted)
1190 .    fu - output vector = J(vu)*v (not ghosted)
1191 -    w - any user data
1192 
1193     Notes: Does NOT do ghost updates on vu and v upon entry
1194 
1195    Level: advanced
1196 
1197 .seealso: DMDAFormFunction1()
1198 
1199 @*/
1200 PetscErrorCode  DMDAMultiplyByJacobian1WithAdifor(DM da,Vec u,Vec v,Vec f,void *w)
1201 {
1202   PetscErrorCode ierr;
1203   PetscScalar    *au,*av,*af,*awork;
1204   Vec            work;
1205   DMDALocalInfo  info;
1206   DM_DA          *dd = (DM_DA*)da->data;
1207   void           (*lf)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*) =
1208                  (void (*)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*))*dd->adiformf_lf;
1209 
1210   PetscFunctionBegin;
1211   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
1212 
1213   ierr = DMGetGlobalVector(da,&work);CHKERRQ(ierr);
1214   ierr = VecGetArray(u,&au);CHKERRQ(ierr);
1215   ierr = VecGetArray(v,&av);CHKERRQ(ierr);
1216   ierr = VecGetArray(f,&af);CHKERRQ(ierr);
1217   ierr = VecGetArray(work,&awork);CHKERRQ(ierr);
1218   (lf)(&info,au,av,awork,af,w,&ierr);CHKERRQ(ierr);
1219   ierr = VecRestoreArray(u,&au);CHKERRQ(ierr);
1220   ierr = VecRestoreArray(v,&av);CHKERRQ(ierr);
1221   ierr = VecRestoreArray(f,&af);CHKERRQ(ierr);
1222   ierr = VecRestoreArray(work,&awork);CHKERRQ(ierr);
1223   ierr = DMRestoreGlobalVector(da,&work);CHKERRQ(ierr);
1224 
1225   PetscFunctionReturn(0);
1226 }
1227 
1228 #undef __FUNCT__
1229 #define __FUNCT__ "DMSetUp_DA_2D"
1230 PetscErrorCode  DMSetUp_DA_2D(DM da)
1231 {
1232   DM_DA                  *dd = (DM_DA*)da->data;
1233   const PetscInt         M            = dd->M;
1234   const PetscInt         N            = dd->N;
1235   PetscInt               m            = dd->m;
1236   PetscInt               n            = dd->n;
1237   const PetscInt         dof          = dd->w;
1238   const PetscInt         s            = dd->s;
1239   const DMDABoundaryType bx         = dd->bx;
1240   const DMDABoundaryType by         = dd->by;
1241   const DMDAStencilType  stencil_type = dd->stencil_type;
1242   PetscInt               *lx           = dd->lx;
1243   PetscInt               *ly           = dd->ly;
1244   MPI_Comm               comm;
1245   PetscMPIInt            rank,size;
1246   PetscInt               xs,xe,ys,ye,x,y,Xs,Xe,Ys,Ye,start,end,IXs,IXe,IYs,IYe;
1247   PetscInt               up,down,left,right,i,n0,n1,n2,n3,n5,n6,n7,n8,*idx,nn,*idx_cpy;
1248   const PetscInt         *idx_full;
1249   PetscInt               xbase,*bases,*ldims,j,x_t,y_t,s_t,base,count;
1250   PetscInt               s_x,s_y; /* s proportionalized to w */
1251   PetscInt               sn0 = 0,sn2 = 0,sn6 = 0,sn8 = 0;
1252   Vec                    local,global;
1253   VecScatter             ltog,gtol;
1254   IS                     to,from,ltogis;
1255   PetscErrorCode         ierr;
1256 
1257   PetscFunctionBegin;
1258   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
1259 
1260   if (dof < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
1261   if (s < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);
1262 
1263   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1264   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1265 
1266   dd->dim         = 2;
1267   ierr = PetscMalloc(dof*sizeof(char*),&dd->fieldname);CHKERRQ(ierr);
1268   ierr = PetscMemzero(dd->fieldname,dof*sizeof(char*));CHKERRQ(ierr);
1269 
1270   if (m != PETSC_DECIDE) {
1271     if (m < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in X direction: %D",m);
1272     else if (m > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in X direction: %D %d",m,size);
1273   }
1274   if (n != PETSC_DECIDE) {
1275     if (n < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in Y direction: %D",n);
1276     else if (n > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in Y direction: %D %d",n,size);
1277   }
1278 
1279   if (m == PETSC_DECIDE || n == PETSC_DECIDE) {
1280     if (n != PETSC_DECIDE) {
1281       m = size/n;
1282     } else if (m != PETSC_DECIDE) {
1283       n = size/m;
1284     } else {
1285       /* try for squarish distribution */
1286       m = (PetscInt)(0.5 + sqrt(((double)M)*((double)size)/((double)N)));
1287       if (!m) m = 1;
1288       while (m > 0) {
1289 	n = size/m;
1290 	if (m*n == size) break;
1291 	m--;
1292       }
1293       if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}
1294     }
1295     if (m*n != size) SETERRQ(comm,PETSC_ERR_PLIB,"Unable to create partition, check the size of the communicator and input m and n ");
1296   } else if (m*n != size) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Given Bad partition");
1297 
1298   if (M < m) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in x direction is too fine! %D %D",M,m);
1299   if (N < n) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in y direction is too fine! %D %D",N,n);
1300 
1301   /*
1302      Determine locally owned region
1303      xs is the first local node number, x is the number of local nodes
1304   */
1305   if (!lx) {
1306     ierr = PetscMalloc(m*sizeof(PetscInt), &dd->lx);CHKERRQ(ierr);
1307     lx = dd->lx;
1308     for (i=0; i<m; i++) {
1309       lx[i] = M/m + ((M % m) > i);
1310     }
1311   }
1312   x  = lx[rank % m];
1313   xs = 0;
1314   for (i=0; i<(rank % m); i++) {
1315     xs += lx[i];
1316   }
1317 #if defined(PETSC_USE_DEBUG)
1318   left = xs;
1319   for (i=(rank % m); i<m; i++) {
1320     left += lx[i];
1321   }
1322   if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M: %D %D",left,M);
1323 #endif
1324 
1325   /*
1326      Determine locally owned region
1327      ys is the first local node number, y is the number of local nodes
1328   */
1329   if (!ly) {
1330     ierr = PetscMalloc(n*sizeof(PetscInt), &dd->ly);CHKERRQ(ierr);
1331     ly = dd->ly;
1332     for (i=0; i<n; i++) {
1333       ly[i] = N/n + ((N % n) > i);
1334     }
1335   }
1336   y  = ly[rank/m];
1337   ys = 0;
1338   for (i=0; i<(rank/m); i++) {
1339     ys += ly[i];
1340   }
1341 #if defined(PETSC_USE_DEBUG)
1342   left = ys;
1343   for (i=(rank/m); i<n; i++) {
1344     left += ly[i];
1345   }
1346   if (left != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of ly across processors not equal to N: %D %D",left,N);
1347 #endif
1348 
1349   /*
1350    check if the scatter requires more than one process neighbor or wraps around
1351    the domain more than once
1352   */
1353   if ((x < s) && ((m > 1) || (bx == DMDA_BOUNDARY_PERIODIC))) {
1354     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
1355   }
1356   if ((y < s) && ((n > 1) || (by == DMDA_BOUNDARY_PERIODIC))) {
1357     SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local y-width of domain y %D is smaller than stencil width s %D",y,s);
1358   }
1359   xe = xs + x;
1360   ye = ys + y;
1361 
1362   /* determine ghost region (Xs) and region scattered into (IXs)  */
1363   /* Assume No Periodicity */
1364   if (xs-s > 0) { Xs = xs - s; IXs = xs - s; } else { Xs = 0; IXs = 0; }
1365   if (xe+s <= M) { Xe = xe + s; IXe = xe + s; } else { Xe = M; IXe = M; }
1366   if (ys-s > 0) { Ys = ys - s; IYs = ys - s; } else { Ys = 0; IYs = 0; }
1367   if (ye+s <= N) { Ye = ye + s; IYe = ye + s; } else { Ye = N; IYe = N; }
1368 
1369   /* fix for periodicity/ghosted */
1370   if (bx) { Xs = xs - s; Xe = xe + s; }
1371   if (bx == DMDA_BOUNDARY_PERIODIC) { IXs = xs - s; IXe = xe + s; }
1372   if (by) { Ys = ys - s; Ye = ye + s; }
1373   if (by == DMDA_BOUNDARY_PERIODIC) { IYs = ys - s; IYe = ye + s; }
1374 
1375   /* Resize all X parameters to reflect w */
1376   s_x = s;
1377   s_y = s;
1378 
1379   /* determine starting point of each processor */
1380   nn    = x*y;
1381   ierr  = PetscMalloc2(size+1,PetscInt,&bases,size,PetscInt,&ldims);CHKERRQ(ierr);
1382   ierr  = MPI_Allgather(&nn,1,MPIU_INT,ldims,1,MPIU_INT,comm);CHKERRQ(ierr);
1383   bases[0] = 0;
1384   for (i=1; i<=size; i++) {
1385     bases[i] = ldims[i-1];
1386   }
1387   for (i=1; i<=size; i++) {
1388     bases[i] += bases[i-1];
1389   }
1390   base = bases[rank]*dof;
1391 
1392   /* allocate the base parallel and sequential vectors */
1393   dd->Nlocal = x*y*dof;
1394   ierr = VecCreateMPIWithArray(comm,dd->Nlocal,PETSC_DECIDE,0,&global);CHKERRQ(ierr);
1395   ierr = VecSetBlockSize(global,dof);CHKERRQ(ierr);
1396   dd->nlocal = (Xe-Xs)*(Ye-Ys)*dof;
1397   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dd->nlocal,0,&local);CHKERRQ(ierr);
1398   ierr = VecSetBlockSize(local,dof);CHKERRQ(ierr);
1399 
1400   /* generate appropriate vector scatters */
1401   /* local to global inserts non-ghost point region into global */
1402   ierr = VecGetOwnershipRange(global,&start,&end);CHKERRQ(ierr);
1403   ierr = ISCreateStride(comm,x*y*dof,start,1,&to);CHKERRQ(ierr);
1404 
1405   count = x*y;
1406   ierr = PetscMalloc(x*y*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1407   left = xs - Xs; right = left + x;
1408   down = ys - Ys; up = down + y;
1409   count = 0;
1410   for (i=down; i<up; i++) {
1411     for (j=left; j<right; j++) {
1412       idx[count++] = i*(Xe-Xs) + j;
1413     }
1414   }
1415 
1416   ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&from);CHKERRQ(ierr);
1417   ierr = VecScatterCreate(local,from,global,to,&ltog);CHKERRQ(ierr);
1418   ierr = PetscLogObjectParent(dd,ltog);CHKERRQ(ierr);
1419   ierr = ISDestroy(&from);CHKERRQ(ierr);
1420   ierr = ISDestroy(&to);CHKERRQ(ierr);
1421 
1422   /* global to local must include ghost points within the domain,
1423      but not ghost points outside the domain that aren't periodic */
1424   if (stencil_type == DMDA_STENCIL_BOX) {
1425     count = (IXe-IXs)*(IYe-IYs);
1426     ierr  = PetscMalloc(count*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1427 
1428     left = IXs - Xs; right = left + (IXe-IXs);
1429     down = IYs - Ys; up = down + (IYe-IYs);
1430     count = 0;
1431     for (i=down; i<up; i++) {
1432       for (j=left; j<right; j++) {
1433         idx[count++] = j + i*(Xe-Xs);
1434       }
1435     }
1436     ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&to);CHKERRQ(ierr);
1437 
1438   } else {
1439     /* must drop into cross shape region */
1440     /*       ---------|
1441             |  top    |
1442          |---         ---| up
1443          |   middle      |
1444          |               |
1445          ----         ---- down
1446             | bottom  |
1447             -----------
1448          Xs xs        xe Xe */
1449     count = (ys-IYs)*x + y*(IXe-IXs) + (IYe-ye)*x;
1450     ierr  = PetscMalloc(count*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1451 
1452     left = xs - Xs; right = left + x;
1453     down = ys - Ys; up = down + y;
1454     count = 0;
1455     /* bottom */
1456     for (i=(IYs-Ys); i<down; i++) {
1457       for (j=left; j<right; j++) {
1458         idx[count++] = j + i*(Xe-Xs);
1459       }
1460     }
1461     /* middle */
1462     for (i=down; i<up; i++) {
1463       for (j=(IXs-Xs); j<(IXe-Xs); j++) {
1464         idx[count++] = j + i*(Xe-Xs);
1465       }
1466     }
1467     /* top */
1468     for (i=up; i<up+IYe-ye; i++) {
1469       for (j=left; j<right; j++) {
1470         idx[count++] = j + i*(Xe-Xs);
1471       }
1472     }
1473     ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&to);CHKERRQ(ierr);
1474   }
1475 
1476 
1477   /* determine who lies on each side of us stored in    n6 n7 n8
1478                                                         n3    n5
1479                                                         n0 n1 n2
1480   */
1481 
1482   /* Assume the Non-Periodic Case */
1483   n1 = rank - m;
1484   if (rank % m) {
1485     n0 = n1 - 1;
1486   } else {
1487     n0 = -1;
1488   }
1489   if ((rank+1) % m) {
1490     n2 = n1 + 1;
1491     n5 = rank + 1;
1492     n8 = rank + m + 1; if (n8 >= m*n) n8 = -1;
1493   } else {
1494     n2 = -1; n5 = -1; n8 = -1;
1495   }
1496   if (rank % m) {
1497     n3 = rank - 1;
1498     n6 = n3 + m; if (n6 >= m*n) n6 = -1;
1499   } else {
1500     n3 = -1; n6 = -1;
1501   }
1502   n7 = rank + m; if (n7 >= m*n) n7 = -1;
1503 
1504   if (bx == DMDA_BOUNDARY_PERIODIC && by == DMDA_BOUNDARY_PERIODIC) {
1505   /* Modify for Periodic Cases */
1506     /* Handle all four corners */
1507     if ((n6 < 0) && (n7 < 0) && (n3 < 0)) n6 = m-1;
1508     if ((n8 < 0) && (n7 < 0) && (n5 < 0)) n8 = 0;
1509     if ((n2 < 0) && (n5 < 0) && (n1 < 0)) n2 = size-m;
1510     if ((n0 < 0) && (n3 < 0) && (n1 < 0)) n0 = size-1;
1511 
1512     /* Handle Top and Bottom Sides */
1513     if (n1 < 0) n1 = rank + m * (n-1);
1514     if (n7 < 0) n7 = rank - m * (n-1);
1515     if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
1516     if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
1517     if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
1518     if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;
1519 
1520     /* Handle Left and Right Sides */
1521     if (n3 < 0) n3 = rank + (m-1);
1522     if (n5 < 0) n5 = rank - (m-1);
1523     if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
1524     if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
1525     if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
1526     if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
1527   } else if (by == DMDA_BOUNDARY_PERIODIC) {  /* Handle Top and Bottom Sides */
1528     if (n1 < 0) n1 = rank + m * (n-1);
1529     if (n7 < 0) n7 = rank - m * (n-1);
1530     if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
1531     if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
1532     if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
1533     if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;
1534   } else if (bx == DMDA_BOUNDARY_PERIODIC) { /* Handle Left and Right Sides */
1535     if (n3 < 0) n3 = rank + (m-1);
1536     if (n5 < 0) n5 = rank - (m-1);
1537     if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
1538     if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
1539     if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
1540     if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
1541   }
1542 
1543   ierr = PetscMalloc(9*sizeof(PetscInt),&dd->neighbors);CHKERRQ(ierr);
1544   dd->neighbors[0] = n0;
1545   dd->neighbors[1] = n1;
1546   dd->neighbors[2] = n2;
1547   dd->neighbors[3] = n3;
1548   dd->neighbors[4] = rank;
1549   dd->neighbors[5] = n5;
1550   dd->neighbors[6] = n6;
1551   dd->neighbors[7] = n7;
1552   dd->neighbors[8] = n8;
1553 
1554   if (stencil_type == DMDA_STENCIL_STAR) {
1555     /* save corner processor numbers */
1556     sn0 = n0; sn2 = n2; sn6 = n6; sn8 = n8;
1557     n0 = n2 = n6 = n8 = -1;
1558   }
1559 
1560   ierr = PetscMalloc((Xe-Xs)*(Ye-Ys)*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1561   ierr = PetscLogObjectMemory(da,(Xe-Xs)*(Ye-Ys)*sizeof(PetscInt));CHKERRQ(ierr);
1562 
1563   nn = 0;
1564   xbase = bases[rank];
1565   for (i=1; i<=s_y; i++) {
1566     if (n0 >= 0) { /* left below */
1567       x_t = lx[n0 % m];
1568       y_t = ly[(n0/m)];
1569       s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
1570       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1571     }
1572     if (n1 >= 0) { /* directly below */
1573       x_t = x;
1574       y_t = ly[(n1/m)];
1575       s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
1576       for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1577     }
1578     if (n2 >= 0) { /* right below */
1579       x_t = lx[n2 % m];
1580       y_t = ly[(n2/m)];
1581       s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
1582       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1583     }
1584   }
1585 
1586   for (i=0; i<y; i++) {
1587     if (n3 >= 0) { /* directly left */
1588       x_t = lx[n3 % m];
1589       /* y_t = y; */
1590       s_t = bases[n3] + (i+1)*x_t - s_x;
1591       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1592     }
1593 
1594     for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */
1595 
1596     if (n5 >= 0) { /* directly right */
1597       x_t = lx[n5 % m];
1598       /* y_t = y; */
1599       s_t = bases[n5] + (i)*x_t;
1600       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1601     }
1602   }
1603 
1604   for (i=1; i<=s_y; i++) {
1605     if (n6 >= 0) { /* left above */
1606       x_t = lx[n6 % m];
1607       /* y_t = ly[(n6/m)]; */
1608       s_t = bases[n6] + (i)*x_t - s_x;
1609       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1610     }
1611     if (n7 >= 0) { /* directly above */
1612       x_t = x;
1613       /* y_t = ly[(n7/m)]; */
1614       s_t = bases[n7] + (i-1)*x_t;
1615       for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1616     }
1617     if (n8 >= 0) { /* right above */
1618       x_t = lx[n8 % m];
1619       /* y_t = ly[(n8/m)]; */
1620       s_t = bases[n8] + (i-1)*x_t;
1621       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1622     }
1623   }
1624 
1625   ierr = ISCreateBlock(comm,dof,nn,idx,PETSC_COPY_VALUES,&from);CHKERRQ(ierr);
1626   ierr = VecScatterCreate(global,from,local,to,&gtol);CHKERRQ(ierr);
1627   ierr = PetscLogObjectParent(da,gtol);CHKERRQ(ierr);
1628   ierr = ISDestroy(&to);CHKERRQ(ierr);
1629   ierr = ISDestroy(&from);CHKERRQ(ierr);
1630 
1631   if (stencil_type == DMDA_STENCIL_STAR) {
1632     n0 = sn0; n2 = sn2; n6 = sn6; n8 = sn8;
1633   }
1634 
1635   if ((stencil_type == DMDA_STENCIL_STAR) ||
1636       (bx && bx != DMDA_BOUNDARY_PERIODIC) ||
1637       (by && by != DMDA_BOUNDARY_PERIODIC)) {
1638     /*
1639         Recompute the local to global mappings, this time keeping the
1640       information about the cross corner processor numbers and any ghosted
1641       but not periodic indices.
1642     */
1643     nn = 0;
1644     xbase = bases[rank];
1645     for (i=1; i<=s_y; i++) {
1646       if (n0 >= 0) { /* left below */
1647         x_t = lx[n0 % m];
1648         y_t = ly[(n0/m)];
1649         s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
1650         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1651       } else if (xs-Xs > 0 && ys-Ys > 0) {
1652         for (j=0; j<s_x; j++) { idx[nn++] = -1;}
1653       }
1654       if (n1 >= 0) { /* directly below */
1655         x_t = x;
1656         y_t = ly[(n1/m)];
1657         s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
1658         for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1659       } else if (ys-Ys > 0) {
1660         for (j=0; j<x; j++) { idx[nn++] = -1;}
1661       }
1662       if (n2 >= 0) { /* right below */
1663         x_t = lx[n2 % m];
1664         y_t = ly[(n2/m)];
1665         s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
1666         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1667       } else if (Xe-xe> 0 && ys-Ys > 0) {
1668         for (j=0; j<s_x; j++) { idx[nn++] = -1;}
1669       }
1670     }
1671 
1672     for (i=0; i<y; i++) {
1673       if (n3 >= 0) { /* directly left */
1674         x_t = lx[n3 % m];
1675         /* y_t = y; */
1676         s_t = bases[n3] + (i+1)*x_t - s_x;
1677         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1678       } else if (xs-Xs > 0) {
1679         for (j=0; j<s_x; j++) { idx[nn++] = -1;}
1680       }
1681 
1682       for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */
1683 
1684       if (n5 >= 0) { /* directly right */
1685         x_t = lx[n5 % m];
1686         /* y_t = y; */
1687         s_t = bases[n5] + (i)*x_t;
1688         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1689       } else if (Xe-xe > 0) {
1690         for (j=0; j<s_x; j++) { idx[nn++] = -1;}
1691       }
1692     }
1693 
1694     for (i=1; i<=s_y; i++) {
1695       if (n6 >= 0) { /* left above */
1696         x_t = lx[n6 % m];
1697         /* y_t = ly[(n6/m)]; */
1698         s_t = bases[n6] + (i)*x_t - s_x;
1699         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1700       } else if (xs-Xs > 0 && Ye-ye > 0) {
1701         for (j=0; j<s_x; j++) { idx[nn++] = -1;}
1702       }
1703       if (n7 >= 0) { /* directly above */
1704         x_t = x;
1705         /* y_t = ly[(n7/m)]; */
1706         s_t = bases[n7] + (i-1)*x_t;
1707         for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1708       } else if (Ye-ye > 0) {
1709         for (j=0; j<x; j++) { idx[nn++] = -1;}
1710       }
1711       if (n8 >= 0) { /* right above */
1712         x_t = lx[n8 % m];
1713         /* y_t = ly[(n8/m)]; */
1714         s_t = bases[n8] + (i-1)*x_t;
1715         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1716       } else if (Xe-xe > 0 && Ye-ye > 0) {
1717         for (j=0; j<s_x; j++) { idx[nn++] = -1;}
1718       }
1719     }
1720   }
1721   /*
1722      Set the local to global ordering in the global vector, this allows use
1723      of VecSetValuesLocal().
1724   */
1725   ierr = ISCreateBlock(comm,dof,nn,idx,PETSC_OWN_POINTER,&ltogis);CHKERRQ(ierr);
1726   ierr = PetscMalloc(nn*dof*sizeof(PetscInt),&idx_cpy);CHKERRQ(ierr);
1727   ierr = PetscLogObjectMemory(da,nn*dof*sizeof(PetscInt));CHKERRQ(ierr);
1728   ierr = ISGetIndices(ltogis, &idx_full);
1729   ierr = PetscMemcpy(idx_cpy,idx_full,nn*dof*sizeof(PetscInt));CHKERRQ(ierr);
1730   ierr = ISRestoreIndices(ltogis, &idx_full);
1731   ierr = ISLocalToGlobalMappingCreateIS(ltogis,&da->ltogmap);CHKERRQ(ierr);
1732   ierr = PetscLogObjectParent(da,da->ltogmap);CHKERRQ(ierr);
1733   ierr = ISDestroy(&ltogis);CHKERRQ(ierr);
1734   ierr = ISLocalToGlobalMappingBlock(da->ltogmap,dd->w,&da->ltogmapb);CHKERRQ(ierr);
1735   ierr = PetscLogObjectParent(da,da->ltogmap);CHKERRQ(ierr);
1736 
1737   ierr = PetscFree2(bases,ldims);CHKERRQ(ierr);
1738   dd->m  = m;  dd->n  = n;
1739   /* note petsc expects xs/xe/Xs/Xe to be multiplied by #dofs in many places */
1740   dd->xs = xs*dof; dd->xe = xe*dof; dd->ys = ys; dd->ye = ye; dd->zs = 0; dd->ze = 1;
1741   dd->Xs = Xs*dof; dd->Xe = Xe*dof; dd->Ys = Ys; dd->Ye = Ye; dd->Zs = 0; dd->Ze = 1;
1742 
1743   ierr = VecDestroy(&local);CHKERRQ(ierr);
1744   ierr = VecDestroy(&global);CHKERRQ(ierr);
1745 
1746   dd->gtol      = gtol;
1747   dd->ltog      = ltog;
1748   dd->idx       = idx_cpy;
1749   dd->Nl        = nn*dof;
1750   dd->base      = base;
1751   da->ops->view = DMView_DA_2d;
1752   dd->ltol = PETSC_NULL;
1753   dd->ao   = PETSC_NULL;
1754 
1755   PetscFunctionReturn(0);
1756 }
1757 
1758 #undef __FUNCT__
1759 #define __FUNCT__ "DMDACreate2d"
1760 /*@C
1761    DMDACreate2d -  Creates an object that will manage the communication of  two-dimensional
1762    regular array data that is distributed across some processors.
1763 
1764    Collective on MPI_Comm
1765 
1766    Input Parameters:
1767 +  comm - MPI communicator
1768 .  bx,by - type of ghost nodes the array have.
1769          Use one of DMDA_BOUNDARY_NONE, DMDA_BOUNDARY_GHOSTED, DMDA_BOUNDARY_PERIODIC.
1770 .  stencil_type - stencil type.  Use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
1771 .  M,N - global dimension in each direction of the array (use -M and or -N to indicate that it may be set to a different value
1772             from the command line with -da_grid_x <M> -da_grid_y <N>)
1773 .  m,n - corresponding number of processors in each dimension
1774          (or PETSC_DECIDE to have calculated)
1775 .  dof - number of degrees of freedom per node
1776 .  s - stencil width
1777 -  lx, ly - arrays containing the number of nodes in each cell along
1778            the x and y coordinates, or PETSC_NULL. If non-null, these
1779            must be of length as m and n, and the corresponding
1780            m and n cannot be PETSC_DECIDE. The sum of the lx[] entries
1781            must be M, and the sum of the ly[] entries must be N.
1782 
1783    Output Parameter:
1784 .  da - the resulting distributed array object
1785 
1786    Options Database Key:
1787 +  -da_view - Calls DMView() at the conclusion of DMDACreate2d()
1788 .  -da_grid_x <nx> - number of grid points in x direction, if M < 0
1789 .  -da_grid_y <ny> - number of grid points in y direction, if N < 0
1790 .  -da_processors_x <nx> - number of processors in x direction
1791 .  -da_processors_y <ny> - number of processors in y direction
1792 .  -da_refine_x - refinement ratio in x direction
1793 -  -da_refine_y - refinement ratio in y direction
1794 
1795    Level: beginner
1796 
1797    Notes:
1798    The stencil type DMDA_STENCIL_STAR with width 1 corresponds to the
1799    standard 5-pt stencil, while DMDA_STENCIL_BOX with width 1 denotes
1800    the standard 9-pt stencil.
1801 
1802    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
1803    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
1804    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
1805 
1806 .keywords: distributed array, create, two-dimensional
1807 
1808 .seealso: DMDestroy(), DMView(), DMDACreate1d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDAGetRefinementFactor(),
1809           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDASetRefinementFactor(),
1810           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMDALoad(), DMDAGetOwnershipRanges()
1811 
1812 @*/
1813 PetscErrorCode  DMDACreate2d(MPI_Comm comm,DMDABoundaryType bx,DMDABoundaryType by,DMDAStencilType stencil_type,
1814                           PetscInt M,PetscInt N,PetscInt m,PetscInt n,PetscInt dof,PetscInt s,const PetscInt lx[],const PetscInt ly[],DM *da)
1815 {
1816   PetscErrorCode ierr;
1817 
1818   PetscFunctionBegin;
1819   ierr = DMDACreate(comm, da);CHKERRQ(ierr);
1820   ierr = DMDASetDim(*da, 2);CHKERRQ(ierr);
1821   ierr = DMDASetSizes(*da, M, N, 1);CHKERRQ(ierr);
1822   ierr = DMDASetNumProcs(*da, m, n, PETSC_DECIDE);CHKERRQ(ierr);
1823   ierr = DMDASetBoundaryType(*da, bx, by, DMDA_BOUNDARY_NONE);CHKERRQ(ierr);
1824   ierr = DMDASetDof(*da, dof);CHKERRQ(ierr);
1825   ierr = DMDASetStencilType(*da, stencil_type);CHKERRQ(ierr);
1826   ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr);
1827   ierr = DMDASetOwnershipRanges(*da, lx, ly, PETSC_NULL);CHKERRQ(ierr);
1828   /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
1829   ierr = DMSetFromOptions(*da);CHKERRQ(ierr);
1830   ierr = DMSetUp(*da);CHKERRQ(ierr);
1831   ierr = DMView_DA_Private(*da);CHKERRQ(ierr);
1832   PetscFunctionReturn(0);
1833 }
1834