xref: /petsc/src/dm/impls/da/da2.c (revision efe48dd8cf8b935accbbb9f4bcb20bc83865fa4d) !
1 
2 #define PETSCDM_DLL
3 
4 #include "private/daimpl.h"    /*I   "petscdm.h"   I*/
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMView_DA_2d"
8 PetscErrorCode DMView_DA_2d(DM da,PetscViewer viewer)
9 {
10   PetscErrorCode ierr;
11   PetscMPIInt    rank;
12   PetscBool      iascii,isdraw,isbinary;
13   DM_DA          *dd = (DM_DA*)da->data;
14 #if defined(PETSC_HAVE_MATLAB_ENGINE)
15   PetscBool      ismatlab;
16 #endif
17 
18   PetscFunctionBegin;
19   ierr = MPI_Comm_rank(((PetscObject)da)->comm,&rank);CHKERRQ(ierr);
20 
21   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
22   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
23   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
24 #if defined(PETSC_HAVE_MATLAB_ENGINE)
25   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERMATLAB,&ismatlab);CHKERRQ(ierr);
26 #endif
27   if (iascii) {
28     PetscViewerFormat format;
29 
30     ierr = PetscViewerGetFormat(viewer, &format);CHKERRQ(ierr);
31     if (format != PETSC_VIEWER_ASCII_VTK && format != PETSC_VIEWER_ASCII_VTK_CELL) {
32       DMDALocalInfo info;
33       ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
34       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"Processor [%d] M %D N %D m %D n %D w %D s %D\n",rank,dd->M,dd->N,dd->m,dd->n,dd->w,dd->s);CHKERRQ(ierr);
35       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"X range of indices: %D %D, Y range of indices: %D %D\n",info.xs,info.xs+info.xm,info.ys,info.ys+info.ym);CHKERRQ(ierr);
36       ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37     } else {
38       ierr = DMView_DA_VTK(da,viewer);CHKERRQ(ierr);
39     }
40   } else if (isdraw) {
41     PetscDraw  draw;
42     double     ymin = -1*dd->s-1,ymax = dd->N+dd->s;
43     double     xmin = -1*dd->s-1,xmax = dd->M+dd->s;
44     double     x,y;
45     PetscInt   base,*idx;
46     char       node[10];
47     PetscBool  isnull;
48 
49     ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr);
50     ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0);
51     if (!dd->coordinates) {
52       ierr = PetscDrawSetCoordinates(draw,xmin,ymin,xmax,ymax);CHKERRQ(ierr);
53     }
54     ierr = PetscDrawSynchronizedClear(draw);CHKERRQ(ierr);
55 
56     /* first processor draw all node lines */
57     if (!rank) {
58       ymin = 0.0; ymax = dd->N - 1;
59       for (xmin=0; xmin<dd->M; xmin++) {
60         ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_BLACK);CHKERRQ(ierr);
61       }
62       xmin = 0.0; xmax = dd->M - 1;
63       for (ymin=0; ymin<dd->N; ymin++) {
64         ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_BLACK);CHKERRQ(ierr);
65       }
66     }
67     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
68     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
69 
70     /* draw my box */
71     ymin = dd->ys; ymax = dd->ye - 1; xmin = dd->xs/dd->w;
72     xmax =(dd->xe-1)/dd->w;
73     ierr = PetscDrawLine(draw,xmin,ymin,xmax,ymin,PETSC_DRAW_RED);CHKERRQ(ierr);
74     ierr = PetscDrawLine(draw,xmin,ymin,xmin,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
75     ierr = PetscDrawLine(draw,xmin,ymax,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
76     ierr = PetscDrawLine(draw,xmax,ymin,xmax,ymax,PETSC_DRAW_RED);CHKERRQ(ierr);
77 
78     /* put in numbers */
79     base = (dd->base)/dd->w;
80     for (y=ymin; y<=ymax; y++) {
81       for (x=xmin; x<=xmax; x++) {
82         sprintf(node,"%d",(int)base++);
83         ierr = PetscDrawString(draw,x,y,PETSC_DRAW_BLACK,node);CHKERRQ(ierr);
84       }
85     }
86 
87     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
88     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
89     /* overlay ghost numbers, useful for error checking */
90     /* put in numbers */
91 
92     base = 0; idx = dd->idx;
93     ymin = dd->Ys; ymax = dd->Ye; xmin = dd->Xs; xmax = dd->Xe;
94     for (y=ymin; y<ymax; y++) {
95       for (x=xmin; x<xmax; x++) {
96         if ((base % dd->w) == 0) {
97           sprintf(node,"%d",(int)(idx[base]/dd->w));
98           ierr = PetscDrawString(draw,x/dd->w,y,PETSC_DRAW_BLUE,node);CHKERRQ(ierr);
99         }
100         base++;
101       }
102     }
103     ierr = PetscDrawSynchronizedFlush(draw);CHKERRQ(ierr);
104     ierr = PetscDrawPause(draw);CHKERRQ(ierr);
105   } else if (isbinary){
106     ierr = DMView_DA_Binary(da,viewer);CHKERRQ(ierr);
107 #if defined(PETSC_HAVE_MATLAB_ENGINE)
108   } else if (ismatlab) {
109     ierr = DMView_DA_Matlab(da,viewer);CHKERRQ(ierr);
110 #endif
111   } else SETERRQ1(((PetscObject)da)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for DMDA 1d",((PetscObject)viewer)->type_name);
112   PetscFunctionReturn(0);
113 }
114 
115 /*
116       M is number of grid points
117       m is number of processors
118 
119 */
120 #undef __FUNCT__
121 #define __FUNCT__ "DMDASplitComm2d"
122 PetscErrorCode PETSCDM_DLLEXPORT DMDASplitComm2d(MPI_Comm comm,PetscInt M,PetscInt N,PetscInt sw,MPI_Comm *outcomm)
123 {
124   PetscErrorCode ierr;
125   PetscInt       m,n = 0,x = 0,y = 0;
126   PetscMPIInt    size,csize,rank;
127 
128   PetscFunctionBegin;
129   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
130   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
131 
132   csize = 4*size;
133   do {
134     if (csize % 4) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Cannot split communicator of size %d tried %d %D %D",size,csize,x,y);
135     csize   = csize/4;
136 
137     m = (PetscInt)(0.5 + sqrt(((double)M)*((double)csize)/((double)N)));
138     if (!m) m = 1;
139     while (m > 0) {
140       n = csize/m;
141       if (m*n == csize) break;
142       m--;
143     }
144     if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}
145 
146     x = M/m + ((M % m) > ((csize-1) % m));
147     y = (N + (csize-1)/m)/n;
148   } while ((x < 4 || y < 4) && csize > 1);
149   if (size != csize) {
150     MPI_Group    entire_group,sub_group;
151     PetscMPIInt  i,*groupies;
152 
153     ierr = MPI_Comm_group(comm,&entire_group);CHKERRQ(ierr);
154     ierr = PetscMalloc(csize*sizeof(PetscInt),&groupies);CHKERRQ(ierr);
155     for (i=0; i<csize; i++) {
156       groupies[i] = (rank/csize)*csize + i;
157     }
158     ierr = MPI_Group_incl(entire_group,csize,groupies,&sub_group);CHKERRQ(ierr);
159     ierr = PetscFree(groupies);CHKERRQ(ierr);
160     ierr = MPI_Comm_create(comm,sub_group,outcomm);CHKERRQ(ierr);
161     ierr = MPI_Group_free(&entire_group);CHKERRQ(ierr);
162     ierr = MPI_Group_free(&sub_group);CHKERRQ(ierr);
163     ierr = PetscInfo1(0,"DMDASplitComm2d:Creating redundant coarse problems of size %d\n",csize);CHKERRQ(ierr);
164   } else {
165     *outcomm = comm;
166   }
167   PetscFunctionReturn(0);
168 }
169 
170 #undef __FUNCT__
171 #define __FUNCT__ "DMDASetLocalFunction"
172 /*@C
173        DMDASetLocalFunction - Caches in a DM a local function.
174 
175    Logically Collective on DMDA
176 
177    Input Parameter:
178 +  da - initial distributed array
179 -  lf - the local function
180 
181    Level: intermediate
182 
183    Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.
184 
185 .keywords:  distributed array, refine
186 
187 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunctioni()
188 @*/
189 PetscErrorCode PETSCDM_DLLEXPORT DMDASetLocalFunction(DM da,DMDALocalFunction1 lf)
190 {
191   DM_DA          *dd = (DM_DA*)da->data;
192   PetscFunctionBegin;
193   PetscValidHeaderSpecific(da,DM_CLASSID,1);
194   dd->lf    = lf;
195   PetscFunctionReturn(0);
196 }
197 
198 #undef __FUNCT__
199 #define __FUNCT__ "DMDASetLocalFunctioni"
200 /*@C
201        DMDASetLocalFunctioni - Caches in a DM a local function that evaluates a single component
202 
203    Logically Collective on DMDA
204 
205    Input Parameter:
206 +  da - initial distributed array
207 -  lfi - the local function
208 
209    Level: intermediate
210 
211 .keywords:  distributed array, refine
212 
213 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction()
214 @*/
215 PetscErrorCode PETSCDM_DLLEXPORT DMDASetLocalFunctioni(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
216 {
217   DM_DA          *dd = (DM_DA*)da->data;
218   PetscFunctionBegin;
219   PetscValidHeaderSpecific(da,DM_CLASSID,1);
220   dd->lfi = lfi;
221   PetscFunctionReturn(0);
222 }
223 
224 #undef __FUNCT__
225 #define __FUNCT__ "DMDASetLocalFunctionib"
226 /*@C
227        DMDASetLocalFunctionib - Caches in a DM a block local function that evaluates a single component
228 
229    Logically Collective on DMDA
230 
231    Input Parameter:
232 +  da - initial distributed array
233 -  lfi - the local function
234 
235    Level: intermediate
236 
237 .keywords:  distributed array, refine
238 
239 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction()
240 @*/
241 PetscErrorCode PETSCDM_DLLEXPORT DMDASetLocalFunctionib(DM da,PetscErrorCode (*lfi)(DMDALocalInfo*,MatStencil*,void*,PetscScalar*,void*))
242 {
243   DM_DA          *dd = (DM_DA*)da->data;
244   PetscFunctionBegin;
245   PetscValidHeaderSpecific(da,DM_CLASSID,1);
246   dd->lfib = lfi;
247   PetscFunctionReturn(0);
248 }
249 
250 #undef __FUNCT__
251 #define __FUNCT__ "DMDASetLocalAdicFunction_Private"
252 PetscErrorCode DMDASetLocalAdicFunction_Private(DM da,DMDALocalFunction1 ad_lf)
253 {
254   DM_DA          *dd = (DM_DA*)da->data;
255   PetscFunctionBegin;
256   PetscValidHeaderSpecific(da,DM_CLASSID,1);
257   dd->adic_lf = ad_lf;
258   PetscFunctionReturn(0);
259 }
260 
261 /*MC
262        DMDASetLocalAdicFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR
263 
264    Synopsis:
265    PetscErrorCode DMDASetLocalAdicFunctioni(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
266 
267    Logically Collective on DMDA
268 
269    Input Parameter:
270 +  da - initial distributed array
271 -  ad_lfi - the local function as computed by ADIC/ADIFOR
272 
273    Level: intermediate
274 
275 .keywords:  distributed array, refine
276 
277 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
278           DMDASetLocalJacobian(), DMDASetLocalFunctioni()
279 M*/
280 
281 #undef __FUNCT__
282 #define __FUNCT__ "DMDASetLocalAdicFunctioni_Private"
283 PetscErrorCode DMDASetLocalAdicFunctioni_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
284 {
285   DM_DA          *dd = (DM_DA*)da->data;
286   PetscFunctionBegin;
287   PetscValidHeaderSpecific(da,DM_CLASSID,1);
288   dd->adic_lfi = ad_lfi;
289   PetscFunctionReturn(0);
290 }
291 
292 /*MC
293        DMDASetLocalAdicMFFunctioni - Caches in a DM a local functioni computed by ADIC/ADIFOR
294 
295    Synopsis:
296    PetscErrorCode  DMDASetLocalAdicFunctioni(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
297 
298    Logically Collective on DMDA
299 
300    Input Parameter:
301 +  da - initial distributed array
302 -  admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR
303 
304    Level: intermediate
305 
306 .keywords:  distributed array, refine
307 
308 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
309           DMDASetLocalJacobian(), DMDASetLocalFunctioni()
310 M*/
311 
312 #undef __FUNCT__
313 #define __FUNCT__ "DMDASetLocalAdicMFFunctioni_Private"
314 PetscErrorCode DMDASetLocalAdicMFFunctioni_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
315 {
316   DM_DA          *dd = (DM_DA*)da->data;
317   PetscFunctionBegin;
318   PetscValidHeaderSpecific(da,DM_CLASSID,1);
319   dd->adicmf_lfi = admf_lfi;
320   PetscFunctionReturn(0);
321 }
322 
323 /*MC
324        DMDASetLocalAdicFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR
325 
326    Synopsis:
327    PetscErrorCode DMDASetLocalAdicFunctionib(DM da,PetscInt (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
328 
329    Logically Collective on DMDA
330 
331    Input Parameter:
332 +  da - initial distributed array
333 -  ad_lfi - the local function as computed by ADIC/ADIFOR
334 
335    Level: intermediate
336 
337 .keywords:  distributed array, refine
338 
339 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
340           DMDASetLocalJacobian(), DMDASetLocalFunctionib()
341 M*/
342 
343 #undef __FUNCT__
344 #define __FUNCT__ "DMDASetLocalAdicFunctionib_Private"
345 PetscErrorCode DMDASetLocalAdicFunctionib_Private(DM da,PetscErrorCode (*ad_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
346 {
347   DM_DA          *dd = (DM_DA*)da->data;
348   PetscFunctionBegin;
349   PetscValidHeaderSpecific(da,DM_CLASSID,1);
350   dd->adic_lfib = ad_lfi;
351   PetscFunctionReturn(0);
352 }
353 
354 /*MC
355        DMDASetLocalAdicMFFunctionib - Caches in a DM a block local functioni computed by ADIC/ADIFOR
356 
357    Synopsis:
358    PetscErrorCode  DMDASetLocalAdicFunctionib(DM da,int (ad_lf*)(DMDALocalInfo*,MatStencil*,void*,void*,void*)
359 
360    Logically Collective on DMDA
361 
362    Input Parameter:
363 +  da - initial distributed array
364 -  admf_lfi - the local matrix-free function as computed by ADIC/ADIFOR
365 
366    Level: intermediate
367 
368 .keywords:  distributed array, refine
369 
370 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
371           DMDASetLocalJacobian(), DMDASetLocalFunctionib()
372 M*/
373 
374 #undef __FUNCT__
375 #define __FUNCT__ "DMDASetLocalAdicMFFunctionib_Private"
376 PetscErrorCode DMDASetLocalAdicMFFunctionib_Private(DM da,PetscErrorCode (*admf_lfi)(DMDALocalInfo*,MatStencil*,void*,void*,void*))
377 {
378   DM_DA          *dd = (DM_DA*)da->data;
379   PetscFunctionBegin;
380   PetscValidHeaderSpecific(da,DM_CLASSID,1);
381   dd->adicmf_lfib = admf_lfi;
382   PetscFunctionReturn(0);
383 }
384 
385 /*MC
386        DMDASetLocalAdicMFFunction - Caches in a DM a local function computed by ADIC/ADIFOR
387 
388    Synopsis:
389    PetscErrorCode DMDASetLocalAdicMFFunction(DM da,DMDALocalFunction1 ad_lf)
390 
391    Logically Collective on DMDA
392 
393    Input Parameter:
394 +  da - initial distributed array
395 -  ad_lf - the local function as computed by ADIC/ADIFOR
396 
397    Level: intermediate
398 
399 .keywords:  distributed array, refine
400 
401 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction(),
402           DMDASetLocalJacobian()
403 M*/
404 
405 #undef __FUNCT__
406 #define __FUNCT__ "DMDASetLocalAdicMFFunction_Private"
407 PetscErrorCode DMDASetLocalAdicMFFunction_Private(DM da,DMDALocalFunction1 ad_lf)
408 {
409   DM_DA          *dd = (DM_DA*)da->data;
410   PetscFunctionBegin;
411   PetscValidHeaderSpecific(da,DM_CLASSID,1);
412   dd->adicmf_lf = ad_lf;
413   PetscFunctionReturn(0);
414 }
415 
416 /*@C
417        DMDASetLocalJacobian - Caches in a DM a local Jacobian computation function
418 
419    Logically Collective on DMDA
420 
421 
422    Input Parameter:
423 +  da - initial distributed array
424 -  lj - the local Jacobian
425 
426    Level: intermediate
427 
428    Notes: The routine SNESDAFormFunction() uses this the cached function to evaluate the user provided function.
429 
430 .keywords:  distributed array, refine
431 
432 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalFunction()
433 @*/
434 #undef __FUNCT__
435 #define __FUNCT__ "DMDASetLocalJacobian"
436 PetscErrorCode PETSCDM_DLLEXPORT DMDASetLocalJacobian(DM da,DMDALocalFunction1 lj)
437 {
438   DM_DA          *dd = (DM_DA*)da->data;
439   PetscFunctionBegin;
440   PetscValidHeaderSpecific(da,DM_CLASSID,1);
441   dd->lj    = lj;
442   PetscFunctionReturn(0);
443 }
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "DMDAGetLocalFunction"
447 /*@C
448        DMDAGetLocalFunction - Gets from a DM a local function and its ADIC/ADIFOR Jacobian
449 
450    Note Collective
451 
452    Input Parameter:
453 .  da - initial distributed array
454 
455    Output Parameter:
456 .  lf - the local function
457 
458    Level: intermediate
459 
460 .keywords:  distributed array, refine
461 
462 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalJacobian(), DMDASetLocalFunction()
463 @*/
464 PetscErrorCode PETSCDM_DLLEXPORT DMDAGetLocalFunction(DM da,DMDALocalFunction1 *lf)
465 {
466   DM_DA *dd = (DM_DA*)da->data;
467   PetscFunctionBegin;
468   PetscValidHeaderSpecific(da,DM_CLASSID,1);
469   if (lf) *lf = dd->lf;
470   PetscFunctionReturn(0);
471 }
472 
473 #undef __FUNCT__
474 #define __FUNCT__ "DMDAGetLocalJacobian"
475 /*@C
476        DMDAGetLocalJacobian - Gets from a DM a local jacobian
477 
478    Not Collective
479 
480    Input Parameter:
481 .  da - initial distributed array
482 
483    Output Parameter:
484 .  lj - the local jacobian
485 
486    Level: intermediate
487 
488 .keywords:  distributed array, refine
489 
490 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDAGetLocalFunction(), DMDASetLocalJacobian()
491 @*/
492 PetscErrorCode PETSCDM_DLLEXPORT DMDAGetLocalJacobian(DM da,DMDALocalFunction1 *lj)
493 {
494   DM_DA *dd = (DM_DA*)da->data;
495   PetscFunctionBegin;
496   PetscValidHeaderSpecific(da,DM_CLASSID,1);
497   if (lj) *lj = dd->lj;
498   PetscFunctionReturn(0);
499 }
500 
501 #undef __FUNCT__
502 #define __FUNCT__ "DMDAFormFunction"
503 /*@
504     DMDAFormFunction - Evaluates a user provided function on each processor that
505         share a DMDA
506 
507    Input Parameters:
508 +    da - the DM that defines the grid
509 .    vu - input vector
510 .    vfu - output vector
511 -    w - any user data
512 
513     Notes: Does NOT do ghost updates on vu upon entry
514 
515            This should eventually replace DMDAFormFunction1
516 
517     Level: advanced
518 
519 .seealso: DMDAComputeJacobian1WithAdic()
520 
521 @*/
522 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunction(DM da,PetscErrorCode (*lf)(void),Vec vu,Vec vfu,void *w)
523 {
524   PetscErrorCode ierr;
525   void           *u,*fu;
526   DMDALocalInfo  info;
527   PetscErrorCode (*f)(DMDALocalInfo*,void*,void*,void*) = (PetscErrorCode (*)(DMDALocalInfo*,void*,void*,void*))lf;
528 
529   PetscFunctionBegin;
530   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
531   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
532   ierr = DMDAVecGetArray(da,vfu,&fu);CHKERRQ(ierr);
533 
534   ierr = (*f)(&info,u,fu,w);
535   if (PetscExceptionValue(ierr)) {
536     PetscErrorCode pierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(pierr);
537     pierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(pierr);
538   }
539  CHKERRQ(ierr);
540 
541   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
542   ierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(ierr);
543   PetscFunctionReturn(0);
544 }
545 
546 #undef __FUNCT__
547 #define __FUNCT__ "DMDAFormFunctionLocal"
548 /*@C
549    DMDAFormFunctionLocal - This is a universal function evaluation routine for
550    a local DM function.
551 
552    Collective on DMDA
553 
554    Input Parameters:
555 +  da - the DM context
556 .  func - The local function
557 .  X - input vector
558 .  F - function vector
559 -  ctx - A user context
560 
561    Level: intermediate
562 
563 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(),
564           SNESSetFunction(), SNESSetJacobian()
565 
566 @*/
567 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunctionLocal(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx)
568 {
569   Vec            localX;
570   DMDALocalInfo  info;
571   void           *u;
572   void           *fu;
573   PetscErrorCode ierr;
574 
575   PetscFunctionBegin;
576   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
577   /*
578      Scatter ghost points to local vector, using the 2-step process
579         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
580   */
581   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
582   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
583   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
584   ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr);
585   ierr = DMDAVecGetArray(da,F,&fu);CHKERRQ(ierr);
586   ierr = (*func)(&info,u,fu,ctx);
587   if (PetscExceptionValue(ierr)) {
588     PetscErrorCode pierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
589     pierr = DMDAVecRestoreArray(da,F,&fu);CHKERRQ(pierr);
590   }
591   CHKERRQ(ierr);
592   ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr);
593   ierr = DMDAVecRestoreArray(da,F,&fu);CHKERRQ(ierr);
594   if (PetscExceptionValue(ierr)) {
595     PetscErrorCode pierr = DMRestoreLocalVector(da,&localX);CHKERRQ(pierr);
596   }
597   CHKERRQ(ierr);
598   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
599   PetscFunctionReturn(0);
600 }
601 
602 #undef __FUNCT__
603 #define __FUNCT__ "DMDAFormFunctionLocalGhost"
604 /*@C
605    DMDAFormFunctionLocalGhost - This is a universal function evaluation routine for
606    a local DM function, but the ghost values of the output are communicated and added.
607 
608    Collective on DMDA
609 
610    Input Parameters:
611 +  da - the DM context
612 .  func - The local function
613 .  X - input vector
614 .  F - function vector
615 -  ctx - A user context
616 
617    Level: intermediate
618 
619 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(),
620           SNESSetFunction(), SNESSetJacobian()
621 
622 @*/
623 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunctionLocalGhost(DM da, DMDALocalFunction1 func, Vec X, Vec F, void *ctx)
624 {
625   Vec            localX, localF;
626   DMDALocalInfo  info;
627   void           *u;
628   void           *fu;
629   PetscErrorCode ierr;
630 
631   PetscFunctionBegin;
632   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
633   ierr = DMGetLocalVector(da,&localF);CHKERRQ(ierr);
634   /*
635      Scatter ghost points to local vector, using the 2-step process
636         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
637   */
638   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
639   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
640   ierr = VecSet(F, 0.0);CHKERRQ(ierr);
641   ierr = VecSet(localF, 0.0);CHKERRQ(ierr);
642   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
643   ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr);
644   ierr = DMDAVecGetArray(da,localF,&fu);CHKERRQ(ierr);
645   ierr = (*func)(&info,u,fu,ctx);
646   if (PetscExceptionValue(ierr)) {
647     PetscErrorCode pierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
648     pierr = DMDAVecRestoreArray(da,localF,&fu);CHKERRQ(pierr);
649   }
650   CHKERRQ(ierr);
651   ierr = DMLocalToGlobalBegin(da,localF,ADD_VALUES,F);CHKERRQ(ierr);
652   ierr = DMLocalToGlobalEnd(da,localF,ADD_VALUES,F);CHKERRQ(ierr);
653   ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr);
654   ierr = DMDAVecRestoreArray(da,localF,&fu);CHKERRQ(ierr);
655   if (PetscExceptionValue(ierr)) {
656     PetscErrorCode pierr = DMRestoreLocalVector(da,&localX);CHKERRQ(pierr);
657   ierr = DMRestoreLocalVector(da,&localF);CHKERRQ(ierr);
658   }
659   CHKERRQ(ierr);
660   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
661   ierr = DMRestoreLocalVector(da,&localF);CHKERRQ(ierr);
662   PetscFunctionReturn(0);
663 }
664 
665 #undef __FUNCT__
666 #define __FUNCT__ "DMDAFormFunction1"
667 /*@
668     DMDAFormFunction1 - Evaluates a user provided function on each processor that
669         share a DMDA
670 
671    Input Parameters:
672 +    da - the DM that defines the grid
673 .    vu - input vector
674 .    vfu - output vector
675 -    w - any user data
676 
677     Notes: Does NOT do ghost updates on vu upon entry
678 
679     Level: advanced
680 
681 .seealso: DMDAComputeJacobian1WithAdic()
682 
683 @*/
684 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunction1(DM da,Vec vu,Vec vfu,void *w)
685 {
686   PetscErrorCode ierr;
687   void           *u,*fu;
688   DMDALocalInfo  info;
689   DM_DA          *dd = (DM_DA*)da->data;
690 
691   PetscFunctionBegin;
692 
693   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
694   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
695   ierr = DMDAVecGetArray(da,vfu,&fu);CHKERRQ(ierr);
696 
697   CHKMEMQ;
698   ierr = (*dd->lf)(&info,u,fu,w);
699   if (PetscExceptionValue(ierr)) {
700     PetscErrorCode pierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(pierr);
701     pierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(pierr);
702   }
703   CHKERRQ(ierr);
704   CHKMEMQ;
705 
706   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
707   ierr = DMDAVecRestoreArray(da,vfu,&fu);CHKERRQ(ierr);
708   PetscFunctionReturn(0);
709 }
710 
711 #undef __FUNCT__
712 #define __FUNCT__ "DMDAFormFunctioniTest1"
713 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunctioniTest1(DM da,void *w)
714 {
715   Vec            vu,fu,fui;
716   PetscErrorCode ierr;
717   PetscInt       i,n;
718   PetscScalar    *ui;
719   PetscRandom    rnd;
720   PetscReal      norm;
721 
722   PetscFunctionBegin;
723   ierr = DMGetLocalVector(da,&vu);CHKERRQ(ierr);
724   ierr = PetscRandomCreate(PETSC_COMM_SELF,&rnd);CHKERRQ(ierr);
725   ierr = PetscRandomSetFromOptions(rnd);CHKERRQ(ierr);
726   ierr = VecSetRandom(vu,rnd);CHKERRQ(ierr);
727   ierr = PetscRandomDestroy(rnd);CHKERRQ(ierr);
728 
729   ierr = DMGetGlobalVector(da,&fu);CHKERRQ(ierr);
730   ierr = DMGetGlobalVector(da,&fui);CHKERRQ(ierr);
731 
732   ierr = DMDAFormFunction1(da,vu,fu,w);CHKERRQ(ierr);
733 
734   ierr = VecGetArray(fui,&ui);CHKERRQ(ierr);
735   ierr = VecGetLocalSize(fui,&n);CHKERRQ(ierr);
736   for (i=0; i<n; i++) {
737     ierr = DMDAFormFunctioni1(da,i,vu,ui+i,w);CHKERRQ(ierr);
738   }
739   ierr = VecRestoreArray(fui,&ui);CHKERRQ(ierr);
740 
741   ierr = VecAXPY(fui,-1.0,fu);CHKERRQ(ierr);
742   ierr = VecNorm(fui,NORM_2,&norm);CHKERRQ(ierr);
743   ierr = PetscPrintf(((PetscObject)da)->comm,"Norm of difference in vectors %G\n",norm);CHKERRQ(ierr);
744   ierr = VecView(fu,0);CHKERRQ(ierr);
745   ierr = VecView(fui,0);CHKERRQ(ierr);
746 
747   ierr = DMRestoreLocalVector(da,&vu);CHKERRQ(ierr);
748   ierr = DMRestoreGlobalVector(da,&fu);CHKERRQ(ierr);
749   ierr = DMRestoreGlobalVector(da,&fui);CHKERRQ(ierr);
750   PetscFunctionReturn(0);
751 }
752 
753 #undef __FUNCT__
754 #define __FUNCT__ "DMDAFormFunctioni1"
755 /*@
756     DMDAFormFunctioni1 - Evaluates a user provided point-wise function
757 
758    Input Parameters:
759 +    da - the DM that defines the grid
760 .    i - the component of the function we wish to compute (must be local)
761 .    vu - input vector
762 .    vfu - output value
763 -    w - any user data
764 
765     Notes: Does NOT do ghost updates on vu upon entry
766 
767     Level: advanced
768 
769 .seealso: DMDAComputeJacobian1WithAdic()
770 
771 @*/
772 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunctioni1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
773 {
774   PetscErrorCode ierr;
775   void           *u;
776   DMDALocalInfo  info;
777   MatStencil     stencil;
778   DM_DA          *dd = (DM_DA*)da->data;
779 
780   PetscFunctionBegin;
781 
782   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
783   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
784 
785   /* figure out stencil value from i */
786   stencil.c = i % info.dof;
787   stencil.i = (i % (info.xm*info.dof))/info.dof;
788   stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
789   stencil.k = i/(info.xm*info.ym*info.dof);
790 
791   ierr = (*dd->lfi)(&info,&stencil,u,vfu,w);CHKERRQ(ierr);
792 
793   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
794   PetscFunctionReturn(0);
795 }
796 
797 #undef __FUNCT__
798 #define __FUNCT__ "DMDAFormFunctionib1"
799 /*@
800     DMDAFormFunctionib1 - Evaluates a user provided point-block function
801 
802    Input Parameters:
803 +    da - the DM that defines the grid
804 .    i - the component of the function we wish to compute (must be local)
805 .    vu - input vector
806 .    vfu - output value
807 -    w - any user data
808 
809     Notes: Does NOT do ghost updates on vu upon entry
810 
811     Level: advanced
812 
813 .seealso: DMDAComputeJacobian1WithAdic()
814 
815 @*/
816 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormFunctionib1(DM da,PetscInt i,Vec vu,PetscScalar *vfu,void *w)
817 {
818   PetscErrorCode ierr;
819   void           *u;
820   DMDALocalInfo  info;
821   MatStencil     stencil;
822   DM_DA          *dd = (DM_DA*)da->data;
823 
824   PetscFunctionBegin;
825   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
826   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
827 
828   /* figure out stencil value from i */
829   stencil.c = i % info.dof;
830   if (stencil.c) SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ARG_WRONG,"Point-block functions can only be called for the entire block");
831   stencil.i = (i % (info.xm*info.dof))/info.dof;
832   stencil.j = (i % (info.xm*info.ym*info.dof))/(info.xm*info.dof);
833   stencil.k = i/(info.xm*info.ym*info.dof);
834 
835   ierr = (*dd->lfib)(&info,&stencil,u,vfu,w);CHKERRQ(ierr);
836 
837   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
838   PetscFunctionReturn(0);
839 }
840 
841 #if defined(new)
842 #undef __FUNCT__
843 #define __FUNCT__ "DMDAGetDiagonal_MFFD"
844 /*
845   DMDAGetDiagonal_MFFD - Gets the diagonal for a matrix free matrix where local
846     function lives on a DMDA
847 
848         y ~= (F(u + ha) - F(u))/h,
849   where F = nonlinear function, as set by SNESSetFunction()
850         u = current iterate
851         h = difference interval
852 */
853 PetscErrorCode DMDAGetDiagonal_MFFD(DM da,Vec U,Vec a)
854 {
855   PetscScalar    h,*aa,*ww,v;
856   PetscReal      epsilon = PETSC_SQRT_MACHINE_EPSILON,umin = 100.0*PETSC_SQRT_MACHINE_EPSILON;
857   PetscErrorCode ierr;
858   PetscInt       gI,nI;
859   MatStencil     stencil;
860   DMDALocalInfo  info;
861 
862   PetscFunctionBegin;
863   ierr = (*ctx->func)(0,U,a,ctx->funcctx);CHKERRQ(ierr);
864   ierr = (*ctx->funcisetbase)(U,ctx->funcctx);CHKERRQ(ierr);
865 
866   ierr = VecGetArray(U,&ww);CHKERRQ(ierr);
867   ierr = VecGetArray(a,&aa);CHKERRQ(ierr);
868 
869   nI = 0;
870     h  = ww[gI];
871     if (h == 0.0) h = 1.0;
872 #if !defined(PETSC_USE_COMPLEX)
873     if (h < umin && h >= 0.0)      h = umin;
874     else if (h < 0.0 && h > -umin) h = -umin;
875 #else
876     if (PetscAbsScalar(h) < umin && PetscRealPart(h) >= 0.0)     h = umin;
877     else if (PetscRealPart(h) < 0.0 && PetscAbsScalar(h) < umin) h = -umin;
878 #endif
879     h     *= epsilon;
880 
881     ww[gI] += h;
882     ierr          = (*ctx->funci)(i,w,&v,ctx->funcctx);CHKERRQ(ierr);
883     aa[nI]  = (v - aa[nI])/h;
884     ww[gI] -= h;
885     nI++;
886   }
887   ierr = VecRestoreArray(U,&ww);CHKERRQ(ierr);
888   ierr = VecRestoreArray(a,&aa);CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 #endif
892 
893 #if defined(PETSC_HAVE_ADIC)
894 EXTERN_C_BEGIN
895 #include "adic/ad_utils.h"
896 EXTERN_C_END
897 
898 #undef __FUNCT__
899 #define __FUNCT__ "DMDAComputeJacobian1WithAdic"
900 /*@C
901     DMDAComputeJacobian1WithAdic - Evaluates a adiC provided Jacobian function on each processor that
902         share a DMDA
903 
904    Input Parameters:
905 +    da - the DM that defines the grid
906 .    vu - input vector (ghosted)
907 .    J - output matrix
908 -    w - any user data
909 
910    Level: advanced
911 
912     Notes: Does NOT do ghost updates on vu upon entry
913 
914 .seealso: DMDAFormFunction1()
915 
916 @*/
917 PetscErrorCode PETSCDM_DLLEXPORT DMDAComputeJacobian1WithAdic(DM da,Vec vu,Mat J,void *w)
918 {
919   PetscErrorCode ierr;
920   PetscInt       gtdof,tdof;
921   PetscScalar    *ustart;
922   DMDALocalInfo  info;
923   void           *ad_u,*ad_f,*ad_ustart,*ad_fstart;
924   ISColoring     iscoloring;
925 
926   PetscFunctionBegin;
927   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
928 
929   PetscADResetIndep();
930 
931   /* get space for derivative objects.  */
932   ierr = DMDAGetAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,&gtdof);CHKERRQ(ierr);
933   ierr = DMDAGetAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr);
934   ierr = VecGetArray(vu,&ustart);CHKERRQ(ierr);
935   ierr = DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);CHKERRQ(ierr);
936 
937   PetscADSetValueAndColor(ad_ustart,gtdof,iscoloring->colors,ustart);
938 
939   ierr = VecRestoreArray(vu,&ustart);CHKERRQ(ierr);
940   ierr = ISColoringDestroy(iscoloring);CHKERRQ(ierr);
941   ierr = PetscADIncrementTotalGradSize(iscoloring->n);CHKERRQ(ierr);
942   PetscADSetIndepDone();
943 
944   ierr = PetscLogEventBegin(DMDA_LocalADFunction,0,0,0,0);CHKERRQ(ierr);
945   ierr = (*dd->adic_lf)(&info,ad_u,ad_f,w);CHKERRQ(ierr);
946   ierr = PetscLogEventEnd(DMDA_LocalADFunction,0,0,0,0);CHKERRQ(ierr);
947 
948   /* stick the values into the matrix */
949   ierr = MatSetValuesAdic(J,(PetscScalar**)ad_fstart);CHKERRQ(ierr);
950 
951   /* return space for derivative objects.  */
952   ierr = DMDARestoreAdicArray(da,PETSC_TRUE,&ad_u,&ad_ustart,&gtdof);CHKERRQ(ierr);
953   ierr = DMDARestoreAdicArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr);
954   PetscFunctionReturn(0);
955 }
956 
957 #undef __FUNCT__
958 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAdic"
959 /*@C
960     DMDAMultiplyByJacobian1WithAdic - Applies an ADIC-provided Jacobian function to a vector on
961     each processor that shares a DMDA.
962 
963     Input Parameters:
964 +   da - the DM that defines the grid
965 .   vu - Jacobian is computed at this point (ghosted)
966 .   v - product is done on this vector (ghosted)
967 .   fu - output vector = J(vu)*v (not ghosted)
968 -   w - any user data
969 
970     Notes:
971     This routine does NOT do ghost updates on vu upon entry.
972 
973    Level: advanced
974 
975 .seealso: DMDAFormFunction1()
976 
977 @*/
978 PetscErrorCode PETSCDM_DLLEXPORT DMDAMultiplyByJacobian1WithAdic(DM da,Vec vu,Vec v,Vec f,void *w)
979 {
980   PetscErrorCode ierr;
981   PetscInt       i,gtdof,tdof;
982   PetscScalar    *avu,*av,*af,*ad_vustart,*ad_fstart;
983   DMDALocalInfo  info;
984   void           *ad_vu,*ad_f;
985 
986   PetscFunctionBegin;
987   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
988 
989   /* get space for derivative objects.  */
990   ierr = DMDAGetAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,&gtdof);CHKERRQ(ierr);
991   ierr = DMDAGetAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr);
992 
993   /* copy input vector into derivative object */
994   ierr = VecGetArray(vu,&avu);CHKERRQ(ierr);
995   ierr = VecGetArray(v,&av);CHKERRQ(ierr);
996   for (i=0; i<gtdof; i++) {
997     ad_vustart[2*i]   = avu[i];
998     ad_vustart[2*i+1] = av[i];
999   }
1000   ierr = VecRestoreArray(vu,&avu);CHKERRQ(ierr);
1001   ierr = VecRestoreArray(v,&av);CHKERRQ(ierr);
1002 
1003   PetscADResetIndep();
1004   ierr = PetscADIncrementTotalGradSize(1);CHKERRQ(ierr);
1005   PetscADSetIndepDone();
1006 
1007   ierr = (*dd->adicmf_lf)(&info,ad_vu,ad_f,w);CHKERRQ(ierr);
1008 
1009   /* stick the values into the vector */
1010   ierr = VecGetArray(f,&af);CHKERRQ(ierr);
1011   for (i=0; i<tdof; i++) {
1012     af[i] = ad_fstart[2*i+1];
1013   }
1014   ierr = VecRestoreArray(f,&af);CHKERRQ(ierr);
1015 
1016   /* return space for derivative objects.  */
1017   ierr = DMDARestoreAdicMFArray(da,PETSC_TRUE,&ad_vu,&ad_vustart,&gtdof);CHKERRQ(ierr);
1018   ierr = DMDARestoreAdicMFArray(da,PETSC_FALSE,&ad_f,&ad_fstart,&tdof);CHKERRQ(ierr);
1019   PetscFunctionReturn(0);
1020 }
1021 #endif
1022 
1023 #undef __FUNCT__
1024 #define __FUNCT__ "DMDAComputeJacobian1"
1025 /*@
1026     DMDAComputeJacobian1 - Evaluates a local Jacobian function on each processor that
1027         share a DMDA
1028 
1029    Input Parameters:
1030 +    da - the DM that defines the grid
1031 .    vu - input vector (ghosted)
1032 .    J - output matrix
1033 -    w - any user data
1034 
1035     Notes: Does NOT do ghost updates on vu upon entry
1036 
1037     Level: advanced
1038 
1039 .seealso: DMDAFormFunction1()
1040 
1041 @*/
1042 PetscErrorCode PETSCDM_DLLEXPORT DMDAComputeJacobian1(DM da,Vec vu,Mat J,void *w)
1043 {
1044   PetscErrorCode ierr;
1045   void           *u;
1046   DMDALocalInfo  info;
1047   DM_DA          *dd = (DM_DA*)da->data;
1048 
1049   PetscFunctionBegin;
1050   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
1051   ierr = DMDAVecGetArray(da,vu,&u);CHKERRQ(ierr);
1052   ierr = (*dd->lj)(&info,u,J,w);CHKERRQ(ierr);
1053   ierr = DMDAVecRestoreArray(da,vu,&u);CHKERRQ(ierr);
1054   PetscFunctionReturn(0);
1055 }
1056 
1057 
1058 #undef __FUNCT__
1059 #define __FUNCT__ "DMDAComputeJacobian1WithAdifor"
1060 /*
1061     DMDAComputeJacobian1WithAdifor - Evaluates a ADIFOR provided Jacobian local function on each processor that
1062         share a DMDA
1063 
1064    Input Parameters:
1065 +    da - the DM that defines the grid
1066 .    vu - input vector (ghosted)
1067 .    J - output matrix
1068 -    w - any user data
1069 
1070     Notes: Does NOT do ghost updates on vu upon entry
1071 
1072 .seealso: DMDAFormFunction1()
1073 
1074 */
1075 PetscErrorCode PETSCDM_DLLEXPORT DMDAComputeJacobian1WithAdifor(DM da,Vec vu,Mat J,void *w)
1076 {
1077   PetscErrorCode  ierr;
1078   PetscInt        i,Nc,N;
1079   ISColoringValue *color;
1080   DMDALocalInfo   info;
1081   PetscScalar     *u,*g_u,*g_f,*f = 0,*p_u;
1082   ISColoring      iscoloring;
1083   DM_DA          *dd = (DM_DA*)da->data;
1084   void            (*lf)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*) =
1085                   (void (*)(PetscInt*,DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscInt*,PetscScalar*,PetscScalar*,PetscInt*,void*,PetscErrorCode*))*dd->adifor_lf;
1086 
1087   PetscFunctionBegin;
1088   ierr = DMGetColoring(da,IS_COLORING_GHOSTED,MATAIJ,&iscoloring);CHKERRQ(ierr);
1089   Nc   = iscoloring->n;
1090   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
1091   N    = info.gxm*info.gym*info.gzm*info.dof;
1092 
1093   /* get space for derivative objects.  */
1094   ierr  = PetscMalloc(Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar),&g_u);CHKERRQ(ierr);
1095   ierr  = PetscMemzero(g_u,Nc*info.gxm*info.gym*info.gzm*info.dof*sizeof(PetscScalar));CHKERRQ(ierr);
1096   p_u   = g_u;
1097   color = iscoloring->colors;
1098   for (i=0; i<N; i++) {
1099     p_u[*color++] = 1.0;
1100     p_u          += Nc;
1101   }
1102   ierr = ISColoringDestroy(iscoloring);CHKERRQ(ierr);
1103   ierr = PetscMalloc2(Nc*info.xm*info.ym*info.zm*info.dof,PetscScalar,&g_f,info.xm*info.ym*info.zm*info.dof,PetscScalar,&f);CHKERRQ(ierr);
1104 
1105   /* Seed the input array g_u with coloring information */
1106 
1107   ierr = VecGetArray(vu,&u);CHKERRQ(ierr);
1108   (lf)(&Nc,&info,u,g_u,&Nc,f,g_f,&Nc,w,&ierr);CHKERRQ(ierr);
1109   ierr = VecRestoreArray(vu,&u);CHKERRQ(ierr);
1110 
1111   /* stick the values into the matrix */
1112   /* PetscScalarView(Nc*info.xm*info.ym,g_f,0); */
1113   ierr = MatSetValuesAdifor(J,Nc,g_f);CHKERRQ(ierr);
1114 
1115   /* return space for derivative objects.  */
1116   ierr = PetscFree(g_u);CHKERRQ(ierr);
1117   ierr = PetscFree2(g_f,f);CHKERRQ(ierr);
1118   PetscFunctionReturn(0);
1119 }
1120 
1121 #undef __FUNCT__
1122 #define __FUNCT__ "DMDAFormJacobianLocal"
1123 /*@C
1124    DMDAFormjacobianLocal - This is a universal Jacobian evaluation routine for
1125    a local DM function.
1126 
1127    Collective on DMDA
1128 
1129    Input Parameters:
1130 +  da - the DM context
1131 .  func - The local function
1132 .  X - input vector
1133 .  J - Jacobian matrix
1134 -  ctx - A user context
1135 
1136    Level: intermediate
1137 
1138 .seealso: DMDASetLocalFunction(), DMDASetLocalJacobian(), DMDASetLocalAdicFunction(), DMDASetLocalAdicMFFunction(),
1139           SNESSetFunction(), SNESSetJacobian()
1140 
1141 @*/
1142 PetscErrorCode PETSCDM_DLLEXPORT DMDAFormJacobianLocal(DM da, DMDALocalFunction1 func, Vec X, Mat J, void *ctx)
1143 {
1144   Vec            localX;
1145   DMDALocalInfo  info;
1146   void           *u;
1147   PetscErrorCode ierr;
1148 
1149   PetscFunctionBegin;
1150   ierr = DMGetLocalVector(da,&localX);CHKERRQ(ierr);
1151   /*
1152      Scatter ghost points to local vector, using the 2-step process
1153         DMGlobalToLocalBegin(), DMGlobalToLocalEnd().
1154   */
1155   ierr = DMGlobalToLocalBegin(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
1156   ierr = DMGlobalToLocalEnd(da,X,INSERT_VALUES,localX);CHKERRQ(ierr);
1157   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
1158   ierr = DMDAVecGetArray(da,localX,&u);CHKERRQ(ierr);
1159   ierr = (*func)(&info,u,J,ctx);
1160   if (PetscExceptionValue(ierr)) {
1161     PetscErrorCode pierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(pierr);
1162   }
1163   CHKERRQ(ierr);
1164   ierr = DMDAVecRestoreArray(da,localX,&u);CHKERRQ(ierr);
1165   if (PetscExceptionValue(ierr)) {
1166     PetscErrorCode pierr = DMRestoreLocalVector(da,&localX);CHKERRQ(pierr);
1167   }
1168   CHKERRQ(ierr);
1169   ierr = DMRestoreLocalVector(da,&localX);CHKERRQ(ierr);
1170   PetscFunctionReturn(0);
1171 }
1172 
1173 #undef __FUNCT__
1174 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAD"
1175 /*@C
1176     DMDAMultiplyByJacobian1WithAD - Applies a Jacobian function supplied by ADIFOR or ADIC
1177     to a vector on each processor that shares a DMDA.
1178 
1179    Input Parameters:
1180 +    da - the DM that defines the grid
1181 .    vu - Jacobian is computed at this point (ghosted)
1182 .    v - product is done on this vector (ghosted)
1183 .    fu - output vector = J(vu)*v (not ghosted)
1184 -    w - any user data
1185 
1186     Notes:
1187     This routine does NOT do ghost updates on vu and v upon entry.
1188 
1189     Automatically calls DMDAMultiplyByJacobian1WithAdifor() or DMDAMultiplyByJacobian1WithAdic()
1190     depending on whether DMDASetLocalAdicMFFunction() or DMDASetLocalAdiforMFFunction() was called.
1191 
1192    Level: advanced
1193 
1194 .seealso: DMDAFormFunction1(), DMDAMultiplyByJacobian1WithAdifor(), DMDAMultiplyByJacobian1WithAdic()
1195 
1196 @*/
1197 PetscErrorCode PETSCDM_DLLEXPORT DMDAMultiplyByJacobian1WithAD(DM da,Vec u,Vec v,Vec f,void *w)
1198 {
1199   PetscErrorCode ierr;
1200   DM_DA          *dd = (DM_DA*)da->data;
1201 
1202   PetscFunctionBegin;
1203   if (dd->adicmf_lf) {
1204 #if defined(PETSC_HAVE_ADIC)
1205     ierr = DMDAMultiplyByJacobian1WithAdic(da,u,v,f,w);CHKERRQ(ierr);
1206 #else
1207     SETERRQ(((PetscObject)da)->comm,PETSC_ERR_SUP_SYS,"Requires ADIC to be installed and cannot use complex numbers");
1208 #endif
1209   } else if (dd->adiformf_lf) {
1210     ierr = DMDAMultiplyByJacobian1WithAdifor(da,u,v,f,w);CHKERRQ(ierr);
1211   } else {
1212     SETERRQ(((PetscObject)da)->comm,PETSC_ERR_ORDER,"Must call DMDASetLocalAdiforMFFunction() or DMDASetLocalAdicMFFunction() before using");
1213   }
1214   PetscFunctionReturn(0);
1215 }
1216 
1217 
1218 #undef __FUNCT__
1219 #define __FUNCT__ "DMDAMultiplyByJacobian1WithAdifor"
1220 /*@C
1221     DMDAMultiplyByJacobian1WithAdifor - Applies a ADIFOR provided Jacobian function on each processor that
1222         share a DM to a vector
1223 
1224    Input Parameters:
1225 +    da - the DM that defines the grid
1226 .    vu - Jacobian is computed at this point (ghosted)
1227 .    v - product is done on this vector (ghosted)
1228 .    fu - output vector = J(vu)*v (not ghosted)
1229 -    w - any user data
1230 
1231     Notes: Does NOT do ghost updates on vu and v upon entry
1232 
1233    Level: advanced
1234 
1235 .seealso: DMDAFormFunction1()
1236 
1237 @*/
1238 PetscErrorCode PETSCDM_DLLEXPORT DMDAMultiplyByJacobian1WithAdifor(DM da,Vec u,Vec v,Vec f,void *w)
1239 {
1240   PetscErrorCode ierr;
1241   PetscScalar    *au,*av,*af,*awork;
1242   Vec            work;
1243   DMDALocalInfo  info;
1244   DM_DA          *dd = (DM_DA*)da->data;
1245   void           (*lf)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*) =
1246                  (void (*)(DMDALocalInfo*,PetscScalar*,PetscScalar*,PetscScalar*,PetscScalar*,void*,PetscErrorCode*))*dd->adiformf_lf;
1247 
1248   PetscFunctionBegin;
1249   ierr = DMDAGetLocalInfo(da,&info);CHKERRQ(ierr);
1250 
1251   ierr = DMGetGlobalVector(da,&work);CHKERRQ(ierr);
1252   ierr = VecGetArray(u,&au);CHKERRQ(ierr);
1253   ierr = VecGetArray(v,&av);CHKERRQ(ierr);
1254   ierr = VecGetArray(f,&af);CHKERRQ(ierr);
1255   ierr = VecGetArray(work,&awork);CHKERRQ(ierr);
1256   (lf)(&info,au,av,awork,af,w,&ierr);CHKERRQ(ierr);
1257   ierr = VecRestoreArray(u,&au);CHKERRQ(ierr);
1258   ierr = VecRestoreArray(v,&av);CHKERRQ(ierr);
1259   ierr = VecRestoreArray(f,&af);CHKERRQ(ierr);
1260   ierr = VecRestoreArray(work,&awork);CHKERRQ(ierr);
1261   ierr = DMRestoreGlobalVector(da,&work);CHKERRQ(ierr);
1262 
1263   PetscFunctionReturn(0);
1264 }
1265 
1266 #undef __FUNCT__
1267 #define __FUNCT__ "DMSetUp_DA_2D"
1268 PetscErrorCode PETSCDM_DLLEXPORT DMSetUp_DA_2D(DM da)
1269 {
1270   DM_DA                  *dd = (DM_DA*)da->data;
1271   const PetscInt         M            = dd->M;
1272   const PetscInt         N            = dd->N;
1273   PetscInt               m            = dd->m;
1274   PetscInt               n            = dd->n;
1275   const PetscInt         dof          = dd->w;
1276   const PetscInt         s            = dd->s;
1277   const DMDAPeriodicType wrap         = dd->wrap;
1278   const DMDAStencilType  stencil_type = dd->stencil_type;
1279   PetscInt               *lx           = dd->lx;
1280   PetscInt               *ly           = dd->ly;
1281   MPI_Comm               comm;
1282   PetscMPIInt            rank,size;
1283   PetscInt               xs,xe,ys,ye,x,y,Xs,Xe,Ys,Ye,start,end;
1284   PetscInt               up,down,left,i,n0,n1,n2,n3,n5,n6,n7,n8,*idx,nn;
1285   PetscInt               xbase,*bases,*ldims,j,x_t,y_t,s_t,base,count;
1286   PetscInt               s_x,s_y; /* s proportionalized to w */
1287   PetscInt               sn0 = 0,sn2 = 0,sn6 = 0,sn8 = 0;
1288   Vec                    local,global;
1289   VecScatter             ltog,gtol;
1290   IS                     to,from;
1291   PetscErrorCode         ierr;
1292 
1293   PetscFunctionBegin;
1294   ierr = PetscObjectGetComm((PetscObject)da,&comm);CHKERRQ(ierr);
1295 
1296   if (dof < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Must have 1 or more degrees of freedom per node: %D",dof);
1297   if (s < 0) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Stencil width cannot be negative: %D",s);
1298 
1299   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
1300   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
1301 
1302   dd->dim         = 2;
1303   ierr = PetscMalloc(dof*sizeof(char*),&dd->fieldname);CHKERRQ(ierr);
1304   ierr = PetscMemzero(dd->fieldname,dof*sizeof(char*));CHKERRQ(ierr);
1305 
1306   if (m != PETSC_DECIDE) {
1307     if (m < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in X direction: %D",m);
1308     else if (m > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in X direction: %D %d",m,size);
1309   }
1310   if (n != PETSC_DECIDE) {
1311     if (n < 1) SETERRQ1(comm,PETSC_ERR_ARG_OUTOFRANGE,"Non-positive number of processors in Y direction: %D",n);
1312     else if (n > size) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Too many processors in Y direction: %D %d",n,size);
1313   }
1314 
1315   if (m == PETSC_DECIDE || n == PETSC_DECIDE) {
1316     if (n != PETSC_DECIDE) {
1317       m = size/n;
1318     } else if (m != PETSC_DECIDE) {
1319       n = size/m;
1320     } else {
1321       /* try for squarish distribution */
1322       m = (PetscInt)(0.5 + sqrt(((double)M)*((double)size)/((double)N)));
1323       if (!m) m = 1;
1324       while (m > 0) {
1325 	n = size/m;
1326 	if (m*n == size) break;
1327 	m--;
1328       }
1329       if (M > N && m < n) {PetscInt _m = m; m = n; n = _m;}
1330     }
1331     if (m*n != size) SETERRQ(comm,PETSC_ERR_PLIB,"Unable to create partition, check the size of the communicator and input m and n ");
1332   } else if (m*n != size) SETERRQ(comm,PETSC_ERR_ARG_OUTOFRANGE,"Given Bad partition");
1333 
1334   if (M < m) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in x direction is too fine! %D %D",M,m);
1335   if (N < n) SETERRQ2(comm,PETSC_ERR_ARG_OUTOFRANGE,"Partition in y direction is too fine! %D %D",N,n);
1336 
1337   /*
1338      Determine locally owned region
1339      xs is the first local node number, x is the number of local nodes
1340   */
1341   if (!lx) {
1342     ierr = PetscMalloc(m*sizeof(PetscInt), &dd->lx);CHKERRQ(ierr);
1343     lx = dd->lx;
1344     for (i=0; i<m; i++) {
1345       lx[i] = M/m + ((M % m) > i);
1346     }
1347   }
1348   x  = lx[rank % m];
1349   xs = 0;
1350   for (i=0; i<(rank % m); i++) {
1351     xs += lx[i];
1352   }
1353 #if defined(PETSC_USE_DEBUG)
1354   left = xs;
1355   for (i=(rank % m); i<m; i++) {
1356     left += lx[i];
1357   }
1358   if (left != M) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of lx across processors not equal to M: %D %D",left,M);
1359 #endif
1360 
1361   /*
1362      Determine locally owned region
1363      ys is the first local node number, y is the number of local nodes
1364   */
1365   if (!ly) {
1366     ierr = PetscMalloc(n*sizeof(PetscInt), &dd->ly);CHKERRQ(ierr);
1367     ly = dd->ly;
1368     for (i=0; i<n; i++) {
1369       ly[i] = N/n + ((N % n) > i);
1370     }
1371   }
1372   y  = ly[rank/m];
1373   ys = 0;
1374   for (i=0; i<(rank/m); i++) {
1375     ys += ly[i];
1376   }
1377 #if defined(PETSC_USE_DEBUG)
1378   left = ys;
1379   for (i=(rank/m); i<n; i++) {
1380     left += ly[i];
1381   }
1382   if (left != N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Sum of ly across processors not equal to N: %D %D",left,N);
1383 #endif
1384 
1385   if (x < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local x-width of domain x %D is smaller than stencil width s %D",x,s);
1386   if (y < s) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local y-width of domain y %D is smaller than stencil width s %D",y,s);
1387   xe = xs + x;
1388   ye = ys + y;
1389 
1390   /* determine ghost region */
1391   /* Assume No Periodicity */
1392   if (xs-s > 0) Xs = xs - s; else Xs = 0;
1393   if (ys-s > 0) Ys = ys - s; else Ys = 0;
1394   if (xe+s <= M) Xe = xe + s; else Xe = M;
1395   if (ye+s <= N) Ye = ye + s; else Ye = N;
1396 
1397   /* X Periodic */
1398   if (DMDAXPeriodic(wrap)){
1399     Xs = xs - s;
1400     Xe = xe + s;
1401   }
1402 
1403   /* Y Periodic */
1404   if (DMDAYPeriodic(wrap)){
1405     Ys = ys - s;
1406     Ye = ye + s;
1407   }
1408 
1409   /* Resize all X parameters to reflect w */
1410   x   *= dof;
1411   xs  *= dof;
1412   xe  *= dof;
1413   Xs  *= dof;
1414   Xe  *= dof;
1415   s_x = s*dof;
1416   s_y = s;
1417 
1418   /* determine starting point of each processor */
1419   nn    = x*y;
1420   ierr  = PetscMalloc2(size+1,PetscInt,&bases,size,PetscInt,&ldims);CHKERRQ(ierr);
1421   ierr  = MPI_Allgather(&nn,1,MPIU_INT,ldims,1,MPIU_INT,comm);CHKERRQ(ierr);
1422   bases[0] = 0;
1423   for (i=1; i<=size; i++) {
1424     bases[i] = ldims[i-1];
1425   }
1426   for (i=1; i<=size; i++) {
1427     bases[i] += bases[i-1];
1428   }
1429 
1430   /* allocate the base parallel and sequential vectors */
1431   dd->Nlocal = x*y;
1432   ierr = VecCreateMPIWithArray(comm,dd->Nlocal,PETSC_DECIDE,0,&global);CHKERRQ(ierr);
1433   ierr = VecSetBlockSize(global,dof);CHKERRQ(ierr);
1434   dd->nlocal = (Xe-Xs)*(Ye-Ys);
1435   ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,dd->nlocal,0,&local);CHKERRQ(ierr);
1436   ierr = VecSetBlockSize(local,dof);CHKERRQ(ierr);
1437 
1438 
1439   /* generate appropriate vector scatters */
1440   /* local to global inserts non-ghost point region into global */
1441   ierr = VecGetOwnershipRange(global,&start,&end);CHKERRQ(ierr);
1442   ierr = ISCreateStride(comm,x*y,start,1,&to);CHKERRQ(ierr);
1443 
1444   left  = xs - Xs; down  = ys - Ys; up    = down + y;
1445   ierr = PetscMalloc(x*(up - down)*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1446   count = 0;
1447   for (i=down; i<up; i++) {
1448     for (j=0; j<x/dof; j++) {
1449       idx[count++] = (left + i*(Xe-Xs) + j*dof)/dof;
1450     }
1451   }
1452   ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&from);CHKERRQ(ierr);
1453 
1454   ierr = VecScatterCreate(local,from,global,to,&ltog);CHKERRQ(ierr);
1455   ierr = PetscLogObjectParent(dd,ltog);CHKERRQ(ierr);
1456   ierr = ISDestroy(from);CHKERRQ(ierr);
1457   ierr = ISDestroy(to);CHKERRQ(ierr);
1458 
1459   /* global to local must include ghost points */
1460   if (stencil_type == DMDA_STENCIL_BOX) {
1461     ierr = ISCreateStride(comm,(Xe-Xs)*(Ye-Ys),0,1,&to);CHKERRQ(ierr);
1462   } else {
1463     /* must drop into cross shape region */
1464     /*       ---------|
1465             |  top    |
1466          |---         ---|
1467          |   middle      |
1468          |               |
1469          ----         ----
1470             | bottom  |
1471             -----------
1472         Xs xs        xe  Xe */
1473     /* bottom */
1474     left  = xs - Xs; down = ys - Ys; up    = down + y;
1475     count = down*(xe-xs) + (up-down)*(Xe-Xs) + (Ye-Ys-up)*(xe-xs);
1476     ierr  = PetscMalloc(count*sizeof(PetscInt)/dof,&idx);CHKERRQ(ierr);
1477     count = 0;
1478     for (i=0; i<down; i++) {
1479       for (j=0; j<xe-xs; j += dof) {
1480         idx[count++] = left + i*(Xe-Xs) + j;
1481       }
1482     }
1483     /* middle */
1484     for (i=down; i<up; i++) {
1485       for (j=0; j<Xe-Xs; j += dof) {
1486         idx[count++] = i*(Xe-Xs) + j;
1487       }
1488     }
1489     /* top */
1490     for (i=up; i<Ye-Ys; i++) {
1491       for (j=0; j<xe-xs; j += dof) {
1492         idx[count++] = (left + i*(Xe-Xs) + j);
1493       }
1494     }
1495     for (i=0; i<count; i++) idx[i] = idx[i]/dof;
1496     ierr = ISCreateBlock(comm,dof,count,idx,PETSC_OWN_POINTER,&to);CHKERRQ(ierr);
1497   }
1498 
1499 
1500   /* determine who lies on each side of us stored in    n6 n7 n8
1501                                                         n3    n5
1502                                                         n0 n1 n2
1503   */
1504 
1505   /* Assume the Non-Periodic Case */
1506   n1 = rank - m;
1507   if (rank % m) {
1508     n0 = n1 - 1;
1509   } else {
1510     n0 = -1;
1511   }
1512   if ((rank+1) % m) {
1513     n2 = n1 + 1;
1514     n5 = rank + 1;
1515     n8 = rank + m + 1; if (n8 >= m*n) n8 = -1;
1516   } else {
1517     n2 = -1; n5 = -1; n8 = -1;
1518   }
1519   if (rank % m) {
1520     n3 = rank - 1;
1521     n6 = n3 + m; if (n6 >= m*n) n6 = -1;
1522   } else {
1523     n3 = -1; n6 = -1;
1524   }
1525   n7 = rank + m; if (n7 >= m*n) n7 = -1;
1526 
1527 
1528   /* Modify for Periodic Cases */
1529   if (wrap == DMDA_YPERIODIC) {  /* Handle Top and Bottom Sides */
1530     if (n1 < 0) n1 = rank + m * (n-1);
1531     if (n7 < 0) n7 = rank - m * (n-1);
1532     if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
1533     if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
1534     if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
1535     if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;
1536   } else if (wrap == DMDA_XPERIODIC) { /* Handle Left and Right Sides */
1537     if (n3 < 0) n3 = rank + (m-1);
1538     if (n5 < 0) n5 = rank - (m-1);
1539     if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
1540     if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
1541     if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
1542     if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
1543   } else if (wrap == DMDA_XYPERIODIC) {
1544 
1545     /* Handle all four corners */
1546     if ((n6 < 0) && (n7 < 0) && (n3 < 0)) n6 = m-1;
1547     if ((n8 < 0) && (n7 < 0) && (n5 < 0)) n8 = 0;
1548     if ((n2 < 0) && (n5 < 0) && (n1 < 0)) n2 = size-m;
1549     if ((n0 < 0) && (n3 < 0) && (n1 < 0)) n0 = size-1;
1550 
1551     /* Handle Top and Bottom Sides */
1552     if (n1 < 0) n1 = rank + m * (n-1);
1553     if (n7 < 0) n7 = rank - m * (n-1);
1554     if ((n3 >= 0) && (n0 < 0)) n0 = size - m + rank - 1;
1555     if ((n3 >= 0) && (n6 < 0)) n6 = (rank%m)-1;
1556     if ((n5 >= 0) && (n2 < 0)) n2 = size - m + rank + 1;
1557     if ((n5 >= 0) && (n8 < 0)) n8 = (rank%m)+1;
1558 
1559     /* Handle Left and Right Sides */
1560     if (n3 < 0) n3 = rank + (m-1);
1561     if (n5 < 0) n5 = rank - (m-1);
1562     if ((n1 >= 0) && (n0 < 0)) n0 = rank-1;
1563     if ((n1 >= 0) && (n2 < 0)) n2 = rank-2*m+1;
1564     if ((n7 >= 0) && (n6 < 0)) n6 = rank+2*m-1;
1565     if ((n7 >= 0) && (n8 < 0)) n8 = rank+1;
1566   }
1567   ierr = PetscMalloc(9*sizeof(PetscInt),&dd->neighbors);CHKERRQ(ierr);
1568   dd->neighbors[0] = n0;
1569   dd->neighbors[1] = n1;
1570   dd->neighbors[2] = n2;
1571   dd->neighbors[3] = n3;
1572   dd->neighbors[4] = rank;
1573   dd->neighbors[5] = n5;
1574   dd->neighbors[6] = n6;
1575   dd->neighbors[7] = n7;
1576   dd->neighbors[8] = n8;
1577 
1578   if (stencil_type == DMDA_STENCIL_STAR) {
1579     /* save corner processor numbers */
1580     sn0 = n0; sn2 = n2; sn6 = n6; sn8 = n8;
1581     n0 = n2 = n6 = n8 = -1;
1582   }
1583 
1584   ierr = PetscMalloc((x+2*s_x)*(y+2*s_y)*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1585   ierr = PetscLogObjectMemory(da,(x+2*s_x)*(y+2*s_y)*sizeof(PetscInt));CHKERRQ(ierr);
1586   nn = 0;
1587 
1588   xbase = bases[rank];
1589   for (i=1; i<=s_y; i++) {
1590     if (n0 >= 0) { /* left below */
1591       x_t = lx[n0 % m]*dof;
1592       y_t = ly[(n0/m)];
1593       s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
1594       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1595     }
1596     if (n1 >= 0) { /* directly below */
1597       x_t = x;
1598       y_t = ly[(n1/m)];
1599       s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
1600       for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1601     }
1602     if (n2 >= 0) { /* right below */
1603       x_t = lx[n2 % m]*dof;
1604       y_t = ly[(n2/m)];
1605       s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
1606       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1607     }
1608   }
1609 
1610   for (i=0; i<y; i++) {
1611     if (n3 >= 0) { /* directly left */
1612       x_t = lx[n3 % m]*dof;
1613       /* y_t = y; */
1614       s_t = bases[n3] + (i+1)*x_t - s_x;
1615       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1616     }
1617 
1618     for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */
1619 
1620     if (n5 >= 0) { /* directly right */
1621       x_t = lx[n5 % m]*dof;
1622       /* y_t = y; */
1623       s_t = bases[n5] + (i)*x_t;
1624       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1625     }
1626   }
1627 
1628   for (i=1; i<=s_y; i++) {
1629     if (n6 >= 0) { /* left above */
1630       x_t = lx[n6 % m]*dof;
1631       /* y_t = ly[(n6/m)]; */
1632       s_t = bases[n6] + (i)*x_t - s_x;
1633       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1634     }
1635     if (n7 >= 0) { /* directly above */
1636       x_t = x;
1637       /* y_t = ly[(n7/m)]; */
1638       s_t = bases[n7] + (i-1)*x_t;
1639       for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1640     }
1641     if (n8 >= 0) { /* right above */
1642       x_t = lx[n8 % m]*dof;
1643       /* y_t = ly[(n8/m)]; */
1644       s_t = bases[n8] + (i-1)*x_t;
1645       for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1646     }
1647   }
1648 
1649   base = bases[rank];
1650   {
1651     PetscInt nnn = nn/dof,*iidx;
1652     ierr = PetscMalloc(nnn*sizeof(PetscInt),&iidx);CHKERRQ(ierr);
1653     for (i=0; i<nnn; i++) {
1654       iidx[i] = idx[dof*i]/dof;
1655     }
1656     ierr = ISCreateBlock(comm,dof,nnn,iidx,PETSC_OWN_POINTER,&from);CHKERRQ(ierr);
1657   }
1658   ierr = VecScatterCreate(global,from,local,to,&gtol);CHKERRQ(ierr);
1659   ierr = PetscLogObjectParent(da,gtol);CHKERRQ(ierr);
1660   ierr = ISDestroy(to);CHKERRQ(ierr);
1661   ierr = ISDestroy(from);CHKERRQ(ierr);
1662 
1663   if (stencil_type == DMDA_STENCIL_STAR) {
1664     /*
1665         Recompute the local to global mappings, this time keeping the
1666       information about the cross corner processor numbers.
1667     */
1668     n0 = sn0; n2 = sn2; n6 = sn6; n8 = sn8;
1669     nn = 0;
1670     xbase = bases[rank];
1671     for (i=1; i<=s_y; i++) {
1672       if (n0 >= 0) { /* left below */
1673         x_t = lx[n0 % m]*dof;
1674         y_t = ly[(n0/m)];
1675         s_t = bases[n0] + x_t*y_t - (s_y-i)*x_t - s_x;
1676         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1677       }
1678       if (n1 >= 0) { /* directly below */
1679         x_t = x;
1680         y_t = ly[(n1/m)];
1681         s_t = bases[n1] + x_t*y_t - (s_y+1-i)*x_t;
1682         for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1683       }
1684       if (n2 >= 0) { /* right below */
1685         x_t = lx[n2 % m]*dof;
1686         y_t = ly[(n2/m)];
1687         s_t = bases[n2] + x_t*y_t - (s_y+1-i)*x_t;
1688         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1689       }
1690     }
1691 
1692     for (i=0; i<y; i++) {
1693       if (n3 >= 0) { /* directly left */
1694         x_t = lx[n3 % m]*dof;
1695         /* y_t = y; */
1696         s_t = bases[n3] + (i+1)*x_t - s_x;
1697         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1698       }
1699 
1700       for (j=0; j<x; j++) { idx[nn++] = xbase++; } /* interior */
1701 
1702       if (n5 >= 0) { /* directly right */
1703         x_t = lx[n5 % m]*dof;
1704         /* y_t = y; */
1705         s_t = bases[n5] + (i)*x_t;
1706         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1707       }
1708     }
1709 
1710     for (i=1; i<=s_y; i++) {
1711       if (n6 >= 0) { /* left above */
1712         x_t = lx[n6 % m]*dof;
1713         /* y_t = ly[(n6/m)]; */
1714         s_t = bases[n6] + (i)*x_t - s_x;
1715         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1716       }
1717       if (n7 >= 0) { /* directly above */
1718         x_t = x;
1719         /* y_t = ly[(n7/m)]; */
1720         s_t = bases[n7] + (i-1)*x_t;
1721         for (j=0; j<x_t; j++) { idx[nn++] = s_t++;}
1722       }
1723       if (n8 >= 0) { /* right above */
1724         x_t = lx[n8 % m]*dof;
1725         /* y_t = ly[(n8/m)]; */
1726         s_t = bases[n8] + (i-1)*x_t;
1727         for (j=0; j<s_x; j++) { idx[nn++] = s_t++;}
1728       }
1729     }
1730   }
1731   ierr = PetscFree2(bases,ldims);CHKERRQ(ierr);
1732 
1733   dd->m  = m;  dd->n  = n;
1734   dd->xs = xs; dd->xe = xe; dd->ys = ys; dd->ye = ye; dd->zs = 0; dd->ze = 1;
1735   dd->Xs = Xs; dd->Xe = Xe; dd->Ys = Ys; dd->Ye = Ye; dd->Zs = 0; dd->Ze = 1;
1736 
1737   ierr = VecDestroy(local);CHKERRQ(ierr);
1738   ierr = VecDestroy(global);CHKERRQ(ierr);
1739 
1740   dd->gtol      = gtol;
1741   dd->ltog      = ltog;
1742   dd->idx       = idx;
1743   dd->Nl        = nn;
1744   dd->base      = base;
1745   da->ops->view = DMView_DA_2d;
1746 
1747   /*
1748      Set the local to global ordering in the global vector, this allows use
1749      of VecSetValuesLocal().
1750   */
1751   ierr = ISLocalToGlobalMappingCreate(comm,nn,idx,PETSC_OWN_POINTER,&dd->ltogmap);CHKERRQ(ierr);
1752   ierr = ISLocalToGlobalMappingBlock(dd->ltogmap,dd->w,&dd->ltogmapb);CHKERRQ(ierr);
1753   ierr = PetscLogObjectParent(da,dd->ltogmap);CHKERRQ(ierr);
1754 
1755   dd->ltol = PETSC_NULL;
1756   dd->ao   = PETSC_NULL;
1757 
1758   PetscFunctionReturn(0);
1759 }
1760 
1761 #undef __FUNCT__
1762 #define __FUNCT__ "DMDACreate2d"
1763 /*@C
1764    DMDACreate2d -  Creates an object that will manage the communication of  two-dimensional
1765    regular array data that is distributed across some processors.
1766 
1767    Collective on MPI_Comm
1768 
1769    Input Parameters:
1770 +  comm - MPI communicator
1771 .  wrap - type of periodicity should the array have.
1772          Use one of DMDA_NONPERIODIC, DMDA_XPERIODIC, DMDA_YPERIODIC, or DMDA_XYPERIODIC.
1773 .  stencil_type - stencil type.  Use either DMDA_STENCIL_BOX or DMDA_STENCIL_STAR.
1774 .  M,N - global dimension in each direction of the array (use -M and or -N to indicate that it may be set to a different value
1775             from the command line with -da_grid_x <M> -da_grid_y <N>)
1776 .  m,n - corresponding number of processors in each dimension
1777          (or PETSC_DECIDE to have calculated)
1778 .  dof - number of degrees of freedom per node
1779 .  s - stencil width
1780 -  lx, ly - arrays containing the number of nodes in each cell along
1781            the x and y coordinates, or PETSC_NULL. If non-null, these
1782            must be of length as m and n, and the corresponding
1783            m and n cannot be PETSC_DECIDE. The sum of the lx[] entries
1784            must be M, and the sum of the ly[] entries must be N.
1785 
1786    Output Parameter:
1787 .  da - the resulting distributed array object
1788 
1789    Options Database Key:
1790 +  -da_view - Calls DMView() at the conclusion of DMDACreate2d()
1791 .  -da_grid_x <nx> - number of grid points in x direction, if M < 0
1792 .  -da_grid_y <ny> - number of grid points in y direction, if N < 0
1793 .  -da_processors_x <nx> - number of processors in x direction
1794 .  -da_processors_y <ny> - number of processors in y direction
1795 .  -da_refine_x - refinement ratio in x direction
1796 -  -da_refine_y - refinement ratio in y direction
1797 
1798    Level: beginner
1799 
1800    Notes:
1801    The stencil type DMDA_STENCIL_STAR with width 1 corresponds to the
1802    standard 5-pt stencil, while DMDA_STENCIL_BOX with width 1 denotes
1803    the standard 9-pt stencil.
1804 
1805    The array data itself is NOT stored in the DMDA, it is stored in Vec objects;
1806    The appropriate vector objects can be obtained with calls to DMCreateGlobalVector()
1807    and DMCreateLocalVector() and calls to VecDuplicate() if more are needed.
1808 
1809 .keywords: distributed array, create, two-dimensional
1810 
1811 .seealso: DMDestroy(), DMView(), DMDACreate1d(), DMDACreate3d(), DMGlobalToLocalBegin(), DMDAGetRefinementFactor(),
1812           DMGlobalToLocalEnd(), DMLocalToGlobalBegin(), DMDALocalToLocalBegin(), DMDALocalToLocalEnd(), DMDASetRefinementFactor(),
1813           DMDAGetInfo(), DMCreateGlobalVector(), DMCreateLocalVector(), DMDACreateNaturalVector(), DMDALoad(), DMDAGetOwnershipRanges()
1814 
1815 @*/
1816 PetscErrorCode PETSCDM_DLLEXPORT DMDACreate2d(MPI_Comm comm,DMDAPeriodicType wrap,DMDAStencilType stencil_type,
1817                           PetscInt M,PetscInt N,PetscInt m,PetscInt n,PetscInt dof,PetscInt s,const PetscInt lx[],const PetscInt ly[],DM *da)
1818 {
1819   PetscErrorCode ierr;
1820 
1821   PetscFunctionBegin;
1822   ierr = DMDACreate(comm, da);CHKERRQ(ierr);
1823   ierr = DMDASetDim(*da, 2);CHKERRQ(ierr);
1824   ierr = DMDASetSizes(*da, M, N, 1);CHKERRQ(ierr);
1825   ierr = DMDASetNumProcs(*da, m, n, PETSC_DECIDE);CHKERRQ(ierr);
1826   ierr = DMDASetPeriodicity(*da, wrap);CHKERRQ(ierr);
1827   ierr = DMDASetDof(*da, dof);CHKERRQ(ierr);
1828   ierr = DMDASetStencilType(*da, stencil_type);CHKERRQ(ierr);
1829   ierr = DMDASetStencilWidth(*da, s);CHKERRQ(ierr);
1830   ierr = DMDASetOwnershipRanges(*da, lx, ly, PETSC_NULL);CHKERRQ(ierr);
1831   /* This violates the behavior for other classes, but right now users expect negative dimensions to be handled this way */
1832   ierr = DMSetFromOptions(*da);CHKERRQ(ierr);
1833   ierr = DMSetUp(*da);CHKERRQ(ierr);
1834   ierr = DMView_DA_Private(*da);CHKERRQ(ierr);
1835   PetscFunctionReturn(0);
1836 }
1837