xref: /petsc/src/dm/interface/dm.c (revision a906b49b2a5591d6d10cd79ed4ebe774a5561b3a)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 #include <petscsf.h>
3 
4 PetscClassId  DM_CLASSID;
5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "DMCreate"
9 /*@
10   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
11 
12    If you never  call DMSetType()  it will generate an
13    error when you try to use the vector.
14 
15   Collective on MPI_Comm
16 
17   Input Parameter:
18 . comm - The communicator for the DM object
19 
20   Output Parameter:
21 . dm - The DM object
22 
23   Level: beginner
24 
25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
26 @*/
27 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
28 {
29   DM             v;
30   PetscErrorCode ierr;
31 
32   PetscFunctionBegin;
33   PetscValidPointer(dm,2);
34   *dm = NULL;
35   ierr = PetscSysInitializePackage();CHKERRQ(ierr);
36   ierr = VecInitializePackage();CHKERRQ(ierr);
37   ierr = MatInitializePackage();CHKERRQ(ierr);
38   ierr = DMInitializePackage();CHKERRQ(ierr);
39 
40   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
41   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
42 
43 
44   v->ltogmap              = NULL;
45   v->ltogmapb             = NULL;
46   v->bs                   = 1;
47   v->coloringtype         = IS_COLORING_GLOBAL;
48   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
49   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
50   v->defaultSection       = NULL;
51   v->defaultGlobalSection = NULL;
52   {
53     PetscInt i;
54     for (i = 0; i < 10; ++i) {
55       v->nullspaceConstructors[i] = NULL;
56     }
57   }
58   v->numFields = 0;
59   v->fields    = NULL;
60   ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr);
61   ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr);
62   *dm = v;
63   PetscFunctionReturn(0);
64 }
65 
66 #undef __FUNCT__
67 #define __FUNCT__ "DMClone"
68 /*@
69   DMClone - Creates a DM object with the same topology as the original.
70 
71   Collective on MPI_Comm
72 
73   Input Parameter:
74 . dm - The original DM object
75 
76   Output Parameter:
77 . newdm  - The new DM object
78 
79   Level: beginner
80 
81 .keywords: DM, topology, create
82 @*/
83 PetscErrorCode DMClone(DM dm, DM *newdm)
84 {
85   PetscSF        sf;
86   Vec            coords;
87   void          *ctx;
88   PetscErrorCode ierr;
89 
90   PetscFunctionBegin;
91   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
92   PetscValidPointer(newdm,2);
93   ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr);
94   if (dm->ops->clone) {
95     ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr);
96   }
97   (*newdm)->setupcalled = PETSC_TRUE;
98   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
99   ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr);
100   ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr);
101   ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr);
102   ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr);
103   if (coords) {
104     ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr);
105   } else {
106     ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr);
107     if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);}
108   }
109   PetscFunctionReturn(0);
110 }
111 
112 #undef __FUNCT__
113 #define __FUNCT__ "DMSetVecType"
114 /*@C
115        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
116 
117    Logically Collective on DMDA
118 
119    Input Parameter:
120 +  da - initial distributed array
121 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
122 
123    Options Database:
124 .   -dm_vec_type ctype
125 
126    Level: intermediate
127 
128 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType()
129 @*/
130 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
131 {
132   PetscErrorCode ierr;
133 
134   PetscFunctionBegin;
135   PetscValidHeaderSpecific(da,DM_CLASSID,1);
136   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
137   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
138   PetscFunctionReturn(0);
139 }
140 
141 #undef __FUNCT__
142 #define __FUNCT__ "DMGetVecType"
143 /*@C
144        DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
145 
146    Logically Collective on DMDA
147 
148    Input Parameter:
149 .  da - initial distributed array
150 
151    Output Parameter:
152 .  ctype - the vector type
153 
154    Level: intermediate
155 
156 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
157 @*/
158 PetscErrorCode  DMGetVecType(DM da,VecType *ctype)
159 {
160   PetscFunctionBegin;
161   PetscValidHeaderSpecific(da,DM_CLASSID,1);
162   *ctype = da->vectype;
163   PetscFunctionReturn(0);
164 }
165 
166 #undef __FUNCT__
167 #define __FUNCT__ "VecGetDM"
168 /*@
169   VecGetDM - Gets the DM defining the data layout of the vector
170 
171   Not collective
172 
173   Input Parameter:
174 . v - The Vec
175 
176   Output Parameter:
177 . dm - The DM
178 
179   Level: intermediate
180 
181 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
182 @*/
183 PetscErrorCode VecGetDM(Vec v, DM *dm)
184 {
185   PetscErrorCode ierr;
186 
187   PetscFunctionBegin;
188   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
189   PetscValidPointer(dm,2);
190   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
191   PetscFunctionReturn(0);
192 }
193 
194 #undef __FUNCT__
195 #define __FUNCT__ "VecSetDM"
196 /*@
197   VecSetDM - Sets the DM defining the data layout of the vector
198 
199   Not collective
200 
201   Input Parameters:
202 + v - The Vec
203 - dm - The DM
204 
205   Level: intermediate
206 
207 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
208 @*/
209 PetscErrorCode VecSetDM(Vec v, DM dm)
210 {
211   PetscErrorCode ierr;
212 
213   PetscFunctionBegin;
214   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
215   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
216   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
217   PetscFunctionReturn(0);
218 }
219 
220 #undef __FUNCT__
221 #define __FUNCT__ "DMSetMatType"
222 /*@C
223        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
224 
225    Logically Collective on DM
226 
227    Input Parameter:
228 +  dm - the DM context
229 .  ctype - the matrix type
230 
231    Options Database:
232 .   -dm_mat_type ctype
233 
234    Level: intermediate
235 
236 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType()
237 @*/
238 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
239 {
240   PetscErrorCode ierr;
241 
242   PetscFunctionBegin;
243   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
244   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
245   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
246   PetscFunctionReturn(0);
247 }
248 
249 #undef __FUNCT__
250 #define __FUNCT__ "DMGetMatType"
251 /*@C
252        DMGetMatType - Gets the type of matrix created with DMCreateMatrix()
253 
254    Logically Collective on DM
255 
256    Input Parameter:
257 .  dm - the DM context
258 
259    Output Parameter:
260 .  ctype - the matrix type
261 
262    Options Database:
263 .   -dm_mat_type ctype
264 
265    Level: intermediate
266 
267 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType()
268 @*/
269 PetscErrorCode  DMGetMatType(DM dm,MatType *ctype)
270 {
271   PetscFunctionBegin;
272   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
273   *ctype = dm->mattype;
274   PetscFunctionReturn(0);
275 }
276 
277 #undef __FUNCT__
278 #define __FUNCT__ "MatGetDM"
279 /*@
280   MatGetDM - Gets the DM defining the data layout of the matrix
281 
282   Not collective
283 
284   Input Parameter:
285 . A - The Mat
286 
287   Output Parameter:
288 . dm - The DM
289 
290   Level: intermediate
291 
292 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
293 @*/
294 PetscErrorCode MatGetDM(Mat A, DM *dm)
295 {
296   PetscErrorCode ierr;
297 
298   PetscFunctionBegin;
299   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
300   PetscValidPointer(dm,2);
301   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
302   PetscFunctionReturn(0);
303 }
304 
305 #undef __FUNCT__
306 #define __FUNCT__ "MatSetDM"
307 /*@
308   MatSetDM - Sets the DM defining the data layout of the matrix
309 
310   Not collective
311 
312   Input Parameters:
313 + A - The Mat
314 - dm - The DM
315 
316   Level: intermediate
317 
318 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
319 @*/
320 PetscErrorCode MatSetDM(Mat A, DM dm)
321 {
322   PetscErrorCode ierr;
323 
324   PetscFunctionBegin;
325   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
326   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
327   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
328   PetscFunctionReturn(0);
329 }
330 
331 #undef __FUNCT__
332 #define __FUNCT__ "DMSetOptionsPrefix"
333 /*@C
334    DMSetOptionsPrefix - Sets the prefix used for searching for all
335    DMDA options in the database.
336 
337    Logically Collective on DMDA
338 
339    Input Parameter:
340 +  da - the DMDA context
341 -  prefix - the prefix to prepend to all option names
342 
343    Notes:
344    A hyphen (-) must NOT be given at the beginning of the prefix name.
345    The first character of all runtime options is AUTOMATICALLY the hyphen.
346 
347    Level: advanced
348 
349 .keywords: DMDA, set, options, prefix, database
350 
351 .seealso: DMSetFromOptions()
352 @*/
353 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
354 {
355   PetscErrorCode ierr;
356 
357   PetscFunctionBegin;
358   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
359   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
360   PetscFunctionReturn(0);
361 }
362 
363 #undef __FUNCT__
364 #define __FUNCT__ "DMDestroy"
365 /*@
366     DMDestroy - Destroys a vector packer or DMDA.
367 
368     Collective on DM
369 
370     Input Parameter:
371 .   dm - the DM object to destroy
372 
373     Level: developer
374 
375 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
376 
377 @*/
378 PetscErrorCode  DMDestroy(DM *dm)
379 {
380   PetscInt       i, cnt = 0, f;
381   DMNamedVecLink nlink,nnext;
382   PetscErrorCode ierr;
383 
384   PetscFunctionBegin;
385   if (!*dm) PetscFunctionReturn(0);
386   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
387 
388   /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */
389   for (f = 0; f < (*dm)->numFields; ++f) {
390     ierr = PetscObjectCompose((*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr);
391     ierr = PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr);
392     ierr = PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr);
393   }
394   /* count all the circular references of DM and its contained Vecs */
395   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
396     if ((*dm)->localin[i])  cnt++;
397     if ((*dm)->globalin[i]) cnt++;
398   }
399   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
400   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
401   if ((*dm)->x) {
402     DM obj;
403     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
404     if (obj == *dm) cnt++;
405   }
406 
407   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
408   /*
409      Need this test because the dm references the vectors that
410      reference the dm, so destroying the dm calls destroy on the
411      vectors that cause another destroy on the dm
412   */
413   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
414   ((PetscObject) (*dm))->refct = 0;
415   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
416     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
417     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
418   }
419   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
420     nnext = nlink->next;
421     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
422     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
423     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
424     ierr = PetscFree(nlink);CHKERRQ(ierr);
425   }
426   (*dm)->namedglobal = NULL;
427 
428   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
429     nnext = nlink->next;
430     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
431     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
432     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
433     ierr = PetscFree(nlink);CHKERRQ(ierr);
434   }
435   (*dm)->namedlocal = NULL;
436 
437   /* Destroy the list of hooks */
438   {
439     DMCoarsenHookLink link,next;
440     for (link=(*dm)->coarsenhook; link; link=next) {
441       next = link->next;
442       ierr = PetscFree(link);CHKERRQ(ierr);
443     }
444     (*dm)->coarsenhook = NULL;
445   }
446   {
447     DMRefineHookLink link,next;
448     for (link=(*dm)->refinehook; link; link=next) {
449       next = link->next;
450       ierr = PetscFree(link);CHKERRQ(ierr);
451     }
452     (*dm)->refinehook = NULL;
453   }
454   {
455     DMSubDomainHookLink link,next;
456     for (link=(*dm)->subdomainhook; link; link=next) {
457       next = link->next;
458       ierr = PetscFree(link);CHKERRQ(ierr);
459     }
460     (*dm)->subdomainhook = NULL;
461   }
462   {
463     DMGlobalToLocalHookLink link,next;
464     for (link=(*dm)->gtolhook; link; link=next) {
465       next = link->next;
466       ierr = PetscFree(link);CHKERRQ(ierr);
467     }
468     (*dm)->gtolhook = NULL;
469   }
470   /* Destroy the work arrays */
471   {
472     DMWorkLink link,next;
473     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
474     for (link=(*dm)->workin; link; link=next) {
475       next = link->next;
476       ierr = PetscFree(link->mem);CHKERRQ(ierr);
477       ierr = PetscFree(link);CHKERRQ(ierr);
478     }
479     (*dm)->workin = NULL;
480   }
481 
482   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
483   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
484   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
485 
486   if ((*dm)->ctx && (*dm)->ctxdestroy) {
487     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
488   }
489   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
490   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
491   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
492   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
493   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
494   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
495   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
496 
497   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
498   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
499   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
500   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
501   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
502 
503   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
504   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
505   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
506 
507   for (f = 0; f < (*dm)->numFields; ++f) {
508     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
509   }
510   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
511   /* if memory was published with SAWs then destroy it */
512   ierr = PetscObjectSAWsViewOff((PetscObject)*dm);CHKERRQ(ierr);
513 
514   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
515   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
516   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
517   PetscFunctionReturn(0);
518 }
519 
520 #undef __FUNCT__
521 #define __FUNCT__ "DMSetUp"
522 /*@
523     DMSetUp - sets up the data structures inside a DM object
524 
525     Collective on DM
526 
527     Input Parameter:
528 .   dm - the DM object to setup
529 
530     Level: developer
531 
532 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
533 
534 @*/
535 PetscErrorCode  DMSetUp(DM dm)
536 {
537   PetscErrorCode ierr;
538 
539   PetscFunctionBegin;
540   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
541   if (dm->setupcalled) PetscFunctionReturn(0);
542   if (dm->ops->setup) {
543     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
544   }
545   dm->setupcalled = PETSC_TRUE;
546   ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr);
547   PetscFunctionReturn(0);
548 }
549 
550 #undef __FUNCT__
551 #define __FUNCT__ "DMSetFromOptions"
552 /*@
553     DMSetFromOptions - sets parameters in a DM from the options database
554 
555     Collective on DM
556 
557     Input Parameter:
558 .   dm - the DM object to set options for
559 
560     Options Database:
561 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
562 .   -dm_vec_type <type>  type of vector to create inside DM
563 .   -dm_mat_type <type>  type of matrix to create inside DM
564 -   -dm_coloring_type <global or ghosted>
565 
566     Level: developer
567 
568 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
569 
570 @*/
571 PetscErrorCode  DMSetFromOptions(DM dm)
572 {
573   char           typeName[256];
574   PetscBool      flg;
575   PetscErrorCode ierr;
576 
577   PetscFunctionBegin;
578   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
579   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
580   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
581   ierr = PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
582   if (flg) {
583     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
584   }
585   ierr = PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
586   if (flg) {
587     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
588   }
589   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
590   if (dm->ops->setfromoptions) {
591     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
592   }
593   /* process any options handlers added with PetscObjectAddOptionsHandler() */
594   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
595   ierr = PetscOptionsEnd();CHKERRQ(ierr);
596   PetscFunctionReturn(0);
597 }
598 
599 #undef __FUNCT__
600 #define __FUNCT__ "DMView"
601 /*@C
602     DMView - Views a vector packer or DMDA.
603 
604     Collective on DM
605 
606     Input Parameter:
607 +   dm - the DM object to view
608 -   v - the viewer
609 
610     Level: developer
611 
612 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
613 
614 @*/
615 PetscErrorCode  DMView(DM dm,PetscViewer v)
616 {
617   PetscErrorCode ierr;
618   PetscBool      isbinary;
619 
620   PetscFunctionBegin;
621   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
622   if (!v) {
623     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
624   }
625   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
626   if (isbinary) {
627     PetscInt classid = DM_FILE_CLASSID;
628     char     type[256];
629 
630     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
631     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
632     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
633   }
634   if (dm->ops->view) {
635     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
636   }
637   PetscFunctionReturn(0);
638 }
639 
640 #undef __FUNCT__
641 #define __FUNCT__ "DMCreateGlobalVector"
642 /*@
643     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
644 
645     Collective on DM
646 
647     Input Parameter:
648 .   dm - the DM object
649 
650     Output Parameter:
651 .   vec - the global vector
652 
653     Level: beginner
654 
655 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
656 
657 @*/
658 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
659 {
660   PetscErrorCode ierr;
661 
662   PetscFunctionBegin;
663   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
664   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
665   PetscFunctionReturn(0);
666 }
667 
668 #undef __FUNCT__
669 #define __FUNCT__ "DMCreateLocalVector"
670 /*@
671     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
672 
673     Not Collective
674 
675     Input Parameter:
676 .   dm - the DM object
677 
678     Output Parameter:
679 .   vec - the local vector
680 
681     Level: beginner
682 
683 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
684 
685 @*/
686 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
687 {
688   PetscErrorCode ierr;
689 
690   PetscFunctionBegin;
691   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
692   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
693   PetscFunctionReturn(0);
694 }
695 
696 #undef __FUNCT__
697 #define __FUNCT__ "DMGetLocalToGlobalMapping"
698 /*@
699    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
700 
701    Collective on DM
702 
703    Input Parameter:
704 .  dm - the DM that provides the mapping
705 
706    Output Parameter:
707 .  ltog - the mapping
708 
709    Level: intermediate
710 
711    Notes:
712    This mapping can then be used by VecSetLocalToGlobalMapping() or
713    MatSetLocalToGlobalMapping().
714 
715 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
716 @*/
717 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
718 {
719   PetscErrorCode ierr;
720 
721   PetscFunctionBegin;
722   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
723   PetscValidPointer(ltog,2);
724   if (!dm->ltogmap) {
725     PetscSection section, sectionGlobal;
726 
727     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
728     if (section) {
729       PetscInt *ltog;
730       PetscInt pStart, pEnd, size, p, l;
731 
732       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
733       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
734       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
735       ierr = PetscMalloc1(size, &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
736       for (p = pStart, l = 0; p < pEnd; ++p) {
737         PetscInt dof, off, c;
738 
739         /* Should probably use constrained dofs */
740         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
741         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
742         for (c = 0; c < dof; ++c, ++l) {
743           ltog[l] = off+c;
744         }
745       }
746       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
747       ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr);
748     } else {
749       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
750       ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr);
751     }
752   }
753   *ltog = dm->ltogmap;
754   PetscFunctionReturn(0);
755 }
756 
757 #undef __FUNCT__
758 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
759 /*@
760    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
761 
762    Collective on DM
763 
764    Input Parameter:
765 .  da - the distributed array that provides the mapping
766 
767    Output Parameter:
768 .  ltog - the block mapping
769 
770    Level: intermediate
771 
772    Notes:
773    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
774    MatSetLocalToGlobalMappingBlock().
775 
776 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
777 @*/
778 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
779 {
780   PetscErrorCode ierr;
781 
782   PetscFunctionBegin;
783   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
784   PetscValidPointer(ltog,2);
785   if (!dm->ltogmapb) {
786     PetscInt bs;
787     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
788     if (bs > 1) {
789       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
790       ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
791     } else {
792       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
793       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
794     }
795   }
796   *ltog = dm->ltogmapb;
797   PetscFunctionReturn(0);
798 }
799 
800 #undef __FUNCT__
801 #define __FUNCT__ "DMGetBlockSize"
802 /*@
803    DMGetBlockSize - Gets the inherent block size associated with a DM
804 
805    Not Collective
806 
807    Input Parameter:
808 .  dm - the DM with block structure
809 
810    Output Parameter:
811 .  bs - the block size, 1 implies no exploitable block structure
812 
813    Level: intermediate
814 
815 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
816 @*/
817 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
818 {
819   PetscFunctionBegin;
820   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
821   PetscValidPointer(bs,2);
822   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
823   *bs = dm->bs;
824   PetscFunctionReturn(0);
825 }
826 
827 #undef __FUNCT__
828 #define __FUNCT__ "DMCreateInterpolation"
829 /*@
830     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
831 
832     Collective on DM
833 
834     Input Parameter:
835 +   dm1 - the DM object
836 -   dm2 - the second, finer DM object
837 
838     Output Parameter:
839 +  mat - the interpolation
840 -  vec - the scaling (optional)
841 
842     Level: developer
843 
844     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
845         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
846 
847         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
848         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
849 
850 
851 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
852 
853 @*/
854 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
855 {
856   PetscErrorCode ierr;
857 
858   PetscFunctionBegin;
859   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
860   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
861   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
862   PetscFunctionReturn(0);
863 }
864 
865 #undef __FUNCT__
866 #define __FUNCT__ "DMCreateInjection"
867 /*@
868     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
869 
870     Collective on DM
871 
872     Input Parameter:
873 +   dm1 - the DM object
874 -   dm2 - the second, finer DM object
875 
876     Output Parameter:
877 .   ctx - the injection
878 
879     Level: developer
880 
881    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
882         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
883 
884 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
885 
886 @*/
887 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
888 {
889   PetscErrorCode ierr;
890 
891   PetscFunctionBegin;
892   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
893   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
894   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
895   PetscFunctionReturn(0);
896 }
897 
898 #undef __FUNCT__
899 #define __FUNCT__ "DMCreateColoring"
900 /*@
901     DMCreateColoring - Gets coloring for a DM
902 
903     Collective on DM
904 
905     Input Parameter:
906 +   dm - the DM object
907 -   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
908 
909     Output Parameter:
910 .   coloring - the coloring
911 
912     Level: developer
913 
914 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType()
915 
916 @*/
917 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring)
918 {
919   PetscErrorCode ierr;
920 
921   PetscFunctionBegin;
922   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
923   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
924   ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr);
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "DMCreateMatrix"
930 /*@
931     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
932 
933     Collective on DM
934 
935     Input Parameter:
936 .   dm - the DM object
937 
938     Output Parameter:
939 .   mat - the empty Jacobian
940 
941     Level: beginner
942 
943     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
944        do not need to do it yourself.
945 
946        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
947        the nonzero pattern call DMDASetMatPreallocateOnly()
948 
949        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
950        internally by PETSc.
951 
952        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
953        the indices for the global numbering for DMDAs which is complicated.
954 
955 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType()
956 
957 @*/
958 PetscErrorCode  DMCreateMatrix(DM dm,Mat *mat)
959 {
960   PetscErrorCode ierr;
961 
962   PetscFunctionBegin;
963   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
964   ierr = MatInitializePackage();CHKERRQ(ierr);
965   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
966   PetscValidPointer(mat,3);
967   ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr);
968   PetscFunctionReturn(0);
969 }
970 
971 #undef __FUNCT__
972 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
973 /*@
974   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
975     preallocated but the nonzero structure and zero values will not be set.
976 
977   Logically Collective on DMDA
978 
979   Input Parameter:
980 + dm - the DM
981 - only - PETSC_TRUE if only want preallocation
982 
983   Level: developer
984 .seealso DMCreateMatrix()
985 @*/
986 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
987 {
988   PetscFunctionBegin;
989   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
990   dm->prealloc_only = only;
991   PetscFunctionReturn(0);
992 }
993 
994 #undef __FUNCT__
995 #define __FUNCT__ "DMGetWorkArray"
996 /*@C
997   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
998 
999   Not Collective
1000 
1001   Input Parameters:
1002 + dm - the DM object
1003 . count - The minium size
1004 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1005 
1006   Output Parameter:
1007 . array - the work array
1008 
1009   Level: developer
1010 
1011 .seealso DMDestroy(), DMCreate()
1012 @*/
1013 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1014 {
1015   PetscErrorCode ierr;
1016   DMWorkLink     link;
1017   size_t         size;
1018 
1019   PetscFunctionBegin;
1020   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1021   PetscValidPointer(mem,4);
1022   if (dm->workin) {
1023     link       = dm->workin;
1024     dm->workin = dm->workin->next;
1025   } else {
1026     ierr = PetscNewLog(dm,&link);CHKERRQ(ierr);
1027   }
1028   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1029   if (size*count > link->bytes) {
1030     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
1031     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1032     link->bytes = size*count;
1033   }
1034   link->next   = dm->workout;
1035   dm->workout  = link;
1036   *(void**)mem = link->mem;
1037   PetscFunctionReturn(0);
1038 }
1039 
1040 #undef __FUNCT__
1041 #define __FUNCT__ "DMRestoreWorkArray"
1042 /*@C
1043   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1044 
1045   Not Collective
1046 
1047   Input Parameters:
1048 + dm - the DM object
1049 . count - The minium size
1050 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1051 
1052   Output Parameter:
1053 . array - the work array
1054 
1055   Level: developer
1056 
1057 .seealso DMDestroy(), DMCreate()
1058 @*/
1059 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1060 {
1061   DMWorkLink *p,link;
1062 
1063   PetscFunctionBegin;
1064   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1065   PetscValidPointer(mem,4);
1066   for (p=&dm->workout; (link=*p); p=&link->next) {
1067     if (link->mem == *(void**)mem) {
1068       *p           = link->next;
1069       link->next   = dm->workin;
1070       dm->workin   = link;
1071       *(void**)mem = NULL;
1072       PetscFunctionReturn(0);
1073     }
1074   }
1075   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1076   PetscFunctionReturn(0);
1077 }
1078 
1079 #undef __FUNCT__
1080 #define __FUNCT__ "DMSetNullSpaceConstructor"
1081 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1082 {
1083   PetscFunctionBegin;
1084   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1085   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1086   dm->nullspaceConstructors[field] = nullsp;
1087   PetscFunctionReturn(0);
1088 }
1089 
1090 #undef __FUNCT__
1091 #define __FUNCT__ "DMCreateFieldIS"
1092 /*@C
1093   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1094 
1095   Not collective
1096 
1097   Input Parameter:
1098 . dm - the DM object
1099 
1100   Output Parameters:
1101 + numFields  - The number of fields (or NULL if not requested)
1102 . fieldNames - The name for each field (or NULL if not requested)
1103 - fields     - The global indices for each field (or NULL if not requested)
1104 
1105   Level: intermediate
1106 
1107   Notes:
1108   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1109   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1110   PetscFree().
1111 
1112 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1113 @*/
1114 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1115 {
1116   PetscSection   section, sectionGlobal;
1117   PetscErrorCode ierr;
1118 
1119   PetscFunctionBegin;
1120   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1121   if (numFields) {
1122     PetscValidPointer(numFields,2);
1123     *numFields = 0;
1124   }
1125   if (fieldNames) {
1126     PetscValidPointer(fieldNames,3);
1127     *fieldNames = NULL;
1128   }
1129   if (fields) {
1130     PetscValidPointer(fields,4);
1131     *fields = NULL;
1132   }
1133   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1134   if (section) {
1135     PetscInt *fieldSizes, **fieldIndices;
1136     PetscInt nF, f, pStart, pEnd, p;
1137 
1138     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1139     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1140     ierr = PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);CHKERRQ(ierr);
1141     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1142     for (f = 0; f < nF; ++f) {
1143       fieldSizes[f] = 0;
1144     }
1145     for (p = pStart; p < pEnd; ++p) {
1146       PetscInt gdof;
1147 
1148       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1149       if (gdof > 0) {
1150         for (f = 0; f < nF; ++f) {
1151           PetscInt fdof, fcdof;
1152 
1153           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1154           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1155           fieldSizes[f] += fdof-fcdof;
1156         }
1157       }
1158     }
1159     for (f = 0; f < nF; ++f) {
1160       ierr          = PetscMalloc1(fieldSizes[f], &fieldIndices[f]);CHKERRQ(ierr);
1161       fieldSizes[f] = 0;
1162     }
1163     for (p = pStart; p < pEnd; ++p) {
1164       PetscInt gdof, goff;
1165 
1166       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1167       if (gdof > 0) {
1168         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1169         for (f = 0; f < nF; ++f) {
1170           PetscInt fdof, fcdof, fc;
1171 
1172           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1173           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1174           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1175             fieldIndices[f][fieldSizes[f]] = goff++;
1176           }
1177         }
1178       }
1179     }
1180     if (numFields) *numFields = nF;
1181     if (fieldNames) {
1182       ierr = PetscMalloc1(nF, fieldNames);CHKERRQ(ierr);
1183       for (f = 0; f < nF; ++f) {
1184         const char *fieldName;
1185 
1186         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1187         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1188       }
1189     }
1190     if (fields) {
1191       ierr = PetscMalloc1(nF, fields);CHKERRQ(ierr);
1192       for (f = 0; f < nF; ++f) {
1193         ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1194       }
1195     }
1196     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1197   } else if (dm->ops->createfieldis) {
1198     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1199   }
1200   PetscFunctionReturn(0);
1201 }
1202 
1203 
1204 #undef __FUNCT__
1205 #define __FUNCT__ "DMCreateFieldDecomposition"
1206 /*@C
1207   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1208                           corresponding to different fields: each IS contains the global indices of the dofs of the
1209                           corresponding field. The optional list of DMs define the DM for each subproblem.
1210                           Generalizes DMCreateFieldIS().
1211 
1212   Not collective
1213 
1214   Input Parameter:
1215 . dm - the DM object
1216 
1217   Output Parameters:
1218 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1219 . namelist  - The name for each field (or NULL if not requested)
1220 . islist    - The global indices for each field (or NULL if not requested)
1221 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1222 
1223   Level: intermediate
1224 
1225   Notes:
1226   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1227   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1228   and all of the arrays should be freed with PetscFree().
1229 
1230 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1231 @*/
1232 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1233 {
1234   PetscErrorCode ierr;
1235 
1236   PetscFunctionBegin;
1237   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1238   if (len) {
1239     PetscValidPointer(len,2);
1240     *len = 0;
1241   }
1242   if (namelist) {
1243     PetscValidPointer(namelist,3);
1244     *namelist = 0;
1245   }
1246   if (islist) {
1247     PetscValidPointer(islist,4);
1248     *islist = 0;
1249   }
1250   if (dmlist) {
1251     PetscValidPointer(dmlist,5);
1252     *dmlist = 0;
1253   }
1254   /*
1255    Is it a good idea to apply the following check across all impls?
1256    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1257    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1258    */
1259   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1260   if (!dm->ops->createfielddecomposition) {
1261     PetscSection section;
1262     PetscInt     numFields, f;
1263 
1264     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1265     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1266     if (section && numFields && dm->ops->createsubdm) {
1267       *len = numFields;
1268       ierr = PetscMalloc3(numFields,namelist,numFields,islist,numFields,dmlist);CHKERRQ(ierr);
1269       for (f = 0; f < numFields; ++f) {
1270         const char *fieldName;
1271 
1272         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1273         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1274         ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1275       }
1276     } else {
1277       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1278       /* By default there are no DMs associated with subproblems. */
1279       if (dmlist) *dmlist = NULL;
1280     }
1281   } else {
1282     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1283   }
1284   PetscFunctionReturn(0);
1285 }
1286 
1287 #undef __FUNCT__
1288 #define __FUNCT__ "DMCreateSubDM"
1289 /*@C
1290   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1291                   The fields are defined by DMCreateFieldIS().
1292 
1293   Not collective
1294 
1295   Input Parameters:
1296 + dm - the DM object
1297 . numFields - number of fields in this subproblem
1298 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1299 
1300   Output Parameters:
1301 . is - The global indices for the subproblem
1302 - dm - The DM for the subproblem
1303 
1304   Level: intermediate
1305 
1306 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1307 @*/
1308 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1309 {
1310   PetscErrorCode ierr;
1311 
1312   PetscFunctionBegin;
1313   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1314   PetscValidPointer(fields,3);
1315   if (is) PetscValidPointer(is,4);
1316   if (subdm) PetscValidPointer(subdm,5);
1317   if (dm->ops->createsubdm) {
1318     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1319   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1320   PetscFunctionReturn(0);
1321 }
1322 
1323 
1324 #undef __FUNCT__
1325 #define __FUNCT__ "DMCreateDomainDecomposition"
1326 /*@C
1327   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1328                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1329                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1330                           define a nonoverlapping covering, while outer subdomains can overlap.
1331                           The optional list of DMs define the DM for each subproblem.
1332 
1333   Not collective
1334 
1335   Input Parameter:
1336 . dm - the DM object
1337 
1338   Output Parameters:
1339 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1340 . namelist    - The name for each subdomain (or NULL if not requested)
1341 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1342 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1343 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1344 
1345   Level: intermediate
1346 
1347   Notes:
1348   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1349   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1350   and all of the arrays should be freed with PetscFree().
1351 
1352 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1353 @*/
1354 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1355 {
1356   PetscErrorCode      ierr;
1357   DMSubDomainHookLink link;
1358   PetscInt            i,l;
1359 
1360   PetscFunctionBegin;
1361   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1362   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1363   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1364   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1365   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1366   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1367   /*
1368    Is it a good idea to apply the following check across all impls?
1369    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1370    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1371    */
1372   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1373   if (dm->ops->createdomaindecomposition) {
1374     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1375     /* copy subdomain hooks and context over to the subdomain DMs */
1376     if (dmlist) {
1377       for (i = 0; i < l; i++) {
1378         for (link=dm->subdomainhook; link; link=link->next) {
1379           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1380         }
1381         (*dmlist)[i]->ctx = dm->ctx;
1382       }
1383     }
1384     if (len) *len = l;
1385   }
1386   PetscFunctionReturn(0);
1387 }
1388 
1389 
1390 #undef __FUNCT__
1391 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1392 /*@C
1393   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1394 
1395   Not collective
1396 
1397   Input Parameters:
1398 + dm - the DM object
1399 . n  - the number of subdomain scatters
1400 - subdms - the local subdomains
1401 
1402   Output Parameters:
1403 + n     - the number of scatters returned
1404 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1405 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1406 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1407 
1408   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1409   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1410   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1411   solution and residual data.
1412 
1413   Level: developer
1414 
1415 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1416 @*/
1417 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1418 {
1419   PetscErrorCode ierr;
1420 
1421   PetscFunctionBegin;
1422   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1423   PetscValidPointer(subdms,3);
1424   if (dm->ops->createddscatters) {
1425     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1426   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1427   PetscFunctionReturn(0);
1428 }
1429 
1430 #undef __FUNCT__
1431 #define __FUNCT__ "DMRefine"
1432 /*@
1433   DMRefine - Refines a DM object
1434 
1435   Collective on DM
1436 
1437   Input Parameter:
1438 + dm   - the DM object
1439 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1440 
1441   Output Parameter:
1442 . dmf - the refined DM, or NULL
1443 
1444   Note: If no refinement was done, the return value is NULL
1445 
1446   Level: developer
1447 
1448 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1449 @*/
1450 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1451 {
1452   PetscErrorCode   ierr;
1453   DMRefineHookLink link;
1454 
1455   PetscFunctionBegin;
1456   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1457   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1458   if (*dmf) {
1459     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1460 
1461     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1462 
1463     (*dmf)->ctx       = dm->ctx;
1464     (*dmf)->leveldown = dm->leveldown;
1465     (*dmf)->levelup   = dm->levelup + 1;
1466 
1467     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1468     for (link=dm->refinehook; link; link=link->next) {
1469       if (link->refinehook) {
1470         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1471       }
1472     }
1473   }
1474   PetscFunctionReturn(0);
1475 }
1476 
1477 #undef __FUNCT__
1478 #define __FUNCT__ "DMRefineHookAdd"
1479 /*@C
1480    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1481 
1482    Logically Collective
1483 
1484    Input Arguments:
1485 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1486 .  refinehook - function to run when setting up a coarser level
1487 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1488 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1489 
1490    Calling sequence of refinehook:
1491 $    refinehook(DM coarse,DM fine,void *ctx);
1492 
1493 +  coarse - coarse level DM
1494 .  fine - fine level DM to interpolate problem to
1495 -  ctx - optional user-defined function context
1496 
1497    Calling sequence for interphook:
1498 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1499 
1500 +  coarse - coarse level DM
1501 .  interp - matrix interpolating a coarse-level solution to the finer grid
1502 .  fine - fine level DM to update
1503 -  ctx - optional user-defined function context
1504 
1505    Level: advanced
1506 
1507    Notes:
1508    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1509 
1510    If this function is called multiple times, the hooks will be run in the order they are added.
1511 
1512    This function is currently not available from Fortran.
1513 
1514 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1515 @*/
1516 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1517 {
1518   PetscErrorCode   ierr;
1519   DMRefineHookLink link,*p;
1520 
1521   PetscFunctionBegin;
1522   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1523   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1524   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1525   link->refinehook = refinehook;
1526   link->interphook = interphook;
1527   link->ctx        = ctx;
1528   link->next       = NULL;
1529   *p               = link;
1530   PetscFunctionReturn(0);
1531 }
1532 
1533 #undef __FUNCT__
1534 #define __FUNCT__ "DMInterpolate"
1535 /*@
1536    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1537 
1538    Collective if any hooks are
1539 
1540    Input Arguments:
1541 +  coarse - coarser DM to use as a base
1542 .  restrct - interpolation matrix, apply using MatInterpolate()
1543 -  fine - finer DM to update
1544 
1545    Level: developer
1546 
1547 .seealso: DMRefineHookAdd(), MatInterpolate()
1548 @*/
1549 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1550 {
1551   PetscErrorCode   ierr;
1552   DMRefineHookLink link;
1553 
1554   PetscFunctionBegin;
1555   for (link=fine->refinehook; link; link=link->next) {
1556     if (link->interphook) {
1557       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1558     }
1559   }
1560   PetscFunctionReturn(0);
1561 }
1562 
1563 #undef __FUNCT__
1564 #define __FUNCT__ "DMGetRefineLevel"
1565 /*@
1566     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1567 
1568     Not Collective
1569 
1570     Input Parameter:
1571 .   dm - the DM object
1572 
1573     Output Parameter:
1574 .   level - number of refinements
1575 
1576     Level: developer
1577 
1578 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1579 
1580 @*/
1581 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1582 {
1583   PetscFunctionBegin;
1584   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1585   *level = dm->levelup;
1586   PetscFunctionReturn(0);
1587 }
1588 
1589 #undef __FUNCT__
1590 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1591 /*@C
1592    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1593 
1594    Logically Collective
1595 
1596    Input Arguments:
1597 +  dm - the DM
1598 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1599 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1600 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1601 
1602    Calling sequence for beginhook:
1603 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1604 
1605 +  dm - global DM
1606 .  g - global vector
1607 .  mode - mode
1608 .  l - local vector
1609 -  ctx - optional user-defined function context
1610 
1611 
1612    Calling sequence for endhook:
1613 $    endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1614 
1615 +  global - global DM
1616 -  ctx - optional user-defined function context
1617 
1618    Level: advanced
1619 
1620 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1621 @*/
1622 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1623 {
1624   PetscErrorCode          ierr;
1625   DMGlobalToLocalHookLink link,*p;
1626 
1627   PetscFunctionBegin;
1628   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1629   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1630   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1631   link->beginhook = beginhook;
1632   link->endhook   = endhook;
1633   link->ctx       = ctx;
1634   link->next      = NULL;
1635   *p              = link;
1636   PetscFunctionReturn(0);
1637 }
1638 
1639 #undef __FUNCT__
1640 #define __FUNCT__ "DMGlobalToLocalBegin"
1641 /*@
1642     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1643 
1644     Neighbor-wise Collective on DM
1645 
1646     Input Parameters:
1647 +   dm - the DM object
1648 .   g - the global vector
1649 .   mode - INSERT_VALUES or ADD_VALUES
1650 -   l - the local vector
1651 
1652 
1653     Level: beginner
1654 
1655 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1656 
1657 @*/
1658 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1659 {
1660   PetscSF                 sf;
1661   PetscErrorCode          ierr;
1662   DMGlobalToLocalHookLink link;
1663 
1664   PetscFunctionBegin;
1665   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1666   for (link=dm->gtolhook; link; link=link->next) {
1667     if (link->beginhook) {
1668       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1669     }
1670   }
1671   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1672   if (sf) {
1673     PetscScalar *lArray, *gArray;
1674 
1675     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1676     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1677     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1678     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1679     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1680     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1681   } else {
1682     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1683   }
1684   PetscFunctionReturn(0);
1685 }
1686 
1687 #undef __FUNCT__
1688 #define __FUNCT__ "DMGlobalToLocalEnd"
1689 /*@
1690     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1691 
1692     Neighbor-wise Collective on DM
1693 
1694     Input Parameters:
1695 +   dm - the DM object
1696 .   g - the global vector
1697 .   mode - INSERT_VALUES or ADD_VALUES
1698 -   l - the local vector
1699 
1700 
1701     Level: beginner
1702 
1703 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1704 
1705 @*/
1706 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1707 {
1708   PetscSF                 sf;
1709   PetscErrorCode          ierr;
1710   PetscScalar             *lArray, *gArray;
1711   DMGlobalToLocalHookLink link;
1712 
1713   PetscFunctionBegin;
1714   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1715   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1716   if (sf) {
1717     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1718 
1719     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1720     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1721     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1722     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1723     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1724   } else {
1725     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1726   }
1727   for (link=dm->gtolhook; link; link=link->next) {
1728     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1729   }
1730   PetscFunctionReturn(0);
1731 }
1732 
1733 #undef __FUNCT__
1734 #define __FUNCT__ "DMLocalToGlobalBegin"
1735 /*@
1736     DMLocalToGlobalBegin - updates global vectors from local vectors
1737 
1738     Neighbor-wise Collective on DM
1739 
1740     Input Parameters:
1741 +   dm - the DM object
1742 .   l - the local vector
1743 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1744            base point.
1745 - - the global vector
1746 
1747     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1748            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1749            global array to the final global array with VecAXPY().
1750 
1751     Level: beginner
1752 
1753 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1754 
1755 @*/
1756 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1757 {
1758   PetscSF        sf;
1759   PetscErrorCode ierr;
1760 
1761   PetscFunctionBegin;
1762   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1763   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1764   if (sf) {
1765     MPI_Op      op;
1766     PetscScalar *lArray, *gArray;
1767 
1768     switch (mode) {
1769     case INSERT_VALUES:
1770     case INSERT_ALL_VALUES:
1771       op = MPIU_REPLACE; break;
1772     case ADD_VALUES:
1773     case ADD_ALL_VALUES:
1774       op = MPI_SUM; break;
1775     default:
1776       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1777     }
1778     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1779     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1780     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1781     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1782     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1783   } else {
1784     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1785   }
1786   PetscFunctionReturn(0);
1787 }
1788 
1789 #undef __FUNCT__
1790 #define __FUNCT__ "DMLocalToGlobalEnd"
1791 /*@
1792     DMLocalToGlobalEnd - updates global vectors from local vectors
1793 
1794     Neighbor-wise Collective on DM
1795 
1796     Input Parameters:
1797 +   dm - the DM object
1798 .   l - the local vector
1799 .   mode - INSERT_VALUES or ADD_VALUES
1800 -   g - the global vector
1801 
1802 
1803     Level: beginner
1804 
1805 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1806 
1807 @*/
1808 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1809 {
1810   PetscSF        sf;
1811   PetscErrorCode ierr;
1812 
1813   PetscFunctionBegin;
1814   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1815   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1816   if (sf) {
1817     MPI_Op      op;
1818     PetscScalar *lArray, *gArray;
1819 
1820     switch (mode) {
1821     case INSERT_VALUES:
1822     case INSERT_ALL_VALUES:
1823       op = MPIU_REPLACE; break;
1824     case ADD_VALUES:
1825     case ADD_ALL_VALUES:
1826       op = MPI_SUM; break;
1827     default:
1828       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1829     }
1830     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1831     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1832     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1833     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1834     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1835   } else {
1836     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1837   }
1838   PetscFunctionReturn(0);
1839 }
1840 
1841 #undef __FUNCT__
1842 #define __FUNCT__ "DMLocalToLocalBegin"
1843 /*@
1844    DMLocalToLocalBegin - Maps from a local vector (including ghost points
1845    that contain irrelevant values) to another local vector where the ghost
1846    points in the second are set correctly. Must be followed by DMLocalToLocalEnd().
1847 
1848    Neighbor-wise Collective on DM and Vec
1849 
1850    Input Parameters:
1851 +  dm - the DM object
1852 .  g - the original local vector
1853 -  mode - one of INSERT_VALUES or ADD_VALUES
1854 
1855    Output Parameter:
1856 .  l  - the local vector with correct ghost values
1857 
1858    Level: intermediate
1859 
1860    Notes:
1861    The local vectors used here need not be the same as those
1862    obtained from DMCreateLocalVector(), BUT they
1863    must have the same parallel data layout; they could, for example, be
1864    obtained with VecDuplicate() from the DM originating vectors.
1865 
1866 .keywords: DM, local-to-local, begin
1867 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1868 
1869 @*/
1870 PetscErrorCode  DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1871 {
1872   PetscErrorCode          ierr;
1873 
1874   PetscFunctionBegin;
1875   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1876   ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1877   PetscFunctionReturn(0);
1878 }
1879 
1880 #undef __FUNCT__
1881 #define __FUNCT__ "DMLocalToLocalEnd"
1882 /*@
1883    DMLocalToLocalEnd - Maps from a local vector (including ghost points
1884    that contain irrelevant values) to another local vector where the ghost
1885    points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().
1886 
1887    Neighbor-wise Collective on DM and Vec
1888 
1889    Input Parameters:
1890 +  da - the DM object
1891 .  g - the original local vector
1892 -  mode - one of INSERT_VALUES or ADD_VALUES
1893 
1894    Output Parameter:
1895 .  l  - the local vector with correct ghost values
1896 
1897    Level: intermediate
1898 
1899    Notes:
1900    The local vectors used here need not be the same as those
1901    obtained from DMCreateLocalVector(), BUT they
1902    must have the same parallel data layout; they could, for example, be
1903    obtained with VecDuplicate() from the DM originating vectors.
1904 
1905 .keywords: DM, local-to-local, end
1906 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1907 
1908 @*/
1909 PetscErrorCode  DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1910 {
1911   PetscErrorCode          ierr;
1912 
1913   PetscFunctionBegin;
1914   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1915   ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1916   PetscFunctionReturn(0);
1917 }
1918 
1919 
1920 #undef __FUNCT__
1921 #define __FUNCT__ "DMCoarsen"
1922 /*@
1923     DMCoarsen - Coarsens a DM object
1924 
1925     Collective on DM
1926 
1927     Input Parameter:
1928 +   dm - the DM object
1929 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1930 
1931     Output Parameter:
1932 .   dmc - the coarsened DM
1933 
1934     Level: developer
1935 
1936 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1937 
1938 @*/
1939 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1940 {
1941   PetscErrorCode    ierr;
1942   DMCoarsenHookLink link;
1943 
1944   PetscFunctionBegin;
1945   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1946   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1947   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1948   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1949   (*dmc)->ctx               = dm->ctx;
1950   (*dmc)->levelup           = dm->levelup;
1951   (*dmc)->leveldown         = dm->leveldown + 1;
1952   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1953   for (link=dm->coarsenhook; link; link=link->next) {
1954     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1955   }
1956   PetscFunctionReturn(0);
1957 }
1958 
1959 #undef __FUNCT__
1960 #define __FUNCT__ "DMCoarsenHookAdd"
1961 /*@C
1962    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1963 
1964    Logically Collective
1965 
1966    Input Arguments:
1967 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1968 .  coarsenhook - function to run when setting up a coarser level
1969 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1970 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1971 
1972    Calling sequence of coarsenhook:
1973 $    coarsenhook(DM fine,DM coarse,void *ctx);
1974 
1975 +  fine - fine level DM
1976 .  coarse - coarse level DM to restrict problem to
1977 -  ctx - optional user-defined function context
1978 
1979    Calling sequence for restricthook:
1980 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1981 
1982 +  fine - fine level DM
1983 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1984 .  rscale - scaling vector for restriction
1985 .  inject - matrix restricting by injection
1986 .  coarse - coarse level DM to update
1987 -  ctx - optional user-defined function context
1988 
1989    Level: advanced
1990 
1991    Notes:
1992    This function is only needed if auxiliary data needs to be set up on coarse grids.
1993 
1994    If this function is called multiple times, the hooks will be run in the order they are added.
1995 
1996    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1997    extract the finest level information from its context (instead of from the SNES).
1998 
1999    This function is currently not available from Fortran.
2000 
2001 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2002 @*/
2003 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
2004 {
2005   PetscErrorCode    ierr;
2006   DMCoarsenHookLink link,*p;
2007 
2008   PetscFunctionBegin;
2009   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
2010   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2011   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
2012   link->coarsenhook  = coarsenhook;
2013   link->restricthook = restricthook;
2014   link->ctx          = ctx;
2015   link->next         = NULL;
2016   *p                 = link;
2017   PetscFunctionReturn(0);
2018 }
2019 
2020 #undef __FUNCT__
2021 #define __FUNCT__ "DMRestrict"
2022 /*@
2023    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
2024 
2025    Collective if any hooks are
2026 
2027    Input Arguments:
2028 +  fine - finer DM to use as a base
2029 .  restrct - restriction matrix, apply using MatRestrict()
2030 .  inject - injection matrix, also use MatRestrict()
2031 -  coarse - coarer DM to update
2032 
2033    Level: developer
2034 
2035 .seealso: DMCoarsenHookAdd(), MatRestrict()
2036 @*/
2037 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2038 {
2039   PetscErrorCode    ierr;
2040   DMCoarsenHookLink link;
2041 
2042   PetscFunctionBegin;
2043   for (link=fine->coarsenhook; link; link=link->next) {
2044     if (link->restricthook) {
2045       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
2046     }
2047   }
2048   PetscFunctionReturn(0);
2049 }
2050 
2051 #undef __FUNCT__
2052 #define __FUNCT__ "DMSubDomainHookAdd"
2053 /*@C
2054    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2055 
2056    Logically Collective
2057 
2058    Input Arguments:
2059 +  global - global DM
2060 .  ddhook - function to run to pass data to the decomposition DM upon its creation
2061 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2062 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2063 
2064 
2065    Calling sequence for ddhook:
2066 $    ddhook(DM global,DM block,void *ctx)
2067 
2068 +  global - global DM
2069 .  block  - block DM
2070 -  ctx - optional user-defined function context
2071 
2072    Calling sequence for restricthook:
2073 $    restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)
2074 
2075 +  global - global DM
2076 .  out    - scatter to the outer (with ghost and overlap points) block vector
2077 .  in     - scatter to block vector values only owned locally
2078 .  block  - block DM
2079 -  ctx - optional user-defined function context
2080 
2081    Level: advanced
2082 
2083    Notes:
2084    This function is only needed if auxiliary data needs to be set up on subdomain DMs.
2085 
2086    If this function is called multiple times, the hooks will be run in the order they are added.
2087 
2088    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2089    extract the global information from its context (instead of from the SNES).
2090 
2091    This function is currently not available from Fortran.
2092 
2093 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2094 @*/
2095 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2096 {
2097   PetscErrorCode      ierr;
2098   DMSubDomainHookLink link,*p;
2099 
2100   PetscFunctionBegin;
2101   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2102   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2103   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2104   link->restricthook = restricthook;
2105   link->ddhook       = ddhook;
2106   link->ctx          = ctx;
2107   link->next         = NULL;
2108   *p                 = link;
2109   PetscFunctionReturn(0);
2110 }
2111 
2112 #undef __FUNCT__
2113 #define __FUNCT__ "DMSubDomainRestrict"
2114 /*@
2115    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2116 
2117    Collective if any hooks are
2118 
2119    Input Arguments:
2120 +  fine - finer DM to use as a base
2121 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2122 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2123 -  coarse - coarer DM to update
2124 
2125    Level: developer
2126 
2127 .seealso: DMCoarsenHookAdd(), MatRestrict()
2128 @*/
2129 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2130 {
2131   PetscErrorCode      ierr;
2132   DMSubDomainHookLink link;
2133 
2134   PetscFunctionBegin;
2135   for (link=global->subdomainhook; link; link=link->next) {
2136     if (link->restricthook) {
2137       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
2138     }
2139   }
2140   PetscFunctionReturn(0);
2141 }
2142 
2143 #undef __FUNCT__
2144 #define __FUNCT__ "DMGetCoarsenLevel"
2145 /*@
2146     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2147 
2148     Not Collective
2149 
2150     Input Parameter:
2151 .   dm - the DM object
2152 
2153     Output Parameter:
2154 .   level - number of coarsenings
2155 
2156     Level: developer
2157 
2158 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2159 
2160 @*/
2161 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2162 {
2163   PetscFunctionBegin;
2164   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2165   *level = dm->leveldown;
2166   PetscFunctionReturn(0);
2167 }
2168 
2169 
2170 
2171 #undef __FUNCT__
2172 #define __FUNCT__ "DMRefineHierarchy"
2173 /*@C
2174     DMRefineHierarchy - Refines a DM object, all levels at once
2175 
2176     Collective on DM
2177 
2178     Input Parameter:
2179 +   dm - the DM object
2180 -   nlevels - the number of levels of refinement
2181 
2182     Output Parameter:
2183 .   dmf - the refined DM hierarchy
2184 
2185     Level: developer
2186 
2187 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2188 
2189 @*/
2190 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2191 {
2192   PetscErrorCode ierr;
2193 
2194   PetscFunctionBegin;
2195   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2196   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2197   if (nlevels == 0) PetscFunctionReturn(0);
2198   if (dm->ops->refinehierarchy) {
2199     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2200   } else if (dm->ops->refine) {
2201     PetscInt i;
2202 
2203     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2204     for (i=1; i<nlevels; i++) {
2205       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2206     }
2207   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2208   PetscFunctionReturn(0);
2209 }
2210 
2211 #undef __FUNCT__
2212 #define __FUNCT__ "DMCoarsenHierarchy"
2213 /*@C
2214     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2215 
2216     Collective on DM
2217 
2218     Input Parameter:
2219 +   dm - the DM object
2220 -   nlevels - the number of levels of coarsening
2221 
2222     Output Parameter:
2223 .   dmc - the coarsened DM hierarchy
2224 
2225     Level: developer
2226 
2227 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2228 
2229 @*/
2230 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2231 {
2232   PetscErrorCode ierr;
2233 
2234   PetscFunctionBegin;
2235   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2236   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2237   if (nlevels == 0) PetscFunctionReturn(0);
2238   PetscValidPointer(dmc,3);
2239   if (dm->ops->coarsenhierarchy) {
2240     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2241   } else if (dm->ops->coarsen) {
2242     PetscInt i;
2243 
2244     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2245     for (i=1; i<nlevels; i++) {
2246       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2247     }
2248   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2249   PetscFunctionReturn(0);
2250 }
2251 
2252 #undef __FUNCT__
2253 #define __FUNCT__ "DMCreateAggregates"
2254 /*@
2255    DMCreateAggregates - Gets the aggregates that map between
2256    grids associated with two DMs.
2257 
2258    Collective on DM
2259 
2260    Input Parameters:
2261 +  dmc - the coarse grid DM
2262 -  dmf - the fine grid DM
2263 
2264    Output Parameters:
2265 .  rest - the restriction matrix (transpose of the projection matrix)
2266 
2267    Level: intermediate
2268 
2269 .keywords: interpolation, restriction, multigrid
2270 
2271 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2272 @*/
2273 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2274 {
2275   PetscErrorCode ierr;
2276 
2277   PetscFunctionBegin;
2278   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2279   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2280   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2281   PetscFunctionReturn(0);
2282 }
2283 
2284 #undef __FUNCT__
2285 #define __FUNCT__ "DMSetApplicationContextDestroy"
2286 /*@C
2287     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2288 
2289     Not Collective
2290 
2291     Input Parameters:
2292 +   dm - the DM object
2293 -   destroy - the destroy function
2294 
2295     Level: intermediate
2296 
2297 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2298 
2299 @*/
2300 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2301 {
2302   PetscFunctionBegin;
2303   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2304   dm->ctxdestroy = destroy;
2305   PetscFunctionReturn(0);
2306 }
2307 
2308 #undef __FUNCT__
2309 #define __FUNCT__ "DMSetApplicationContext"
2310 /*@
2311     DMSetApplicationContext - Set a user context into a DM object
2312 
2313     Not Collective
2314 
2315     Input Parameters:
2316 +   dm - the DM object
2317 -   ctx - the user context
2318 
2319     Level: intermediate
2320 
2321 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2322 
2323 @*/
2324 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2325 {
2326   PetscFunctionBegin;
2327   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2328   dm->ctx = ctx;
2329   PetscFunctionReturn(0);
2330 }
2331 
2332 #undef __FUNCT__
2333 #define __FUNCT__ "DMGetApplicationContext"
2334 /*@
2335     DMGetApplicationContext - Gets a user context from a DM object
2336 
2337     Not Collective
2338 
2339     Input Parameter:
2340 .   dm - the DM object
2341 
2342     Output Parameter:
2343 .   ctx - the user context
2344 
2345     Level: intermediate
2346 
2347 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2348 
2349 @*/
2350 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2351 {
2352   PetscFunctionBegin;
2353   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2354   *(void**)ctx = dm->ctx;
2355   PetscFunctionReturn(0);
2356 }
2357 
2358 #undef __FUNCT__
2359 #define __FUNCT__ "DMSetVariableBounds"
2360 /*@C
2361     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2362 
2363     Logically Collective on DM
2364 
2365     Input Parameter:
2366 +   dm - the DM object
2367 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2368 
2369     Level: intermediate
2370 
2371 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2372          DMSetJacobian()
2373 
2374 @*/
2375 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2376 {
2377   PetscFunctionBegin;
2378   dm->ops->computevariablebounds = f;
2379   PetscFunctionReturn(0);
2380 }
2381 
2382 #undef __FUNCT__
2383 #define __FUNCT__ "DMHasVariableBounds"
2384 /*@
2385     DMHasVariableBounds - does the DM object have a variable bounds function?
2386 
2387     Not Collective
2388 
2389     Input Parameter:
2390 .   dm - the DM object to destroy
2391 
2392     Output Parameter:
2393 .   flg - PETSC_TRUE if the variable bounds function exists
2394 
2395     Level: developer
2396 
2397 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2398 
2399 @*/
2400 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2401 {
2402   PetscFunctionBegin;
2403   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2404   PetscFunctionReturn(0);
2405 }
2406 
2407 #undef __FUNCT__
2408 #define __FUNCT__ "DMComputeVariableBounds"
2409 /*@C
2410     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2411 
2412     Logically Collective on DM
2413 
2414     Input Parameters:
2415 +   dm - the DM object to destroy
2416 -   x  - current solution at which the bounds are computed
2417 
2418     Output parameters:
2419 +   xl - lower bound
2420 -   xu - upper bound
2421 
2422     Level: intermediate
2423 
2424 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2425 
2426 @*/
2427 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2428 {
2429   PetscErrorCode ierr;
2430 
2431   PetscFunctionBegin;
2432   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2433   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2434   if (dm->ops->computevariablebounds) {
2435     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2436   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2437   PetscFunctionReturn(0);
2438 }
2439 
2440 #undef __FUNCT__
2441 #define __FUNCT__ "DMHasColoring"
2442 /*@
2443     DMHasColoring - does the DM object have a method of providing a coloring?
2444 
2445     Not Collective
2446 
2447     Input Parameter:
2448 .   dm - the DM object
2449 
2450     Output Parameter:
2451 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2452 
2453     Level: developer
2454 
2455 .seealso DMHasFunction(), DMCreateColoring()
2456 
2457 @*/
2458 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2459 {
2460   PetscFunctionBegin;
2461   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2462   PetscFunctionReturn(0);
2463 }
2464 
2465 #undef  __FUNCT__
2466 #define __FUNCT__ "DMSetVec"
2467 /*@C
2468     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2469 
2470     Collective on DM
2471 
2472     Input Parameter:
2473 +   dm - the DM object
2474 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2475 
2476     Level: developer
2477 
2478 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2479 
2480 @*/
2481 PetscErrorCode  DMSetVec(DM dm,Vec x)
2482 {
2483   PetscErrorCode ierr;
2484 
2485   PetscFunctionBegin;
2486   if (x) {
2487     if (!dm->x) {
2488       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2489     }
2490     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2491   } else if (dm->x) {
2492     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2493   }
2494   PetscFunctionReturn(0);
2495 }
2496 
2497 PetscFunctionList DMList              = NULL;
2498 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2499 
2500 #undef __FUNCT__
2501 #define __FUNCT__ "DMSetType"
2502 /*@C
2503   DMSetType - Builds a DM, for a particular DM implementation.
2504 
2505   Collective on DM
2506 
2507   Input Parameters:
2508 + dm     - The DM object
2509 - method - The name of the DM type
2510 
2511   Options Database Key:
2512 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2513 
2514   Notes:
2515   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2516 
2517   Level: intermediate
2518 
2519 .keywords: DM, set, type
2520 .seealso: DMGetType(), DMCreate()
2521 @*/
2522 PetscErrorCode  DMSetType(DM dm, DMType method)
2523 {
2524   PetscErrorCode (*r)(DM);
2525   PetscBool      match;
2526   PetscErrorCode ierr;
2527 
2528   PetscFunctionBegin;
2529   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2530   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2531   if (match) PetscFunctionReturn(0);
2532 
2533   if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);}
2534   ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr);
2535   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2536 
2537   if (dm->ops->destroy) {
2538     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2539     dm->ops->destroy = NULL;
2540   }
2541   ierr = (*r)(dm);CHKERRQ(ierr);
2542   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2543   PetscFunctionReturn(0);
2544 }
2545 
2546 #undef __FUNCT__
2547 #define __FUNCT__ "DMGetType"
2548 /*@C
2549   DMGetType - Gets the DM type name (as a string) from the DM.
2550 
2551   Not Collective
2552 
2553   Input Parameter:
2554 . dm  - The DM
2555 
2556   Output Parameter:
2557 . type - The DM type name
2558 
2559   Level: intermediate
2560 
2561 .keywords: DM, get, type, name
2562 .seealso: DMSetType(), DMCreate()
2563 @*/
2564 PetscErrorCode  DMGetType(DM dm, DMType *type)
2565 {
2566   PetscErrorCode ierr;
2567 
2568   PetscFunctionBegin;
2569   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2570   PetscValidCharPointer(type,2);
2571   if (!DMRegisterAllCalled) {
2572     ierr = DMRegisterAll();CHKERRQ(ierr);
2573   }
2574   *type = ((PetscObject)dm)->type_name;
2575   PetscFunctionReturn(0);
2576 }
2577 
2578 #undef __FUNCT__
2579 #define __FUNCT__ "DMConvert"
2580 /*@C
2581   DMConvert - Converts a DM to another DM, either of the same or different type.
2582 
2583   Collective on DM
2584 
2585   Input Parameters:
2586 + dm - the DM
2587 - newtype - new DM type (use "same" for the same type)
2588 
2589   Output Parameter:
2590 . M - pointer to new DM
2591 
2592   Notes:
2593   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2594   the MPI communicator of the generated DM is always the same as the communicator
2595   of the input DM.
2596 
2597   Level: intermediate
2598 
2599 .seealso: DMCreate()
2600 @*/
2601 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2602 {
2603   DM             B;
2604   char           convname[256];
2605   PetscBool      sametype, issame;
2606   PetscErrorCode ierr;
2607 
2608   PetscFunctionBegin;
2609   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2610   PetscValidType(dm,1);
2611   PetscValidPointer(M,3);
2612   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2613   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2614   {
2615     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2616 
2617     /*
2618        Order of precedence:
2619        1) See if a specialized converter is known to the current DM.
2620        2) See if a specialized converter is known to the desired DM class.
2621        3) See if a good general converter is registered for the desired class
2622        4) See if a good general converter is known for the current matrix.
2623        5) Use a really basic converter.
2624     */
2625 
2626     /* 1) See if a specialized converter is known to the current DM and the desired class */
2627     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2628     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2629     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2630     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2631     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2632     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr);
2633     if (conv) goto foundconv;
2634 
2635     /* 2)  See if a specialized converter is known to the desired DM class. */
2636     ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr);
2637     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2638     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2639     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2640     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2641     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2642     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2643     ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr);
2644     if (conv) {
2645       ierr = DMDestroy(&B);CHKERRQ(ierr);
2646       goto foundconv;
2647     }
2648 
2649 #if 0
2650     /* 3) See if a good general converter is registered for the desired class */
2651     conv = B->ops->convertfrom;
2652     ierr = DMDestroy(&B);CHKERRQ(ierr);
2653     if (conv) goto foundconv;
2654 
2655     /* 4) See if a good general converter is known for the current matrix */
2656     if (dm->ops->convert) {
2657       conv = dm->ops->convert;
2658     }
2659     if (conv) goto foundconv;
2660 #endif
2661 
2662     /* 5) Use a really basic converter. */
2663     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2664 
2665 foundconv:
2666     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2667     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2668     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2669   }
2670   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2671   PetscFunctionReturn(0);
2672 }
2673 
2674 /*--------------------------------------------------------------------------------------------------------------------*/
2675 
2676 #undef __FUNCT__
2677 #define __FUNCT__ "DMRegister"
2678 /*@C
2679   DMRegister -  Adds a new DM component implementation
2680 
2681   Not Collective
2682 
2683   Input Parameters:
2684 + name        - The name of a new user-defined creation routine
2685 - create_func - The creation routine itself
2686 
2687   Notes:
2688   DMRegister() may be called multiple times to add several user-defined DMs
2689 
2690 
2691   Sample usage:
2692 .vb
2693     DMRegister("my_da", MyDMCreate);
2694 .ve
2695 
2696   Then, your DM type can be chosen with the procedural interface via
2697 .vb
2698     DMCreate(MPI_Comm, DM *);
2699     DMSetType(DM,"my_da");
2700 .ve
2701    or at runtime via the option
2702 .vb
2703     -da_type my_da
2704 .ve
2705 
2706   Level: advanced
2707 
2708 .keywords: DM, register
2709 .seealso: DMRegisterAll(), DMRegisterDestroy()
2710 
2711 @*/
2712 PetscErrorCode  DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2713 {
2714   PetscErrorCode ierr;
2715 
2716   PetscFunctionBegin;
2717   ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr);
2718   PetscFunctionReturn(0);
2719 }
2720 
2721 #undef __FUNCT__
2722 #define __FUNCT__ "DMLoad"
2723 /*@C
2724   DMLoad - Loads a DM that has been stored in binary  with DMView().
2725 
2726   Collective on PetscViewer
2727 
2728   Input Parameters:
2729 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2730            some related function before a call to DMLoad().
2731 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2732            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2733 
2734    Level: intermediate
2735 
2736   Notes:
2737    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2738 
2739   Notes for advanced users:
2740   Most users should not need to know the details of the binary storage
2741   format, since DMLoad() and DMView() completely hide these details.
2742   But for anyone who's interested, the standard binary matrix storage
2743   format is
2744 .vb
2745      has not yet been determined
2746 .ve
2747 
2748 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2749 @*/
2750 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2751 {
2752   PetscErrorCode ierr;
2753   PetscBool      isbinary;
2754   PetscInt       classid;
2755   char           type[256];
2756 
2757   PetscFunctionBegin;
2758   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2759   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2760   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2761   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2762 
2763   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2764   if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid);
2765   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2766   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2767   if (newdm->ops->load) {
2768     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2769   }
2770   PetscFunctionReturn(0);
2771 }
2772 
2773 /******************************** FEM Support **********************************/
2774 
2775 #undef __FUNCT__
2776 #define __FUNCT__ "DMPrintCellVector"
2777 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2778 {
2779   PetscInt       f;
2780   PetscErrorCode ierr;
2781 
2782   PetscFunctionBegin;
2783   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2784   for (f = 0; f < len; ++f) {
2785     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2786   }
2787   PetscFunctionReturn(0);
2788 }
2789 
2790 #undef __FUNCT__
2791 #define __FUNCT__ "DMPrintCellMatrix"
2792 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2793 {
2794   PetscInt       f, g;
2795   PetscErrorCode ierr;
2796 
2797   PetscFunctionBegin;
2798   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2799   for (f = 0; f < rows; ++f) {
2800     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2801     for (g = 0; g < cols; ++g) {
2802       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2803     }
2804     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2805   }
2806   PetscFunctionReturn(0);
2807 }
2808 
2809 #undef __FUNCT__
2810 #define __FUNCT__ "DMPrintLocalVec"
2811 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X)
2812 {
2813   PetscMPIInt    rank, numProcs;
2814   PetscInt       p;
2815   PetscErrorCode ierr;
2816 
2817   PetscFunctionBegin;
2818   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2819   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
2820   ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr);
2821   for (p = 0; p < numProcs; ++p) {
2822     if (p == rank) {
2823       Vec x;
2824 
2825       ierr = VecDuplicate(X, &x);CHKERRQ(ierr);
2826       ierr = VecCopy(X, x);CHKERRQ(ierr);
2827       ierr = VecChop(x, tol);CHKERRQ(ierr);
2828       ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2829       ierr = VecDestroy(&x);CHKERRQ(ierr);
2830       ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2831     }
2832     ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr);
2833   }
2834   PetscFunctionReturn(0);
2835 }
2836 
2837 #undef __FUNCT__
2838 #define __FUNCT__ "DMGetDefaultSection"
2839 /*@
2840   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2841 
2842   Input Parameter:
2843 . dm - The DM
2844 
2845   Output Parameter:
2846 . section - The PetscSection
2847 
2848   Level: intermediate
2849 
2850   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2851 
2852 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2853 @*/
2854 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2855 {
2856   PetscFunctionBegin;
2857   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2858   PetscValidPointer(section, 2);
2859   *section = dm->defaultSection;
2860   PetscFunctionReturn(0);
2861 }
2862 
2863 #undef __FUNCT__
2864 #define __FUNCT__ "DMSetDefaultSection"
2865 /*@
2866   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2867 
2868   Input Parameters:
2869 + dm - The DM
2870 - section - The PetscSection
2871 
2872   Level: intermediate
2873 
2874   Note: Any existing Section will be destroyed
2875 
2876 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2877 @*/
2878 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2879 {
2880   PetscInt       numFields;
2881   PetscInt       f;
2882   PetscErrorCode ierr;
2883 
2884   PetscFunctionBegin;
2885   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2886   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2887   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2888   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2889   dm->defaultSection = section;
2890   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2891   if (numFields) {
2892     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2893     for (f = 0; f < numFields; ++f) {
2894       const char *name;
2895 
2896       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2897       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2898     }
2899   }
2900   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2901   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2902   PetscFunctionReturn(0);
2903 }
2904 
2905 #undef __FUNCT__
2906 #define __FUNCT__ "DMGetDefaultGlobalSection"
2907 /*@
2908   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2909 
2910   Collective on DM
2911 
2912   Input Parameter:
2913 . dm - The DM
2914 
2915   Output Parameter:
2916 . section - The PetscSection
2917 
2918   Level: intermediate
2919 
2920   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2921 
2922 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2923 @*/
2924 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2925 {
2926   PetscErrorCode ierr;
2927 
2928   PetscFunctionBegin;
2929   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2930   PetscValidPointer(section, 2);
2931   if (!dm->defaultGlobalSection) {
2932     if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2933     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2934     ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr);
2935     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2936   }
2937   *section = dm->defaultGlobalSection;
2938   PetscFunctionReturn(0);
2939 }
2940 
2941 #undef __FUNCT__
2942 #define __FUNCT__ "DMSetDefaultGlobalSection"
2943 /*@
2944   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2945 
2946   Input Parameters:
2947 + dm - The DM
2948 - section - The PetscSection, or NULL
2949 
2950   Level: intermediate
2951 
2952   Note: Any existing Section will be destroyed
2953 
2954 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2955 @*/
2956 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2957 {
2958   PetscErrorCode ierr;
2959 
2960   PetscFunctionBegin;
2961   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2962   if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2963   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2964   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2965   dm->defaultGlobalSection = section;
2966   PetscFunctionReturn(0);
2967 }
2968 
2969 #undef __FUNCT__
2970 #define __FUNCT__ "DMGetDefaultSF"
2971 /*@
2972   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2973   it is created from the default PetscSection layouts in the DM.
2974 
2975   Input Parameter:
2976 . dm - The DM
2977 
2978   Output Parameter:
2979 . sf - The PetscSF
2980 
2981   Level: intermediate
2982 
2983   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2984 
2985 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2986 @*/
2987 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2988 {
2989   PetscInt       nroots;
2990   PetscErrorCode ierr;
2991 
2992   PetscFunctionBegin;
2993   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2994   PetscValidPointer(sf, 2);
2995   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
2996   if (nroots < 0) {
2997     PetscSection section, gSection;
2998 
2999     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
3000     if (section) {
3001       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
3002       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
3003     } else {
3004       *sf = NULL;
3005       PetscFunctionReturn(0);
3006     }
3007   }
3008   *sf = dm->defaultSF;
3009   PetscFunctionReturn(0);
3010 }
3011 
3012 #undef __FUNCT__
3013 #define __FUNCT__ "DMSetDefaultSF"
3014 /*@
3015   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3016 
3017   Input Parameters:
3018 + dm - The DM
3019 - sf - The PetscSF
3020 
3021   Level: intermediate
3022 
3023   Note: Any previous SF is destroyed
3024 
3025 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3026 @*/
3027 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
3028 {
3029   PetscErrorCode ierr;
3030 
3031   PetscFunctionBegin;
3032   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3033   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
3034   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3035   dm->defaultSF = sf;
3036   PetscFunctionReturn(0);
3037 }
3038 
3039 #undef __FUNCT__
3040 #define __FUNCT__ "DMCreateDefaultSF"
3041 /*@C
3042   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3043   describing the data layout.
3044 
3045   Input Parameters:
3046 + dm - The DM
3047 . localSection - PetscSection describing the local data layout
3048 - globalSection - PetscSection describing the global data layout
3049 
3050   Level: intermediate
3051 
3052 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3053 @*/
3054 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3055 {
3056   MPI_Comm       comm;
3057   PetscLayout    layout;
3058   const PetscInt *ranges;
3059   PetscInt       *local;
3060   PetscSFNode    *remote;
3061   PetscInt       pStart, pEnd, p, nroots, nleaves = 0, l;
3062   PetscMPIInt    size, rank;
3063   PetscErrorCode ierr;
3064 
3065   PetscFunctionBegin;
3066   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
3067   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3068   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3069   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3070   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3071   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3072   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3073   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3074   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3075   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3076   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3077   for (p = pStart; p < pEnd; ++p) {
3078     PetscInt gdof, gcdof;
3079 
3080     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3081     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3082     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3083   }
3084   ierr = PetscMalloc1(nleaves, &local);CHKERRQ(ierr);
3085   ierr = PetscMalloc1(nleaves, &remote);CHKERRQ(ierr);
3086   for (p = pStart, l = 0; p < pEnd; ++p) {
3087     const PetscInt *cind;
3088     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3089 
3090     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3091     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3092     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3093     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3094     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3095     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3096     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3097     if (!gdof) continue; /* Censored point */
3098     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3099     if (gsize != dof-cdof) {
3100       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3101       cdof = 0; /* Ignore constraints */
3102     }
3103     for (d = 0, c = 0; d < dof; ++d) {
3104       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3105       local[l+d-c] = off+d;
3106     }
3107     if (gdof < 0) {
3108       for (d = 0; d < gsize; ++d, ++l) {
3109         PetscInt offset = -(goff+1) + d, r;
3110 
3111         ierr = PetscFindInt(offset,size+1,ranges,&r);CHKERRQ(ierr);
3112         if (r < 0) r = -(r+2);
3113         if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff);
3114         remote[l].rank  = r;
3115         remote[l].index = offset - ranges[r];
3116       }
3117     } else {
3118       for (d = 0; d < gsize; ++d, ++l) {
3119         remote[l].rank  = rank;
3120         remote[l].index = goff+d - ranges[rank];
3121       }
3122     }
3123   }
3124   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3125   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3126   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3127   PetscFunctionReturn(0);
3128 }
3129 
3130 #undef __FUNCT__
3131 #define __FUNCT__ "DMGetPointSF"
3132 /*@
3133   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3134 
3135   Input Parameter:
3136 . dm - The DM
3137 
3138   Output Parameter:
3139 . sf - The PetscSF
3140 
3141   Level: intermediate
3142 
3143   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3144 
3145 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3146 @*/
3147 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3148 {
3149   PetscFunctionBegin;
3150   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3151   PetscValidPointer(sf, 2);
3152   *sf = dm->sf;
3153   PetscFunctionReturn(0);
3154 }
3155 
3156 #undef __FUNCT__
3157 #define __FUNCT__ "DMSetPointSF"
3158 /*@
3159   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3160 
3161   Input Parameters:
3162 + dm - The DM
3163 - sf - The PetscSF
3164 
3165   Level: intermediate
3166 
3167 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3168 @*/
3169 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3170 {
3171   PetscErrorCode ierr;
3172 
3173   PetscFunctionBegin;
3174   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3175   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3176   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3177   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3178   dm->sf = sf;
3179   PetscFunctionReturn(0);
3180 }
3181 
3182 #undef __FUNCT__
3183 #define __FUNCT__ "DMGetNumFields"
3184 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3185 {
3186   PetscFunctionBegin;
3187   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3188   PetscValidPointer(numFields, 2);
3189   *numFields = dm->numFields;
3190   PetscFunctionReturn(0);
3191 }
3192 
3193 #undef __FUNCT__
3194 #define __FUNCT__ "DMSetNumFields"
3195 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3196 {
3197   PetscInt       f;
3198   PetscErrorCode ierr;
3199 
3200   PetscFunctionBegin;
3201   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3202   for (f = 0; f < dm->numFields; ++f) {
3203     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3204   }
3205   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3206   dm->numFields = numFields;
3207   ierr          = PetscMalloc1(dm->numFields, &dm->fields);CHKERRQ(ierr);
3208   for (f = 0; f < dm->numFields; ++f) {
3209     ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3210   }
3211   PetscFunctionReturn(0);
3212 }
3213 
3214 #undef __FUNCT__
3215 #define __FUNCT__ "DMGetField"
3216 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3217 {
3218   PetscFunctionBegin;
3219   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3220   PetscValidPointer(field, 2);
3221   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3222   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3223   *field = dm->fields[f];
3224   PetscFunctionReturn(0);
3225 }
3226 
3227 #undef __FUNCT__
3228 #define __FUNCT__ "DMRestrictHook_Coordinates"
3229 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3230 {
3231   DM dm_coord,dmc_coord;
3232   PetscErrorCode ierr;
3233   Vec coords,ccoords;
3234   VecScatter scat;
3235   PetscFunctionBegin;
3236   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3237   ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr);
3238   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3239   ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr);
3240   if (coords && !ccoords) {
3241     ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr);
3242     ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr);
3243     ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3244     ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3245     ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr);
3246     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
3247     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3248   }
3249   PetscFunctionReturn(0);
3250 }
3251 
3252 #undef __FUNCT__
3253 #define __FUNCT__ "DMSubDomainHook_Coordinates"
3254 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3255 {
3256   DM dm_coord,subdm_coord;
3257   PetscErrorCode ierr;
3258   Vec coords,ccoords,clcoords;
3259   VecScatter *scat_i,*scat_g;
3260   PetscFunctionBegin;
3261   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3262   ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr);
3263   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3264   ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr);
3265   if (coords && !ccoords) {
3266     ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr);
3267     ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr);
3268     ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr);
3269     ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3270     ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3271     ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3272     ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3273     ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr);
3274     ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr);
3275     ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr);
3276     ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr);
3277     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3278     ierr = VecDestroy(&clcoords);CHKERRQ(ierr);
3279     ierr = PetscFree(scat_i);CHKERRQ(ierr);
3280     ierr = PetscFree(scat_g);CHKERRQ(ierr);
3281   }
3282   PetscFunctionReturn(0);
3283 }
3284 
3285 #undef __FUNCT__
3286 #define __FUNCT__ "DMSetCoordinates"
3287 /*@
3288   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3289 
3290   Collective on DM
3291 
3292   Input Parameters:
3293 + dm - the DM
3294 - c - coordinate vector
3295 
3296   Note:
3297   The coordinates do include those for ghost points, which are in the local vector
3298 
3299   Level: intermediate
3300 
3301 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3302 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3303 @*/
3304 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3305 {
3306   PetscErrorCode ierr;
3307 
3308   PetscFunctionBegin;
3309   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3310   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3311   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3312   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3313   dm->coordinates = c;
3314   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3315   ierr            = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3316   ierr            = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3317   PetscFunctionReturn(0);
3318 }
3319 
3320 #undef __FUNCT__
3321 #define __FUNCT__ "DMSetCoordinatesLocal"
3322 /*@
3323   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3324 
3325   Collective on DM
3326 
3327    Input Parameters:
3328 +  dm - the DM
3329 -  c - coordinate vector
3330 
3331   Note:
3332   The coordinates of ghost points can be set using DMSetCoordinates()
3333   followed by DMGetCoordinatesLocal(). This is intended to enable the
3334   setting of ghost coordinates outside of the domain.
3335 
3336   Level: intermediate
3337 
3338 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3339 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3340 @*/
3341 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3342 {
3343   PetscErrorCode ierr;
3344 
3345   PetscFunctionBegin;
3346   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3347   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3348   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3349   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3350 
3351   dm->coordinatesLocal = c;
3352 
3353   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3354   PetscFunctionReturn(0);
3355 }
3356 
3357 #undef __FUNCT__
3358 #define __FUNCT__ "DMGetCoordinates"
3359 /*@
3360   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3361 
3362   Not Collective
3363 
3364   Input Parameter:
3365 . dm - the DM
3366 
3367   Output Parameter:
3368 . c - global coordinate vector
3369 
3370   Note:
3371   This is a borrowed reference, so the user should NOT destroy this vector
3372 
3373   Each process has only the local coordinates (does NOT have the ghost coordinates).
3374 
3375   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3376   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3377 
3378   Level: intermediate
3379 
3380 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3381 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3382 @*/
3383 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3384 {
3385   PetscErrorCode ierr;
3386 
3387   PetscFunctionBegin;
3388   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3389   PetscValidPointer(c,2);
3390   if (!dm->coordinates && dm->coordinatesLocal) {
3391     DM cdm = NULL;
3392 
3393     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3394     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3395     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3396     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3397     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3398   }
3399   *c = dm->coordinates;
3400   PetscFunctionReturn(0);
3401 }
3402 
3403 #undef __FUNCT__
3404 #define __FUNCT__ "DMGetCoordinatesLocal"
3405 /*@
3406   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3407 
3408   Collective on DM
3409 
3410   Input Parameter:
3411 . dm - the DM
3412 
3413   Output Parameter:
3414 . c - coordinate vector
3415 
3416   Note:
3417   This is a borrowed reference, so the user should NOT destroy this vector
3418 
3419   Each process has the local and ghost coordinates
3420 
3421   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3422   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3423 
3424   Level: intermediate
3425 
3426 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3427 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3428 @*/
3429 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3430 {
3431   PetscErrorCode ierr;
3432 
3433   PetscFunctionBegin;
3434   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3435   PetscValidPointer(c,2);
3436   if (!dm->coordinatesLocal && dm->coordinates) {
3437     DM cdm = NULL;
3438 
3439     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3440     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3441     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3442     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3443     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3444   }
3445   *c = dm->coordinatesLocal;
3446   PetscFunctionReturn(0);
3447 }
3448 
3449 #undef __FUNCT__
3450 #define __FUNCT__ "DMGetCoordinateDM"
3451 /*@
3452   DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates
3453 
3454   Collective on DM
3455 
3456   Input Parameter:
3457 . dm - the DM
3458 
3459   Output Parameter:
3460 . cdm - coordinate DM
3461 
3462   Level: intermediate
3463 
3464 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3465 .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3466 @*/
3467 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3468 {
3469   PetscErrorCode ierr;
3470 
3471   PetscFunctionBegin;
3472   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3473   PetscValidPointer(cdm,2);
3474   if (!dm->coordinateDM) {
3475     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3476     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3477   }
3478   *cdm = dm->coordinateDM;
3479   PetscFunctionReturn(0);
3480 }
3481 
3482 #undef __FUNCT__
3483 #define __FUNCT__ "DMSetCoordinateDM"
3484 /*@
3485   DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates
3486 
3487   Logically Collective on DM
3488 
3489   Input Parameters:
3490 + dm - the DM
3491 - cdm - coordinate DM
3492 
3493   Level: intermediate
3494 
3495 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3496 .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3497 @*/
3498 PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm)
3499 {
3500   PetscErrorCode ierr;
3501 
3502   PetscFunctionBegin;
3503   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3504   PetscValidHeaderSpecific(cdm,DM_CLASSID,2);
3505   ierr = DMDestroy(&dm->coordinateDM);CHKERRQ(ierr);
3506   dm->coordinateDM = cdm;
3507   ierr = PetscObjectReference((PetscObject) dm->coordinateDM);CHKERRQ(ierr);
3508   PetscFunctionReturn(0);
3509 }
3510 
3511 #undef __FUNCT__
3512 #define __FUNCT__ "DMLocatePoints"
3513 /*@
3514   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3515 
3516   Not collective
3517 
3518   Input Parameters:
3519 + dm - The DM
3520 - v - The Vec of points
3521 
3522   Output Parameter:
3523 . cells - The local cell numbers for cells which contain the points
3524 
3525   Level: developer
3526 
3527 .keywords: point location, mesh
3528 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3529 @*/
3530 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3531 {
3532   PetscErrorCode ierr;
3533 
3534   PetscFunctionBegin;
3535   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3536   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3537   PetscValidPointer(cells,3);
3538   if (dm->ops->locatepoints) {
3539     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3540   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3541   PetscFunctionReturn(0);
3542 }
3543