xref: /petsc/src/dm/interface/dm.c (revision 7fdeb8b9be692273ce99bf256278a44482a052ca)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 #include <petscsf.h>
3 
4 PetscClassId  DM_CLASSID;
5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;
6 
7 const char *const DMBoundaryTypes[] = {"NONE","GHOSTED","MIRROR","PERIODIC","TWIST","DM_BOUNDARY_",0};
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "DMCreate"
11 /*@
12   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
13 
14    If you never  call DMSetType()  it will generate an
15    error when you try to use the vector.
16 
17   Collective on MPI_Comm
18 
19   Input Parameter:
20 . comm - The communicator for the DM object
21 
22   Output Parameter:
23 . dm - The DM object
24 
25   Level: beginner
26 
27 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
28 @*/
29 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
30 {
31   DM             v;
32   PetscErrorCode ierr;
33 
34   PetscFunctionBegin;
35   PetscValidPointer(dm,2);
36   *dm = NULL;
37   ierr = PetscSysInitializePackage();CHKERRQ(ierr);
38   ierr = VecInitializePackage();CHKERRQ(ierr);
39   ierr = MatInitializePackage();CHKERRQ(ierr);
40   ierr = DMInitializePackage();CHKERRQ(ierr);
41 
42   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
43   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
44 
45 
46   v->ltogmap              = NULL;
47   v->ltogmapb             = NULL;
48   v->bs                   = 1;
49   v->coloringtype         = IS_COLORING_GLOBAL;
50   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
51   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
52   v->defaultSection       = NULL;
53   v->defaultGlobalSection = NULL;
54   {
55     PetscInt i;
56     for (i = 0; i < 10; ++i) {
57       v->nullspaceConstructors[i] = NULL;
58     }
59   }
60   v->numFields = 0;
61   v->fields    = NULL;
62   ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr);
63   ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr);
64   *dm = v;
65   PetscFunctionReturn(0);
66 }
67 
68 #undef __FUNCT__
69 #define __FUNCT__ "DMClone"
70 /*@
71   DMClone - Creates a DM object with the same topology as the original.
72 
73   Collective on MPI_Comm
74 
75   Input Parameter:
76 . dm - The original DM object
77 
78   Output Parameter:
79 . newdm  - The new DM object
80 
81   Level: beginner
82 
83 .keywords: DM, topology, create
84 @*/
85 PetscErrorCode DMClone(DM dm, DM *newdm)
86 {
87   PetscSF        sf;
88   Vec            coords;
89   void          *ctx;
90   PetscErrorCode ierr;
91 
92   PetscFunctionBegin;
93   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
94   PetscValidPointer(newdm,2);
95   ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr);
96   if (dm->ops->clone) {
97     ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr);
98   }
99   (*newdm)->setupcalled = PETSC_TRUE;
100   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
101   ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr);
102   ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr);
103   ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr);
104   ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr);
105   if (coords) {
106     ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr);
107   } else {
108     ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr);
109     if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);}
110   }
111   PetscFunctionReturn(0);
112 }
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "DMSetVecType"
116 /*@C
117        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
118 
119    Logically Collective on DMDA
120 
121    Input Parameter:
122 +  da - initial distributed array
123 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
124 
125    Options Database:
126 .   -dm_vec_type ctype
127 
128    Level: intermediate
129 
130 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType()
131 @*/
132 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
133 {
134   PetscErrorCode ierr;
135 
136   PetscFunctionBegin;
137   PetscValidHeaderSpecific(da,DM_CLASSID,1);
138   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
139   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
140   PetscFunctionReturn(0);
141 }
142 
143 #undef __FUNCT__
144 #define __FUNCT__ "DMGetVecType"
145 /*@C
146        DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
147 
148    Logically Collective on DMDA
149 
150    Input Parameter:
151 .  da - initial distributed array
152 
153    Output Parameter:
154 .  ctype - the vector type
155 
156    Level: intermediate
157 
158 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
159 @*/
160 PetscErrorCode  DMGetVecType(DM da,VecType *ctype)
161 {
162   PetscFunctionBegin;
163   PetscValidHeaderSpecific(da,DM_CLASSID,1);
164   *ctype = da->vectype;
165   PetscFunctionReturn(0);
166 }
167 
168 #undef __FUNCT__
169 #define __FUNCT__ "VecGetDM"
170 /*@
171   VecGetDM - Gets the DM defining the data layout of the vector
172 
173   Not collective
174 
175   Input Parameter:
176 . v - The Vec
177 
178   Output Parameter:
179 . dm - The DM
180 
181   Level: intermediate
182 
183 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
184 @*/
185 PetscErrorCode VecGetDM(Vec v, DM *dm)
186 {
187   PetscErrorCode ierr;
188 
189   PetscFunctionBegin;
190   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
191   PetscValidPointer(dm,2);
192   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNCT__
197 #define __FUNCT__ "VecSetDM"
198 /*@
199   VecSetDM - Sets the DM defining the data layout of the vector
200 
201   Not collective
202 
203   Input Parameters:
204 + v - The Vec
205 - dm - The DM
206 
207   Level: intermediate
208 
209 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
210 @*/
211 PetscErrorCode VecSetDM(Vec v, DM dm)
212 {
213   PetscErrorCode ierr;
214 
215   PetscFunctionBegin;
216   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
217   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
218   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
219   PetscFunctionReturn(0);
220 }
221 
222 #undef __FUNCT__
223 #define __FUNCT__ "DMSetMatType"
224 /*@C
225        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
226 
227    Logically Collective on DM
228 
229    Input Parameter:
230 +  dm - the DM context
231 .  ctype - the matrix type
232 
233    Options Database:
234 .   -dm_mat_type ctype
235 
236    Level: intermediate
237 
238 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType()
239 @*/
240 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
241 {
242   PetscErrorCode ierr;
243 
244   PetscFunctionBegin;
245   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
246   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
247   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
248   PetscFunctionReturn(0);
249 }
250 
251 #undef __FUNCT__
252 #define __FUNCT__ "DMGetMatType"
253 /*@C
254        DMGetMatType - Gets the type of matrix created with DMCreateMatrix()
255 
256    Logically Collective on DM
257 
258    Input Parameter:
259 .  dm - the DM context
260 
261    Output Parameter:
262 .  ctype - the matrix type
263 
264    Options Database:
265 .   -dm_mat_type ctype
266 
267    Level: intermediate
268 
269 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType()
270 @*/
271 PetscErrorCode  DMGetMatType(DM dm,MatType *ctype)
272 {
273   PetscFunctionBegin;
274   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
275   *ctype = dm->mattype;
276   PetscFunctionReturn(0);
277 }
278 
279 #undef __FUNCT__
280 #define __FUNCT__ "MatGetDM"
281 /*@
282   MatGetDM - Gets the DM defining the data layout of the matrix
283 
284   Not collective
285 
286   Input Parameter:
287 . A - The Mat
288 
289   Output Parameter:
290 . dm - The DM
291 
292   Level: intermediate
293 
294 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
295 @*/
296 PetscErrorCode MatGetDM(Mat A, DM *dm)
297 {
298   PetscErrorCode ierr;
299 
300   PetscFunctionBegin;
301   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
302   PetscValidPointer(dm,2);
303   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
304   PetscFunctionReturn(0);
305 }
306 
307 #undef __FUNCT__
308 #define __FUNCT__ "MatSetDM"
309 /*@
310   MatSetDM - Sets the DM defining the data layout of the matrix
311 
312   Not collective
313 
314   Input Parameters:
315 + A - The Mat
316 - dm - The DM
317 
318   Level: intermediate
319 
320 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
321 @*/
322 PetscErrorCode MatSetDM(Mat A, DM dm)
323 {
324   PetscErrorCode ierr;
325 
326   PetscFunctionBegin;
327   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
328   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
329   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
330   PetscFunctionReturn(0);
331 }
332 
333 #undef __FUNCT__
334 #define __FUNCT__ "DMSetOptionsPrefix"
335 /*@C
336    DMSetOptionsPrefix - Sets the prefix used for searching for all
337    DMDA options in the database.
338 
339    Logically Collective on DMDA
340 
341    Input Parameter:
342 +  da - the DMDA context
343 -  prefix - the prefix to prepend to all option names
344 
345    Notes:
346    A hyphen (-) must NOT be given at the beginning of the prefix name.
347    The first character of all runtime options is AUTOMATICALLY the hyphen.
348 
349    Level: advanced
350 
351 .keywords: DMDA, set, options, prefix, database
352 
353 .seealso: DMSetFromOptions()
354 @*/
355 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
356 {
357   PetscErrorCode ierr;
358 
359   PetscFunctionBegin;
360   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
361   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
362   PetscFunctionReturn(0);
363 }
364 
365 #undef __FUNCT__
366 #define __FUNCT__ "DMDestroy"
367 /*@
368     DMDestroy - Destroys a vector packer or DMDA.
369 
370     Collective on DM
371 
372     Input Parameter:
373 .   dm - the DM object to destroy
374 
375     Level: developer
376 
377 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
378 
379 @*/
380 PetscErrorCode  DMDestroy(DM *dm)
381 {
382   PetscInt       i, cnt = 0, f;
383   DMNamedVecLink nlink,nnext;
384   PetscErrorCode ierr;
385 
386   PetscFunctionBegin;
387   if (!*dm) PetscFunctionReturn(0);
388   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
389 
390   /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */
391   for (f = 0; f < (*dm)->numFields; ++f) {
392     ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr);
393     ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr);
394     ierr = PetscObjectCompose((PetscObject) (*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr);
395   }
396   /* count all the circular references of DM and its contained Vecs */
397   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
398     if ((*dm)->localin[i])  cnt++;
399     if ((*dm)->globalin[i]) cnt++;
400   }
401   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
402   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
403   if ((*dm)->x) {
404     DM obj;
405     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
406     if (obj == *dm) cnt++;
407   }
408 
409   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
410   /*
411      Need this test because the dm references the vectors that
412      reference the dm, so destroying the dm calls destroy on the
413      vectors that cause another destroy on the dm
414   */
415   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
416   ((PetscObject) (*dm))->refct = 0;
417   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
418     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
419     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
420   }
421   nnext=(*dm)->namedglobal;
422   (*dm)->namedglobal = NULL;
423   for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named vectors */
424     nnext = nlink->next;
425     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
426     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
427     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
428     ierr = PetscFree(nlink);CHKERRQ(ierr);
429   }
430   nnext=(*dm)->namedlocal;
431   (*dm)->namedlocal = NULL;
432   for (nlink=nnext; nlink; nlink=nnext) { /* Destroy the named local vectors */
433     nnext = nlink->next;
434     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
435     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
436     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
437     ierr = PetscFree(nlink);CHKERRQ(ierr);
438   }
439 
440   /* Destroy the list of hooks */
441   {
442     DMCoarsenHookLink link,next;
443     for (link=(*dm)->coarsenhook; link; link=next) {
444       next = link->next;
445       ierr = PetscFree(link);CHKERRQ(ierr);
446     }
447     (*dm)->coarsenhook = NULL;
448   }
449   {
450     DMRefineHookLink link,next;
451     for (link=(*dm)->refinehook; link; link=next) {
452       next = link->next;
453       ierr = PetscFree(link);CHKERRQ(ierr);
454     }
455     (*dm)->refinehook = NULL;
456   }
457   {
458     DMSubDomainHookLink link,next;
459     for (link=(*dm)->subdomainhook; link; link=next) {
460       next = link->next;
461       ierr = PetscFree(link);CHKERRQ(ierr);
462     }
463     (*dm)->subdomainhook = NULL;
464   }
465   {
466     DMGlobalToLocalHookLink link,next;
467     for (link=(*dm)->gtolhook; link; link=next) {
468       next = link->next;
469       ierr = PetscFree(link);CHKERRQ(ierr);
470     }
471     (*dm)->gtolhook = NULL;
472   }
473   /* Destroy the work arrays */
474   {
475     DMWorkLink link,next;
476     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
477     for (link=(*dm)->workin; link; link=next) {
478       next = link->next;
479       ierr = PetscFree(link->mem);CHKERRQ(ierr);
480       ierr = PetscFree(link);CHKERRQ(ierr);
481     }
482     (*dm)->workin = NULL;
483   }
484 
485   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
486   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
487   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
488 
489   if ((*dm)->ctx && (*dm)->ctxdestroy) {
490     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
491   }
492   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
493   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
494   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
495   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
496   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
497   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
498   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
499 
500   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
501   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
502   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
503   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
504   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
505 
506   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
507   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
508   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
509 
510   for (f = 0; f < (*dm)->numFields; ++f) {
511     ierr = PetscObjectDestroy((PetscObject *) &(*dm)->fields[f]);CHKERRQ(ierr);
512   }
513   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
514   /* if memory was published with SAWs then destroy it */
515   ierr = PetscObjectSAWsViewOff((PetscObject)*dm);CHKERRQ(ierr);
516 
517   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
518   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
519   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
520   PetscFunctionReturn(0);
521 }
522 
523 #undef __FUNCT__
524 #define __FUNCT__ "DMSetUp"
525 /*@
526     DMSetUp - sets up the data structures inside a DM object
527 
528     Collective on DM
529 
530     Input Parameter:
531 .   dm - the DM object to setup
532 
533     Level: developer
534 
535 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
536 
537 @*/
538 PetscErrorCode  DMSetUp(DM dm)
539 {
540   PetscErrorCode ierr;
541 
542   PetscFunctionBegin;
543   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
544   if (dm->setupcalled) PetscFunctionReturn(0);
545   if (dm->ops->setup) {
546     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
547   }
548   dm->setupcalled = PETSC_TRUE;
549   PetscFunctionReturn(0);
550 }
551 
552 #undef __FUNCT__
553 #define __FUNCT__ "DMSetFromOptions"
554 /*@
555     DMSetFromOptions - sets parameters in a DM from the options database
556 
557     Collective on DM
558 
559     Input Parameter:
560 .   dm - the DM object to set options for
561 
562     Options Database:
563 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
564 .   -dm_vec_type <type>  type of vector to create inside DM
565 .   -dm_mat_type <type>  type of matrix to create inside DM
566 -   -dm_coloring_type <global or ghosted>
567 
568     Level: developer
569 
570 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
571 
572 @*/
573 PetscErrorCode  DMSetFromOptions(DM dm)
574 {
575   char           typeName[256];
576   PetscBool      flg;
577   PetscErrorCode ierr;
578 
579   PetscFunctionBegin;
580   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
581   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
582   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
583   ierr = PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
584   if (flg) {
585     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
586   }
587   ierr = PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
588   if (flg) {
589     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
590   }
591   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
592   if (dm->ops->setfromoptions) {
593     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
594   }
595   /* process any options handlers added with PetscObjectAddOptionsHandler() */
596   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
597   ierr = PetscOptionsEnd();CHKERRQ(ierr);
598   PetscFunctionReturn(0);
599 }
600 
601 #undef __FUNCT__
602 #define __FUNCT__ "DMView"
603 /*@C
604     DMView - Views a vector packer or DMDA.
605 
606     Collective on DM
607 
608     Input Parameter:
609 +   dm - the DM object to view
610 -   v - the viewer
611 
612     Level: developer
613 
614 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
615 
616 @*/
617 PetscErrorCode  DMView(DM dm,PetscViewer v)
618 {
619   PetscErrorCode ierr;
620   PetscBool      isbinary;
621 
622   PetscFunctionBegin;
623   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
624   if (!v) {
625     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
626   }
627   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
628   if (isbinary) {
629     PetscInt classid = DM_FILE_CLASSID;
630     char     type[256];
631 
632     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
633     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
634     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
635   }
636   if (dm->ops->view) {
637     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
638   }
639   PetscFunctionReturn(0);
640 }
641 
642 #undef __FUNCT__
643 #define __FUNCT__ "DMCreateGlobalVector"
644 /*@
645     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
646 
647     Collective on DM
648 
649     Input Parameter:
650 .   dm - the DM object
651 
652     Output Parameter:
653 .   vec - the global vector
654 
655     Level: beginner
656 
657 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
658 
659 @*/
660 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
661 {
662   PetscErrorCode ierr;
663 
664   PetscFunctionBegin;
665   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
666   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
667   PetscFunctionReturn(0);
668 }
669 
670 #undef __FUNCT__
671 #define __FUNCT__ "DMCreateLocalVector"
672 /*@
673     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
674 
675     Not Collective
676 
677     Input Parameter:
678 .   dm - the DM object
679 
680     Output Parameter:
681 .   vec - the local vector
682 
683     Level: beginner
684 
685 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
686 
687 @*/
688 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
689 {
690   PetscErrorCode ierr;
691 
692   PetscFunctionBegin;
693   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
694   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
695   PetscFunctionReturn(0);
696 }
697 
698 #undef __FUNCT__
699 #define __FUNCT__ "DMGetLocalToGlobalMapping"
700 /*@
701    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
702 
703    Collective on DM
704 
705    Input Parameter:
706 .  dm - the DM that provides the mapping
707 
708    Output Parameter:
709 .  ltog - the mapping
710 
711    Level: intermediate
712 
713    Notes:
714    This mapping can then be used by VecSetLocalToGlobalMapping() or
715    MatSetLocalToGlobalMapping().
716 
717 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
718 @*/
719 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
720 {
721   PetscErrorCode ierr;
722 
723   PetscFunctionBegin;
724   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
725   PetscValidPointer(ltog,2);
726   if (!dm->ltogmap) {
727     PetscSection section, sectionGlobal;
728 
729     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
730     if (section) {
731       PetscInt *ltog;
732       PetscInt pStart, pEnd, size, p, l;
733 
734       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
735       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
736       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
737       ierr = PetscMalloc1(size, &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
738       for (p = pStart, l = 0; p < pEnd; ++p) {
739         PetscInt dof, off, c;
740 
741         /* Should probably use constrained dofs */
742         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
743         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
744         for (c = 0; c < dof; ++c, ++l) {
745           ltog[l] = off+c;
746         }
747       }
748       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
749       ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr);
750     } else {
751       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
752       ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr);
753     }
754   }
755   *ltog = dm->ltogmap;
756   PetscFunctionReturn(0);
757 }
758 
759 #undef __FUNCT__
760 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
761 /*@
762    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
763 
764    Collective on DM
765 
766    Input Parameter:
767 .  da - the distributed array that provides the mapping
768 
769    Output Parameter:
770 .  ltog - the block mapping
771 
772    Level: intermediate
773 
774    Notes:
775    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
776    MatSetLocalToGlobalMappingBlock().
777 
778 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
779 @*/
780 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
781 {
782   PetscErrorCode ierr;
783 
784   PetscFunctionBegin;
785   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
786   PetscValidPointer(ltog,2);
787   if (!dm->ltogmapb) {
788     PetscInt bs;
789     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
790     if (bs > 1) {
791       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
792       ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
793     } else {
794       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
795       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
796     }
797   }
798   *ltog = dm->ltogmapb;
799   PetscFunctionReturn(0);
800 }
801 
802 #undef __FUNCT__
803 #define __FUNCT__ "DMGetBlockSize"
804 /*@
805    DMGetBlockSize - Gets the inherent block size associated with a DM
806 
807    Not Collective
808 
809    Input Parameter:
810 .  dm - the DM with block structure
811 
812    Output Parameter:
813 .  bs - the block size, 1 implies no exploitable block structure
814 
815    Level: intermediate
816 
817 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
818 @*/
819 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
820 {
821   PetscFunctionBegin;
822   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
823   PetscValidPointer(bs,2);
824   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
825   *bs = dm->bs;
826   PetscFunctionReturn(0);
827 }
828 
829 #undef __FUNCT__
830 #define __FUNCT__ "DMCreateInterpolation"
831 /*@
832     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
833 
834     Collective on DM
835 
836     Input Parameter:
837 +   dm1 - the DM object
838 -   dm2 - the second, finer DM object
839 
840     Output Parameter:
841 +  mat - the interpolation
842 -  vec - the scaling (optional)
843 
844     Level: developer
845 
846     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
847         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
848 
849         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
850         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
851 
852 
853 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
854 
855 @*/
856 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
857 {
858   PetscErrorCode ierr;
859 
860   PetscFunctionBegin;
861   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
862   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
863   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
864   PetscFunctionReturn(0);
865 }
866 
867 #undef __FUNCT__
868 #define __FUNCT__ "DMCreateInjection"
869 /*@
870     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
871 
872     Collective on DM
873 
874     Input Parameter:
875 +   dm1 - the DM object
876 -   dm2 - the second, finer DM object
877 
878     Output Parameter:
879 .   ctx - the injection
880 
881     Level: developer
882 
883    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
884         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
885 
886 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
887 
888 @*/
889 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
890 {
891   PetscErrorCode ierr;
892 
893   PetscFunctionBegin;
894   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
895   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
896   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
897   PetscFunctionReturn(0);
898 }
899 
900 #undef __FUNCT__
901 #define __FUNCT__ "DMCreateColoring"
902 /*@
903     DMCreateColoring - Gets coloring for a DM
904 
905     Collective on DM
906 
907     Input Parameter:
908 +   dm - the DM object
909 -   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
910 
911     Output Parameter:
912 .   coloring - the coloring
913 
914     Level: developer
915 
916 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType()
917 
918 @*/
919 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring)
920 {
921   PetscErrorCode ierr;
922 
923   PetscFunctionBegin;
924   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
925   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
926   ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr);
927   PetscFunctionReturn(0);
928 }
929 
930 #undef __FUNCT__
931 #define __FUNCT__ "DMCreateMatrix"
932 /*@
933     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
934 
935     Collective on DM
936 
937     Input Parameter:
938 .   dm - the DM object
939 
940     Output Parameter:
941 .   mat - the empty Jacobian
942 
943     Level: beginner
944 
945     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
946        do not need to do it yourself.
947 
948        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
949        the nonzero pattern call DMDASetMatPreallocateOnly()
950 
951        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
952        internally by PETSc.
953 
954        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
955        the indices for the global numbering for DMDAs which is complicated.
956 
957 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType()
958 
959 @*/
960 PetscErrorCode  DMCreateMatrix(DM dm,Mat *mat)
961 {
962   PetscErrorCode ierr;
963 
964   PetscFunctionBegin;
965   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
966   ierr = MatInitializePackage();CHKERRQ(ierr);
967   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
968   PetscValidPointer(mat,3);
969   ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr);
970   PetscFunctionReturn(0);
971 }
972 
973 #undef __FUNCT__
974 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
975 /*@
976   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
977     preallocated but the nonzero structure and zero values will not be set.
978 
979   Logically Collective on DMDA
980 
981   Input Parameter:
982 + dm - the DM
983 - only - PETSC_TRUE if only want preallocation
984 
985   Level: developer
986 .seealso DMCreateMatrix()
987 @*/
988 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
989 {
990   PetscFunctionBegin;
991   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
992   dm->prealloc_only = only;
993   PetscFunctionReturn(0);
994 }
995 
996 #undef __FUNCT__
997 #define __FUNCT__ "DMGetWorkArray"
998 /*@C
999   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1000 
1001   Not Collective
1002 
1003   Input Parameters:
1004 + dm - the DM object
1005 . count - The minium size
1006 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1007 
1008   Output Parameter:
1009 . array - the work array
1010 
1011   Level: developer
1012 
1013 .seealso DMDestroy(), DMCreate()
1014 @*/
1015 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1016 {
1017   PetscErrorCode ierr;
1018   DMWorkLink     link;
1019   size_t         size;
1020 
1021   PetscFunctionBegin;
1022   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1023   PetscValidPointer(mem,4);
1024   if (dm->workin) {
1025     link       = dm->workin;
1026     dm->workin = dm->workin->next;
1027   } else {
1028     ierr = PetscNewLog(dm,&link);CHKERRQ(ierr);
1029   }
1030   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1031   if (size*count > link->bytes) {
1032     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
1033     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1034     link->bytes = size*count;
1035   }
1036   link->next   = dm->workout;
1037   dm->workout  = link;
1038   *(void**)mem = link->mem;
1039   PetscFunctionReturn(0);
1040 }
1041 
1042 #undef __FUNCT__
1043 #define __FUNCT__ "DMRestoreWorkArray"
1044 /*@C
1045   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1046 
1047   Not Collective
1048 
1049   Input Parameters:
1050 + dm - the DM object
1051 . count - The minium size
1052 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1053 
1054   Output Parameter:
1055 . array - the work array
1056 
1057   Level: developer
1058 
1059 .seealso DMDestroy(), DMCreate()
1060 @*/
1061 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1062 {
1063   DMWorkLink *p,link;
1064 
1065   PetscFunctionBegin;
1066   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1067   PetscValidPointer(mem,4);
1068   for (p=&dm->workout; (link=*p); p=&link->next) {
1069     if (link->mem == *(void**)mem) {
1070       *p           = link->next;
1071       link->next   = dm->workin;
1072       dm->workin   = link;
1073       *(void**)mem = NULL;
1074       PetscFunctionReturn(0);
1075     }
1076   }
1077   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1078   PetscFunctionReturn(0);
1079 }
1080 
1081 #undef __FUNCT__
1082 #define __FUNCT__ "DMSetNullSpaceConstructor"
1083 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1084 {
1085   PetscFunctionBegin;
1086   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1087   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1088   dm->nullspaceConstructors[field] = nullsp;
1089   PetscFunctionReturn(0);
1090 }
1091 
1092 #undef __FUNCT__
1093 #define __FUNCT__ "DMCreateFieldIS"
1094 /*@C
1095   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1096 
1097   Not collective
1098 
1099   Input Parameter:
1100 . dm - the DM object
1101 
1102   Output Parameters:
1103 + numFields  - The number of fields (or NULL if not requested)
1104 . fieldNames - The name for each field (or NULL if not requested)
1105 - fields     - The global indices for each field (or NULL if not requested)
1106 
1107   Level: intermediate
1108 
1109   Notes:
1110   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1111   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1112   PetscFree().
1113 
1114 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1115 @*/
1116 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1117 {
1118   PetscSection   section, sectionGlobal;
1119   PetscErrorCode ierr;
1120 
1121   PetscFunctionBegin;
1122   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1123   if (numFields) {
1124     PetscValidPointer(numFields,2);
1125     *numFields = 0;
1126   }
1127   if (fieldNames) {
1128     PetscValidPointer(fieldNames,3);
1129     *fieldNames = NULL;
1130   }
1131   if (fields) {
1132     PetscValidPointer(fields,4);
1133     *fields = NULL;
1134   }
1135   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1136   if (section) {
1137     PetscInt *fieldSizes, **fieldIndices;
1138     PetscInt nF, f, pStart, pEnd, p;
1139 
1140     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1141     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1142     ierr = PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);CHKERRQ(ierr);
1143     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1144     for (f = 0; f < nF; ++f) {
1145       fieldSizes[f] = 0;
1146     }
1147     for (p = pStart; p < pEnd; ++p) {
1148       PetscInt gdof;
1149 
1150       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1151       if (gdof > 0) {
1152         for (f = 0; f < nF; ++f) {
1153           PetscInt fdof, fcdof;
1154 
1155           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1156           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1157           fieldSizes[f] += fdof-fcdof;
1158         }
1159       }
1160     }
1161     for (f = 0; f < nF; ++f) {
1162       ierr          = PetscMalloc1(fieldSizes[f], &fieldIndices[f]);CHKERRQ(ierr);
1163       fieldSizes[f] = 0;
1164     }
1165     for (p = pStart; p < pEnd; ++p) {
1166       PetscInt gdof, goff;
1167 
1168       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1169       if (gdof > 0) {
1170         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1171         for (f = 0; f < nF; ++f) {
1172           PetscInt fdof, fcdof, fc;
1173 
1174           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1175           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1176           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1177             fieldIndices[f][fieldSizes[f]] = goff++;
1178           }
1179         }
1180       }
1181     }
1182     if (numFields) *numFields = nF;
1183     if (fieldNames) {
1184       ierr = PetscMalloc1(nF, fieldNames);CHKERRQ(ierr);
1185       for (f = 0; f < nF; ++f) {
1186         const char *fieldName;
1187 
1188         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1189         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1190       }
1191     }
1192     if (fields) {
1193       ierr = PetscMalloc1(nF, fields);CHKERRQ(ierr);
1194       for (f = 0; f < nF; ++f) {
1195         ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1196       }
1197     }
1198     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1199   } else if (dm->ops->createfieldis) {
1200     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1201   }
1202   PetscFunctionReturn(0);
1203 }
1204 
1205 
1206 #undef __FUNCT__
1207 #define __FUNCT__ "DMCreateFieldDecomposition"
1208 /*@C
1209   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1210                           corresponding to different fields: each IS contains the global indices of the dofs of the
1211                           corresponding field. The optional list of DMs define the DM for each subproblem.
1212                           Generalizes DMCreateFieldIS().
1213 
1214   Not collective
1215 
1216   Input Parameter:
1217 . dm - the DM object
1218 
1219   Output Parameters:
1220 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1221 . namelist  - The name for each field (or NULL if not requested)
1222 . islist    - The global indices for each field (or NULL if not requested)
1223 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1224 
1225   Level: intermediate
1226 
1227   Notes:
1228   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1229   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1230   and all of the arrays should be freed with PetscFree().
1231 
1232 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1233 @*/
1234 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1235 {
1236   PetscErrorCode ierr;
1237 
1238   PetscFunctionBegin;
1239   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1240   if (len) {
1241     PetscValidPointer(len,2);
1242     *len = 0;
1243   }
1244   if (namelist) {
1245     PetscValidPointer(namelist,3);
1246     *namelist = 0;
1247   }
1248   if (islist) {
1249     PetscValidPointer(islist,4);
1250     *islist = 0;
1251   }
1252   if (dmlist) {
1253     PetscValidPointer(dmlist,5);
1254     *dmlist = 0;
1255   }
1256   /*
1257    Is it a good idea to apply the following check across all impls?
1258    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1259    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1260    */
1261   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1262   if (!dm->ops->createfielddecomposition) {
1263     PetscSection section;
1264     PetscInt     numFields, f;
1265 
1266     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1267     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1268     if (section && numFields && dm->ops->createsubdm) {
1269       *len = numFields;
1270       if (namelist) {ierr = PetscMalloc1(numFields,namelist);CHKERRQ(ierr);}
1271       if (islist)   {ierr = PetscMalloc1(numFields,islist);CHKERRQ(ierr);}
1272       if (dmlist)   {ierr = PetscMalloc1(numFields,dmlist);CHKERRQ(ierr);}
1273       for (f = 0; f < numFields; ++f) {
1274         const char *fieldName;
1275 
1276         ierr = DMCreateSubDM(dm, 1, &f, islist ? &(*islist)[f] : NULL, dmlist ? &(*dmlist)[f] : NULL);CHKERRQ(ierr);
1277         if (namelist) {
1278           ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1279           ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1280         }
1281       }
1282     } else {
1283       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1284       /* By default there are no DMs associated with subproblems. */
1285       if (dmlist) *dmlist = NULL;
1286     }
1287   } else {
1288     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1289   }
1290   PetscFunctionReturn(0);
1291 }
1292 
1293 #undef __FUNCT__
1294 #define __FUNCT__ "DMCreateSubDM"
1295 /*@C
1296   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1297                   The fields are defined by DMCreateFieldIS().
1298 
1299   Not collective
1300 
1301   Input Parameters:
1302 + dm - the DM object
1303 . numFields - number of fields in this subproblem
1304 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1305 
1306   Output Parameters:
1307 . is - The global indices for the subproblem
1308 - dm - The DM for the subproblem
1309 
1310   Level: intermediate
1311 
1312 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1313 @*/
1314 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1315 {
1316   PetscErrorCode ierr;
1317 
1318   PetscFunctionBegin;
1319   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1320   PetscValidPointer(fields,3);
1321   if (is) PetscValidPointer(is,4);
1322   if (subdm) PetscValidPointer(subdm,5);
1323   if (dm->ops->createsubdm) {
1324     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1325   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1326   PetscFunctionReturn(0);
1327 }
1328 
1329 
1330 #undef __FUNCT__
1331 #define __FUNCT__ "DMCreateDomainDecomposition"
1332 /*@C
1333   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1334                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1335                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1336                           define a nonoverlapping covering, while outer subdomains can overlap.
1337                           The optional list of DMs define the DM for each subproblem.
1338 
1339   Not collective
1340 
1341   Input Parameter:
1342 . dm - the DM object
1343 
1344   Output Parameters:
1345 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1346 . namelist    - The name for each subdomain (or NULL if not requested)
1347 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1348 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1349 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1350 
1351   Level: intermediate
1352 
1353   Notes:
1354   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1355   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1356   and all of the arrays should be freed with PetscFree().
1357 
1358 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1359 @*/
1360 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1361 {
1362   PetscErrorCode      ierr;
1363   DMSubDomainHookLink link;
1364   PetscInt            i,l;
1365 
1366   PetscFunctionBegin;
1367   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1368   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1369   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1370   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1371   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1372   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1373   /*
1374    Is it a good idea to apply the following check across all impls?
1375    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1376    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1377    */
1378   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1379   if (dm->ops->createdomaindecomposition) {
1380     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1381     /* copy subdomain hooks and context over to the subdomain DMs */
1382     if (dmlist) {
1383       for (i = 0; i < l; i++) {
1384         for (link=dm->subdomainhook; link; link=link->next) {
1385           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1386         }
1387         (*dmlist)[i]->ctx = dm->ctx;
1388       }
1389     }
1390     if (len) *len = l;
1391   }
1392   PetscFunctionReturn(0);
1393 }
1394 
1395 
1396 #undef __FUNCT__
1397 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1398 /*@C
1399   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1400 
1401   Not collective
1402 
1403   Input Parameters:
1404 + dm - the DM object
1405 . n  - the number of subdomain scatters
1406 - subdms - the local subdomains
1407 
1408   Output Parameters:
1409 + n     - the number of scatters returned
1410 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1411 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1412 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1413 
1414   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1415   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1416   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1417   solution and residual data.
1418 
1419   Level: developer
1420 
1421 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1422 @*/
1423 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1424 {
1425   PetscErrorCode ierr;
1426 
1427   PetscFunctionBegin;
1428   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1429   PetscValidPointer(subdms,3);
1430   if (dm->ops->createddscatters) {
1431     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1432   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1433   PetscFunctionReturn(0);
1434 }
1435 
1436 #undef __FUNCT__
1437 #define __FUNCT__ "DMRefine"
1438 /*@
1439   DMRefine - Refines a DM object
1440 
1441   Collective on DM
1442 
1443   Input Parameter:
1444 + dm   - the DM object
1445 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1446 
1447   Output Parameter:
1448 . dmf - the refined DM, or NULL
1449 
1450   Note: If no refinement was done, the return value is NULL
1451 
1452   Level: developer
1453 
1454 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1455 @*/
1456 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1457 {
1458   PetscErrorCode   ierr;
1459   DMRefineHookLink link;
1460 
1461   PetscFunctionBegin;
1462   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1463   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1464   if (*dmf) {
1465     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1466 
1467     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1468 
1469     (*dmf)->ctx       = dm->ctx;
1470     (*dmf)->leveldown = dm->leveldown;
1471     (*dmf)->levelup   = dm->levelup + 1;
1472 
1473     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1474     for (link=dm->refinehook; link; link=link->next) {
1475       if (link->refinehook) {
1476         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1477       }
1478     }
1479   }
1480   PetscFunctionReturn(0);
1481 }
1482 
1483 #undef __FUNCT__
1484 #define __FUNCT__ "DMRefineHookAdd"
1485 /*@C
1486    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1487 
1488    Logically Collective
1489 
1490    Input Arguments:
1491 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1492 .  refinehook - function to run when setting up a coarser level
1493 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1494 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1495 
1496    Calling sequence of refinehook:
1497 $    refinehook(DM coarse,DM fine,void *ctx);
1498 
1499 +  coarse - coarse level DM
1500 .  fine - fine level DM to interpolate problem to
1501 -  ctx - optional user-defined function context
1502 
1503    Calling sequence for interphook:
1504 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1505 
1506 +  coarse - coarse level DM
1507 .  interp - matrix interpolating a coarse-level solution to the finer grid
1508 .  fine - fine level DM to update
1509 -  ctx - optional user-defined function context
1510 
1511    Level: advanced
1512 
1513    Notes:
1514    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1515 
1516    If this function is called multiple times, the hooks will be run in the order they are added.
1517 
1518    This function is currently not available from Fortran.
1519 
1520 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1521 @*/
1522 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1523 {
1524   PetscErrorCode   ierr;
1525   DMRefineHookLink link,*p;
1526 
1527   PetscFunctionBegin;
1528   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1529   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1530   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1531   link->refinehook = refinehook;
1532   link->interphook = interphook;
1533   link->ctx        = ctx;
1534   link->next       = NULL;
1535   *p               = link;
1536   PetscFunctionReturn(0);
1537 }
1538 
1539 #undef __FUNCT__
1540 #define __FUNCT__ "DMInterpolate"
1541 /*@
1542    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1543 
1544    Collective if any hooks are
1545 
1546    Input Arguments:
1547 +  coarse - coarser DM to use as a base
1548 .  restrct - interpolation matrix, apply using MatInterpolate()
1549 -  fine - finer DM to update
1550 
1551    Level: developer
1552 
1553 .seealso: DMRefineHookAdd(), MatInterpolate()
1554 @*/
1555 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1556 {
1557   PetscErrorCode   ierr;
1558   DMRefineHookLink link;
1559 
1560   PetscFunctionBegin;
1561   for (link=fine->refinehook; link; link=link->next) {
1562     if (link->interphook) {
1563       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1564     }
1565   }
1566   PetscFunctionReturn(0);
1567 }
1568 
1569 #undef __FUNCT__
1570 #define __FUNCT__ "DMGetRefineLevel"
1571 /*@
1572     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1573 
1574     Not Collective
1575 
1576     Input Parameter:
1577 .   dm - the DM object
1578 
1579     Output Parameter:
1580 .   level - number of refinements
1581 
1582     Level: developer
1583 
1584 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1585 
1586 @*/
1587 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1588 {
1589   PetscFunctionBegin;
1590   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1591   *level = dm->levelup;
1592   PetscFunctionReturn(0);
1593 }
1594 
1595 #undef __FUNCT__
1596 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1597 /*@C
1598    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1599 
1600    Logically Collective
1601 
1602    Input Arguments:
1603 +  dm - the DM
1604 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1605 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1606 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1607 
1608    Calling sequence for beginhook:
1609 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1610 
1611 +  dm - global DM
1612 .  g - global vector
1613 .  mode - mode
1614 .  l - local vector
1615 -  ctx - optional user-defined function context
1616 
1617 
1618    Calling sequence for endhook:
1619 $    endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1620 
1621 +  global - global DM
1622 -  ctx - optional user-defined function context
1623 
1624    Level: advanced
1625 
1626 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1627 @*/
1628 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1629 {
1630   PetscErrorCode          ierr;
1631   DMGlobalToLocalHookLink link,*p;
1632 
1633   PetscFunctionBegin;
1634   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1635   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1636   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1637   link->beginhook = beginhook;
1638   link->endhook   = endhook;
1639   link->ctx       = ctx;
1640   link->next      = NULL;
1641   *p              = link;
1642   PetscFunctionReturn(0);
1643 }
1644 
1645 #undef __FUNCT__
1646 #define __FUNCT__ "DMGlobalToLocalBegin"
1647 /*@
1648     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1649 
1650     Neighbor-wise Collective on DM
1651 
1652     Input Parameters:
1653 +   dm - the DM object
1654 .   g - the global vector
1655 .   mode - INSERT_VALUES or ADD_VALUES
1656 -   l - the local vector
1657 
1658 
1659     Level: beginner
1660 
1661 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1662 
1663 @*/
1664 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1665 {
1666   PetscSF                 sf;
1667   PetscErrorCode          ierr;
1668   DMGlobalToLocalHookLink link;
1669 
1670   PetscFunctionBegin;
1671   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1672   for (link=dm->gtolhook; link; link=link->next) {
1673     if (link->beginhook) {
1674       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1675     }
1676   }
1677   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1678   if (sf) {
1679     PetscScalar *lArray, *gArray;
1680 
1681     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1682     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1683     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1684     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1685     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1686     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1687   } else {
1688     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1689   }
1690   PetscFunctionReturn(0);
1691 }
1692 
1693 #undef __FUNCT__
1694 #define __FUNCT__ "DMGlobalToLocalEnd"
1695 /*@
1696     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1697 
1698     Neighbor-wise Collective on DM
1699 
1700     Input Parameters:
1701 +   dm - the DM object
1702 .   g - the global vector
1703 .   mode - INSERT_VALUES or ADD_VALUES
1704 -   l - the local vector
1705 
1706 
1707     Level: beginner
1708 
1709 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1710 
1711 @*/
1712 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1713 {
1714   PetscSF                 sf;
1715   PetscErrorCode          ierr;
1716   PetscScalar             *lArray, *gArray;
1717   DMGlobalToLocalHookLink link;
1718 
1719   PetscFunctionBegin;
1720   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1721   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1722   if (sf) {
1723     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1724 
1725     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1726     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1727     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1728     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1729     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1730   } else {
1731     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1732   }
1733   for (link=dm->gtolhook; link; link=link->next) {
1734     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1735   }
1736   PetscFunctionReturn(0);
1737 }
1738 
1739 #undef __FUNCT__
1740 #define __FUNCT__ "DMLocalToGlobalBegin"
1741 /*@
1742     DMLocalToGlobalBegin - updates global vectors from local vectors
1743 
1744     Neighbor-wise Collective on DM
1745 
1746     Input Parameters:
1747 +   dm - the DM object
1748 .   l - the local vector
1749 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1750            base point.
1751 - - the global vector
1752 
1753     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1754            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1755            global array to the final global array with VecAXPY().
1756 
1757     Level: beginner
1758 
1759 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1760 
1761 @*/
1762 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1763 {
1764   PetscSF        sf;
1765   PetscErrorCode ierr;
1766 
1767   PetscFunctionBegin;
1768   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1769   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1770   if (sf) {
1771     MPI_Op      op;
1772     PetscScalar *lArray, *gArray;
1773 
1774     switch (mode) {
1775     case INSERT_VALUES:
1776     case INSERT_ALL_VALUES:
1777       op = MPIU_REPLACE; break;
1778     case ADD_VALUES:
1779     case ADD_ALL_VALUES:
1780       op = MPI_SUM; break;
1781     default:
1782       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1783     }
1784     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1785     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1786     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1787     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1788     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1789   } else {
1790     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1791   }
1792   PetscFunctionReturn(0);
1793 }
1794 
1795 #undef __FUNCT__
1796 #define __FUNCT__ "DMLocalToGlobalEnd"
1797 /*@
1798     DMLocalToGlobalEnd - updates global vectors from local vectors
1799 
1800     Neighbor-wise Collective on DM
1801 
1802     Input Parameters:
1803 +   dm - the DM object
1804 .   l - the local vector
1805 .   mode - INSERT_VALUES or ADD_VALUES
1806 -   g - the global vector
1807 
1808 
1809     Level: beginner
1810 
1811 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1812 
1813 @*/
1814 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1815 {
1816   PetscSF        sf;
1817   PetscErrorCode ierr;
1818 
1819   PetscFunctionBegin;
1820   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1821   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1822   if (sf) {
1823     MPI_Op      op;
1824     PetscScalar *lArray, *gArray;
1825 
1826     switch (mode) {
1827     case INSERT_VALUES:
1828     case INSERT_ALL_VALUES:
1829       op = MPIU_REPLACE; break;
1830     case ADD_VALUES:
1831     case ADD_ALL_VALUES:
1832       op = MPI_SUM; break;
1833     default:
1834       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1835     }
1836     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1837     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1838     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1839     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1840     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1841   } else {
1842     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1843   }
1844   PetscFunctionReturn(0);
1845 }
1846 
1847 #undef __FUNCT__
1848 #define __FUNCT__ "DMLocalToLocalBegin"
1849 /*@
1850    DMLocalToLocalBegin - Maps from a local vector (including ghost points
1851    that contain irrelevant values) to another local vector where the ghost
1852    points in the second are set correctly. Must be followed by DMLocalToLocalEnd().
1853 
1854    Neighbor-wise Collective on DM and Vec
1855 
1856    Input Parameters:
1857 +  dm - the DM object
1858 .  g - the original local vector
1859 -  mode - one of INSERT_VALUES or ADD_VALUES
1860 
1861    Output Parameter:
1862 .  l  - the local vector with correct ghost values
1863 
1864    Level: intermediate
1865 
1866    Notes:
1867    The local vectors used here need not be the same as those
1868    obtained from DMCreateLocalVector(), BUT they
1869    must have the same parallel data layout; they could, for example, be
1870    obtained with VecDuplicate() from the DM originating vectors.
1871 
1872 .keywords: DM, local-to-local, begin
1873 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1874 
1875 @*/
1876 PetscErrorCode  DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1877 {
1878   PetscErrorCode          ierr;
1879 
1880   PetscFunctionBegin;
1881   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1882   ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1883   PetscFunctionReturn(0);
1884 }
1885 
1886 #undef __FUNCT__
1887 #define __FUNCT__ "DMLocalToLocalEnd"
1888 /*@
1889    DMLocalToLocalEnd - Maps from a local vector (including ghost points
1890    that contain irrelevant values) to another local vector where the ghost
1891    points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().
1892 
1893    Neighbor-wise Collective on DM and Vec
1894 
1895    Input Parameters:
1896 +  da - the DM object
1897 .  g - the original local vector
1898 -  mode - one of INSERT_VALUES or ADD_VALUES
1899 
1900    Output Parameter:
1901 .  l  - the local vector with correct ghost values
1902 
1903    Level: intermediate
1904 
1905    Notes:
1906    The local vectors used here need not be the same as those
1907    obtained from DMCreateLocalVector(), BUT they
1908    must have the same parallel data layout; they could, for example, be
1909    obtained with VecDuplicate() from the DM originating vectors.
1910 
1911 .keywords: DM, local-to-local, end
1912 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1913 
1914 @*/
1915 PetscErrorCode  DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1916 {
1917   PetscErrorCode          ierr;
1918 
1919   PetscFunctionBegin;
1920   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1921   ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1922   PetscFunctionReturn(0);
1923 }
1924 
1925 
1926 #undef __FUNCT__
1927 #define __FUNCT__ "DMCoarsen"
1928 /*@
1929     DMCoarsen - Coarsens a DM object
1930 
1931     Collective on DM
1932 
1933     Input Parameter:
1934 +   dm - the DM object
1935 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1936 
1937     Output Parameter:
1938 .   dmc - the coarsened DM
1939 
1940     Level: developer
1941 
1942 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1943 
1944 @*/
1945 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1946 {
1947   PetscErrorCode    ierr;
1948   DMCoarsenHookLink link;
1949 
1950   PetscFunctionBegin;
1951   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1952   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1953   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1954   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1955   (*dmc)->ctx               = dm->ctx;
1956   (*dmc)->levelup           = dm->levelup;
1957   (*dmc)->leveldown         = dm->leveldown + 1;
1958   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1959   for (link=dm->coarsenhook; link; link=link->next) {
1960     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1961   }
1962   PetscFunctionReturn(0);
1963 }
1964 
1965 #undef __FUNCT__
1966 #define __FUNCT__ "DMCoarsenHookAdd"
1967 /*@C
1968    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1969 
1970    Logically Collective
1971 
1972    Input Arguments:
1973 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1974 .  coarsenhook - function to run when setting up a coarser level
1975 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1976 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1977 
1978    Calling sequence of coarsenhook:
1979 $    coarsenhook(DM fine,DM coarse,void *ctx);
1980 
1981 +  fine - fine level DM
1982 .  coarse - coarse level DM to restrict problem to
1983 -  ctx - optional user-defined function context
1984 
1985    Calling sequence for restricthook:
1986 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1987 
1988 +  fine - fine level DM
1989 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1990 .  rscale - scaling vector for restriction
1991 .  inject - matrix restricting by injection
1992 .  coarse - coarse level DM to update
1993 -  ctx - optional user-defined function context
1994 
1995    Level: advanced
1996 
1997    Notes:
1998    This function is only needed if auxiliary data needs to be set up on coarse grids.
1999 
2000    If this function is called multiple times, the hooks will be run in the order they are added.
2001 
2002    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2003    extract the finest level information from its context (instead of from the SNES).
2004 
2005    This function is currently not available from Fortran.
2006 
2007 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2008 @*/
2009 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
2010 {
2011   PetscErrorCode    ierr;
2012   DMCoarsenHookLink link,*p;
2013 
2014   PetscFunctionBegin;
2015   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
2016   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2017   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
2018   link->coarsenhook  = coarsenhook;
2019   link->restricthook = restricthook;
2020   link->ctx          = ctx;
2021   link->next         = NULL;
2022   *p                 = link;
2023   PetscFunctionReturn(0);
2024 }
2025 
2026 #undef __FUNCT__
2027 #define __FUNCT__ "DMRestrict"
2028 /*@
2029    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
2030 
2031    Collective if any hooks are
2032 
2033    Input Arguments:
2034 +  fine - finer DM to use as a base
2035 .  restrct - restriction matrix, apply using MatRestrict()
2036 .  inject - injection matrix, also use MatRestrict()
2037 -  coarse - coarer DM to update
2038 
2039    Level: developer
2040 
2041 .seealso: DMCoarsenHookAdd(), MatRestrict()
2042 @*/
2043 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2044 {
2045   PetscErrorCode    ierr;
2046   DMCoarsenHookLink link;
2047 
2048   PetscFunctionBegin;
2049   for (link=fine->coarsenhook; link; link=link->next) {
2050     if (link->restricthook) {
2051       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
2052     }
2053   }
2054   PetscFunctionReturn(0);
2055 }
2056 
2057 #undef __FUNCT__
2058 #define __FUNCT__ "DMSubDomainHookAdd"
2059 /*@C
2060    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2061 
2062    Logically Collective
2063 
2064    Input Arguments:
2065 +  global - global DM
2066 .  ddhook - function to run to pass data to the decomposition DM upon its creation
2067 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2068 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2069 
2070 
2071    Calling sequence for ddhook:
2072 $    ddhook(DM global,DM block,void *ctx)
2073 
2074 +  global - global DM
2075 .  block  - block DM
2076 -  ctx - optional user-defined function context
2077 
2078    Calling sequence for restricthook:
2079 $    restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)
2080 
2081 +  global - global DM
2082 .  out    - scatter to the outer (with ghost and overlap points) block vector
2083 .  in     - scatter to block vector values only owned locally
2084 .  block  - block DM
2085 -  ctx - optional user-defined function context
2086 
2087    Level: advanced
2088 
2089    Notes:
2090    This function is only needed if auxiliary data needs to be set up on subdomain DMs.
2091 
2092    If this function is called multiple times, the hooks will be run in the order they are added.
2093 
2094    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2095    extract the global information from its context (instead of from the SNES).
2096 
2097    This function is currently not available from Fortran.
2098 
2099 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2100 @*/
2101 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2102 {
2103   PetscErrorCode      ierr;
2104   DMSubDomainHookLink link,*p;
2105 
2106   PetscFunctionBegin;
2107   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2108   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2109   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2110   link->restricthook = restricthook;
2111   link->ddhook       = ddhook;
2112   link->ctx          = ctx;
2113   link->next         = NULL;
2114   *p                 = link;
2115   PetscFunctionReturn(0);
2116 }
2117 
2118 #undef __FUNCT__
2119 #define __FUNCT__ "DMSubDomainRestrict"
2120 /*@
2121    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2122 
2123    Collective if any hooks are
2124 
2125    Input Arguments:
2126 +  fine - finer DM to use as a base
2127 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2128 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2129 -  coarse - coarer DM to update
2130 
2131    Level: developer
2132 
2133 .seealso: DMCoarsenHookAdd(), MatRestrict()
2134 @*/
2135 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2136 {
2137   PetscErrorCode      ierr;
2138   DMSubDomainHookLink link;
2139 
2140   PetscFunctionBegin;
2141   for (link=global->subdomainhook; link; link=link->next) {
2142     if (link->restricthook) {
2143       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
2144     }
2145   }
2146   PetscFunctionReturn(0);
2147 }
2148 
2149 #undef __FUNCT__
2150 #define __FUNCT__ "DMGetCoarsenLevel"
2151 /*@
2152     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2153 
2154     Not Collective
2155 
2156     Input Parameter:
2157 .   dm - the DM object
2158 
2159     Output Parameter:
2160 .   level - number of coarsenings
2161 
2162     Level: developer
2163 
2164 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2165 
2166 @*/
2167 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2168 {
2169   PetscFunctionBegin;
2170   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2171   *level = dm->leveldown;
2172   PetscFunctionReturn(0);
2173 }
2174 
2175 
2176 
2177 #undef __FUNCT__
2178 #define __FUNCT__ "DMRefineHierarchy"
2179 /*@C
2180     DMRefineHierarchy - Refines a DM object, all levels at once
2181 
2182     Collective on DM
2183 
2184     Input Parameter:
2185 +   dm - the DM object
2186 -   nlevels - the number of levels of refinement
2187 
2188     Output Parameter:
2189 .   dmf - the refined DM hierarchy
2190 
2191     Level: developer
2192 
2193 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2194 
2195 @*/
2196 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2197 {
2198   PetscErrorCode ierr;
2199 
2200   PetscFunctionBegin;
2201   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2202   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2203   if (nlevels == 0) PetscFunctionReturn(0);
2204   if (dm->ops->refinehierarchy) {
2205     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2206   } else if (dm->ops->refine) {
2207     PetscInt i;
2208 
2209     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2210     for (i=1; i<nlevels; i++) {
2211       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2212     }
2213   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2214   PetscFunctionReturn(0);
2215 }
2216 
2217 #undef __FUNCT__
2218 #define __FUNCT__ "DMCoarsenHierarchy"
2219 /*@C
2220     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2221 
2222     Collective on DM
2223 
2224     Input Parameter:
2225 +   dm - the DM object
2226 -   nlevels - the number of levels of coarsening
2227 
2228     Output Parameter:
2229 .   dmc - the coarsened DM hierarchy
2230 
2231     Level: developer
2232 
2233 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2234 
2235 @*/
2236 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2237 {
2238   PetscErrorCode ierr;
2239 
2240   PetscFunctionBegin;
2241   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2242   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2243   if (nlevels == 0) PetscFunctionReturn(0);
2244   PetscValidPointer(dmc,3);
2245   if (dm->ops->coarsenhierarchy) {
2246     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2247   } else if (dm->ops->coarsen) {
2248     PetscInt i;
2249 
2250     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2251     for (i=1; i<nlevels; i++) {
2252       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2253     }
2254   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2255   PetscFunctionReturn(0);
2256 }
2257 
2258 #undef __FUNCT__
2259 #define __FUNCT__ "DMCreateAggregates"
2260 /*@
2261    DMCreateAggregates - Gets the aggregates that map between
2262    grids associated with two DMs.
2263 
2264    Collective on DM
2265 
2266    Input Parameters:
2267 +  dmc - the coarse grid DM
2268 -  dmf - the fine grid DM
2269 
2270    Output Parameters:
2271 .  rest - the restriction matrix (transpose of the projection matrix)
2272 
2273    Level: intermediate
2274 
2275 .keywords: interpolation, restriction, multigrid
2276 
2277 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2278 @*/
2279 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2280 {
2281   PetscErrorCode ierr;
2282 
2283   PetscFunctionBegin;
2284   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2285   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2286   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2287   PetscFunctionReturn(0);
2288 }
2289 
2290 #undef __FUNCT__
2291 #define __FUNCT__ "DMSetApplicationContextDestroy"
2292 /*@C
2293     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2294 
2295     Not Collective
2296 
2297     Input Parameters:
2298 +   dm - the DM object
2299 -   destroy - the destroy function
2300 
2301     Level: intermediate
2302 
2303 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2304 
2305 @*/
2306 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2307 {
2308   PetscFunctionBegin;
2309   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2310   dm->ctxdestroy = destroy;
2311   PetscFunctionReturn(0);
2312 }
2313 
2314 #undef __FUNCT__
2315 #define __FUNCT__ "DMSetApplicationContext"
2316 /*@
2317     DMSetApplicationContext - Set a user context into a DM object
2318 
2319     Not Collective
2320 
2321     Input Parameters:
2322 +   dm - the DM object
2323 -   ctx - the user context
2324 
2325     Level: intermediate
2326 
2327 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2328 
2329 @*/
2330 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2331 {
2332   PetscFunctionBegin;
2333   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2334   dm->ctx = ctx;
2335   PetscFunctionReturn(0);
2336 }
2337 
2338 #undef __FUNCT__
2339 #define __FUNCT__ "DMGetApplicationContext"
2340 /*@
2341     DMGetApplicationContext - Gets a user context from a DM object
2342 
2343     Not Collective
2344 
2345     Input Parameter:
2346 .   dm - the DM object
2347 
2348     Output Parameter:
2349 .   ctx - the user context
2350 
2351     Level: intermediate
2352 
2353 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2354 
2355 @*/
2356 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2357 {
2358   PetscFunctionBegin;
2359   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2360   *(void**)ctx = dm->ctx;
2361   PetscFunctionReturn(0);
2362 }
2363 
2364 #undef __FUNCT__
2365 #define __FUNCT__ "DMSetVariableBounds"
2366 /*@C
2367     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2368 
2369     Logically Collective on DM
2370 
2371     Input Parameter:
2372 +   dm - the DM object
2373 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2374 
2375     Level: intermediate
2376 
2377 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2378          DMSetJacobian()
2379 
2380 @*/
2381 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2382 {
2383   PetscFunctionBegin;
2384   dm->ops->computevariablebounds = f;
2385   PetscFunctionReturn(0);
2386 }
2387 
2388 #undef __FUNCT__
2389 #define __FUNCT__ "DMHasVariableBounds"
2390 /*@
2391     DMHasVariableBounds - does the DM object have a variable bounds function?
2392 
2393     Not Collective
2394 
2395     Input Parameter:
2396 .   dm - the DM object to destroy
2397 
2398     Output Parameter:
2399 .   flg - PETSC_TRUE if the variable bounds function exists
2400 
2401     Level: developer
2402 
2403 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2404 
2405 @*/
2406 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2407 {
2408   PetscFunctionBegin;
2409   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2410   PetscFunctionReturn(0);
2411 }
2412 
2413 #undef __FUNCT__
2414 #define __FUNCT__ "DMComputeVariableBounds"
2415 /*@C
2416     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2417 
2418     Logically Collective on DM
2419 
2420     Input Parameters:
2421 +   dm - the DM object to destroy
2422 -   x  - current solution at which the bounds are computed
2423 
2424     Output parameters:
2425 +   xl - lower bound
2426 -   xu - upper bound
2427 
2428     Level: intermediate
2429 
2430 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2431 
2432 @*/
2433 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2434 {
2435   PetscErrorCode ierr;
2436 
2437   PetscFunctionBegin;
2438   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2439   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2440   if (dm->ops->computevariablebounds) {
2441     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2442   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2443   PetscFunctionReturn(0);
2444 }
2445 
2446 #undef __FUNCT__
2447 #define __FUNCT__ "DMHasColoring"
2448 /*@
2449     DMHasColoring - does the DM object have a method of providing a coloring?
2450 
2451     Not Collective
2452 
2453     Input Parameter:
2454 .   dm - the DM object
2455 
2456     Output Parameter:
2457 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2458 
2459     Level: developer
2460 
2461 .seealso DMHasFunction(), DMCreateColoring()
2462 
2463 @*/
2464 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2465 {
2466   PetscFunctionBegin;
2467   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2468   PetscFunctionReturn(0);
2469 }
2470 
2471 #undef  __FUNCT__
2472 #define __FUNCT__ "DMSetVec"
2473 /*@C
2474     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2475 
2476     Collective on DM
2477 
2478     Input Parameter:
2479 +   dm - the DM object
2480 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2481 
2482     Level: developer
2483 
2484 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2485 
2486 @*/
2487 PetscErrorCode  DMSetVec(DM dm,Vec x)
2488 {
2489   PetscErrorCode ierr;
2490 
2491   PetscFunctionBegin;
2492   if (x) {
2493     if (!dm->x) {
2494       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2495     }
2496     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2497   } else if (dm->x) {
2498     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2499   }
2500   PetscFunctionReturn(0);
2501 }
2502 
2503 PetscFunctionList DMList              = NULL;
2504 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2505 
2506 #undef __FUNCT__
2507 #define __FUNCT__ "DMSetType"
2508 /*@C
2509   DMSetType - Builds a DM, for a particular DM implementation.
2510 
2511   Collective on DM
2512 
2513   Input Parameters:
2514 + dm     - The DM object
2515 - method - The name of the DM type
2516 
2517   Options Database Key:
2518 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2519 
2520   Notes:
2521   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2522 
2523   Level: intermediate
2524 
2525 .keywords: DM, set, type
2526 .seealso: DMGetType(), DMCreate()
2527 @*/
2528 PetscErrorCode  DMSetType(DM dm, DMType method)
2529 {
2530   PetscErrorCode (*r)(DM);
2531   PetscBool      match;
2532   PetscErrorCode ierr;
2533 
2534   PetscFunctionBegin;
2535   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2536   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2537   if (match) PetscFunctionReturn(0);
2538 
2539   if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);}
2540   ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr);
2541   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2542 
2543   if (dm->ops->destroy) {
2544     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2545     dm->ops->destroy = NULL;
2546   }
2547   ierr = (*r)(dm);CHKERRQ(ierr);
2548   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2549   PetscFunctionReturn(0);
2550 }
2551 
2552 #undef __FUNCT__
2553 #define __FUNCT__ "DMGetType"
2554 /*@C
2555   DMGetType - Gets the DM type name (as a string) from the DM.
2556 
2557   Not Collective
2558 
2559   Input Parameter:
2560 . dm  - The DM
2561 
2562   Output Parameter:
2563 . type - The DM type name
2564 
2565   Level: intermediate
2566 
2567 .keywords: DM, get, type, name
2568 .seealso: DMSetType(), DMCreate()
2569 @*/
2570 PetscErrorCode  DMGetType(DM dm, DMType *type)
2571 {
2572   PetscErrorCode ierr;
2573 
2574   PetscFunctionBegin;
2575   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2576   PetscValidCharPointer(type,2);
2577   if (!DMRegisterAllCalled) {
2578     ierr = DMRegisterAll();CHKERRQ(ierr);
2579   }
2580   *type = ((PetscObject)dm)->type_name;
2581   PetscFunctionReturn(0);
2582 }
2583 
2584 #undef __FUNCT__
2585 #define __FUNCT__ "DMConvert"
2586 /*@C
2587   DMConvert - Converts a DM to another DM, either of the same or different type.
2588 
2589   Collective on DM
2590 
2591   Input Parameters:
2592 + dm - the DM
2593 - newtype - new DM type (use "same" for the same type)
2594 
2595   Output Parameter:
2596 . M - pointer to new DM
2597 
2598   Notes:
2599   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2600   the MPI communicator of the generated DM is always the same as the communicator
2601   of the input DM.
2602 
2603   Level: intermediate
2604 
2605 .seealso: DMCreate()
2606 @*/
2607 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2608 {
2609   DM             B;
2610   char           convname[256];
2611   PetscBool      sametype, issame;
2612   PetscErrorCode ierr;
2613 
2614   PetscFunctionBegin;
2615   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2616   PetscValidType(dm,1);
2617   PetscValidPointer(M,3);
2618   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2619   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2620   {
2621     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2622 
2623     /*
2624        Order of precedence:
2625        1) See if a specialized converter is known to the current DM.
2626        2) See if a specialized converter is known to the desired DM class.
2627        3) See if a good general converter is registered for the desired class
2628        4) See if a good general converter is known for the current matrix.
2629        5) Use a really basic converter.
2630     */
2631 
2632     /* 1) See if a specialized converter is known to the current DM and the desired class */
2633     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2634     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2635     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2636     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2637     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2638     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr);
2639     if (conv) goto foundconv;
2640 
2641     /* 2)  See if a specialized converter is known to the desired DM class. */
2642     ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr);
2643     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2644     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2645     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2646     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2647     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2648     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2649     ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr);
2650     if (conv) {
2651       ierr = DMDestroy(&B);CHKERRQ(ierr);
2652       goto foundconv;
2653     }
2654 
2655 #if 0
2656     /* 3) See if a good general converter is registered for the desired class */
2657     conv = B->ops->convertfrom;
2658     ierr = DMDestroy(&B);CHKERRQ(ierr);
2659     if (conv) goto foundconv;
2660 
2661     /* 4) See if a good general converter is known for the current matrix */
2662     if (dm->ops->convert) {
2663       conv = dm->ops->convert;
2664     }
2665     if (conv) goto foundconv;
2666 #endif
2667 
2668     /* 5) Use a really basic converter. */
2669     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2670 
2671 foundconv:
2672     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2673     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2674     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2675   }
2676   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2677   PetscFunctionReturn(0);
2678 }
2679 
2680 /*--------------------------------------------------------------------------------------------------------------------*/
2681 
2682 #undef __FUNCT__
2683 #define __FUNCT__ "DMRegister"
2684 /*@C
2685   DMRegister -  Adds a new DM component implementation
2686 
2687   Not Collective
2688 
2689   Input Parameters:
2690 + name        - The name of a new user-defined creation routine
2691 - create_func - The creation routine itself
2692 
2693   Notes:
2694   DMRegister() may be called multiple times to add several user-defined DMs
2695 
2696 
2697   Sample usage:
2698 .vb
2699     DMRegister("my_da", MyDMCreate);
2700 .ve
2701 
2702   Then, your DM type can be chosen with the procedural interface via
2703 .vb
2704     DMCreate(MPI_Comm, DM *);
2705     DMSetType(DM,"my_da");
2706 .ve
2707    or at runtime via the option
2708 .vb
2709     -da_type my_da
2710 .ve
2711 
2712   Level: advanced
2713 
2714 .keywords: DM, register
2715 .seealso: DMRegisterAll(), DMRegisterDestroy()
2716 
2717 @*/
2718 PetscErrorCode  DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2719 {
2720   PetscErrorCode ierr;
2721 
2722   PetscFunctionBegin;
2723   ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr);
2724   PetscFunctionReturn(0);
2725 }
2726 
2727 #undef __FUNCT__
2728 #define __FUNCT__ "DMLoad"
2729 /*@C
2730   DMLoad - Loads a DM that has been stored in binary  with DMView().
2731 
2732   Collective on PetscViewer
2733 
2734   Input Parameters:
2735 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2736            some related function before a call to DMLoad().
2737 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2738            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2739 
2740    Level: intermediate
2741 
2742   Notes:
2743    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2744 
2745   Notes for advanced users:
2746   Most users should not need to know the details of the binary storage
2747   format, since DMLoad() and DMView() completely hide these details.
2748   But for anyone who's interested, the standard binary matrix storage
2749   format is
2750 .vb
2751      has not yet been determined
2752 .ve
2753 
2754 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2755 @*/
2756 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2757 {
2758   PetscErrorCode ierr;
2759   PetscBool      isbinary;
2760   PetscInt       classid;
2761   char           type[256];
2762 
2763   PetscFunctionBegin;
2764   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2765   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2766   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2767   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2768 
2769   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2770   if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid);
2771   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2772   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2773   if (newdm->ops->load) {
2774     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2775   }
2776   PetscFunctionReturn(0);
2777 }
2778 
2779 /******************************** FEM Support **********************************/
2780 
2781 #undef __FUNCT__
2782 #define __FUNCT__ "DMPrintCellVector"
2783 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2784 {
2785   PetscInt       f;
2786   PetscErrorCode ierr;
2787 
2788   PetscFunctionBegin;
2789   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2790   for (f = 0; f < len; ++f) {
2791     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %g |\n", (double)PetscRealPart(x[f]));CHKERRQ(ierr);
2792   }
2793   PetscFunctionReturn(0);
2794 }
2795 
2796 #undef __FUNCT__
2797 #define __FUNCT__ "DMPrintCellMatrix"
2798 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2799 {
2800   PetscInt       f, g;
2801   PetscErrorCode ierr;
2802 
2803   PetscFunctionBegin;
2804   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2805   for (f = 0; f < rows; ++f) {
2806     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2807     for (g = 0; g < cols; ++g) {
2808       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5g", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2809     }
2810     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2811   }
2812   PetscFunctionReturn(0);
2813 }
2814 
2815 #undef __FUNCT__
2816 #define __FUNCT__ "DMPrintLocalVec"
2817 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X)
2818 {
2819   PetscMPIInt    rank, numProcs;
2820   PetscInt       p;
2821   PetscErrorCode ierr;
2822 
2823   PetscFunctionBegin;
2824   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2825   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
2826   ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr);
2827   for (p = 0; p < numProcs; ++p) {
2828     if (p == rank) {
2829       Vec x;
2830 
2831       ierr = VecDuplicate(X, &x);CHKERRQ(ierr);
2832       ierr = VecCopy(X, x);CHKERRQ(ierr);
2833       ierr = VecChop(x, tol);CHKERRQ(ierr);
2834       ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2835       ierr = VecDestroy(&x);CHKERRQ(ierr);
2836       ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2837     }
2838     ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr);
2839   }
2840   PetscFunctionReturn(0);
2841 }
2842 
2843 #undef __FUNCT__
2844 #define __FUNCT__ "DMGetDefaultSection"
2845 /*@
2846   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2847 
2848   Input Parameter:
2849 . dm - The DM
2850 
2851   Output Parameter:
2852 . section - The PetscSection
2853 
2854   Level: intermediate
2855 
2856   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2857 
2858 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2859 @*/
2860 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2861 {
2862   PetscErrorCode ierr;
2863 
2864   PetscFunctionBegin;
2865   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2866   PetscValidPointer(section, 2);
2867   if (!dm->defaultSection && dm->ops->createdefaultsection) {ierr = (*dm->ops->createdefaultsection)(dm);CHKERRQ(ierr);}
2868   *section = dm->defaultSection;
2869   PetscFunctionReturn(0);
2870 }
2871 
2872 #undef __FUNCT__
2873 #define __FUNCT__ "DMSetDefaultSection"
2874 /*@
2875   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2876 
2877   Input Parameters:
2878 + dm - The DM
2879 - section - The PetscSection
2880 
2881   Level: intermediate
2882 
2883   Note: Any existing Section will be destroyed
2884 
2885 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2886 @*/
2887 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2888 {
2889   PetscInt       numFields;
2890   PetscInt       f;
2891   PetscErrorCode ierr;
2892 
2893   PetscFunctionBegin;
2894   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2895   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2896   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2897   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2898   dm->defaultSection = section;
2899   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2900   if (numFields) {
2901     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2902     for (f = 0; f < numFields; ++f) {
2903       const char *name;
2904 
2905       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2906       ierr = PetscObjectSetName((PetscObject) dm->fields[f], name);CHKERRQ(ierr);
2907     }
2908   }
2909   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2910   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2911   PetscFunctionReturn(0);
2912 }
2913 
2914 #undef __FUNCT__
2915 #define __FUNCT__ "DMGetDefaultGlobalSection"
2916 /*@
2917   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2918 
2919   Collective on DM
2920 
2921   Input Parameter:
2922 . dm - The DM
2923 
2924   Output Parameter:
2925 . section - The PetscSection
2926 
2927   Level: intermediate
2928 
2929   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2930 
2931 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2932 @*/
2933 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2934 {
2935   PetscErrorCode ierr;
2936 
2937   PetscFunctionBegin;
2938   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2939   PetscValidPointer(section, 2);
2940   if (!dm->defaultGlobalSection) {
2941     PetscSection s;
2942 
2943     ierr = DMGetDefaultSection(dm, &s);CHKERRQ(ierr);
2944     if (!s)  SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection in order to create a global PetscSection");
2945     if (!dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSF in order to create a global PetscSection");
2946     ierr = PetscSectionCreateGlobalSection(s, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2947     ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr);
2948     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm), dm->defaultGlobalSection, &dm->map);CHKERRQ(ierr);
2949   }
2950   *section = dm->defaultGlobalSection;
2951   PetscFunctionReturn(0);
2952 }
2953 
2954 #undef __FUNCT__
2955 #define __FUNCT__ "DMSetDefaultGlobalSection"
2956 /*@
2957   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2958 
2959   Input Parameters:
2960 + dm - The DM
2961 - section - The PetscSection, or NULL
2962 
2963   Level: intermediate
2964 
2965   Note: Any existing Section will be destroyed
2966 
2967 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2968 @*/
2969 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2970 {
2971   PetscErrorCode ierr;
2972 
2973   PetscFunctionBegin;
2974   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2975   if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2976   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2977   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2978   dm->defaultGlobalSection = section;
2979   PetscFunctionReturn(0);
2980 }
2981 
2982 #undef __FUNCT__
2983 #define __FUNCT__ "DMGetDefaultSF"
2984 /*@
2985   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2986   it is created from the default PetscSection layouts in the DM.
2987 
2988   Input Parameter:
2989 . dm - The DM
2990 
2991   Output Parameter:
2992 . sf - The PetscSF
2993 
2994   Level: intermediate
2995 
2996   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2997 
2998 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2999 @*/
3000 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
3001 {
3002   PetscInt       nroots;
3003   PetscErrorCode ierr;
3004 
3005   PetscFunctionBegin;
3006   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3007   PetscValidPointer(sf, 2);
3008   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
3009   if (nroots < 0) {
3010     PetscSection section, gSection;
3011 
3012     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
3013     if (section) {
3014       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
3015       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
3016     } else {
3017       *sf = NULL;
3018       PetscFunctionReturn(0);
3019     }
3020   }
3021   *sf = dm->defaultSF;
3022   PetscFunctionReturn(0);
3023 }
3024 
3025 #undef __FUNCT__
3026 #define __FUNCT__ "DMSetDefaultSF"
3027 /*@
3028   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3029 
3030   Input Parameters:
3031 + dm - The DM
3032 - sf - The PetscSF
3033 
3034   Level: intermediate
3035 
3036   Note: Any previous SF is destroyed
3037 
3038 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3039 @*/
3040 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
3041 {
3042   PetscErrorCode ierr;
3043 
3044   PetscFunctionBegin;
3045   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3046   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
3047   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3048   dm->defaultSF = sf;
3049   PetscFunctionReturn(0);
3050 }
3051 
3052 #undef __FUNCT__
3053 #define __FUNCT__ "DMCreateDefaultSF"
3054 /*@C
3055   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3056   describing the data layout.
3057 
3058   Input Parameters:
3059 + dm - The DM
3060 . localSection - PetscSection describing the local data layout
3061 - globalSection - PetscSection describing the global data layout
3062 
3063   Level: intermediate
3064 
3065 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3066 @*/
3067 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3068 {
3069   MPI_Comm       comm;
3070   PetscLayout    layout;
3071   const PetscInt *ranges;
3072   PetscInt       *local;
3073   PetscSFNode    *remote;
3074   PetscInt       pStart, pEnd, p, nroots, nleaves = 0, l;
3075   PetscMPIInt    size, rank;
3076   PetscErrorCode ierr;
3077 
3078   PetscFunctionBegin;
3079   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
3080   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3081   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3082   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3083   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3084   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3085   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3086   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3087   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3088   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3089   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3090   for (p = pStart; p < pEnd; ++p) {
3091     PetscInt gdof, gcdof;
3092 
3093     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3094     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3095     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3096   }
3097   ierr = PetscMalloc1(nleaves, &local);CHKERRQ(ierr);
3098   ierr = PetscMalloc1(nleaves, &remote);CHKERRQ(ierr);
3099   for (p = pStart, l = 0; p < pEnd; ++p) {
3100     const PetscInt *cind;
3101     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3102 
3103     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3104     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3105     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3106     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3107     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3108     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3109     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3110     if (!gdof) continue; /* Censored point */
3111     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3112     if (gsize != dof-cdof) {
3113       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3114       cdof = 0; /* Ignore constraints */
3115     }
3116     for (d = 0, c = 0; d < dof; ++d) {
3117       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3118       local[l+d-c] = off+d;
3119     }
3120     if (gdof < 0) {
3121       for (d = 0; d < gsize; ++d, ++l) {
3122         PetscInt offset = -(goff+1) + d, r;
3123 
3124         ierr = PetscFindInt(offset,size+1,ranges,&r);CHKERRQ(ierr);
3125         if (r < 0) r = -(r+2);
3126         if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff);
3127         remote[l].rank  = r;
3128         remote[l].index = offset - ranges[r];
3129       }
3130     } else {
3131       for (d = 0; d < gsize; ++d, ++l) {
3132         remote[l].rank  = rank;
3133         remote[l].index = goff+d - ranges[rank];
3134       }
3135     }
3136   }
3137   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3138   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3139   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3140   PetscFunctionReturn(0);
3141 }
3142 
3143 #undef __FUNCT__
3144 #define __FUNCT__ "DMGetPointSF"
3145 /*@
3146   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3147 
3148   Input Parameter:
3149 . dm - The DM
3150 
3151   Output Parameter:
3152 . sf - The PetscSF
3153 
3154   Level: intermediate
3155 
3156   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3157 
3158 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3159 @*/
3160 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3161 {
3162   PetscFunctionBegin;
3163   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3164   PetscValidPointer(sf, 2);
3165   *sf = dm->sf;
3166   PetscFunctionReturn(0);
3167 }
3168 
3169 #undef __FUNCT__
3170 #define __FUNCT__ "DMSetPointSF"
3171 /*@
3172   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3173 
3174   Input Parameters:
3175 + dm - The DM
3176 - sf - The PetscSF
3177 
3178   Level: intermediate
3179 
3180 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3181 @*/
3182 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3183 {
3184   PetscErrorCode ierr;
3185 
3186   PetscFunctionBegin;
3187   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3188   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3189   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3190   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3191   dm->sf = sf;
3192   PetscFunctionReturn(0);
3193 }
3194 
3195 #undef __FUNCT__
3196 #define __FUNCT__ "DMGetNumFields"
3197 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3198 {
3199   PetscFunctionBegin;
3200   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3201   PetscValidPointer(numFields, 2);
3202   *numFields = dm->numFields;
3203   PetscFunctionReturn(0);
3204 }
3205 
3206 #undef __FUNCT__
3207 #define __FUNCT__ "DMSetNumFields"
3208 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3209 {
3210   PetscInt       f;
3211   PetscErrorCode ierr;
3212 
3213   PetscFunctionBegin;
3214   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3215   if (dm->numFields == numFields) PetscFunctionReturn(0);
3216   for (f = 0; f < dm->numFields; ++f) {
3217     ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr);
3218   }
3219   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3220   dm->numFields = numFields;
3221   ierr          = PetscMalloc1(dm->numFields, &dm->fields);CHKERRQ(ierr);
3222   for (f = 0; f < dm->numFields; ++f) {
3223     ierr = PetscContainerCreate(PetscObjectComm((PetscObject) dm), (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3224   }
3225   PetscFunctionReturn(0);
3226 }
3227 
3228 #undef __FUNCT__
3229 #define __FUNCT__ "DMGetField"
3230 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3231 {
3232   PetscFunctionBegin;
3233   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3234   PetscValidPointer(field, 3);
3235   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3236   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3237   *field = (PetscObject) dm->fields[f];
3238   PetscFunctionReturn(0);
3239 }
3240 
3241 #undef __FUNCT__
3242 #define __FUNCT__ "DMSetField"
3243 PetscErrorCode DMSetField(DM dm, PetscInt f, PetscObject field)
3244 {
3245   PetscErrorCode ierr;
3246 
3247   PetscFunctionBegin;
3248   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3249   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3250   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject) dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3251   if (((PetscObject) dm->fields[f]) == field) PetscFunctionReturn(0);
3252   ierr = PetscObjectDestroy((PetscObject *) &dm->fields[f]);CHKERRQ(ierr);
3253   dm->fields[f] = (PetscFE) field;
3254   ierr = PetscObjectReference((PetscObject) dm->fields[f]);CHKERRQ(ierr);
3255   PetscFunctionReturn(0);
3256 }
3257 
3258 #undef __FUNCT__
3259 #define __FUNCT__ "DMRestrictHook_Coordinates"
3260 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3261 {
3262   DM dm_coord,dmc_coord;
3263   PetscErrorCode ierr;
3264   Vec coords,ccoords;
3265   VecScatter scat;
3266   PetscFunctionBegin;
3267   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3268   ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr);
3269   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3270   ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr);
3271   if (coords && !ccoords) {
3272     ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr);
3273     ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr);
3274     ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3275     ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3276     ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr);
3277     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
3278     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3279   }
3280   PetscFunctionReturn(0);
3281 }
3282 
3283 #undef __FUNCT__
3284 #define __FUNCT__ "DMSubDomainHook_Coordinates"
3285 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3286 {
3287   DM dm_coord,subdm_coord;
3288   PetscErrorCode ierr;
3289   Vec coords,ccoords,clcoords;
3290   VecScatter *scat_i,*scat_g;
3291   PetscFunctionBegin;
3292   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3293   ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr);
3294   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3295   ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr);
3296   if (coords && !ccoords) {
3297     ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr);
3298     ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr);
3299     ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr);
3300     ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3301     ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3302     ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3303     ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3304     ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr);
3305     ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr);
3306     ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr);
3307     ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr);
3308     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3309     ierr = VecDestroy(&clcoords);CHKERRQ(ierr);
3310     ierr = PetscFree(scat_i);CHKERRQ(ierr);
3311     ierr = PetscFree(scat_g);CHKERRQ(ierr);
3312   }
3313   PetscFunctionReturn(0);
3314 }
3315 
3316 #undef __FUNCT__
3317 #define __FUNCT__ "DMSetCoordinates"
3318 /*@
3319   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3320 
3321   Collective on DM
3322 
3323   Input Parameters:
3324 + dm - the DM
3325 - c - coordinate vector
3326 
3327   Note:
3328   The coordinates do include those for ghost points, which are in the local vector
3329 
3330   Level: intermediate
3331 
3332 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3333 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3334 @*/
3335 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3336 {
3337   PetscErrorCode ierr;
3338 
3339   PetscFunctionBegin;
3340   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3341   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3342   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3343   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3344   dm->coordinates = c;
3345   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3346   ierr            = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3347   ierr            = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3348   PetscFunctionReturn(0);
3349 }
3350 
3351 #undef __FUNCT__
3352 #define __FUNCT__ "DMSetCoordinatesLocal"
3353 /*@
3354   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3355 
3356   Collective on DM
3357 
3358    Input Parameters:
3359 +  dm - the DM
3360 -  c - coordinate vector
3361 
3362   Note:
3363   The coordinates of ghost points can be set using DMSetCoordinates()
3364   followed by DMGetCoordinatesLocal(). This is intended to enable the
3365   setting of ghost coordinates outside of the domain.
3366 
3367   Level: intermediate
3368 
3369 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3370 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3371 @*/
3372 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3373 {
3374   PetscErrorCode ierr;
3375 
3376   PetscFunctionBegin;
3377   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3378   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3379   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3380   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3381 
3382   dm->coordinatesLocal = c;
3383 
3384   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3385   PetscFunctionReturn(0);
3386 }
3387 
3388 #undef __FUNCT__
3389 #define __FUNCT__ "DMGetCoordinates"
3390 /*@
3391   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3392 
3393   Not Collective
3394 
3395   Input Parameter:
3396 . dm - the DM
3397 
3398   Output Parameter:
3399 . c - global coordinate vector
3400 
3401   Note:
3402   This is a borrowed reference, so the user should NOT destroy this vector
3403 
3404   Each process has only the local coordinates (does NOT have the ghost coordinates).
3405 
3406   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3407   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3408 
3409   Level: intermediate
3410 
3411 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3412 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3413 @*/
3414 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3415 {
3416   PetscErrorCode ierr;
3417 
3418   PetscFunctionBegin;
3419   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3420   PetscValidPointer(c,2);
3421   if (!dm->coordinates && dm->coordinatesLocal) {
3422     DM cdm = NULL;
3423 
3424     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3425     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3426     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3427     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3428     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3429   }
3430   *c = dm->coordinates;
3431   PetscFunctionReturn(0);
3432 }
3433 
3434 #undef __FUNCT__
3435 #define __FUNCT__ "DMGetCoordinatesLocal"
3436 /*@
3437   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3438 
3439   Collective on DM
3440 
3441   Input Parameter:
3442 . dm - the DM
3443 
3444   Output Parameter:
3445 . c - coordinate vector
3446 
3447   Note:
3448   This is a borrowed reference, so the user should NOT destroy this vector
3449 
3450   Each process has the local and ghost coordinates
3451 
3452   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3453   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3454 
3455   Level: intermediate
3456 
3457 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3458 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3459 @*/
3460 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3461 {
3462   PetscErrorCode ierr;
3463 
3464   PetscFunctionBegin;
3465   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3466   PetscValidPointer(c,2);
3467   if (!dm->coordinatesLocal && dm->coordinates) {
3468     DM cdm = NULL;
3469 
3470     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3471     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3472     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3473     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3474     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3475   }
3476   *c = dm->coordinatesLocal;
3477   PetscFunctionReturn(0);
3478 }
3479 
3480 #undef __FUNCT__
3481 #define __FUNCT__ "DMGetCoordinateDM"
3482 /*@
3483   DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates
3484 
3485   Collective on DM
3486 
3487   Input Parameter:
3488 . dm - the DM
3489 
3490   Output Parameter:
3491 . cdm - coordinate DM
3492 
3493   Level: intermediate
3494 
3495 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3496 .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3497 @*/
3498 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3499 {
3500   PetscErrorCode ierr;
3501 
3502   PetscFunctionBegin;
3503   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3504   PetscValidPointer(cdm,2);
3505   if (!dm->coordinateDM) {
3506     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3507     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3508   }
3509   *cdm = dm->coordinateDM;
3510   PetscFunctionReturn(0);
3511 }
3512 
3513 #undef __FUNCT__
3514 #define __FUNCT__ "DMSetCoordinateDM"
3515 /*@
3516   DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates
3517 
3518   Logically Collective on DM
3519 
3520   Input Parameters:
3521 + dm - the DM
3522 - cdm - coordinate DM
3523 
3524   Level: intermediate
3525 
3526 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3527 .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3528 @*/
3529 PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm)
3530 {
3531   PetscErrorCode ierr;
3532 
3533   PetscFunctionBegin;
3534   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3535   PetscValidHeaderSpecific(cdm,DM_CLASSID,2);
3536   ierr = DMDestroy(&dm->coordinateDM);CHKERRQ(ierr);
3537   dm->coordinateDM = cdm;
3538   ierr = PetscObjectReference((PetscObject) dm->coordinateDM);CHKERRQ(ierr);
3539   PetscFunctionReturn(0);
3540 }
3541 
3542 #undef __FUNCT__
3543 #define __FUNCT__ "DMGetCoordinateSection"
3544 /*@
3545   DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh.
3546 
3547   Not Collective
3548 
3549   Input Parameter:
3550 . dm - The DM object
3551 
3552   Output Parameter:
3553 . section - The PetscSection object
3554 
3555   Level: intermediate
3556 
3557 .keywords: mesh, coordinates
3558 .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection()
3559 @*/
3560 PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section)
3561 {
3562   DM             cdm;
3563   PetscErrorCode ierr;
3564 
3565   PetscFunctionBegin;
3566   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3567   PetscValidPointer(section, 2);
3568   ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3569   ierr = DMGetDefaultSection(cdm, section);CHKERRQ(ierr);
3570   PetscFunctionReturn(0);
3571 }
3572 
3573 #undef __FUNCT__
3574 #define __FUNCT__ "DMSetCoordinateSection"
3575 /*@
3576   DMSetCoordinateSection - Set the layout of coordinate values over the mesh.
3577 
3578   Not Collective
3579 
3580   Input Parameters:
3581 + dm      - The DM object
3582 - section - The PetscSection object
3583 
3584   Level: intermediate
3585 
3586 .keywords: mesh, coordinates
3587 .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection()
3588 @*/
3589 PetscErrorCode DMSetCoordinateSection(DM dm, PetscSection section)
3590 {
3591   DM             cdm;
3592   PetscErrorCode ierr;
3593 
3594   PetscFunctionBegin;
3595   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3596   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
3597   ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3598   ierr = DMSetDefaultSection(cdm, section);CHKERRQ(ierr);
3599   PetscFunctionReturn(0);
3600 }
3601 
3602 #undef __FUNCT__
3603 #define __FUNCT__ "DMLocatePoints"
3604 /*@
3605   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3606 
3607   Not collective
3608 
3609   Input Parameters:
3610 + dm - The DM
3611 - v - The Vec of points
3612 
3613   Output Parameter:
3614 . cells - The local cell numbers for cells which contain the points
3615 
3616   Level: developer
3617 
3618 .keywords: point location, mesh
3619 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3620 @*/
3621 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3622 {
3623   PetscErrorCode ierr;
3624 
3625   PetscFunctionBegin;
3626   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3627   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3628   PetscValidPointer(cells,3);
3629   if (dm->ops->locatepoints) {
3630     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3631   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3632   PetscFunctionReturn(0);
3633 }
3634