xref: /petsc/src/dm/interface/dm.c (revision 0646a658dcefdf4ee741d3bf5d8bdc3d39675ad7)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 #include <petscsf.h>
3 
4 PetscClassId  DM_CLASSID;
5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;
6 
7 const char *const DMBoundaryTypes[] = {"NONE","GHOSTED","MIRROR","PERIODIC","TWIST","DM_BOUNDARY_",0};
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "DMCreate"
11 /*@
12   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
13 
14    If you never  call DMSetType()  it will generate an
15    error when you try to use the vector.
16 
17   Collective on MPI_Comm
18 
19   Input Parameter:
20 . comm - The communicator for the DM object
21 
22   Output Parameter:
23 . dm - The DM object
24 
25   Level: beginner
26 
27 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
28 @*/
29 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
30 {
31   DM             v;
32   PetscErrorCode ierr;
33 
34   PetscFunctionBegin;
35   PetscValidPointer(dm,2);
36   *dm = NULL;
37   ierr = PetscSysInitializePackage();CHKERRQ(ierr);
38   ierr = VecInitializePackage();CHKERRQ(ierr);
39   ierr = MatInitializePackage();CHKERRQ(ierr);
40   ierr = DMInitializePackage();CHKERRQ(ierr);
41 
42   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
43   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
44 
45 
46   v->ltogmap              = NULL;
47   v->ltogmapb             = NULL;
48   v->bs                   = 1;
49   v->coloringtype         = IS_COLORING_GLOBAL;
50   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
51   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
52   v->defaultSection       = NULL;
53   v->defaultGlobalSection = NULL;
54   {
55     PetscInt i;
56     for (i = 0; i < 10; ++i) {
57       v->nullspaceConstructors[i] = NULL;
58     }
59   }
60   v->numFields = 0;
61   v->fields    = NULL;
62   ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr);
63   ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr);
64   *dm = v;
65   PetscFunctionReturn(0);
66 }
67 
68 #undef __FUNCT__
69 #define __FUNCT__ "DMClone"
70 /*@
71   DMClone - Creates a DM object with the same topology as the original.
72 
73   Collective on MPI_Comm
74 
75   Input Parameter:
76 . dm - The original DM object
77 
78   Output Parameter:
79 . newdm  - The new DM object
80 
81   Level: beginner
82 
83 .keywords: DM, topology, create
84 @*/
85 PetscErrorCode DMClone(DM dm, DM *newdm)
86 {
87   PetscSF        sf;
88   Vec            coords;
89   void          *ctx;
90   PetscErrorCode ierr;
91 
92   PetscFunctionBegin;
93   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
94   PetscValidPointer(newdm,2);
95   ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr);
96   if (dm->ops->clone) {
97     ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr);
98   }
99   (*newdm)->setupcalled = PETSC_TRUE;
100   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
101   ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr);
102   ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr);
103   ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr);
104   ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr);
105   if (coords) {
106     ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr);
107   } else {
108     ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr);
109     if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);}
110   }
111   PetscFunctionReturn(0);
112 }
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "DMSetVecType"
116 /*@C
117        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
118 
119    Logically Collective on DMDA
120 
121    Input Parameter:
122 +  da - initial distributed array
123 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
124 
125    Options Database:
126 .   -dm_vec_type ctype
127 
128    Level: intermediate
129 
130 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType()
131 @*/
132 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
133 {
134   PetscErrorCode ierr;
135 
136   PetscFunctionBegin;
137   PetscValidHeaderSpecific(da,DM_CLASSID,1);
138   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
139   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
140   PetscFunctionReturn(0);
141 }
142 
143 #undef __FUNCT__
144 #define __FUNCT__ "DMGetVecType"
145 /*@C
146        DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
147 
148    Logically Collective on DMDA
149 
150    Input Parameter:
151 .  da - initial distributed array
152 
153    Output Parameter:
154 .  ctype - the vector type
155 
156    Level: intermediate
157 
158 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
159 @*/
160 PetscErrorCode  DMGetVecType(DM da,VecType *ctype)
161 {
162   PetscFunctionBegin;
163   PetscValidHeaderSpecific(da,DM_CLASSID,1);
164   *ctype = da->vectype;
165   PetscFunctionReturn(0);
166 }
167 
168 #undef __FUNCT__
169 #define __FUNCT__ "VecGetDM"
170 /*@
171   VecGetDM - Gets the DM defining the data layout of the vector
172 
173   Not collective
174 
175   Input Parameter:
176 . v - The Vec
177 
178   Output Parameter:
179 . dm - The DM
180 
181   Level: intermediate
182 
183 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
184 @*/
185 PetscErrorCode VecGetDM(Vec v, DM *dm)
186 {
187   PetscErrorCode ierr;
188 
189   PetscFunctionBegin;
190   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
191   PetscValidPointer(dm,2);
192   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNCT__
197 #define __FUNCT__ "VecSetDM"
198 /*@
199   VecSetDM - Sets the DM defining the data layout of the vector
200 
201   Not collective
202 
203   Input Parameters:
204 + v - The Vec
205 - dm - The DM
206 
207   Level: intermediate
208 
209 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
210 @*/
211 PetscErrorCode VecSetDM(Vec v, DM dm)
212 {
213   PetscErrorCode ierr;
214 
215   PetscFunctionBegin;
216   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
217   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
218   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
219   PetscFunctionReturn(0);
220 }
221 
222 #undef __FUNCT__
223 #define __FUNCT__ "DMSetMatType"
224 /*@C
225        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
226 
227    Logically Collective on DM
228 
229    Input Parameter:
230 +  dm - the DM context
231 .  ctype - the matrix type
232 
233    Options Database:
234 .   -dm_mat_type ctype
235 
236    Level: intermediate
237 
238 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType()
239 @*/
240 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
241 {
242   PetscErrorCode ierr;
243 
244   PetscFunctionBegin;
245   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
246   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
247   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
248   PetscFunctionReturn(0);
249 }
250 
251 #undef __FUNCT__
252 #define __FUNCT__ "DMGetMatType"
253 /*@C
254        DMGetMatType - Gets the type of matrix created with DMCreateMatrix()
255 
256    Logically Collective on DM
257 
258    Input Parameter:
259 .  dm - the DM context
260 
261    Output Parameter:
262 .  ctype - the matrix type
263 
264    Options Database:
265 .   -dm_mat_type ctype
266 
267    Level: intermediate
268 
269 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType()
270 @*/
271 PetscErrorCode  DMGetMatType(DM dm,MatType *ctype)
272 {
273   PetscFunctionBegin;
274   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
275   *ctype = dm->mattype;
276   PetscFunctionReturn(0);
277 }
278 
279 #undef __FUNCT__
280 #define __FUNCT__ "MatGetDM"
281 /*@
282   MatGetDM - Gets the DM defining the data layout of the matrix
283 
284   Not collective
285 
286   Input Parameter:
287 . A - The Mat
288 
289   Output Parameter:
290 . dm - The DM
291 
292   Level: intermediate
293 
294 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
295 @*/
296 PetscErrorCode MatGetDM(Mat A, DM *dm)
297 {
298   PetscErrorCode ierr;
299 
300   PetscFunctionBegin;
301   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
302   PetscValidPointer(dm,2);
303   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
304   PetscFunctionReturn(0);
305 }
306 
307 #undef __FUNCT__
308 #define __FUNCT__ "MatSetDM"
309 /*@
310   MatSetDM - Sets the DM defining the data layout of the matrix
311 
312   Not collective
313 
314   Input Parameters:
315 + A - The Mat
316 - dm - The DM
317 
318   Level: intermediate
319 
320 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
321 @*/
322 PetscErrorCode MatSetDM(Mat A, DM dm)
323 {
324   PetscErrorCode ierr;
325 
326   PetscFunctionBegin;
327   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
328   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
329   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
330   PetscFunctionReturn(0);
331 }
332 
333 #undef __FUNCT__
334 #define __FUNCT__ "DMSetOptionsPrefix"
335 /*@C
336    DMSetOptionsPrefix - Sets the prefix used for searching for all
337    DMDA options in the database.
338 
339    Logically Collective on DMDA
340 
341    Input Parameter:
342 +  da - the DMDA context
343 -  prefix - the prefix to prepend to all option names
344 
345    Notes:
346    A hyphen (-) must NOT be given at the beginning of the prefix name.
347    The first character of all runtime options is AUTOMATICALLY the hyphen.
348 
349    Level: advanced
350 
351 .keywords: DMDA, set, options, prefix, database
352 
353 .seealso: DMSetFromOptions()
354 @*/
355 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
356 {
357   PetscErrorCode ierr;
358 
359   PetscFunctionBegin;
360   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
361   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
362   PetscFunctionReturn(0);
363 }
364 
365 #undef __FUNCT__
366 #define __FUNCT__ "DMDestroy"
367 /*@
368     DMDestroy - Destroys a vector packer or DMDA.
369 
370     Collective on DM
371 
372     Input Parameter:
373 .   dm - the DM object to destroy
374 
375     Level: developer
376 
377 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
378 
379 @*/
380 PetscErrorCode  DMDestroy(DM *dm)
381 {
382   PetscInt       i, cnt = 0, f;
383   DMNamedVecLink nlink,nnext;
384   PetscErrorCode ierr;
385 
386   PetscFunctionBegin;
387   if (!*dm) PetscFunctionReturn(0);
388   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
389 
390   /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */
391   for (f = 0; f < (*dm)->numFields; ++f) {
392     ierr = PetscObjectCompose((*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr);
393     ierr = PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr);
394     ierr = PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr);
395   }
396   /* count all the circular references of DM and its contained Vecs */
397   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
398     if ((*dm)->localin[i])  cnt++;
399     if ((*dm)->globalin[i]) cnt++;
400   }
401   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
402   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
403   if ((*dm)->x) {
404     DM obj;
405     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
406     if (obj == *dm) cnt++;
407   }
408 
409   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
410   /*
411      Need this test because the dm references the vectors that
412      reference the dm, so destroying the dm calls destroy on the
413      vectors that cause another destroy on the dm
414   */
415   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
416   ((PetscObject) (*dm))->refct = 0;
417   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
418     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
419     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
420   }
421   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
422     nnext = nlink->next;
423     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
424     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
425     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
426     ierr = PetscFree(nlink);CHKERRQ(ierr);
427   }
428   (*dm)->namedglobal = NULL;
429 
430   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
431     nnext = nlink->next;
432     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
433     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
434     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
435     ierr = PetscFree(nlink);CHKERRQ(ierr);
436   }
437   (*dm)->namedlocal = NULL;
438 
439   /* Destroy the list of hooks */
440   {
441     DMCoarsenHookLink link,next;
442     for (link=(*dm)->coarsenhook; link; link=next) {
443       next = link->next;
444       ierr = PetscFree(link);CHKERRQ(ierr);
445     }
446     (*dm)->coarsenhook = NULL;
447   }
448   {
449     DMRefineHookLink link,next;
450     for (link=(*dm)->refinehook; link; link=next) {
451       next = link->next;
452       ierr = PetscFree(link);CHKERRQ(ierr);
453     }
454     (*dm)->refinehook = NULL;
455   }
456   {
457     DMSubDomainHookLink link,next;
458     for (link=(*dm)->subdomainhook; link; link=next) {
459       next = link->next;
460       ierr = PetscFree(link);CHKERRQ(ierr);
461     }
462     (*dm)->subdomainhook = NULL;
463   }
464   {
465     DMGlobalToLocalHookLink link,next;
466     for (link=(*dm)->gtolhook; link; link=next) {
467       next = link->next;
468       ierr = PetscFree(link);CHKERRQ(ierr);
469     }
470     (*dm)->gtolhook = NULL;
471   }
472   /* Destroy the work arrays */
473   {
474     DMWorkLink link,next;
475     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
476     for (link=(*dm)->workin; link; link=next) {
477       next = link->next;
478       ierr = PetscFree(link->mem);CHKERRQ(ierr);
479       ierr = PetscFree(link);CHKERRQ(ierr);
480     }
481     (*dm)->workin = NULL;
482   }
483 
484   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
485   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
486   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
487 
488   if ((*dm)->ctx && (*dm)->ctxdestroy) {
489     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
490   }
491   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
492   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
493   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
494   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
495   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
496   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
497   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
498 
499   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
500   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
501   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
502   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
503   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
504 
505   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
506   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
507   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
508 
509   for (f = 0; f < (*dm)->numFields; ++f) {
510     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
511   }
512   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
513   /* if memory was published with SAWs then destroy it */
514   ierr = PetscObjectSAWsViewOff((PetscObject)*dm);CHKERRQ(ierr);
515 
516   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
517   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
518   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
519   PetscFunctionReturn(0);
520 }
521 
522 #undef __FUNCT__
523 #define __FUNCT__ "DMSetUp"
524 /*@
525     DMSetUp - sets up the data structures inside a DM object
526 
527     Collective on DM
528 
529     Input Parameter:
530 .   dm - the DM object to setup
531 
532     Level: developer
533 
534 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
535 
536 @*/
537 PetscErrorCode  DMSetUp(DM dm)
538 {
539   PetscErrorCode ierr;
540 
541   PetscFunctionBegin;
542   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
543   if (dm->setupcalled) PetscFunctionReturn(0);
544   if (dm->ops->setup) {
545     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
546   }
547   dm->setupcalled = PETSC_TRUE;
548   PetscFunctionReturn(0);
549 }
550 
551 #undef __FUNCT__
552 #define __FUNCT__ "DMSetFromOptions"
553 /*@
554     DMSetFromOptions - sets parameters in a DM from the options database
555 
556     Collective on DM
557 
558     Input Parameter:
559 .   dm - the DM object to set options for
560 
561     Options Database:
562 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
563 .   -dm_vec_type <type>  type of vector to create inside DM
564 .   -dm_mat_type <type>  type of matrix to create inside DM
565 -   -dm_coloring_type <global or ghosted>
566 
567     Level: developer
568 
569 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
570 
571 @*/
572 PetscErrorCode  DMSetFromOptions(DM dm)
573 {
574   char           typeName[256];
575   PetscBool      flg;
576   PetscErrorCode ierr;
577 
578   PetscFunctionBegin;
579   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
580   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
581   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
582   ierr = PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
583   if (flg) {
584     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
585   }
586   ierr = PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
587   if (flg) {
588     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
589   }
590   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
591   if (dm->ops->setfromoptions) {
592     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
593   }
594   /* process any options handlers added with PetscObjectAddOptionsHandler() */
595   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
596   ierr = PetscOptionsEnd();CHKERRQ(ierr);
597   PetscFunctionReturn(0);
598 }
599 
600 #undef __FUNCT__
601 #define __FUNCT__ "DMView"
602 /*@C
603     DMView - Views a vector packer or DMDA.
604 
605     Collective on DM
606 
607     Input Parameter:
608 +   dm - the DM object to view
609 -   v - the viewer
610 
611     Level: developer
612 
613 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
614 
615 @*/
616 PetscErrorCode  DMView(DM dm,PetscViewer v)
617 {
618   PetscErrorCode ierr;
619   PetscBool      isbinary;
620 
621   PetscFunctionBegin;
622   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
623   if (!v) {
624     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
625   }
626   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
627   if (isbinary) {
628     PetscInt classid = DM_FILE_CLASSID;
629     char     type[256];
630 
631     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
632     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
633     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
634   }
635   if (dm->ops->view) {
636     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
637   }
638   PetscFunctionReturn(0);
639 }
640 
641 #undef __FUNCT__
642 #define __FUNCT__ "DMCreateGlobalVector"
643 /*@
644     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
645 
646     Collective on DM
647 
648     Input Parameter:
649 .   dm - the DM object
650 
651     Output Parameter:
652 .   vec - the global vector
653 
654     Level: beginner
655 
656 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
657 
658 @*/
659 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
660 {
661   PetscErrorCode ierr;
662 
663   PetscFunctionBegin;
664   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
665   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
666   PetscFunctionReturn(0);
667 }
668 
669 #undef __FUNCT__
670 #define __FUNCT__ "DMCreateLocalVector"
671 /*@
672     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
673 
674     Not Collective
675 
676     Input Parameter:
677 .   dm - the DM object
678 
679     Output Parameter:
680 .   vec - the local vector
681 
682     Level: beginner
683 
684 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
685 
686 @*/
687 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
688 {
689   PetscErrorCode ierr;
690 
691   PetscFunctionBegin;
692   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
693   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
694   PetscFunctionReturn(0);
695 }
696 
697 #undef __FUNCT__
698 #define __FUNCT__ "DMGetLocalToGlobalMapping"
699 /*@
700    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
701 
702    Collective on DM
703 
704    Input Parameter:
705 .  dm - the DM that provides the mapping
706 
707    Output Parameter:
708 .  ltog - the mapping
709 
710    Level: intermediate
711 
712    Notes:
713    This mapping can then be used by VecSetLocalToGlobalMapping() or
714    MatSetLocalToGlobalMapping().
715 
716 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
717 @*/
718 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
719 {
720   PetscErrorCode ierr;
721 
722   PetscFunctionBegin;
723   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
724   PetscValidPointer(ltog,2);
725   if (!dm->ltogmap) {
726     PetscSection section, sectionGlobal;
727 
728     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
729     if (section) {
730       PetscInt *ltog;
731       PetscInt pStart, pEnd, size, p, l;
732 
733       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
734       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
735       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
736       ierr = PetscMalloc1(size, &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
737       for (p = pStart, l = 0; p < pEnd; ++p) {
738         PetscInt dof, off, c;
739 
740         /* Should probably use constrained dofs */
741         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
742         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
743         for (c = 0; c < dof; ++c, ++l) {
744           ltog[l] = off+c;
745         }
746       }
747       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
748       ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr);
749     } else {
750       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
751       ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr);
752     }
753   }
754   *ltog = dm->ltogmap;
755   PetscFunctionReturn(0);
756 }
757 
758 #undef __FUNCT__
759 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
760 /*@
761    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
762 
763    Collective on DM
764 
765    Input Parameter:
766 .  da - the distributed array that provides the mapping
767 
768    Output Parameter:
769 .  ltog - the block mapping
770 
771    Level: intermediate
772 
773    Notes:
774    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
775    MatSetLocalToGlobalMappingBlock().
776 
777 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
778 @*/
779 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
780 {
781   PetscErrorCode ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
785   PetscValidPointer(ltog,2);
786   if (!dm->ltogmapb) {
787     PetscInt bs;
788     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
789     if (bs > 1) {
790       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
791       ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
792     } else {
793       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
794       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
795     }
796   }
797   *ltog = dm->ltogmapb;
798   PetscFunctionReturn(0);
799 }
800 
801 #undef __FUNCT__
802 #define __FUNCT__ "DMGetBlockSize"
803 /*@
804    DMGetBlockSize - Gets the inherent block size associated with a DM
805 
806    Not Collective
807 
808    Input Parameter:
809 .  dm - the DM with block structure
810 
811    Output Parameter:
812 .  bs - the block size, 1 implies no exploitable block structure
813 
814    Level: intermediate
815 
816 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
817 @*/
818 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
819 {
820   PetscFunctionBegin;
821   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
822   PetscValidPointer(bs,2);
823   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
824   *bs = dm->bs;
825   PetscFunctionReturn(0);
826 }
827 
828 #undef __FUNCT__
829 #define __FUNCT__ "DMCreateInterpolation"
830 /*@
831     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
832 
833     Collective on DM
834 
835     Input Parameter:
836 +   dm1 - the DM object
837 -   dm2 - the second, finer DM object
838 
839     Output Parameter:
840 +  mat - the interpolation
841 -  vec - the scaling (optional)
842 
843     Level: developer
844 
845     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
846         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
847 
848         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
849         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
850 
851 
852 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
853 
854 @*/
855 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
856 {
857   PetscErrorCode ierr;
858 
859   PetscFunctionBegin;
860   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
861   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
862   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
863   PetscFunctionReturn(0);
864 }
865 
866 #undef __FUNCT__
867 #define __FUNCT__ "DMCreateInjection"
868 /*@
869     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
870 
871     Collective on DM
872 
873     Input Parameter:
874 +   dm1 - the DM object
875 -   dm2 - the second, finer DM object
876 
877     Output Parameter:
878 .   ctx - the injection
879 
880     Level: developer
881 
882    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
883         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
884 
885 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
886 
887 @*/
888 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
889 {
890   PetscErrorCode ierr;
891 
892   PetscFunctionBegin;
893   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
894   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
895   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
896   PetscFunctionReturn(0);
897 }
898 
899 #undef __FUNCT__
900 #define __FUNCT__ "DMCreateColoring"
901 /*@
902     DMCreateColoring - Gets coloring for a DM
903 
904     Collective on DM
905 
906     Input Parameter:
907 +   dm - the DM object
908 -   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
909 
910     Output Parameter:
911 .   coloring - the coloring
912 
913     Level: developer
914 
915 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType()
916 
917 @*/
918 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring)
919 {
920   PetscErrorCode ierr;
921 
922   PetscFunctionBegin;
923   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
924   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
925   ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr);
926   PetscFunctionReturn(0);
927 }
928 
929 #undef __FUNCT__
930 #define __FUNCT__ "DMCreateMatrix"
931 /*@
932     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
933 
934     Collective on DM
935 
936     Input Parameter:
937 .   dm - the DM object
938 
939     Output Parameter:
940 .   mat - the empty Jacobian
941 
942     Level: beginner
943 
944     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
945        do not need to do it yourself.
946 
947        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
948        the nonzero pattern call DMDASetMatPreallocateOnly()
949 
950        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
951        internally by PETSc.
952 
953        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
954        the indices for the global numbering for DMDAs which is complicated.
955 
956 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType()
957 
958 @*/
959 PetscErrorCode  DMCreateMatrix(DM dm,Mat *mat)
960 {
961   PetscErrorCode ierr;
962 
963   PetscFunctionBegin;
964   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
965   ierr = MatInitializePackage();CHKERRQ(ierr);
966   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
967   PetscValidPointer(mat,3);
968   ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr);
969   PetscFunctionReturn(0);
970 }
971 
972 #undef __FUNCT__
973 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
974 /*@
975   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
976     preallocated but the nonzero structure and zero values will not be set.
977 
978   Logically Collective on DMDA
979 
980   Input Parameter:
981 + dm - the DM
982 - only - PETSC_TRUE if only want preallocation
983 
984   Level: developer
985 .seealso DMCreateMatrix()
986 @*/
987 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
988 {
989   PetscFunctionBegin;
990   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
991   dm->prealloc_only = only;
992   PetscFunctionReturn(0);
993 }
994 
995 #undef __FUNCT__
996 #define __FUNCT__ "DMGetWorkArray"
997 /*@C
998   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
999 
1000   Not Collective
1001 
1002   Input Parameters:
1003 + dm - the DM object
1004 . count - The minium size
1005 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1006 
1007   Output Parameter:
1008 . array - the work array
1009 
1010   Level: developer
1011 
1012 .seealso DMDestroy(), DMCreate()
1013 @*/
1014 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1015 {
1016   PetscErrorCode ierr;
1017   DMWorkLink     link;
1018   size_t         size;
1019 
1020   PetscFunctionBegin;
1021   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1022   PetscValidPointer(mem,4);
1023   if (dm->workin) {
1024     link       = dm->workin;
1025     dm->workin = dm->workin->next;
1026   } else {
1027     ierr = PetscNewLog(dm,&link);CHKERRQ(ierr);
1028   }
1029   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1030   if (size*count > link->bytes) {
1031     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
1032     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1033     link->bytes = size*count;
1034   }
1035   link->next   = dm->workout;
1036   dm->workout  = link;
1037   *(void**)mem = link->mem;
1038   PetscFunctionReturn(0);
1039 }
1040 
1041 #undef __FUNCT__
1042 #define __FUNCT__ "DMRestoreWorkArray"
1043 /*@C
1044   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1045 
1046   Not Collective
1047 
1048   Input Parameters:
1049 + dm - the DM object
1050 . count - The minium size
1051 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1052 
1053   Output Parameter:
1054 . array - the work array
1055 
1056   Level: developer
1057 
1058 .seealso DMDestroy(), DMCreate()
1059 @*/
1060 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1061 {
1062   DMWorkLink *p,link;
1063 
1064   PetscFunctionBegin;
1065   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1066   PetscValidPointer(mem,4);
1067   for (p=&dm->workout; (link=*p); p=&link->next) {
1068     if (link->mem == *(void**)mem) {
1069       *p           = link->next;
1070       link->next   = dm->workin;
1071       dm->workin   = link;
1072       *(void**)mem = NULL;
1073       PetscFunctionReturn(0);
1074     }
1075   }
1076   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1077   PetscFunctionReturn(0);
1078 }
1079 
1080 #undef __FUNCT__
1081 #define __FUNCT__ "DMSetNullSpaceConstructor"
1082 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1083 {
1084   PetscFunctionBegin;
1085   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1086   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1087   dm->nullspaceConstructors[field] = nullsp;
1088   PetscFunctionReturn(0);
1089 }
1090 
1091 #undef __FUNCT__
1092 #define __FUNCT__ "DMCreateFieldIS"
1093 /*@C
1094   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1095 
1096   Not collective
1097 
1098   Input Parameter:
1099 . dm - the DM object
1100 
1101   Output Parameters:
1102 + numFields  - The number of fields (or NULL if not requested)
1103 . fieldNames - The name for each field (or NULL if not requested)
1104 - fields     - The global indices for each field (or NULL if not requested)
1105 
1106   Level: intermediate
1107 
1108   Notes:
1109   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1110   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1111   PetscFree().
1112 
1113 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1114 @*/
1115 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1116 {
1117   PetscSection   section, sectionGlobal;
1118   PetscErrorCode ierr;
1119 
1120   PetscFunctionBegin;
1121   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1122   if (numFields) {
1123     PetscValidPointer(numFields,2);
1124     *numFields = 0;
1125   }
1126   if (fieldNames) {
1127     PetscValidPointer(fieldNames,3);
1128     *fieldNames = NULL;
1129   }
1130   if (fields) {
1131     PetscValidPointer(fields,4);
1132     *fields = NULL;
1133   }
1134   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1135   if (section) {
1136     PetscInt *fieldSizes, **fieldIndices;
1137     PetscInt nF, f, pStart, pEnd, p;
1138 
1139     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1140     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1141     ierr = PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);CHKERRQ(ierr);
1142     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1143     for (f = 0; f < nF; ++f) {
1144       fieldSizes[f] = 0;
1145     }
1146     for (p = pStart; p < pEnd; ++p) {
1147       PetscInt gdof;
1148 
1149       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1150       if (gdof > 0) {
1151         for (f = 0; f < nF; ++f) {
1152           PetscInt fdof, fcdof;
1153 
1154           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1155           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1156           fieldSizes[f] += fdof-fcdof;
1157         }
1158       }
1159     }
1160     for (f = 0; f < nF; ++f) {
1161       ierr          = PetscMalloc1(fieldSizes[f], &fieldIndices[f]);CHKERRQ(ierr);
1162       fieldSizes[f] = 0;
1163     }
1164     for (p = pStart; p < pEnd; ++p) {
1165       PetscInt gdof, goff;
1166 
1167       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1168       if (gdof > 0) {
1169         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1170         for (f = 0; f < nF; ++f) {
1171           PetscInt fdof, fcdof, fc;
1172 
1173           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1174           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1175           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1176             fieldIndices[f][fieldSizes[f]] = goff++;
1177           }
1178         }
1179       }
1180     }
1181     if (numFields) *numFields = nF;
1182     if (fieldNames) {
1183       ierr = PetscMalloc1(nF, fieldNames);CHKERRQ(ierr);
1184       for (f = 0; f < nF; ++f) {
1185         const char *fieldName;
1186 
1187         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1188         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1189       }
1190     }
1191     if (fields) {
1192       ierr = PetscMalloc1(nF, fields);CHKERRQ(ierr);
1193       for (f = 0; f < nF; ++f) {
1194         ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1195       }
1196     }
1197     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1198   } else if (dm->ops->createfieldis) {
1199     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1200   }
1201   PetscFunctionReturn(0);
1202 }
1203 
1204 
1205 #undef __FUNCT__
1206 #define __FUNCT__ "DMCreateFieldDecomposition"
1207 /*@C
1208   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1209                           corresponding to different fields: each IS contains the global indices of the dofs of the
1210                           corresponding field. The optional list of DMs define the DM for each subproblem.
1211                           Generalizes DMCreateFieldIS().
1212 
1213   Not collective
1214 
1215   Input Parameter:
1216 . dm - the DM object
1217 
1218   Output Parameters:
1219 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1220 . namelist  - The name for each field (or NULL if not requested)
1221 . islist    - The global indices for each field (or NULL if not requested)
1222 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1223 
1224   Level: intermediate
1225 
1226   Notes:
1227   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1228   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1229   and all of the arrays should be freed with PetscFree().
1230 
1231 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1232 @*/
1233 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1234 {
1235   PetscErrorCode ierr;
1236 
1237   PetscFunctionBegin;
1238   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1239   if (len) {
1240     PetscValidPointer(len,2);
1241     *len = 0;
1242   }
1243   if (namelist) {
1244     PetscValidPointer(namelist,3);
1245     *namelist = 0;
1246   }
1247   if (islist) {
1248     PetscValidPointer(islist,4);
1249     *islist = 0;
1250   }
1251   if (dmlist) {
1252     PetscValidPointer(dmlist,5);
1253     *dmlist = 0;
1254   }
1255   /*
1256    Is it a good idea to apply the following check across all impls?
1257    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1258    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1259    */
1260   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1261   if (!dm->ops->createfielddecomposition) {
1262     PetscSection section;
1263     PetscInt     numFields, f;
1264 
1265     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1266     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1267     if (section && numFields && dm->ops->createsubdm) {
1268       *len = numFields;
1269       if (namelist) {ierr = PetscMalloc1(numFields,namelist);CHKERRQ(ierr);}
1270       if (islist)   {ierr = PetscMalloc1(numFields,islist);CHKERRQ(ierr);}
1271       if (dmlist)   {ierr = PetscMalloc1(numFields,dmlist);CHKERRQ(ierr);}
1272       for (f = 0; f < numFields; ++f) {
1273         const char *fieldName;
1274 
1275         ierr = DMCreateSubDM(dm, 1, &f, islist ? &(*islist)[f] : NULL, dmlist ? &(*dmlist)[f] : NULL);CHKERRQ(ierr);
1276         if (namelist) {
1277           ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1278           ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1279         }
1280       }
1281     } else {
1282       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1283       /* By default there are no DMs associated with subproblems. */
1284       if (dmlist) *dmlist = NULL;
1285     }
1286   } else {
1287     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1288   }
1289   PetscFunctionReturn(0);
1290 }
1291 
1292 #undef __FUNCT__
1293 #define __FUNCT__ "DMCreateSubDM"
1294 /*@C
1295   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1296                   The fields are defined by DMCreateFieldIS().
1297 
1298   Not collective
1299 
1300   Input Parameters:
1301 + dm - the DM object
1302 . numFields - number of fields in this subproblem
1303 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1304 
1305   Output Parameters:
1306 . is - The global indices for the subproblem
1307 - dm - The DM for the subproblem
1308 
1309   Level: intermediate
1310 
1311 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1312 @*/
1313 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1314 {
1315   PetscErrorCode ierr;
1316 
1317   PetscFunctionBegin;
1318   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1319   PetscValidPointer(fields,3);
1320   if (is) PetscValidPointer(is,4);
1321   if (subdm) PetscValidPointer(subdm,5);
1322   if (dm->ops->createsubdm) {
1323     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1324   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1325   PetscFunctionReturn(0);
1326 }
1327 
1328 
1329 #undef __FUNCT__
1330 #define __FUNCT__ "DMCreateDomainDecomposition"
1331 /*@C
1332   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1333                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1334                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1335                           define a nonoverlapping covering, while outer subdomains can overlap.
1336                           The optional list of DMs define the DM for each subproblem.
1337 
1338   Not collective
1339 
1340   Input Parameter:
1341 . dm - the DM object
1342 
1343   Output Parameters:
1344 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1345 . namelist    - The name for each subdomain (or NULL if not requested)
1346 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1347 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1348 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1349 
1350   Level: intermediate
1351 
1352   Notes:
1353   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1354   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1355   and all of the arrays should be freed with PetscFree().
1356 
1357 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1358 @*/
1359 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1360 {
1361   PetscErrorCode      ierr;
1362   DMSubDomainHookLink link;
1363   PetscInt            i,l;
1364 
1365   PetscFunctionBegin;
1366   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1367   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1368   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1369   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1370   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1371   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1372   /*
1373    Is it a good idea to apply the following check across all impls?
1374    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1375    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1376    */
1377   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1378   if (dm->ops->createdomaindecomposition) {
1379     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1380     /* copy subdomain hooks and context over to the subdomain DMs */
1381     if (dmlist) {
1382       for (i = 0; i < l; i++) {
1383         for (link=dm->subdomainhook; link; link=link->next) {
1384           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1385         }
1386         (*dmlist)[i]->ctx = dm->ctx;
1387       }
1388     }
1389     if (len) *len = l;
1390   }
1391   PetscFunctionReturn(0);
1392 }
1393 
1394 
1395 #undef __FUNCT__
1396 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1397 /*@C
1398   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1399 
1400   Not collective
1401 
1402   Input Parameters:
1403 + dm - the DM object
1404 . n  - the number of subdomain scatters
1405 - subdms - the local subdomains
1406 
1407   Output Parameters:
1408 + n     - the number of scatters returned
1409 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1410 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1411 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1412 
1413   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1414   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1415   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1416   solution and residual data.
1417 
1418   Level: developer
1419 
1420 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1421 @*/
1422 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1423 {
1424   PetscErrorCode ierr;
1425 
1426   PetscFunctionBegin;
1427   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1428   PetscValidPointer(subdms,3);
1429   if (dm->ops->createddscatters) {
1430     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1431   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1432   PetscFunctionReturn(0);
1433 }
1434 
1435 #undef __FUNCT__
1436 #define __FUNCT__ "DMRefine"
1437 /*@
1438   DMRefine - Refines a DM object
1439 
1440   Collective on DM
1441 
1442   Input Parameter:
1443 + dm   - the DM object
1444 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1445 
1446   Output Parameter:
1447 . dmf - the refined DM, or NULL
1448 
1449   Note: If no refinement was done, the return value is NULL
1450 
1451   Level: developer
1452 
1453 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1454 @*/
1455 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1456 {
1457   PetscErrorCode   ierr;
1458   DMRefineHookLink link;
1459 
1460   PetscFunctionBegin;
1461   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1462   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1463   if (*dmf) {
1464     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1465 
1466     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1467 
1468     (*dmf)->ctx       = dm->ctx;
1469     (*dmf)->leveldown = dm->leveldown;
1470     (*dmf)->levelup   = dm->levelup + 1;
1471 
1472     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1473     for (link=dm->refinehook; link; link=link->next) {
1474       if (link->refinehook) {
1475         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1476       }
1477     }
1478   }
1479   PetscFunctionReturn(0);
1480 }
1481 
1482 #undef __FUNCT__
1483 #define __FUNCT__ "DMRefineHookAdd"
1484 /*@C
1485    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1486 
1487    Logically Collective
1488 
1489    Input Arguments:
1490 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1491 .  refinehook - function to run when setting up a coarser level
1492 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1493 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1494 
1495    Calling sequence of refinehook:
1496 $    refinehook(DM coarse,DM fine,void *ctx);
1497 
1498 +  coarse - coarse level DM
1499 .  fine - fine level DM to interpolate problem to
1500 -  ctx - optional user-defined function context
1501 
1502    Calling sequence for interphook:
1503 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1504 
1505 +  coarse - coarse level DM
1506 .  interp - matrix interpolating a coarse-level solution to the finer grid
1507 .  fine - fine level DM to update
1508 -  ctx - optional user-defined function context
1509 
1510    Level: advanced
1511 
1512    Notes:
1513    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1514 
1515    If this function is called multiple times, the hooks will be run in the order they are added.
1516 
1517    This function is currently not available from Fortran.
1518 
1519 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1520 @*/
1521 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1522 {
1523   PetscErrorCode   ierr;
1524   DMRefineHookLink link,*p;
1525 
1526   PetscFunctionBegin;
1527   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1528   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1529   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1530   link->refinehook = refinehook;
1531   link->interphook = interphook;
1532   link->ctx        = ctx;
1533   link->next       = NULL;
1534   *p               = link;
1535   PetscFunctionReturn(0);
1536 }
1537 
1538 #undef __FUNCT__
1539 #define __FUNCT__ "DMInterpolate"
1540 /*@
1541    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1542 
1543    Collective if any hooks are
1544 
1545    Input Arguments:
1546 +  coarse - coarser DM to use as a base
1547 .  restrct - interpolation matrix, apply using MatInterpolate()
1548 -  fine - finer DM to update
1549 
1550    Level: developer
1551 
1552 .seealso: DMRefineHookAdd(), MatInterpolate()
1553 @*/
1554 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1555 {
1556   PetscErrorCode   ierr;
1557   DMRefineHookLink link;
1558 
1559   PetscFunctionBegin;
1560   for (link=fine->refinehook; link; link=link->next) {
1561     if (link->interphook) {
1562       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1563     }
1564   }
1565   PetscFunctionReturn(0);
1566 }
1567 
1568 #undef __FUNCT__
1569 #define __FUNCT__ "DMGetRefineLevel"
1570 /*@
1571     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1572 
1573     Not Collective
1574 
1575     Input Parameter:
1576 .   dm - the DM object
1577 
1578     Output Parameter:
1579 .   level - number of refinements
1580 
1581     Level: developer
1582 
1583 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1584 
1585 @*/
1586 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1587 {
1588   PetscFunctionBegin;
1589   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1590   *level = dm->levelup;
1591   PetscFunctionReturn(0);
1592 }
1593 
1594 #undef __FUNCT__
1595 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1596 /*@C
1597    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1598 
1599    Logically Collective
1600 
1601    Input Arguments:
1602 +  dm - the DM
1603 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1604 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1605 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1606 
1607    Calling sequence for beginhook:
1608 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1609 
1610 +  dm - global DM
1611 .  g - global vector
1612 .  mode - mode
1613 .  l - local vector
1614 -  ctx - optional user-defined function context
1615 
1616 
1617    Calling sequence for endhook:
1618 $    endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1619 
1620 +  global - global DM
1621 -  ctx - optional user-defined function context
1622 
1623    Level: advanced
1624 
1625 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1626 @*/
1627 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1628 {
1629   PetscErrorCode          ierr;
1630   DMGlobalToLocalHookLink link,*p;
1631 
1632   PetscFunctionBegin;
1633   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1634   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1635   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1636   link->beginhook = beginhook;
1637   link->endhook   = endhook;
1638   link->ctx       = ctx;
1639   link->next      = NULL;
1640   *p              = link;
1641   PetscFunctionReturn(0);
1642 }
1643 
1644 #undef __FUNCT__
1645 #define __FUNCT__ "DMGlobalToLocalBegin"
1646 /*@
1647     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1648 
1649     Neighbor-wise Collective on DM
1650 
1651     Input Parameters:
1652 +   dm - the DM object
1653 .   g - the global vector
1654 .   mode - INSERT_VALUES or ADD_VALUES
1655 -   l - the local vector
1656 
1657 
1658     Level: beginner
1659 
1660 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1661 
1662 @*/
1663 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1664 {
1665   PetscSF                 sf;
1666   PetscErrorCode          ierr;
1667   DMGlobalToLocalHookLink link;
1668 
1669   PetscFunctionBegin;
1670   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1671   for (link=dm->gtolhook; link; link=link->next) {
1672     if (link->beginhook) {
1673       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1674     }
1675   }
1676   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1677   if (sf) {
1678     PetscScalar *lArray, *gArray;
1679 
1680     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1681     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1682     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1683     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1684     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1685     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1686   } else {
1687     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1688   }
1689   PetscFunctionReturn(0);
1690 }
1691 
1692 #undef __FUNCT__
1693 #define __FUNCT__ "DMGlobalToLocalEnd"
1694 /*@
1695     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1696 
1697     Neighbor-wise Collective on DM
1698 
1699     Input Parameters:
1700 +   dm - the DM object
1701 .   g - the global vector
1702 .   mode - INSERT_VALUES or ADD_VALUES
1703 -   l - the local vector
1704 
1705 
1706     Level: beginner
1707 
1708 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1709 
1710 @*/
1711 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1712 {
1713   PetscSF                 sf;
1714   PetscErrorCode          ierr;
1715   PetscScalar             *lArray, *gArray;
1716   DMGlobalToLocalHookLink link;
1717 
1718   PetscFunctionBegin;
1719   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1720   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1721   if (sf) {
1722     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1723 
1724     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1725     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1726     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1727     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1728     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1729   } else {
1730     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1731   }
1732   for (link=dm->gtolhook; link; link=link->next) {
1733     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1734   }
1735   PetscFunctionReturn(0);
1736 }
1737 
1738 #undef __FUNCT__
1739 #define __FUNCT__ "DMLocalToGlobalBegin"
1740 /*@
1741     DMLocalToGlobalBegin - updates global vectors from local vectors
1742 
1743     Neighbor-wise Collective on DM
1744 
1745     Input Parameters:
1746 +   dm - the DM object
1747 .   l - the local vector
1748 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1749            base point.
1750 - - the global vector
1751 
1752     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1753            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1754            global array to the final global array with VecAXPY().
1755 
1756     Level: beginner
1757 
1758 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1759 
1760 @*/
1761 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1762 {
1763   PetscSF        sf;
1764   PetscErrorCode ierr;
1765 
1766   PetscFunctionBegin;
1767   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1768   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1769   if (sf) {
1770     MPI_Op      op;
1771     PetscScalar *lArray, *gArray;
1772 
1773     switch (mode) {
1774     case INSERT_VALUES:
1775     case INSERT_ALL_VALUES:
1776       op = MPIU_REPLACE; break;
1777     case ADD_VALUES:
1778     case ADD_ALL_VALUES:
1779       op = MPI_SUM; break;
1780     default:
1781       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1782     }
1783     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1784     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1785     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1786     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1787     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1788   } else {
1789     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1790   }
1791   PetscFunctionReturn(0);
1792 }
1793 
1794 #undef __FUNCT__
1795 #define __FUNCT__ "DMLocalToGlobalEnd"
1796 /*@
1797     DMLocalToGlobalEnd - updates global vectors from local vectors
1798 
1799     Neighbor-wise Collective on DM
1800 
1801     Input Parameters:
1802 +   dm - the DM object
1803 .   l - the local vector
1804 .   mode - INSERT_VALUES or ADD_VALUES
1805 -   g - the global vector
1806 
1807 
1808     Level: beginner
1809 
1810 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1811 
1812 @*/
1813 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1814 {
1815   PetscSF        sf;
1816   PetscErrorCode ierr;
1817 
1818   PetscFunctionBegin;
1819   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1820   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1821   if (sf) {
1822     MPI_Op      op;
1823     PetscScalar *lArray, *gArray;
1824 
1825     switch (mode) {
1826     case INSERT_VALUES:
1827     case INSERT_ALL_VALUES:
1828       op = MPIU_REPLACE; break;
1829     case ADD_VALUES:
1830     case ADD_ALL_VALUES:
1831       op = MPI_SUM; break;
1832     default:
1833       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1834     }
1835     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1836     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1837     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1838     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1839     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1840   } else {
1841     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1842   }
1843   PetscFunctionReturn(0);
1844 }
1845 
1846 #undef __FUNCT__
1847 #define __FUNCT__ "DMLocalToLocalBegin"
1848 /*@
1849    DMLocalToLocalBegin - Maps from a local vector (including ghost points
1850    that contain irrelevant values) to another local vector where the ghost
1851    points in the second are set correctly. Must be followed by DMLocalToLocalEnd().
1852 
1853    Neighbor-wise Collective on DM and Vec
1854 
1855    Input Parameters:
1856 +  dm - the DM object
1857 .  g - the original local vector
1858 -  mode - one of INSERT_VALUES or ADD_VALUES
1859 
1860    Output Parameter:
1861 .  l  - the local vector with correct ghost values
1862 
1863    Level: intermediate
1864 
1865    Notes:
1866    The local vectors used here need not be the same as those
1867    obtained from DMCreateLocalVector(), BUT they
1868    must have the same parallel data layout; they could, for example, be
1869    obtained with VecDuplicate() from the DM originating vectors.
1870 
1871 .keywords: DM, local-to-local, begin
1872 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1873 
1874 @*/
1875 PetscErrorCode  DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1876 {
1877   PetscErrorCode          ierr;
1878 
1879   PetscFunctionBegin;
1880   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1881   ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1882   PetscFunctionReturn(0);
1883 }
1884 
1885 #undef __FUNCT__
1886 #define __FUNCT__ "DMLocalToLocalEnd"
1887 /*@
1888    DMLocalToLocalEnd - Maps from a local vector (including ghost points
1889    that contain irrelevant values) to another local vector where the ghost
1890    points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().
1891 
1892    Neighbor-wise Collective on DM and Vec
1893 
1894    Input Parameters:
1895 +  da - the DM object
1896 .  g - the original local vector
1897 -  mode - one of INSERT_VALUES or ADD_VALUES
1898 
1899    Output Parameter:
1900 .  l  - the local vector with correct ghost values
1901 
1902    Level: intermediate
1903 
1904    Notes:
1905    The local vectors used here need not be the same as those
1906    obtained from DMCreateLocalVector(), BUT they
1907    must have the same parallel data layout; they could, for example, be
1908    obtained with VecDuplicate() from the DM originating vectors.
1909 
1910 .keywords: DM, local-to-local, end
1911 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1912 
1913 @*/
1914 PetscErrorCode  DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1915 {
1916   PetscErrorCode          ierr;
1917 
1918   PetscFunctionBegin;
1919   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1920   ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1921   PetscFunctionReturn(0);
1922 }
1923 
1924 
1925 #undef __FUNCT__
1926 #define __FUNCT__ "DMCoarsen"
1927 /*@
1928     DMCoarsen - Coarsens a DM object
1929 
1930     Collective on DM
1931 
1932     Input Parameter:
1933 +   dm - the DM object
1934 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1935 
1936     Output Parameter:
1937 .   dmc - the coarsened DM
1938 
1939     Level: developer
1940 
1941 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1942 
1943 @*/
1944 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1945 {
1946   PetscErrorCode    ierr;
1947   DMCoarsenHookLink link;
1948 
1949   PetscFunctionBegin;
1950   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1951   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1952   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1953   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1954   (*dmc)->ctx               = dm->ctx;
1955   (*dmc)->levelup           = dm->levelup;
1956   (*dmc)->leveldown         = dm->leveldown + 1;
1957   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1958   for (link=dm->coarsenhook; link; link=link->next) {
1959     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1960   }
1961   PetscFunctionReturn(0);
1962 }
1963 
1964 #undef __FUNCT__
1965 #define __FUNCT__ "DMCoarsenHookAdd"
1966 /*@C
1967    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1968 
1969    Logically Collective
1970 
1971    Input Arguments:
1972 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1973 .  coarsenhook - function to run when setting up a coarser level
1974 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1975 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1976 
1977    Calling sequence of coarsenhook:
1978 $    coarsenhook(DM fine,DM coarse,void *ctx);
1979 
1980 +  fine - fine level DM
1981 .  coarse - coarse level DM to restrict problem to
1982 -  ctx - optional user-defined function context
1983 
1984    Calling sequence for restricthook:
1985 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1986 
1987 +  fine - fine level DM
1988 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1989 .  rscale - scaling vector for restriction
1990 .  inject - matrix restricting by injection
1991 .  coarse - coarse level DM to update
1992 -  ctx - optional user-defined function context
1993 
1994    Level: advanced
1995 
1996    Notes:
1997    This function is only needed if auxiliary data needs to be set up on coarse grids.
1998 
1999    If this function is called multiple times, the hooks will be run in the order they are added.
2000 
2001    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2002    extract the finest level information from its context (instead of from the SNES).
2003 
2004    This function is currently not available from Fortran.
2005 
2006 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2007 @*/
2008 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
2009 {
2010   PetscErrorCode    ierr;
2011   DMCoarsenHookLink link,*p;
2012 
2013   PetscFunctionBegin;
2014   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
2015   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2016   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
2017   link->coarsenhook  = coarsenhook;
2018   link->restricthook = restricthook;
2019   link->ctx          = ctx;
2020   link->next         = NULL;
2021   *p                 = link;
2022   PetscFunctionReturn(0);
2023 }
2024 
2025 #undef __FUNCT__
2026 #define __FUNCT__ "DMRestrict"
2027 /*@
2028    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
2029 
2030    Collective if any hooks are
2031 
2032    Input Arguments:
2033 +  fine - finer DM to use as a base
2034 .  restrct - restriction matrix, apply using MatRestrict()
2035 .  inject - injection matrix, also use MatRestrict()
2036 -  coarse - coarer DM to update
2037 
2038    Level: developer
2039 
2040 .seealso: DMCoarsenHookAdd(), MatRestrict()
2041 @*/
2042 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2043 {
2044   PetscErrorCode    ierr;
2045   DMCoarsenHookLink link;
2046 
2047   PetscFunctionBegin;
2048   for (link=fine->coarsenhook; link; link=link->next) {
2049     if (link->restricthook) {
2050       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
2051     }
2052   }
2053   PetscFunctionReturn(0);
2054 }
2055 
2056 #undef __FUNCT__
2057 #define __FUNCT__ "DMSubDomainHookAdd"
2058 /*@C
2059    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2060 
2061    Logically Collective
2062 
2063    Input Arguments:
2064 +  global - global DM
2065 .  ddhook - function to run to pass data to the decomposition DM upon its creation
2066 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2067 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2068 
2069 
2070    Calling sequence for ddhook:
2071 $    ddhook(DM global,DM block,void *ctx)
2072 
2073 +  global - global DM
2074 .  block  - block DM
2075 -  ctx - optional user-defined function context
2076 
2077    Calling sequence for restricthook:
2078 $    restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)
2079 
2080 +  global - global DM
2081 .  out    - scatter to the outer (with ghost and overlap points) block vector
2082 .  in     - scatter to block vector values only owned locally
2083 .  block  - block DM
2084 -  ctx - optional user-defined function context
2085 
2086    Level: advanced
2087 
2088    Notes:
2089    This function is only needed if auxiliary data needs to be set up on subdomain DMs.
2090 
2091    If this function is called multiple times, the hooks will be run in the order they are added.
2092 
2093    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2094    extract the global information from its context (instead of from the SNES).
2095 
2096    This function is currently not available from Fortran.
2097 
2098 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2099 @*/
2100 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2101 {
2102   PetscErrorCode      ierr;
2103   DMSubDomainHookLink link,*p;
2104 
2105   PetscFunctionBegin;
2106   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2107   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2108   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2109   link->restricthook = restricthook;
2110   link->ddhook       = ddhook;
2111   link->ctx          = ctx;
2112   link->next         = NULL;
2113   *p                 = link;
2114   PetscFunctionReturn(0);
2115 }
2116 
2117 #undef __FUNCT__
2118 #define __FUNCT__ "DMSubDomainRestrict"
2119 /*@
2120    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2121 
2122    Collective if any hooks are
2123 
2124    Input Arguments:
2125 +  fine - finer DM to use as a base
2126 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2127 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2128 -  coarse - coarer DM to update
2129 
2130    Level: developer
2131 
2132 .seealso: DMCoarsenHookAdd(), MatRestrict()
2133 @*/
2134 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2135 {
2136   PetscErrorCode      ierr;
2137   DMSubDomainHookLink link;
2138 
2139   PetscFunctionBegin;
2140   for (link=global->subdomainhook; link; link=link->next) {
2141     if (link->restricthook) {
2142       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
2143     }
2144   }
2145   PetscFunctionReturn(0);
2146 }
2147 
2148 #undef __FUNCT__
2149 #define __FUNCT__ "DMGetCoarsenLevel"
2150 /*@
2151     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2152 
2153     Not Collective
2154 
2155     Input Parameter:
2156 .   dm - the DM object
2157 
2158     Output Parameter:
2159 .   level - number of coarsenings
2160 
2161     Level: developer
2162 
2163 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2164 
2165 @*/
2166 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2167 {
2168   PetscFunctionBegin;
2169   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2170   *level = dm->leveldown;
2171   PetscFunctionReturn(0);
2172 }
2173 
2174 
2175 
2176 #undef __FUNCT__
2177 #define __FUNCT__ "DMRefineHierarchy"
2178 /*@C
2179     DMRefineHierarchy - Refines a DM object, all levels at once
2180 
2181     Collective on DM
2182 
2183     Input Parameter:
2184 +   dm - the DM object
2185 -   nlevels - the number of levels of refinement
2186 
2187     Output Parameter:
2188 .   dmf - the refined DM hierarchy
2189 
2190     Level: developer
2191 
2192 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2193 
2194 @*/
2195 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2196 {
2197   PetscErrorCode ierr;
2198 
2199   PetscFunctionBegin;
2200   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2201   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2202   if (nlevels == 0) PetscFunctionReturn(0);
2203   if (dm->ops->refinehierarchy) {
2204     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2205   } else if (dm->ops->refine) {
2206     PetscInt i;
2207 
2208     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2209     for (i=1; i<nlevels; i++) {
2210       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2211     }
2212   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2213   PetscFunctionReturn(0);
2214 }
2215 
2216 #undef __FUNCT__
2217 #define __FUNCT__ "DMCoarsenHierarchy"
2218 /*@C
2219     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2220 
2221     Collective on DM
2222 
2223     Input Parameter:
2224 +   dm - the DM object
2225 -   nlevels - the number of levels of coarsening
2226 
2227     Output Parameter:
2228 .   dmc - the coarsened DM hierarchy
2229 
2230     Level: developer
2231 
2232 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2233 
2234 @*/
2235 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2236 {
2237   PetscErrorCode ierr;
2238 
2239   PetscFunctionBegin;
2240   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2241   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2242   if (nlevels == 0) PetscFunctionReturn(0);
2243   PetscValidPointer(dmc,3);
2244   if (dm->ops->coarsenhierarchy) {
2245     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2246   } else if (dm->ops->coarsen) {
2247     PetscInt i;
2248 
2249     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2250     for (i=1; i<nlevels; i++) {
2251       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2252     }
2253   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2254   PetscFunctionReturn(0);
2255 }
2256 
2257 #undef __FUNCT__
2258 #define __FUNCT__ "DMCreateAggregates"
2259 /*@
2260    DMCreateAggregates - Gets the aggregates that map between
2261    grids associated with two DMs.
2262 
2263    Collective on DM
2264 
2265    Input Parameters:
2266 +  dmc - the coarse grid DM
2267 -  dmf - the fine grid DM
2268 
2269    Output Parameters:
2270 .  rest - the restriction matrix (transpose of the projection matrix)
2271 
2272    Level: intermediate
2273 
2274 .keywords: interpolation, restriction, multigrid
2275 
2276 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2277 @*/
2278 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2279 {
2280   PetscErrorCode ierr;
2281 
2282   PetscFunctionBegin;
2283   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2284   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2285   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2286   PetscFunctionReturn(0);
2287 }
2288 
2289 #undef __FUNCT__
2290 #define __FUNCT__ "DMSetApplicationContextDestroy"
2291 /*@C
2292     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2293 
2294     Not Collective
2295 
2296     Input Parameters:
2297 +   dm - the DM object
2298 -   destroy - the destroy function
2299 
2300     Level: intermediate
2301 
2302 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2303 
2304 @*/
2305 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2306 {
2307   PetscFunctionBegin;
2308   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2309   dm->ctxdestroy = destroy;
2310   PetscFunctionReturn(0);
2311 }
2312 
2313 #undef __FUNCT__
2314 #define __FUNCT__ "DMSetApplicationContext"
2315 /*@
2316     DMSetApplicationContext - Set a user context into a DM object
2317 
2318     Not Collective
2319 
2320     Input Parameters:
2321 +   dm - the DM object
2322 -   ctx - the user context
2323 
2324     Level: intermediate
2325 
2326 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2327 
2328 @*/
2329 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2330 {
2331   PetscFunctionBegin;
2332   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2333   dm->ctx = ctx;
2334   PetscFunctionReturn(0);
2335 }
2336 
2337 #undef __FUNCT__
2338 #define __FUNCT__ "DMGetApplicationContext"
2339 /*@
2340     DMGetApplicationContext - Gets a user context from a DM object
2341 
2342     Not Collective
2343 
2344     Input Parameter:
2345 .   dm - the DM object
2346 
2347     Output Parameter:
2348 .   ctx - the user context
2349 
2350     Level: intermediate
2351 
2352 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2353 
2354 @*/
2355 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2356 {
2357   PetscFunctionBegin;
2358   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2359   *(void**)ctx = dm->ctx;
2360   PetscFunctionReturn(0);
2361 }
2362 
2363 #undef __FUNCT__
2364 #define __FUNCT__ "DMSetVariableBounds"
2365 /*@C
2366     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2367 
2368     Logically Collective on DM
2369 
2370     Input Parameter:
2371 +   dm - the DM object
2372 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2373 
2374     Level: intermediate
2375 
2376 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2377          DMSetJacobian()
2378 
2379 @*/
2380 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2381 {
2382   PetscFunctionBegin;
2383   dm->ops->computevariablebounds = f;
2384   PetscFunctionReturn(0);
2385 }
2386 
2387 #undef __FUNCT__
2388 #define __FUNCT__ "DMHasVariableBounds"
2389 /*@
2390     DMHasVariableBounds - does the DM object have a variable bounds function?
2391 
2392     Not Collective
2393 
2394     Input Parameter:
2395 .   dm - the DM object to destroy
2396 
2397     Output Parameter:
2398 .   flg - PETSC_TRUE if the variable bounds function exists
2399 
2400     Level: developer
2401 
2402 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2403 
2404 @*/
2405 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2406 {
2407   PetscFunctionBegin;
2408   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2409   PetscFunctionReturn(0);
2410 }
2411 
2412 #undef __FUNCT__
2413 #define __FUNCT__ "DMComputeVariableBounds"
2414 /*@C
2415     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2416 
2417     Logically Collective on DM
2418 
2419     Input Parameters:
2420 +   dm - the DM object to destroy
2421 -   x  - current solution at which the bounds are computed
2422 
2423     Output parameters:
2424 +   xl - lower bound
2425 -   xu - upper bound
2426 
2427     Level: intermediate
2428 
2429 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2430 
2431 @*/
2432 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2433 {
2434   PetscErrorCode ierr;
2435 
2436   PetscFunctionBegin;
2437   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2438   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2439   if (dm->ops->computevariablebounds) {
2440     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2441   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2442   PetscFunctionReturn(0);
2443 }
2444 
2445 #undef __FUNCT__
2446 #define __FUNCT__ "DMHasColoring"
2447 /*@
2448     DMHasColoring - does the DM object have a method of providing a coloring?
2449 
2450     Not Collective
2451 
2452     Input Parameter:
2453 .   dm - the DM object
2454 
2455     Output Parameter:
2456 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2457 
2458     Level: developer
2459 
2460 .seealso DMHasFunction(), DMCreateColoring()
2461 
2462 @*/
2463 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2464 {
2465   PetscFunctionBegin;
2466   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2467   PetscFunctionReturn(0);
2468 }
2469 
2470 #undef  __FUNCT__
2471 #define __FUNCT__ "DMSetVec"
2472 /*@C
2473     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2474 
2475     Collective on DM
2476 
2477     Input Parameter:
2478 +   dm - the DM object
2479 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2480 
2481     Level: developer
2482 
2483 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2484 
2485 @*/
2486 PetscErrorCode  DMSetVec(DM dm,Vec x)
2487 {
2488   PetscErrorCode ierr;
2489 
2490   PetscFunctionBegin;
2491   if (x) {
2492     if (!dm->x) {
2493       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2494     }
2495     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2496   } else if (dm->x) {
2497     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2498   }
2499   PetscFunctionReturn(0);
2500 }
2501 
2502 PetscFunctionList DMList              = NULL;
2503 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2504 
2505 #undef __FUNCT__
2506 #define __FUNCT__ "DMSetType"
2507 /*@C
2508   DMSetType - Builds a DM, for a particular DM implementation.
2509 
2510   Collective on DM
2511 
2512   Input Parameters:
2513 + dm     - The DM object
2514 - method - The name of the DM type
2515 
2516   Options Database Key:
2517 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2518 
2519   Notes:
2520   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2521 
2522   Level: intermediate
2523 
2524 .keywords: DM, set, type
2525 .seealso: DMGetType(), DMCreate()
2526 @*/
2527 PetscErrorCode  DMSetType(DM dm, DMType method)
2528 {
2529   PetscErrorCode (*r)(DM);
2530   PetscBool      match;
2531   PetscErrorCode ierr;
2532 
2533   PetscFunctionBegin;
2534   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2535   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2536   if (match) PetscFunctionReturn(0);
2537 
2538   if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);}
2539   ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr);
2540   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2541 
2542   if (dm->ops->destroy) {
2543     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2544     dm->ops->destroy = NULL;
2545   }
2546   ierr = (*r)(dm);CHKERRQ(ierr);
2547   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2548   PetscFunctionReturn(0);
2549 }
2550 
2551 #undef __FUNCT__
2552 #define __FUNCT__ "DMGetType"
2553 /*@C
2554   DMGetType - Gets the DM type name (as a string) from the DM.
2555 
2556   Not Collective
2557 
2558   Input Parameter:
2559 . dm  - The DM
2560 
2561   Output Parameter:
2562 . type - The DM type name
2563 
2564   Level: intermediate
2565 
2566 .keywords: DM, get, type, name
2567 .seealso: DMSetType(), DMCreate()
2568 @*/
2569 PetscErrorCode  DMGetType(DM dm, DMType *type)
2570 {
2571   PetscErrorCode ierr;
2572 
2573   PetscFunctionBegin;
2574   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2575   PetscValidCharPointer(type,2);
2576   if (!DMRegisterAllCalled) {
2577     ierr = DMRegisterAll();CHKERRQ(ierr);
2578   }
2579   *type = ((PetscObject)dm)->type_name;
2580   PetscFunctionReturn(0);
2581 }
2582 
2583 #undef __FUNCT__
2584 #define __FUNCT__ "DMConvert"
2585 /*@C
2586   DMConvert - Converts a DM to another DM, either of the same or different type.
2587 
2588   Collective on DM
2589 
2590   Input Parameters:
2591 + dm - the DM
2592 - newtype - new DM type (use "same" for the same type)
2593 
2594   Output Parameter:
2595 . M - pointer to new DM
2596 
2597   Notes:
2598   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2599   the MPI communicator of the generated DM is always the same as the communicator
2600   of the input DM.
2601 
2602   Level: intermediate
2603 
2604 .seealso: DMCreate()
2605 @*/
2606 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2607 {
2608   DM             B;
2609   char           convname[256];
2610   PetscBool      sametype, issame;
2611   PetscErrorCode ierr;
2612 
2613   PetscFunctionBegin;
2614   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2615   PetscValidType(dm,1);
2616   PetscValidPointer(M,3);
2617   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2618   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2619   {
2620     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2621 
2622     /*
2623        Order of precedence:
2624        1) See if a specialized converter is known to the current DM.
2625        2) See if a specialized converter is known to the desired DM class.
2626        3) See if a good general converter is registered for the desired class
2627        4) See if a good general converter is known for the current matrix.
2628        5) Use a really basic converter.
2629     */
2630 
2631     /* 1) See if a specialized converter is known to the current DM and the desired class */
2632     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2633     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2634     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2635     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2636     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2637     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr);
2638     if (conv) goto foundconv;
2639 
2640     /* 2)  See if a specialized converter is known to the desired DM class. */
2641     ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr);
2642     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2643     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2644     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2645     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2646     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2647     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2648     ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr);
2649     if (conv) {
2650       ierr = DMDestroy(&B);CHKERRQ(ierr);
2651       goto foundconv;
2652     }
2653 
2654 #if 0
2655     /* 3) See if a good general converter is registered for the desired class */
2656     conv = B->ops->convertfrom;
2657     ierr = DMDestroy(&B);CHKERRQ(ierr);
2658     if (conv) goto foundconv;
2659 
2660     /* 4) See if a good general converter is known for the current matrix */
2661     if (dm->ops->convert) {
2662       conv = dm->ops->convert;
2663     }
2664     if (conv) goto foundconv;
2665 #endif
2666 
2667     /* 5) Use a really basic converter. */
2668     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2669 
2670 foundconv:
2671     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2672     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2673     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2674   }
2675   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2676   PetscFunctionReturn(0);
2677 }
2678 
2679 /*--------------------------------------------------------------------------------------------------------------------*/
2680 
2681 #undef __FUNCT__
2682 #define __FUNCT__ "DMRegister"
2683 /*@C
2684   DMRegister -  Adds a new DM component implementation
2685 
2686   Not Collective
2687 
2688   Input Parameters:
2689 + name        - The name of a new user-defined creation routine
2690 - create_func - The creation routine itself
2691 
2692   Notes:
2693   DMRegister() may be called multiple times to add several user-defined DMs
2694 
2695 
2696   Sample usage:
2697 .vb
2698     DMRegister("my_da", MyDMCreate);
2699 .ve
2700 
2701   Then, your DM type can be chosen with the procedural interface via
2702 .vb
2703     DMCreate(MPI_Comm, DM *);
2704     DMSetType(DM,"my_da");
2705 .ve
2706    or at runtime via the option
2707 .vb
2708     -da_type my_da
2709 .ve
2710 
2711   Level: advanced
2712 
2713 .keywords: DM, register
2714 .seealso: DMRegisterAll(), DMRegisterDestroy()
2715 
2716 @*/
2717 PetscErrorCode  DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2718 {
2719   PetscErrorCode ierr;
2720 
2721   PetscFunctionBegin;
2722   ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr);
2723   PetscFunctionReturn(0);
2724 }
2725 
2726 #undef __FUNCT__
2727 #define __FUNCT__ "DMLoad"
2728 /*@C
2729   DMLoad - Loads a DM that has been stored in binary  with DMView().
2730 
2731   Collective on PetscViewer
2732 
2733   Input Parameters:
2734 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2735            some related function before a call to DMLoad().
2736 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2737            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2738 
2739    Level: intermediate
2740 
2741   Notes:
2742    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2743 
2744   Notes for advanced users:
2745   Most users should not need to know the details of the binary storage
2746   format, since DMLoad() and DMView() completely hide these details.
2747   But for anyone who's interested, the standard binary matrix storage
2748   format is
2749 .vb
2750      has not yet been determined
2751 .ve
2752 
2753 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2754 @*/
2755 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2756 {
2757   PetscErrorCode ierr;
2758   PetscBool      isbinary;
2759   PetscInt       classid;
2760   char           type[256];
2761 
2762   PetscFunctionBegin;
2763   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2764   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2765   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2766   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2767 
2768   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2769   if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid);
2770   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2771   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2772   if (newdm->ops->load) {
2773     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2774   }
2775   PetscFunctionReturn(0);
2776 }
2777 
2778 /******************************** FEM Support **********************************/
2779 
2780 #undef __FUNCT__
2781 #define __FUNCT__ "DMPrintCellVector"
2782 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2783 {
2784   PetscInt       f;
2785   PetscErrorCode ierr;
2786 
2787   PetscFunctionBegin;
2788   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2789   for (f = 0; f < len; ++f) {
2790     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %g |\n", (double)PetscRealPart(x[f]));CHKERRQ(ierr);
2791   }
2792   PetscFunctionReturn(0);
2793 }
2794 
2795 #undef __FUNCT__
2796 #define __FUNCT__ "DMPrintCellMatrix"
2797 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2798 {
2799   PetscInt       f, g;
2800   PetscErrorCode ierr;
2801 
2802   PetscFunctionBegin;
2803   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2804   for (f = 0; f < rows; ++f) {
2805     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2806     for (g = 0; g < cols; ++g) {
2807       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2808     }
2809     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2810   }
2811   PetscFunctionReturn(0);
2812 }
2813 
2814 #undef __FUNCT__
2815 #define __FUNCT__ "DMPrintLocalVec"
2816 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X)
2817 {
2818   PetscMPIInt    rank, numProcs;
2819   PetscInt       p;
2820   PetscErrorCode ierr;
2821 
2822   PetscFunctionBegin;
2823   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2824   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
2825   ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr);
2826   for (p = 0; p < numProcs; ++p) {
2827     if (p == rank) {
2828       Vec x;
2829 
2830       ierr = VecDuplicate(X, &x);CHKERRQ(ierr);
2831       ierr = VecCopy(X, x);CHKERRQ(ierr);
2832       ierr = VecChop(x, tol);CHKERRQ(ierr);
2833       ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2834       ierr = VecDestroy(&x);CHKERRQ(ierr);
2835       ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2836     }
2837     ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr);
2838   }
2839   PetscFunctionReturn(0);
2840 }
2841 
2842 #undef __FUNCT__
2843 #define __FUNCT__ "DMGetDefaultSection"
2844 /*@
2845   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2846 
2847   Input Parameter:
2848 . dm - The DM
2849 
2850   Output Parameter:
2851 . section - The PetscSection
2852 
2853   Level: intermediate
2854 
2855   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2856 
2857 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2858 @*/
2859 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2860 {
2861   PetscFunctionBegin;
2862   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2863   PetscValidPointer(section, 2);
2864   *section = dm->defaultSection;
2865   PetscFunctionReturn(0);
2866 }
2867 
2868 #undef __FUNCT__
2869 #define __FUNCT__ "DMSetDefaultSection"
2870 /*@
2871   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2872 
2873   Input Parameters:
2874 + dm - The DM
2875 - section - The PetscSection
2876 
2877   Level: intermediate
2878 
2879   Note: Any existing Section will be destroyed
2880 
2881 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2882 @*/
2883 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2884 {
2885   PetscInt       numFields;
2886   PetscInt       f;
2887   PetscErrorCode ierr;
2888 
2889   PetscFunctionBegin;
2890   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2891   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2892   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2893   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2894   dm->defaultSection = section;
2895   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2896   if (numFields) {
2897     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2898     for (f = 0; f < numFields; ++f) {
2899       const char *name;
2900 
2901       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2902       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2903     }
2904   }
2905   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2906   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2907   PetscFunctionReturn(0);
2908 }
2909 
2910 #undef __FUNCT__
2911 #define __FUNCT__ "DMGetDefaultGlobalSection"
2912 /*@
2913   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2914 
2915   Collective on DM
2916 
2917   Input Parameter:
2918 . dm - The DM
2919 
2920   Output Parameter:
2921 . section - The PetscSection
2922 
2923   Level: intermediate
2924 
2925   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2926 
2927 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2928 @*/
2929 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2930 {
2931   PetscErrorCode ierr;
2932 
2933   PetscFunctionBegin;
2934   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2935   PetscValidPointer(section, 2);
2936   if (!dm->defaultGlobalSection) {
2937     if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2938     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2939     ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr);
2940     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2941   }
2942   *section = dm->defaultGlobalSection;
2943   PetscFunctionReturn(0);
2944 }
2945 
2946 #undef __FUNCT__
2947 #define __FUNCT__ "DMSetDefaultGlobalSection"
2948 /*@
2949   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2950 
2951   Input Parameters:
2952 + dm - The DM
2953 - section - The PetscSection, or NULL
2954 
2955   Level: intermediate
2956 
2957   Note: Any existing Section will be destroyed
2958 
2959 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2960 @*/
2961 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2962 {
2963   PetscErrorCode ierr;
2964 
2965   PetscFunctionBegin;
2966   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2967   if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2968   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2969   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2970   dm->defaultGlobalSection = section;
2971   PetscFunctionReturn(0);
2972 }
2973 
2974 #undef __FUNCT__
2975 #define __FUNCT__ "DMGetDefaultSF"
2976 /*@
2977   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2978   it is created from the default PetscSection layouts in the DM.
2979 
2980   Input Parameter:
2981 . dm - The DM
2982 
2983   Output Parameter:
2984 . sf - The PetscSF
2985 
2986   Level: intermediate
2987 
2988   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2989 
2990 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2991 @*/
2992 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2993 {
2994   PetscInt       nroots;
2995   PetscErrorCode ierr;
2996 
2997   PetscFunctionBegin;
2998   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2999   PetscValidPointer(sf, 2);
3000   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
3001   if (nroots < 0) {
3002     PetscSection section, gSection;
3003 
3004     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
3005     if (section) {
3006       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
3007       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
3008     } else {
3009       *sf = NULL;
3010       PetscFunctionReturn(0);
3011     }
3012   }
3013   *sf = dm->defaultSF;
3014   PetscFunctionReturn(0);
3015 }
3016 
3017 #undef __FUNCT__
3018 #define __FUNCT__ "DMSetDefaultSF"
3019 /*@
3020   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3021 
3022   Input Parameters:
3023 + dm - The DM
3024 - sf - The PetscSF
3025 
3026   Level: intermediate
3027 
3028   Note: Any previous SF is destroyed
3029 
3030 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3031 @*/
3032 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
3033 {
3034   PetscErrorCode ierr;
3035 
3036   PetscFunctionBegin;
3037   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3038   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
3039   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3040   dm->defaultSF = sf;
3041   PetscFunctionReturn(0);
3042 }
3043 
3044 #undef __FUNCT__
3045 #define __FUNCT__ "DMCreateDefaultSF"
3046 /*@C
3047   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3048   describing the data layout.
3049 
3050   Input Parameters:
3051 + dm - The DM
3052 . localSection - PetscSection describing the local data layout
3053 - globalSection - PetscSection describing the global data layout
3054 
3055   Level: intermediate
3056 
3057 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3058 @*/
3059 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3060 {
3061   MPI_Comm       comm;
3062   PetscLayout    layout;
3063   const PetscInt *ranges;
3064   PetscInt       *local;
3065   PetscSFNode    *remote;
3066   PetscInt       pStart, pEnd, p, nroots, nleaves = 0, l;
3067   PetscMPIInt    size, rank;
3068   PetscErrorCode ierr;
3069 
3070   PetscFunctionBegin;
3071   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
3072   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3073   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3074   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3075   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3076   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3077   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3078   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3079   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3080   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3081   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3082   for (p = pStart; p < pEnd; ++p) {
3083     PetscInt gdof, gcdof;
3084 
3085     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3086     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3087     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3088   }
3089   ierr = PetscMalloc1(nleaves, &local);CHKERRQ(ierr);
3090   ierr = PetscMalloc1(nleaves, &remote);CHKERRQ(ierr);
3091   for (p = pStart, l = 0; p < pEnd; ++p) {
3092     const PetscInt *cind;
3093     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3094 
3095     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3096     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3097     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3098     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3099     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3100     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3101     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3102     if (!gdof) continue; /* Censored point */
3103     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3104     if (gsize != dof-cdof) {
3105       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3106       cdof = 0; /* Ignore constraints */
3107     }
3108     for (d = 0, c = 0; d < dof; ++d) {
3109       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3110       local[l+d-c] = off+d;
3111     }
3112     if (gdof < 0) {
3113       for (d = 0; d < gsize; ++d, ++l) {
3114         PetscInt offset = -(goff+1) + d, r;
3115 
3116         ierr = PetscFindInt(offset,size+1,ranges,&r);CHKERRQ(ierr);
3117         if (r < 0) r = -(r+2);
3118         if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff);
3119         remote[l].rank  = r;
3120         remote[l].index = offset - ranges[r];
3121       }
3122     } else {
3123       for (d = 0; d < gsize; ++d, ++l) {
3124         remote[l].rank  = rank;
3125         remote[l].index = goff+d - ranges[rank];
3126       }
3127     }
3128   }
3129   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3130   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3131   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3132   PetscFunctionReturn(0);
3133 }
3134 
3135 #undef __FUNCT__
3136 #define __FUNCT__ "DMGetPointSF"
3137 /*@
3138   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3139 
3140   Input Parameter:
3141 . dm - The DM
3142 
3143   Output Parameter:
3144 . sf - The PetscSF
3145 
3146   Level: intermediate
3147 
3148   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3149 
3150 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3151 @*/
3152 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3153 {
3154   PetscFunctionBegin;
3155   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3156   PetscValidPointer(sf, 2);
3157   *sf = dm->sf;
3158   PetscFunctionReturn(0);
3159 }
3160 
3161 #undef __FUNCT__
3162 #define __FUNCT__ "DMSetPointSF"
3163 /*@
3164   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3165 
3166   Input Parameters:
3167 + dm - The DM
3168 - sf - The PetscSF
3169 
3170   Level: intermediate
3171 
3172 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3173 @*/
3174 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3175 {
3176   PetscErrorCode ierr;
3177 
3178   PetscFunctionBegin;
3179   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3180   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3181   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3182   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3183   dm->sf = sf;
3184   PetscFunctionReturn(0);
3185 }
3186 
3187 #undef __FUNCT__
3188 #define __FUNCT__ "DMGetNumFields"
3189 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3190 {
3191   PetscFunctionBegin;
3192   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3193   PetscValidPointer(numFields, 2);
3194   *numFields = dm->numFields;
3195   PetscFunctionReturn(0);
3196 }
3197 
3198 #undef __FUNCT__
3199 #define __FUNCT__ "DMSetNumFields"
3200 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3201 {
3202   PetscInt       f;
3203   PetscErrorCode ierr;
3204 
3205   PetscFunctionBegin;
3206   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3207   for (f = 0; f < dm->numFields; ++f) {
3208     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3209   }
3210   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3211   dm->numFields = numFields;
3212   ierr          = PetscMalloc1(dm->numFields, &dm->fields);CHKERRQ(ierr);
3213   for (f = 0; f < dm->numFields; ++f) {
3214     ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3215   }
3216   PetscFunctionReturn(0);
3217 }
3218 
3219 #undef __FUNCT__
3220 #define __FUNCT__ "DMGetField"
3221 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3222 {
3223   PetscFunctionBegin;
3224   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3225   PetscValidPointer(field, 2);
3226   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3227   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3228   *field = dm->fields[f];
3229   PetscFunctionReturn(0);
3230 }
3231 
3232 #undef __FUNCT__
3233 #define __FUNCT__ "DMRestrictHook_Coordinates"
3234 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3235 {
3236   DM dm_coord,dmc_coord;
3237   PetscErrorCode ierr;
3238   Vec coords,ccoords;
3239   VecScatter scat;
3240   PetscFunctionBegin;
3241   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3242   ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr);
3243   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3244   ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr);
3245   if (coords && !ccoords) {
3246     ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr);
3247     ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr);
3248     ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3249     ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3250     ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr);
3251     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
3252     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3253   }
3254   PetscFunctionReturn(0);
3255 }
3256 
3257 #undef __FUNCT__
3258 #define __FUNCT__ "DMSubDomainHook_Coordinates"
3259 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3260 {
3261   DM dm_coord,subdm_coord;
3262   PetscErrorCode ierr;
3263   Vec coords,ccoords,clcoords;
3264   VecScatter *scat_i,*scat_g;
3265   PetscFunctionBegin;
3266   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3267   ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr);
3268   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3269   ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr);
3270   if (coords && !ccoords) {
3271     ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr);
3272     ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr);
3273     ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr);
3274     ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3275     ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3276     ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3277     ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3278     ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr);
3279     ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr);
3280     ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr);
3281     ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr);
3282     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3283     ierr = VecDestroy(&clcoords);CHKERRQ(ierr);
3284     ierr = PetscFree(scat_i);CHKERRQ(ierr);
3285     ierr = PetscFree(scat_g);CHKERRQ(ierr);
3286   }
3287   PetscFunctionReturn(0);
3288 }
3289 
3290 #undef __FUNCT__
3291 #define __FUNCT__ "DMSetCoordinates"
3292 /*@
3293   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3294 
3295   Collective on DM
3296 
3297   Input Parameters:
3298 + dm - the DM
3299 - c - coordinate vector
3300 
3301   Note:
3302   The coordinates do include those for ghost points, which are in the local vector
3303 
3304   Level: intermediate
3305 
3306 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3307 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3308 @*/
3309 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3310 {
3311   PetscErrorCode ierr;
3312 
3313   PetscFunctionBegin;
3314   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3315   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3316   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3317   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3318   dm->coordinates = c;
3319   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3320   ierr            = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3321   ierr            = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3322   PetscFunctionReturn(0);
3323 }
3324 
3325 #undef __FUNCT__
3326 #define __FUNCT__ "DMSetCoordinatesLocal"
3327 /*@
3328   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3329 
3330   Collective on DM
3331 
3332    Input Parameters:
3333 +  dm - the DM
3334 -  c - coordinate vector
3335 
3336   Note:
3337   The coordinates of ghost points can be set using DMSetCoordinates()
3338   followed by DMGetCoordinatesLocal(). This is intended to enable the
3339   setting of ghost coordinates outside of the domain.
3340 
3341   Level: intermediate
3342 
3343 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3344 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3345 @*/
3346 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3347 {
3348   PetscErrorCode ierr;
3349 
3350   PetscFunctionBegin;
3351   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3352   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3353   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3354   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3355 
3356   dm->coordinatesLocal = c;
3357 
3358   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3359   PetscFunctionReturn(0);
3360 }
3361 
3362 #undef __FUNCT__
3363 #define __FUNCT__ "DMGetCoordinates"
3364 /*@
3365   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3366 
3367   Not Collective
3368 
3369   Input Parameter:
3370 . dm - the DM
3371 
3372   Output Parameter:
3373 . c - global coordinate vector
3374 
3375   Note:
3376   This is a borrowed reference, so the user should NOT destroy this vector
3377 
3378   Each process has only the local coordinates (does NOT have the ghost coordinates).
3379 
3380   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3381   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3382 
3383   Level: intermediate
3384 
3385 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3386 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3387 @*/
3388 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3389 {
3390   PetscErrorCode ierr;
3391 
3392   PetscFunctionBegin;
3393   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3394   PetscValidPointer(c,2);
3395   if (!dm->coordinates && dm->coordinatesLocal) {
3396     DM cdm = NULL;
3397 
3398     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3399     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3400     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3401     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3402     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3403   }
3404   *c = dm->coordinates;
3405   PetscFunctionReturn(0);
3406 }
3407 
3408 #undef __FUNCT__
3409 #define __FUNCT__ "DMGetCoordinatesLocal"
3410 /*@
3411   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3412 
3413   Collective on DM
3414 
3415   Input Parameter:
3416 . dm - the DM
3417 
3418   Output Parameter:
3419 . c - coordinate vector
3420 
3421   Note:
3422   This is a borrowed reference, so the user should NOT destroy this vector
3423 
3424   Each process has the local and ghost coordinates
3425 
3426   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3427   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3428 
3429   Level: intermediate
3430 
3431 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3432 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3433 @*/
3434 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3435 {
3436   PetscErrorCode ierr;
3437 
3438   PetscFunctionBegin;
3439   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3440   PetscValidPointer(c,2);
3441   if (!dm->coordinatesLocal && dm->coordinates) {
3442     DM cdm = NULL;
3443 
3444     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3445     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3446     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3447     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3448     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3449   }
3450   *c = dm->coordinatesLocal;
3451   PetscFunctionReturn(0);
3452 }
3453 
3454 #undef __FUNCT__
3455 #define __FUNCT__ "DMGetCoordinateDM"
3456 /*@
3457   DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates
3458 
3459   Collective on DM
3460 
3461   Input Parameter:
3462 . dm - the DM
3463 
3464   Output Parameter:
3465 . cdm - coordinate DM
3466 
3467   Level: intermediate
3468 
3469 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3470 .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3471 @*/
3472 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3473 {
3474   PetscErrorCode ierr;
3475 
3476   PetscFunctionBegin;
3477   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3478   PetscValidPointer(cdm,2);
3479   if (!dm->coordinateDM) {
3480     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3481     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3482   }
3483   *cdm = dm->coordinateDM;
3484   PetscFunctionReturn(0);
3485 }
3486 
3487 #undef __FUNCT__
3488 #define __FUNCT__ "DMSetCoordinateDM"
3489 /*@
3490   DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates
3491 
3492   Logically Collective on DM
3493 
3494   Input Parameters:
3495 + dm - the DM
3496 - cdm - coordinate DM
3497 
3498   Level: intermediate
3499 
3500 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3501 .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3502 @*/
3503 PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm)
3504 {
3505   PetscErrorCode ierr;
3506 
3507   PetscFunctionBegin;
3508   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3509   PetscValidHeaderSpecific(cdm,DM_CLASSID,2);
3510   ierr = DMDestroy(&dm->coordinateDM);CHKERRQ(ierr);
3511   dm->coordinateDM = cdm;
3512   ierr = PetscObjectReference((PetscObject) dm->coordinateDM);CHKERRQ(ierr);
3513   PetscFunctionReturn(0);
3514 }
3515 
3516 #undef __FUNCT__
3517 #define __FUNCT__ "DMGetCoordinateSection"
3518 /*@
3519   DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh.
3520 
3521   Not Collective
3522 
3523   Input Parameter:
3524 . dm - The DM object
3525 
3526   Output Parameter:
3527 . section - The PetscSection object
3528 
3529   Level: intermediate
3530 
3531 .keywords: mesh, coordinates
3532 .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection()
3533 @*/
3534 PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section)
3535 {
3536   DM             cdm;
3537   PetscErrorCode ierr;
3538 
3539   PetscFunctionBegin;
3540   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3541   PetscValidPointer(section, 2);
3542   ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3543   ierr = DMGetDefaultSection(cdm, section);CHKERRQ(ierr);
3544   PetscFunctionReturn(0);
3545 }
3546 
3547 #undef __FUNCT__
3548 #define __FUNCT__ "DMSetCoordinateSection"
3549 /*@
3550   DMSetCoordinateSection - Set the layout of coordinate values over the mesh.
3551 
3552   Not Collective
3553 
3554   Input Parameters:
3555 + dm      - The DM object
3556 - section - The PetscSection object
3557 
3558   Level: intermediate
3559 
3560 .keywords: mesh, coordinates
3561 .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection()
3562 @*/
3563 PetscErrorCode DMSetCoordinateSection(DM dm, PetscSection section)
3564 {
3565   DM             cdm;
3566   PetscErrorCode ierr;
3567 
3568   PetscFunctionBegin;
3569   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3570   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
3571   ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3572   ierr = DMSetDefaultSection(cdm, section);CHKERRQ(ierr);
3573   PetscFunctionReturn(0);
3574 }
3575 
3576 #undef __FUNCT__
3577 #define __FUNCT__ "DMLocatePoints"
3578 /*@
3579   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3580 
3581   Not collective
3582 
3583   Input Parameters:
3584 + dm - The DM
3585 - v - The Vec of points
3586 
3587   Output Parameter:
3588 . cells - The local cell numbers for cells which contain the points
3589 
3590   Level: developer
3591 
3592 .keywords: point location, mesh
3593 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3594 @*/
3595 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3596 {
3597   PetscErrorCode ierr;
3598 
3599   PetscFunctionBegin;
3600   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3601   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3602   PetscValidPointer(cells,3);
3603   if (dm->ops->locatepoints) {
3604     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3605   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3606   PetscFunctionReturn(0);
3607 }
3608