xref: /petsc/src/dm/interface/dm.c (revision a7b5fb5f7cb42c18903bd3d6423cc78f00b39ba3)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 #include <petscsf.h>
3 
4 PetscClassId  DM_CLASSID;
5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;
6 
7 const char *const DMBoundaryTypes[] = {"NONE","GHOSTED","MIRROR","PERIODIC","TWIST","DM_BOUNDARY_",0};
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "DMCreate"
11 /*@
12   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
13 
14    If you never  call DMSetType()  it will generate an
15    error when you try to use the vector.
16 
17   Collective on MPI_Comm
18 
19   Input Parameter:
20 . comm - The communicator for the DM object
21 
22   Output Parameter:
23 . dm - The DM object
24 
25   Level: beginner
26 
27 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
28 @*/
29 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
30 {
31   DM             v;
32   PetscErrorCode ierr;
33 
34   PetscFunctionBegin;
35   PetscValidPointer(dm,2);
36   *dm = NULL;
37   ierr = PetscSysInitializePackage();CHKERRQ(ierr);
38   ierr = VecInitializePackage();CHKERRQ(ierr);
39   ierr = MatInitializePackage();CHKERRQ(ierr);
40   ierr = DMInitializePackage();CHKERRQ(ierr);
41 
42   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
43   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
44 
45 
46   v->ltogmap              = NULL;
47   v->ltogmapb             = NULL;
48   v->bs                   = 1;
49   v->coloringtype         = IS_COLORING_GLOBAL;
50   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
51   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
52   v->defaultSection       = NULL;
53   v->defaultGlobalSection = NULL;
54   {
55     PetscInt i;
56     for (i = 0; i < 10; ++i) {
57       v->nullspaceConstructors[i] = NULL;
58     }
59   }
60   v->numFields = 0;
61   v->fields    = NULL;
62   ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr);
63   ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr);
64   *dm = v;
65   PetscFunctionReturn(0);
66 }
67 
68 #undef __FUNCT__
69 #define __FUNCT__ "DMClone"
70 /*@
71   DMClone - Creates a DM object with the same topology as the original.
72 
73   Collective on MPI_Comm
74 
75   Input Parameter:
76 . dm - The original DM object
77 
78   Output Parameter:
79 . newdm  - The new DM object
80 
81   Level: beginner
82 
83 .keywords: DM, topology, create
84 @*/
85 PetscErrorCode DMClone(DM dm, DM *newdm)
86 {
87   PetscSF        sf;
88   Vec            coords;
89   void          *ctx;
90   PetscErrorCode ierr;
91 
92   PetscFunctionBegin;
93   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
94   PetscValidPointer(newdm,2);
95   ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr);
96   if (dm->ops->clone) {
97     ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr);
98   }
99   (*newdm)->setupcalled = PETSC_TRUE;
100   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
101   ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr);
102   ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr);
103   ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr);
104   ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr);
105   if (coords) {
106     ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr);
107   } else {
108     ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr);
109     if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);}
110   }
111   PetscFunctionReturn(0);
112 }
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "DMSetVecType"
116 /*@C
117        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
118 
119    Logically Collective on DMDA
120 
121    Input Parameter:
122 +  da - initial distributed array
123 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
124 
125    Options Database:
126 .   -dm_vec_type ctype
127 
128    Level: intermediate
129 
130 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType()
131 @*/
132 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
133 {
134   PetscErrorCode ierr;
135 
136   PetscFunctionBegin;
137   PetscValidHeaderSpecific(da,DM_CLASSID,1);
138   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
139   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
140   PetscFunctionReturn(0);
141 }
142 
143 #undef __FUNCT__
144 #define __FUNCT__ "DMGetVecType"
145 /*@C
146        DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
147 
148    Logically Collective on DMDA
149 
150    Input Parameter:
151 .  da - initial distributed array
152 
153    Output Parameter:
154 .  ctype - the vector type
155 
156    Level: intermediate
157 
158 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
159 @*/
160 PetscErrorCode  DMGetVecType(DM da,VecType *ctype)
161 {
162   PetscFunctionBegin;
163   PetscValidHeaderSpecific(da,DM_CLASSID,1);
164   *ctype = da->vectype;
165   PetscFunctionReturn(0);
166 }
167 
168 #undef __FUNCT__
169 #define __FUNCT__ "VecGetDM"
170 /*@
171   VecGetDM - Gets the DM defining the data layout of the vector
172 
173   Not collective
174 
175   Input Parameter:
176 . v - The Vec
177 
178   Output Parameter:
179 . dm - The DM
180 
181   Level: intermediate
182 
183 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
184 @*/
185 PetscErrorCode VecGetDM(Vec v, DM *dm)
186 {
187   PetscErrorCode ierr;
188 
189   PetscFunctionBegin;
190   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
191   PetscValidPointer(dm,2);
192   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNCT__
197 #define __FUNCT__ "VecSetDM"
198 /*@
199   VecSetDM - Sets the DM defining the data layout of the vector
200 
201   Not collective
202 
203   Input Parameters:
204 + v - The Vec
205 - dm - The DM
206 
207   Level: intermediate
208 
209 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
210 @*/
211 PetscErrorCode VecSetDM(Vec v, DM dm)
212 {
213   PetscErrorCode ierr;
214 
215   PetscFunctionBegin;
216   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
217   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
218   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
219   PetscFunctionReturn(0);
220 }
221 
222 #undef __FUNCT__
223 #define __FUNCT__ "DMSetMatType"
224 /*@C
225        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
226 
227    Logically Collective on DM
228 
229    Input Parameter:
230 +  dm - the DM context
231 .  ctype - the matrix type
232 
233    Options Database:
234 .   -dm_mat_type ctype
235 
236    Level: intermediate
237 
238 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType()
239 @*/
240 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
241 {
242   PetscErrorCode ierr;
243 
244   PetscFunctionBegin;
245   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
246   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
247   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
248   PetscFunctionReturn(0);
249 }
250 
251 #undef __FUNCT__
252 #define __FUNCT__ "DMGetMatType"
253 /*@C
254        DMGetMatType - Gets the type of matrix created with DMCreateMatrix()
255 
256    Logically Collective on DM
257 
258    Input Parameter:
259 .  dm - the DM context
260 
261    Output Parameter:
262 .  ctype - the matrix type
263 
264    Options Database:
265 .   -dm_mat_type ctype
266 
267    Level: intermediate
268 
269 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType()
270 @*/
271 PetscErrorCode  DMGetMatType(DM dm,MatType *ctype)
272 {
273   PetscFunctionBegin;
274   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
275   *ctype = dm->mattype;
276   PetscFunctionReturn(0);
277 }
278 
279 #undef __FUNCT__
280 #define __FUNCT__ "MatGetDM"
281 /*@
282   MatGetDM - Gets the DM defining the data layout of the matrix
283 
284   Not collective
285 
286   Input Parameter:
287 . A - The Mat
288 
289   Output Parameter:
290 . dm - The DM
291 
292   Level: intermediate
293 
294 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
295 @*/
296 PetscErrorCode MatGetDM(Mat A, DM *dm)
297 {
298   PetscErrorCode ierr;
299 
300   PetscFunctionBegin;
301   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
302   PetscValidPointer(dm,2);
303   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
304   PetscFunctionReturn(0);
305 }
306 
307 #undef __FUNCT__
308 #define __FUNCT__ "MatSetDM"
309 /*@
310   MatSetDM - Sets the DM defining the data layout of the matrix
311 
312   Not collective
313 
314   Input Parameters:
315 + A - The Mat
316 - dm - The DM
317 
318   Level: intermediate
319 
320 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
321 @*/
322 PetscErrorCode MatSetDM(Mat A, DM dm)
323 {
324   PetscErrorCode ierr;
325 
326   PetscFunctionBegin;
327   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
328   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
329   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
330   PetscFunctionReturn(0);
331 }
332 
333 #undef __FUNCT__
334 #define __FUNCT__ "DMSetOptionsPrefix"
335 /*@C
336    DMSetOptionsPrefix - Sets the prefix used for searching for all
337    DMDA options in the database.
338 
339    Logically Collective on DMDA
340 
341    Input Parameter:
342 +  da - the DMDA context
343 -  prefix - the prefix to prepend to all option names
344 
345    Notes:
346    A hyphen (-) must NOT be given at the beginning of the prefix name.
347    The first character of all runtime options is AUTOMATICALLY the hyphen.
348 
349    Level: advanced
350 
351 .keywords: DMDA, set, options, prefix, database
352 
353 .seealso: DMSetFromOptions()
354 @*/
355 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
356 {
357   PetscErrorCode ierr;
358 
359   PetscFunctionBegin;
360   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
361   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
362   PetscFunctionReturn(0);
363 }
364 
365 #undef __FUNCT__
366 #define __FUNCT__ "DMDestroy"
367 /*@
368     DMDestroy - Destroys a vector packer or DMDA.
369 
370     Collective on DM
371 
372     Input Parameter:
373 .   dm - the DM object to destroy
374 
375     Level: developer
376 
377 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
378 
379 @*/
380 PetscErrorCode  DMDestroy(DM *dm)
381 {
382   PetscInt       i, cnt = 0, f;
383   DMNamedVecLink nlink,nnext;
384   PetscErrorCode ierr;
385 
386   PetscFunctionBegin;
387   if (!*dm) PetscFunctionReturn(0);
388   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
389 
390   /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */
391   for (f = 0; f < (*dm)->numFields; ++f) {
392     ierr = PetscObjectCompose((*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr);
393     ierr = PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr);
394     ierr = PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr);
395   }
396   /* count all the circular references of DM and its contained Vecs */
397   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
398     if ((*dm)->localin[i])  cnt++;
399     if ((*dm)->globalin[i]) cnt++;
400   }
401   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
402   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
403   if ((*dm)->x) {
404     DM obj;
405     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
406     if (obj == *dm) cnt++;
407   }
408 
409   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
410   /*
411      Need this test because the dm references the vectors that
412      reference the dm, so destroying the dm calls destroy on the
413      vectors that cause another destroy on the dm
414   */
415   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
416   ((PetscObject) (*dm))->refct = 0;
417   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
418     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
419     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
420   }
421   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
422     nnext = nlink->next;
423     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
424     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
425     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
426     ierr = PetscFree(nlink);CHKERRQ(ierr);
427   }
428   (*dm)->namedglobal = NULL;
429 
430   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
431     nnext = nlink->next;
432     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
433     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
434     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
435     ierr = PetscFree(nlink);CHKERRQ(ierr);
436   }
437   (*dm)->namedlocal = NULL;
438 
439   /* Destroy the list of hooks */
440   {
441     DMCoarsenHookLink link,next;
442     for (link=(*dm)->coarsenhook; link; link=next) {
443       next = link->next;
444       ierr = PetscFree(link);CHKERRQ(ierr);
445     }
446     (*dm)->coarsenhook = NULL;
447   }
448   {
449     DMRefineHookLink link,next;
450     for (link=(*dm)->refinehook; link; link=next) {
451       next = link->next;
452       ierr = PetscFree(link);CHKERRQ(ierr);
453     }
454     (*dm)->refinehook = NULL;
455   }
456   {
457     DMSubDomainHookLink link,next;
458     for (link=(*dm)->subdomainhook; link; link=next) {
459       next = link->next;
460       ierr = PetscFree(link);CHKERRQ(ierr);
461     }
462     (*dm)->subdomainhook = NULL;
463   }
464   {
465     DMGlobalToLocalHookLink link,next;
466     for (link=(*dm)->gtolhook; link; link=next) {
467       next = link->next;
468       ierr = PetscFree(link);CHKERRQ(ierr);
469     }
470     (*dm)->gtolhook = NULL;
471   }
472   /* Destroy the work arrays */
473   {
474     DMWorkLink link,next;
475     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
476     for (link=(*dm)->workin; link; link=next) {
477       next = link->next;
478       ierr = PetscFree(link->mem);CHKERRQ(ierr);
479       ierr = PetscFree(link);CHKERRQ(ierr);
480     }
481     (*dm)->workin = NULL;
482   }
483 
484   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
485   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
486   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
487 
488   if ((*dm)->ctx && (*dm)->ctxdestroy) {
489     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
490   }
491   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
492   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
493   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
494   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
495   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
496   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
497   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
498 
499   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
500   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
501   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
502   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
503   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
504 
505   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
506   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
507   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
508 
509   for (f = 0; f < (*dm)->numFields; ++f) {
510     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
511   }
512   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
513   /* if memory was published with SAWs then destroy it */
514   ierr = PetscObjectSAWsViewOff((PetscObject)*dm);CHKERRQ(ierr);
515 
516   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
517   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
518   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
519   PetscFunctionReturn(0);
520 }
521 
522 #undef __FUNCT__
523 #define __FUNCT__ "DMSetUp"
524 /*@
525     DMSetUp - sets up the data structures inside a DM object
526 
527     Collective on DM
528 
529     Input Parameter:
530 .   dm - the DM object to setup
531 
532     Level: developer
533 
534 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
535 
536 @*/
537 PetscErrorCode  DMSetUp(DM dm)
538 {
539   PetscErrorCode ierr;
540 
541   PetscFunctionBegin;
542   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
543   if (dm->setupcalled) PetscFunctionReturn(0);
544   if (dm->ops->setup) {
545     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
546   }
547   dm->setupcalled = PETSC_TRUE;
548   PetscFunctionReturn(0);
549 }
550 
551 #undef __FUNCT__
552 #define __FUNCT__ "DMSetFromOptions"
553 /*@
554     DMSetFromOptions - sets parameters in a DM from the options database
555 
556     Collective on DM
557 
558     Input Parameter:
559 .   dm - the DM object to set options for
560 
561     Options Database:
562 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
563 .   -dm_vec_type <type>  type of vector to create inside DM
564 .   -dm_mat_type <type>  type of matrix to create inside DM
565 -   -dm_coloring_type <global or ghosted>
566 
567     Level: developer
568 
569 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
570 
571 @*/
572 PetscErrorCode  DMSetFromOptions(DM dm)
573 {
574   char           typeName[256];
575   PetscBool      flg;
576   PetscErrorCode ierr;
577 
578   PetscFunctionBegin;
579   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
580   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
581   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
582   ierr = PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
583   if (flg) {
584     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
585   }
586   ierr = PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
587   if (flg) {
588     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
589   }
590   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
591   if (dm->ops->setfromoptions) {
592     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
593   }
594   /* process any options handlers added with PetscObjectAddOptionsHandler() */
595   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
596   ierr = PetscOptionsEnd();CHKERRQ(ierr);
597   PetscFunctionReturn(0);
598 }
599 
600 #undef __FUNCT__
601 #define __FUNCT__ "DMView"
602 /*@C
603     DMView - Views a vector packer or DMDA.
604 
605     Collective on DM
606 
607     Input Parameter:
608 +   dm - the DM object to view
609 -   v - the viewer
610 
611     Level: developer
612 
613 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
614 
615 @*/
616 PetscErrorCode  DMView(DM dm,PetscViewer v)
617 {
618   PetscErrorCode ierr;
619   PetscBool      isbinary;
620 
621   PetscFunctionBegin;
622   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
623   if (!v) {
624     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
625   }
626   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
627   if (isbinary) {
628     PetscInt classid = DM_FILE_CLASSID;
629     char     type[256];
630 
631     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
632     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
633     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
634   }
635   if (dm->ops->view) {
636     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
637   }
638   PetscFunctionReturn(0);
639 }
640 
641 #undef __FUNCT__
642 #define __FUNCT__ "DMCreateGlobalVector"
643 /*@
644     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
645 
646     Collective on DM
647 
648     Input Parameter:
649 .   dm - the DM object
650 
651     Output Parameter:
652 .   vec - the global vector
653 
654     Level: beginner
655 
656 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
657 
658 @*/
659 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
660 {
661   PetscErrorCode ierr;
662 
663   PetscFunctionBegin;
664   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
665   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
666   PetscFunctionReturn(0);
667 }
668 
669 #undef __FUNCT__
670 #define __FUNCT__ "DMCreateLocalVector"
671 /*@
672     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
673 
674     Not Collective
675 
676     Input Parameter:
677 .   dm - the DM object
678 
679     Output Parameter:
680 .   vec - the local vector
681 
682     Level: beginner
683 
684 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
685 
686 @*/
687 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
688 {
689   PetscErrorCode ierr;
690 
691   PetscFunctionBegin;
692   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
693   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
694   PetscFunctionReturn(0);
695 }
696 
697 #undef __FUNCT__
698 #define __FUNCT__ "DMGetLocalToGlobalMapping"
699 /*@
700    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
701 
702    Collective on DM
703 
704    Input Parameter:
705 .  dm - the DM that provides the mapping
706 
707    Output Parameter:
708 .  ltog - the mapping
709 
710    Level: intermediate
711 
712    Notes:
713    This mapping can then be used by VecSetLocalToGlobalMapping() or
714    MatSetLocalToGlobalMapping().
715 
716 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
717 @*/
718 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
719 {
720   PetscErrorCode ierr;
721 
722   PetscFunctionBegin;
723   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
724   PetscValidPointer(ltog,2);
725   if (!dm->ltogmap) {
726     PetscSection section, sectionGlobal;
727 
728     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
729     if (section) {
730       PetscInt *ltog;
731       PetscInt pStart, pEnd, size, p, l;
732 
733       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
734       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
735       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
736       ierr = PetscMalloc1(size, &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
737       for (p = pStart, l = 0; p < pEnd; ++p) {
738         PetscInt dof, off, c;
739 
740         /* Should probably use constrained dofs */
741         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
742         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
743         for (c = 0; c < dof; ++c, ++l) {
744           ltog[l] = off+c;
745         }
746       }
747       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
748       ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr);
749     } else {
750       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
751       ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr);
752     }
753   }
754   *ltog = dm->ltogmap;
755   PetscFunctionReturn(0);
756 }
757 
758 #undef __FUNCT__
759 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
760 /*@
761    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
762 
763    Collective on DM
764 
765    Input Parameter:
766 .  da - the distributed array that provides the mapping
767 
768    Output Parameter:
769 .  ltog - the block mapping
770 
771    Level: intermediate
772 
773    Notes:
774    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
775    MatSetLocalToGlobalMappingBlock().
776 
777 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
778 @*/
779 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
780 {
781   PetscErrorCode ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
785   PetscValidPointer(ltog,2);
786   if (!dm->ltogmapb) {
787     PetscInt bs;
788     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
789     if (bs > 1) {
790       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
791       ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
792     } else {
793       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
794       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
795     }
796   }
797   *ltog = dm->ltogmapb;
798   PetscFunctionReturn(0);
799 }
800 
801 #undef __FUNCT__
802 #define __FUNCT__ "DMGetBlockSize"
803 /*@
804    DMGetBlockSize - Gets the inherent block size associated with a DM
805 
806    Not Collective
807 
808    Input Parameter:
809 .  dm - the DM with block structure
810 
811    Output Parameter:
812 .  bs - the block size, 1 implies no exploitable block structure
813 
814    Level: intermediate
815 
816 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
817 @*/
818 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
819 {
820   PetscFunctionBegin;
821   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
822   PetscValidPointer(bs,2);
823   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
824   *bs = dm->bs;
825   PetscFunctionReturn(0);
826 }
827 
828 #undef __FUNCT__
829 #define __FUNCT__ "DMCreateInterpolation"
830 /*@
831     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
832 
833     Collective on DM
834 
835     Input Parameter:
836 +   dm1 - the DM object
837 -   dm2 - the second, finer DM object
838 
839     Output Parameter:
840 +  mat - the interpolation
841 -  vec - the scaling (optional)
842 
843     Level: developer
844 
845     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
846         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
847 
848         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
849         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
850 
851 
852 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
853 
854 @*/
855 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
856 {
857   PetscErrorCode ierr;
858 
859   PetscFunctionBegin;
860   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
861   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
862   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
863   PetscFunctionReturn(0);
864 }
865 
866 #undef __FUNCT__
867 #define __FUNCT__ "DMCreateInjection"
868 /*@
869     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
870 
871     Collective on DM
872 
873     Input Parameter:
874 +   dm1 - the DM object
875 -   dm2 - the second, finer DM object
876 
877     Output Parameter:
878 .   ctx - the injection
879 
880     Level: developer
881 
882    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
883         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
884 
885 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
886 
887 @*/
888 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
889 {
890   PetscErrorCode ierr;
891 
892   PetscFunctionBegin;
893   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
894   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
895   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
896   PetscFunctionReturn(0);
897 }
898 
899 #undef __FUNCT__
900 #define __FUNCT__ "DMCreateColoring"
901 /*@
902     DMCreateColoring - Gets coloring for a DM
903 
904     Collective on DM
905 
906     Input Parameter:
907 +   dm - the DM object
908 -   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
909 
910     Output Parameter:
911 .   coloring - the coloring
912 
913     Level: developer
914 
915 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType()
916 
917 @*/
918 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring)
919 {
920   PetscErrorCode ierr;
921 
922   PetscFunctionBegin;
923   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
924   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
925   ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr);
926   PetscFunctionReturn(0);
927 }
928 
929 #undef __FUNCT__
930 #define __FUNCT__ "DMCreateMatrix"
931 /*@
932     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
933 
934     Collective on DM
935 
936     Input Parameter:
937 .   dm - the DM object
938 
939     Output Parameter:
940 .   mat - the empty Jacobian
941 
942     Level: beginner
943 
944     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
945        do not need to do it yourself.
946 
947        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
948        the nonzero pattern call DMDASetMatPreallocateOnly()
949 
950        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
951        internally by PETSc.
952 
953        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
954        the indices for the global numbering for DMDAs which is complicated.
955 
956 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType()
957 
958 @*/
959 PetscErrorCode  DMCreateMatrix(DM dm,Mat *mat)
960 {
961   PetscErrorCode ierr;
962 
963   PetscFunctionBegin;
964   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
965   ierr = MatInitializePackage();CHKERRQ(ierr);
966   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
967   PetscValidPointer(mat,3);
968   ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr);
969   PetscFunctionReturn(0);
970 }
971 
972 #undef __FUNCT__
973 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
974 /*@
975   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
976     preallocated but the nonzero structure and zero values will not be set.
977 
978   Logically Collective on DMDA
979 
980   Input Parameter:
981 + dm - the DM
982 - only - PETSC_TRUE if only want preallocation
983 
984   Level: developer
985 .seealso DMCreateMatrix()
986 @*/
987 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
988 {
989   PetscFunctionBegin;
990   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
991   dm->prealloc_only = only;
992   PetscFunctionReturn(0);
993 }
994 
995 #undef __FUNCT__
996 #define __FUNCT__ "DMGetWorkArray"
997 /*@C
998   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
999 
1000   Not Collective
1001 
1002   Input Parameters:
1003 + dm - the DM object
1004 . count - The minium size
1005 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1006 
1007   Output Parameter:
1008 . array - the work array
1009 
1010   Level: developer
1011 
1012 .seealso DMDestroy(), DMCreate()
1013 @*/
1014 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1015 {
1016   PetscErrorCode ierr;
1017   DMWorkLink     link;
1018   size_t         size;
1019 
1020   PetscFunctionBegin;
1021   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1022   PetscValidPointer(mem,4);
1023   if (dm->workin) {
1024     link       = dm->workin;
1025     dm->workin = dm->workin->next;
1026   } else {
1027     ierr = PetscNewLog(dm,&link);CHKERRQ(ierr);
1028   }
1029   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1030   if (size*count > link->bytes) {
1031     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
1032     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1033     link->bytes = size*count;
1034   }
1035   link->next   = dm->workout;
1036   dm->workout  = link;
1037   *(void**)mem = link->mem;
1038   PetscFunctionReturn(0);
1039 }
1040 
1041 #undef __FUNCT__
1042 #define __FUNCT__ "DMRestoreWorkArray"
1043 /*@C
1044   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1045 
1046   Not Collective
1047 
1048   Input Parameters:
1049 + dm - the DM object
1050 . count - The minium size
1051 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1052 
1053   Output Parameter:
1054 . array - the work array
1055 
1056   Level: developer
1057 
1058 .seealso DMDestroy(), DMCreate()
1059 @*/
1060 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1061 {
1062   DMWorkLink *p,link;
1063 
1064   PetscFunctionBegin;
1065   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1066   PetscValidPointer(mem,4);
1067   for (p=&dm->workout; (link=*p); p=&link->next) {
1068     if (link->mem == *(void**)mem) {
1069       *p           = link->next;
1070       link->next   = dm->workin;
1071       dm->workin   = link;
1072       *(void**)mem = NULL;
1073       PetscFunctionReturn(0);
1074     }
1075   }
1076   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1077   PetscFunctionReturn(0);
1078 }
1079 
1080 #undef __FUNCT__
1081 #define __FUNCT__ "DMSetNullSpaceConstructor"
1082 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1083 {
1084   PetscFunctionBegin;
1085   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1086   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1087   dm->nullspaceConstructors[field] = nullsp;
1088   PetscFunctionReturn(0);
1089 }
1090 
1091 #undef __FUNCT__
1092 #define __FUNCT__ "DMCreateFieldIS"
1093 /*@C
1094   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1095 
1096   Not collective
1097 
1098   Input Parameter:
1099 . dm - the DM object
1100 
1101   Output Parameters:
1102 + numFields  - The number of fields (or NULL if not requested)
1103 . fieldNames - The name for each field (or NULL if not requested)
1104 - fields     - The global indices for each field (or NULL if not requested)
1105 
1106   Level: intermediate
1107 
1108   Notes:
1109   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1110   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1111   PetscFree().
1112 
1113 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1114 @*/
1115 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1116 {
1117   PetscSection   section, sectionGlobal;
1118   PetscErrorCode ierr;
1119 
1120   PetscFunctionBegin;
1121   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1122   if (numFields) {
1123     PetscValidPointer(numFields,2);
1124     *numFields = 0;
1125   }
1126   if (fieldNames) {
1127     PetscValidPointer(fieldNames,3);
1128     *fieldNames = NULL;
1129   }
1130   if (fields) {
1131     PetscValidPointer(fields,4);
1132     *fields = NULL;
1133   }
1134   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1135   if (section) {
1136     PetscInt *fieldSizes, **fieldIndices;
1137     PetscInt nF, f, pStart, pEnd, p;
1138 
1139     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1140     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1141     ierr = PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);CHKERRQ(ierr);
1142     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1143     for (f = 0; f < nF; ++f) {
1144       fieldSizes[f] = 0;
1145     }
1146     for (p = pStart; p < pEnd; ++p) {
1147       PetscInt gdof;
1148 
1149       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1150       if (gdof > 0) {
1151         for (f = 0; f < nF; ++f) {
1152           PetscInt fdof, fcdof;
1153 
1154           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1155           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1156           fieldSizes[f] += fdof-fcdof;
1157         }
1158       }
1159     }
1160     for (f = 0; f < nF; ++f) {
1161       ierr          = PetscMalloc1(fieldSizes[f], &fieldIndices[f]);CHKERRQ(ierr);
1162       fieldSizes[f] = 0;
1163     }
1164     for (p = pStart; p < pEnd; ++p) {
1165       PetscInt gdof, goff;
1166 
1167       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1168       if (gdof > 0) {
1169         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1170         for (f = 0; f < nF; ++f) {
1171           PetscInt fdof, fcdof, fc;
1172 
1173           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1174           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1175           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1176             fieldIndices[f][fieldSizes[f]] = goff++;
1177           }
1178         }
1179       }
1180     }
1181     if (numFields) *numFields = nF;
1182     if (fieldNames) {
1183       ierr = PetscMalloc1(nF, fieldNames);CHKERRQ(ierr);
1184       for (f = 0; f < nF; ++f) {
1185         const char *fieldName;
1186 
1187         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1188         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1189       }
1190     }
1191     if (fields) {
1192       ierr = PetscMalloc1(nF, fields);CHKERRQ(ierr);
1193       for (f = 0; f < nF; ++f) {
1194         ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1195       }
1196     }
1197     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1198   } else if (dm->ops->createfieldis) {
1199     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1200   }
1201   PetscFunctionReturn(0);
1202 }
1203 
1204 
1205 #undef __FUNCT__
1206 #define __FUNCT__ "DMCreateFieldDecomposition"
1207 /*@C
1208   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1209                           corresponding to different fields: each IS contains the global indices of the dofs of the
1210                           corresponding field. The optional list of DMs define the DM for each subproblem.
1211                           Generalizes DMCreateFieldIS().
1212 
1213   Not collective
1214 
1215   Input Parameter:
1216 . dm - the DM object
1217 
1218   Output Parameters:
1219 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1220 . namelist  - The name for each field (or NULL if not requested)
1221 . islist    - The global indices for each field (or NULL if not requested)
1222 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1223 
1224   Level: intermediate
1225 
1226   Notes:
1227   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1228   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1229   and all of the arrays should be freed with PetscFree().
1230 
1231 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1232 @*/
1233 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1234 {
1235   PetscErrorCode ierr;
1236 
1237   PetscFunctionBegin;
1238   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1239   if (len) {
1240     PetscValidPointer(len,2);
1241     *len = 0;
1242   }
1243   if (namelist) {
1244     PetscValidPointer(namelist,3);
1245     *namelist = 0;
1246   }
1247   if (islist) {
1248     PetscValidPointer(islist,4);
1249     *islist = 0;
1250   }
1251   if (dmlist) {
1252     PetscValidPointer(dmlist,5);
1253     *dmlist = 0;
1254   }
1255   /*
1256    Is it a good idea to apply the following check across all impls?
1257    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1258    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1259    */
1260   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1261   if (!dm->ops->createfielddecomposition) {
1262     PetscSection section;
1263     PetscInt     numFields, f;
1264 
1265     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1266     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1267     if (section && numFields && dm->ops->createsubdm) {
1268       *len = numFields;
1269       ierr = PetscMalloc3(numFields,namelist,numFields,islist,numFields,dmlist);CHKERRQ(ierr);
1270       for (f = 0; f < numFields; ++f) {
1271         const char *fieldName;
1272 
1273         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1274         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1275         ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1276       }
1277     } else {
1278       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1279       /* By default there are no DMs associated with subproblems. */
1280       if (dmlist) *dmlist = NULL;
1281     }
1282   } else {
1283     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1284   }
1285   PetscFunctionReturn(0);
1286 }
1287 
1288 #undef __FUNCT__
1289 #define __FUNCT__ "DMCreateSubDM"
1290 /*@C
1291   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1292                   The fields are defined by DMCreateFieldIS().
1293 
1294   Not collective
1295 
1296   Input Parameters:
1297 + dm - the DM object
1298 . numFields - number of fields in this subproblem
1299 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1300 
1301   Output Parameters:
1302 . is - The global indices for the subproblem
1303 - dm - The DM for the subproblem
1304 
1305   Level: intermediate
1306 
1307 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1308 @*/
1309 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1310 {
1311   PetscErrorCode ierr;
1312 
1313   PetscFunctionBegin;
1314   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1315   PetscValidPointer(fields,3);
1316   if (is) PetscValidPointer(is,4);
1317   if (subdm) PetscValidPointer(subdm,5);
1318   if (dm->ops->createsubdm) {
1319     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1320   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1321   PetscFunctionReturn(0);
1322 }
1323 
1324 
1325 #undef __FUNCT__
1326 #define __FUNCT__ "DMCreateDomainDecomposition"
1327 /*@C
1328   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1329                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1330                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1331                           define a nonoverlapping covering, while outer subdomains can overlap.
1332                           The optional list of DMs define the DM for each subproblem.
1333 
1334   Not collective
1335 
1336   Input Parameter:
1337 . dm - the DM object
1338 
1339   Output Parameters:
1340 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1341 . namelist    - The name for each subdomain (or NULL if not requested)
1342 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1343 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1344 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1345 
1346   Level: intermediate
1347 
1348   Notes:
1349   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1350   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1351   and all of the arrays should be freed with PetscFree().
1352 
1353 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1354 @*/
1355 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1356 {
1357   PetscErrorCode      ierr;
1358   DMSubDomainHookLink link;
1359   PetscInt            i,l;
1360 
1361   PetscFunctionBegin;
1362   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1363   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1364   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1365   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1366   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1367   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1368   /*
1369    Is it a good idea to apply the following check across all impls?
1370    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1371    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1372    */
1373   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1374   if (dm->ops->createdomaindecomposition) {
1375     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1376     /* copy subdomain hooks and context over to the subdomain DMs */
1377     if (dmlist) {
1378       for (i = 0; i < l; i++) {
1379         for (link=dm->subdomainhook; link; link=link->next) {
1380           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1381         }
1382         (*dmlist)[i]->ctx = dm->ctx;
1383       }
1384     }
1385     if (len) *len = l;
1386   }
1387   PetscFunctionReturn(0);
1388 }
1389 
1390 
1391 #undef __FUNCT__
1392 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1393 /*@C
1394   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1395 
1396   Not collective
1397 
1398   Input Parameters:
1399 + dm - the DM object
1400 . n  - the number of subdomain scatters
1401 - subdms - the local subdomains
1402 
1403   Output Parameters:
1404 + n     - the number of scatters returned
1405 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1406 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1407 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1408 
1409   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1410   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1411   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1412   solution and residual data.
1413 
1414   Level: developer
1415 
1416 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1417 @*/
1418 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1419 {
1420   PetscErrorCode ierr;
1421 
1422   PetscFunctionBegin;
1423   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1424   PetscValidPointer(subdms,3);
1425   if (dm->ops->createddscatters) {
1426     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1427   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1428   PetscFunctionReturn(0);
1429 }
1430 
1431 #undef __FUNCT__
1432 #define __FUNCT__ "DMRefine"
1433 /*@
1434   DMRefine - Refines a DM object
1435 
1436   Collective on DM
1437 
1438   Input Parameter:
1439 + dm   - the DM object
1440 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1441 
1442   Output Parameter:
1443 . dmf - the refined DM, or NULL
1444 
1445   Note: If no refinement was done, the return value is NULL
1446 
1447   Level: developer
1448 
1449 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1450 @*/
1451 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1452 {
1453   PetscErrorCode   ierr;
1454   DMRefineHookLink link;
1455 
1456   PetscFunctionBegin;
1457   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1458   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1459   if (*dmf) {
1460     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1461 
1462     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1463 
1464     (*dmf)->ctx       = dm->ctx;
1465     (*dmf)->leveldown = dm->leveldown;
1466     (*dmf)->levelup   = dm->levelup + 1;
1467 
1468     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1469     for (link=dm->refinehook; link; link=link->next) {
1470       if (link->refinehook) {
1471         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1472       }
1473     }
1474   }
1475   PetscFunctionReturn(0);
1476 }
1477 
1478 #undef __FUNCT__
1479 #define __FUNCT__ "DMRefineHookAdd"
1480 /*@C
1481    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1482 
1483    Logically Collective
1484 
1485    Input Arguments:
1486 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1487 .  refinehook - function to run when setting up a coarser level
1488 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1489 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1490 
1491    Calling sequence of refinehook:
1492 $    refinehook(DM coarse,DM fine,void *ctx);
1493 
1494 +  coarse - coarse level DM
1495 .  fine - fine level DM to interpolate problem to
1496 -  ctx - optional user-defined function context
1497 
1498    Calling sequence for interphook:
1499 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1500 
1501 +  coarse - coarse level DM
1502 .  interp - matrix interpolating a coarse-level solution to the finer grid
1503 .  fine - fine level DM to update
1504 -  ctx - optional user-defined function context
1505 
1506    Level: advanced
1507 
1508    Notes:
1509    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1510 
1511    If this function is called multiple times, the hooks will be run in the order they are added.
1512 
1513    This function is currently not available from Fortran.
1514 
1515 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1516 @*/
1517 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1518 {
1519   PetscErrorCode   ierr;
1520   DMRefineHookLink link,*p;
1521 
1522   PetscFunctionBegin;
1523   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1524   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1525   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1526   link->refinehook = refinehook;
1527   link->interphook = interphook;
1528   link->ctx        = ctx;
1529   link->next       = NULL;
1530   *p               = link;
1531   PetscFunctionReturn(0);
1532 }
1533 
1534 #undef __FUNCT__
1535 #define __FUNCT__ "DMInterpolate"
1536 /*@
1537    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1538 
1539    Collective if any hooks are
1540 
1541    Input Arguments:
1542 +  coarse - coarser DM to use as a base
1543 .  restrct - interpolation matrix, apply using MatInterpolate()
1544 -  fine - finer DM to update
1545 
1546    Level: developer
1547 
1548 .seealso: DMRefineHookAdd(), MatInterpolate()
1549 @*/
1550 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1551 {
1552   PetscErrorCode   ierr;
1553   DMRefineHookLink link;
1554 
1555   PetscFunctionBegin;
1556   for (link=fine->refinehook; link; link=link->next) {
1557     if (link->interphook) {
1558       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1559     }
1560   }
1561   PetscFunctionReturn(0);
1562 }
1563 
1564 #undef __FUNCT__
1565 #define __FUNCT__ "DMGetRefineLevel"
1566 /*@
1567     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1568 
1569     Not Collective
1570 
1571     Input Parameter:
1572 .   dm - the DM object
1573 
1574     Output Parameter:
1575 .   level - number of refinements
1576 
1577     Level: developer
1578 
1579 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1580 
1581 @*/
1582 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1583 {
1584   PetscFunctionBegin;
1585   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1586   *level = dm->levelup;
1587   PetscFunctionReturn(0);
1588 }
1589 
1590 #undef __FUNCT__
1591 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1592 /*@C
1593    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1594 
1595    Logically Collective
1596 
1597    Input Arguments:
1598 +  dm - the DM
1599 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1600 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1601 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1602 
1603    Calling sequence for beginhook:
1604 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1605 
1606 +  dm - global DM
1607 .  g - global vector
1608 .  mode - mode
1609 .  l - local vector
1610 -  ctx - optional user-defined function context
1611 
1612 
1613    Calling sequence for endhook:
1614 $    endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1615 
1616 +  global - global DM
1617 -  ctx - optional user-defined function context
1618 
1619    Level: advanced
1620 
1621 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1622 @*/
1623 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1624 {
1625   PetscErrorCode          ierr;
1626   DMGlobalToLocalHookLink link,*p;
1627 
1628   PetscFunctionBegin;
1629   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1630   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1631   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1632   link->beginhook = beginhook;
1633   link->endhook   = endhook;
1634   link->ctx       = ctx;
1635   link->next      = NULL;
1636   *p              = link;
1637   PetscFunctionReturn(0);
1638 }
1639 
1640 #undef __FUNCT__
1641 #define __FUNCT__ "DMGlobalToLocalBegin"
1642 /*@
1643     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1644 
1645     Neighbor-wise Collective on DM
1646 
1647     Input Parameters:
1648 +   dm - the DM object
1649 .   g - the global vector
1650 .   mode - INSERT_VALUES or ADD_VALUES
1651 -   l - the local vector
1652 
1653 
1654     Level: beginner
1655 
1656 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1657 
1658 @*/
1659 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1660 {
1661   PetscSF                 sf;
1662   PetscErrorCode          ierr;
1663   DMGlobalToLocalHookLink link;
1664 
1665   PetscFunctionBegin;
1666   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1667   for (link=dm->gtolhook; link; link=link->next) {
1668     if (link->beginhook) {
1669       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1670     }
1671   }
1672   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1673   if (sf) {
1674     PetscScalar *lArray, *gArray;
1675 
1676     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1677     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1678     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1679     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1680     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1681     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1682   } else {
1683     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1684   }
1685   PetscFunctionReturn(0);
1686 }
1687 
1688 #undef __FUNCT__
1689 #define __FUNCT__ "DMGlobalToLocalEnd"
1690 /*@
1691     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1692 
1693     Neighbor-wise Collective on DM
1694 
1695     Input Parameters:
1696 +   dm - the DM object
1697 .   g - the global vector
1698 .   mode - INSERT_VALUES or ADD_VALUES
1699 -   l - the local vector
1700 
1701 
1702     Level: beginner
1703 
1704 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1705 
1706 @*/
1707 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1708 {
1709   PetscSF                 sf;
1710   PetscErrorCode          ierr;
1711   PetscScalar             *lArray, *gArray;
1712   DMGlobalToLocalHookLink link;
1713 
1714   PetscFunctionBegin;
1715   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1716   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1717   if (sf) {
1718     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1719 
1720     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1721     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1722     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1723     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1724     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1725   } else {
1726     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1727   }
1728   for (link=dm->gtolhook; link; link=link->next) {
1729     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1730   }
1731   PetscFunctionReturn(0);
1732 }
1733 
1734 #undef __FUNCT__
1735 #define __FUNCT__ "DMLocalToGlobalBegin"
1736 /*@
1737     DMLocalToGlobalBegin - updates global vectors from local vectors
1738 
1739     Neighbor-wise Collective on DM
1740 
1741     Input Parameters:
1742 +   dm - the DM object
1743 .   l - the local vector
1744 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1745            base point.
1746 - - the global vector
1747 
1748     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1749            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1750            global array to the final global array with VecAXPY().
1751 
1752     Level: beginner
1753 
1754 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1755 
1756 @*/
1757 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1758 {
1759   PetscSF        sf;
1760   PetscErrorCode ierr;
1761 
1762   PetscFunctionBegin;
1763   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1764   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1765   if (sf) {
1766     MPI_Op      op;
1767     PetscScalar *lArray, *gArray;
1768 
1769     switch (mode) {
1770     case INSERT_VALUES:
1771     case INSERT_ALL_VALUES:
1772       op = MPIU_REPLACE; break;
1773     case ADD_VALUES:
1774     case ADD_ALL_VALUES:
1775       op = MPI_SUM; break;
1776     default:
1777       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1778     }
1779     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1780     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1781     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1782     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1783     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1784   } else {
1785     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1786   }
1787   PetscFunctionReturn(0);
1788 }
1789 
1790 #undef __FUNCT__
1791 #define __FUNCT__ "DMLocalToGlobalEnd"
1792 /*@
1793     DMLocalToGlobalEnd - updates global vectors from local vectors
1794 
1795     Neighbor-wise Collective on DM
1796 
1797     Input Parameters:
1798 +   dm - the DM object
1799 .   l - the local vector
1800 .   mode - INSERT_VALUES or ADD_VALUES
1801 -   g - the global vector
1802 
1803 
1804     Level: beginner
1805 
1806 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1807 
1808 @*/
1809 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1810 {
1811   PetscSF        sf;
1812   PetscErrorCode ierr;
1813 
1814   PetscFunctionBegin;
1815   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1816   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1817   if (sf) {
1818     MPI_Op      op;
1819     PetscScalar *lArray, *gArray;
1820 
1821     switch (mode) {
1822     case INSERT_VALUES:
1823     case INSERT_ALL_VALUES:
1824       op = MPIU_REPLACE; break;
1825     case ADD_VALUES:
1826     case ADD_ALL_VALUES:
1827       op = MPI_SUM; break;
1828     default:
1829       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1830     }
1831     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1832     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1833     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1834     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1835     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1836   } else {
1837     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1838   }
1839   PetscFunctionReturn(0);
1840 }
1841 
1842 #undef __FUNCT__
1843 #define __FUNCT__ "DMLocalToLocalBegin"
1844 /*@
1845    DMLocalToLocalBegin - Maps from a local vector (including ghost points
1846    that contain irrelevant values) to another local vector where the ghost
1847    points in the second are set correctly. Must be followed by DMLocalToLocalEnd().
1848 
1849    Neighbor-wise Collective on DM and Vec
1850 
1851    Input Parameters:
1852 +  dm - the DM object
1853 .  g - the original local vector
1854 -  mode - one of INSERT_VALUES or ADD_VALUES
1855 
1856    Output Parameter:
1857 .  l  - the local vector with correct ghost values
1858 
1859    Level: intermediate
1860 
1861    Notes:
1862    The local vectors used here need not be the same as those
1863    obtained from DMCreateLocalVector(), BUT they
1864    must have the same parallel data layout; they could, for example, be
1865    obtained with VecDuplicate() from the DM originating vectors.
1866 
1867 .keywords: DM, local-to-local, begin
1868 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1869 
1870 @*/
1871 PetscErrorCode  DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1872 {
1873   PetscErrorCode          ierr;
1874 
1875   PetscFunctionBegin;
1876   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1877   ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1878   PetscFunctionReturn(0);
1879 }
1880 
1881 #undef __FUNCT__
1882 #define __FUNCT__ "DMLocalToLocalEnd"
1883 /*@
1884    DMLocalToLocalEnd - Maps from a local vector (including ghost points
1885    that contain irrelevant values) to another local vector where the ghost
1886    points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().
1887 
1888    Neighbor-wise Collective on DM and Vec
1889 
1890    Input Parameters:
1891 +  da - the DM object
1892 .  g - the original local vector
1893 -  mode - one of INSERT_VALUES or ADD_VALUES
1894 
1895    Output Parameter:
1896 .  l  - the local vector with correct ghost values
1897 
1898    Level: intermediate
1899 
1900    Notes:
1901    The local vectors used here need not be the same as those
1902    obtained from DMCreateLocalVector(), BUT they
1903    must have the same parallel data layout; they could, for example, be
1904    obtained with VecDuplicate() from the DM originating vectors.
1905 
1906 .keywords: DM, local-to-local, end
1907 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1908 
1909 @*/
1910 PetscErrorCode  DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1911 {
1912   PetscErrorCode          ierr;
1913 
1914   PetscFunctionBegin;
1915   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1916   ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1917   PetscFunctionReturn(0);
1918 }
1919 
1920 
1921 #undef __FUNCT__
1922 #define __FUNCT__ "DMCoarsen"
1923 /*@
1924     DMCoarsen - Coarsens a DM object
1925 
1926     Collective on DM
1927 
1928     Input Parameter:
1929 +   dm - the DM object
1930 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1931 
1932     Output Parameter:
1933 .   dmc - the coarsened DM
1934 
1935     Level: developer
1936 
1937 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1938 
1939 @*/
1940 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1941 {
1942   PetscErrorCode    ierr;
1943   DMCoarsenHookLink link;
1944 
1945   PetscFunctionBegin;
1946   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1947   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1948   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1949   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1950   (*dmc)->ctx               = dm->ctx;
1951   (*dmc)->levelup           = dm->levelup;
1952   (*dmc)->leveldown         = dm->leveldown + 1;
1953   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1954   for (link=dm->coarsenhook; link; link=link->next) {
1955     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1956   }
1957   PetscFunctionReturn(0);
1958 }
1959 
1960 #undef __FUNCT__
1961 #define __FUNCT__ "DMCoarsenHookAdd"
1962 /*@C
1963    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1964 
1965    Logically Collective
1966 
1967    Input Arguments:
1968 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1969 .  coarsenhook - function to run when setting up a coarser level
1970 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1971 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1972 
1973    Calling sequence of coarsenhook:
1974 $    coarsenhook(DM fine,DM coarse,void *ctx);
1975 
1976 +  fine - fine level DM
1977 .  coarse - coarse level DM to restrict problem to
1978 -  ctx - optional user-defined function context
1979 
1980    Calling sequence for restricthook:
1981 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1982 
1983 +  fine - fine level DM
1984 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1985 .  rscale - scaling vector for restriction
1986 .  inject - matrix restricting by injection
1987 .  coarse - coarse level DM to update
1988 -  ctx - optional user-defined function context
1989 
1990    Level: advanced
1991 
1992    Notes:
1993    This function is only needed if auxiliary data needs to be set up on coarse grids.
1994 
1995    If this function is called multiple times, the hooks will be run in the order they are added.
1996 
1997    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1998    extract the finest level information from its context (instead of from the SNES).
1999 
2000    This function is currently not available from Fortran.
2001 
2002 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2003 @*/
2004 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
2005 {
2006   PetscErrorCode    ierr;
2007   DMCoarsenHookLink link,*p;
2008 
2009   PetscFunctionBegin;
2010   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
2011   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2012   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
2013   link->coarsenhook  = coarsenhook;
2014   link->restricthook = restricthook;
2015   link->ctx          = ctx;
2016   link->next         = NULL;
2017   *p                 = link;
2018   PetscFunctionReturn(0);
2019 }
2020 
2021 #undef __FUNCT__
2022 #define __FUNCT__ "DMRestrict"
2023 /*@
2024    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
2025 
2026    Collective if any hooks are
2027 
2028    Input Arguments:
2029 +  fine - finer DM to use as a base
2030 .  restrct - restriction matrix, apply using MatRestrict()
2031 .  inject - injection matrix, also use MatRestrict()
2032 -  coarse - coarer DM to update
2033 
2034    Level: developer
2035 
2036 .seealso: DMCoarsenHookAdd(), MatRestrict()
2037 @*/
2038 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2039 {
2040   PetscErrorCode    ierr;
2041   DMCoarsenHookLink link;
2042 
2043   PetscFunctionBegin;
2044   for (link=fine->coarsenhook; link; link=link->next) {
2045     if (link->restricthook) {
2046       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
2047     }
2048   }
2049   PetscFunctionReturn(0);
2050 }
2051 
2052 #undef __FUNCT__
2053 #define __FUNCT__ "DMSubDomainHookAdd"
2054 /*@C
2055    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2056 
2057    Logically Collective
2058 
2059    Input Arguments:
2060 +  global - global DM
2061 .  ddhook - function to run to pass data to the decomposition DM upon its creation
2062 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2063 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2064 
2065 
2066    Calling sequence for ddhook:
2067 $    ddhook(DM global,DM block,void *ctx)
2068 
2069 +  global - global DM
2070 .  block  - block DM
2071 -  ctx - optional user-defined function context
2072 
2073    Calling sequence for restricthook:
2074 $    restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)
2075 
2076 +  global - global DM
2077 .  out    - scatter to the outer (with ghost and overlap points) block vector
2078 .  in     - scatter to block vector values only owned locally
2079 .  block  - block DM
2080 -  ctx - optional user-defined function context
2081 
2082    Level: advanced
2083 
2084    Notes:
2085    This function is only needed if auxiliary data needs to be set up on subdomain DMs.
2086 
2087    If this function is called multiple times, the hooks will be run in the order they are added.
2088 
2089    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2090    extract the global information from its context (instead of from the SNES).
2091 
2092    This function is currently not available from Fortran.
2093 
2094 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2095 @*/
2096 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2097 {
2098   PetscErrorCode      ierr;
2099   DMSubDomainHookLink link,*p;
2100 
2101   PetscFunctionBegin;
2102   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2103   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2104   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2105   link->restricthook = restricthook;
2106   link->ddhook       = ddhook;
2107   link->ctx          = ctx;
2108   link->next         = NULL;
2109   *p                 = link;
2110   PetscFunctionReturn(0);
2111 }
2112 
2113 #undef __FUNCT__
2114 #define __FUNCT__ "DMSubDomainRestrict"
2115 /*@
2116    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2117 
2118    Collective if any hooks are
2119 
2120    Input Arguments:
2121 +  fine - finer DM to use as a base
2122 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2123 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2124 -  coarse - coarer DM to update
2125 
2126    Level: developer
2127 
2128 .seealso: DMCoarsenHookAdd(), MatRestrict()
2129 @*/
2130 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2131 {
2132   PetscErrorCode      ierr;
2133   DMSubDomainHookLink link;
2134 
2135   PetscFunctionBegin;
2136   for (link=global->subdomainhook; link; link=link->next) {
2137     if (link->restricthook) {
2138       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
2139     }
2140   }
2141   PetscFunctionReturn(0);
2142 }
2143 
2144 #undef __FUNCT__
2145 #define __FUNCT__ "DMGetCoarsenLevel"
2146 /*@
2147     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2148 
2149     Not Collective
2150 
2151     Input Parameter:
2152 .   dm - the DM object
2153 
2154     Output Parameter:
2155 .   level - number of coarsenings
2156 
2157     Level: developer
2158 
2159 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2160 
2161 @*/
2162 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2163 {
2164   PetscFunctionBegin;
2165   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2166   *level = dm->leveldown;
2167   PetscFunctionReturn(0);
2168 }
2169 
2170 
2171 
2172 #undef __FUNCT__
2173 #define __FUNCT__ "DMRefineHierarchy"
2174 /*@C
2175     DMRefineHierarchy - Refines a DM object, all levels at once
2176 
2177     Collective on DM
2178 
2179     Input Parameter:
2180 +   dm - the DM object
2181 -   nlevels - the number of levels of refinement
2182 
2183     Output Parameter:
2184 .   dmf - the refined DM hierarchy
2185 
2186     Level: developer
2187 
2188 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2189 
2190 @*/
2191 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2192 {
2193   PetscErrorCode ierr;
2194 
2195   PetscFunctionBegin;
2196   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2197   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2198   if (nlevels == 0) PetscFunctionReturn(0);
2199   if (dm->ops->refinehierarchy) {
2200     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2201   } else if (dm->ops->refine) {
2202     PetscInt i;
2203 
2204     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2205     for (i=1; i<nlevels; i++) {
2206       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2207     }
2208   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2209   PetscFunctionReturn(0);
2210 }
2211 
2212 #undef __FUNCT__
2213 #define __FUNCT__ "DMCoarsenHierarchy"
2214 /*@C
2215     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2216 
2217     Collective on DM
2218 
2219     Input Parameter:
2220 +   dm - the DM object
2221 -   nlevels - the number of levels of coarsening
2222 
2223     Output Parameter:
2224 .   dmc - the coarsened DM hierarchy
2225 
2226     Level: developer
2227 
2228 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2229 
2230 @*/
2231 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2232 {
2233   PetscErrorCode ierr;
2234 
2235   PetscFunctionBegin;
2236   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2237   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2238   if (nlevels == 0) PetscFunctionReturn(0);
2239   PetscValidPointer(dmc,3);
2240   if (dm->ops->coarsenhierarchy) {
2241     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2242   } else if (dm->ops->coarsen) {
2243     PetscInt i;
2244 
2245     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2246     for (i=1; i<nlevels; i++) {
2247       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2248     }
2249   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2250   PetscFunctionReturn(0);
2251 }
2252 
2253 #undef __FUNCT__
2254 #define __FUNCT__ "DMCreateAggregates"
2255 /*@
2256    DMCreateAggregates - Gets the aggregates that map between
2257    grids associated with two DMs.
2258 
2259    Collective on DM
2260 
2261    Input Parameters:
2262 +  dmc - the coarse grid DM
2263 -  dmf - the fine grid DM
2264 
2265    Output Parameters:
2266 .  rest - the restriction matrix (transpose of the projection matrix)
2267 
2268    Level: intermediate
2269 
2270 .keywords: interpolation, restriction, multigrid
2271 
2272 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2273 @*/
2274 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2275 {
2276   PetscErrorCode ierr;
2277 
2278   PetscFunctionBegin;
2279   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2280   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2281   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2282   PetscFunctionReturn(0);
2283 }
2284 
2285 #undef __FUNCT__
2286 #define __FUNCT__ "DMSetApplicationContextDestroy"
2287 /*@C
2288     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2289 
2290     Not Collective
2291 
2292     Input Parameters:
2293 +   dm - the DM object
2294 -   destroy - the destroy function
2295 
2296     Level: intermediate
2297 
2298 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2299 
2300 @*/
2301 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2302 {
2303   PetscFunctionBegin;
2304   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2305   dm->ctxdestroy = destroy;
2306   PetscFunctionReturn(0);
2307 }
2308 
2309 #undef __FUNCT__
2310 #define __FUNCT__ "DMSetApplicationContext"
2311 /*@
2312     DMSetApplicationContext - Set a user context into a DM object
2313 
2314     Not Collective
2315 
2316     Input Parameters:
2317 +   dm - the DM object
2318 -   ctx - the user context
2319 
2320     Level: intermediate
2321 
2322 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2323 
2324 @*/
2325 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2326 {
2327   PetscFunctionBegin;
2328   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2329   dm->ctx = ctx;
2330   PetscFunctionReturn(0);
2331 }
2332 
2333 #undef __FUNCT__
2334 #define __FUNCT__ "DMGetApplicationContext"
2335 /*@
2336     DMGetApplicationContext - Gets a user context from a DM object
2337 
2338     Not Collective
2339 
2340     Input Parameter:
2341 .   dm - the DM object
2342 
2343     Output Parameter:
2344 .   ctx - the user context
2345 
2346     Level: intermediate
2347 
2348 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2349 
2350 @*/
2351 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2352 {
2353   PetscFunctionBegin;
2354   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2355   *(void**)ctx = dm->ctx;
2356   PetscFunctionReturn(0);
2357 }
2358 
2359 #undef __FUNCT__
2360 #define __FUNCT__ "DMSetVariableBounds"
2361 /*@C
2362     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2363 
2364     Logically Collective on DM
2365 
2366     Input Parameter:
2367 +   dm - the DM object
2368 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2369 
2370     Level: intermediate
2371 
2372 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2373          DMSetJacobian()
2374 
2375 @*/
2376 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2377 {
2378   PetscFunctionBegin;
2379   dm->ops->computevariablebounds = f;
2380   PetscFunctionReturn(0);
2381 }
2382 
2383 #undef __FUNCT__
2384 #define __FUNCT__ "DMHasVariableBounds"
2385 /*@
2386     DMHasVariableBounds - does the DM object have a variable bounds function?
2387 
2388     Not Collective
2389 
2390     Input Parameter:
2391 .   dm - the DM object to destroy
2392 
2393     Output Parameter:
2394 .   flg - PETSC_TRUE if the variable bounds function exists
2395 
2396     Level: developer
2397 
2398 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2399 
2400 @*/
2401 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2402 {
2403   PetscFunctionBegin;
2404   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2405   PetscFunctionReturn(0);
2406 }
2407 
2408 #undef __FUNCT__
2409 #define __FUNCT__ "DMComputeVariableBounds"
2410 /*@C
2411     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2412 
2413     Logically Collective on DM
2414 
2415     Input Parameters:
2416 +   dm - the DM object to destroy
2417 -   x  - current solution at which the bounds are computed
2418 
2419     Output parameters:
2420 +   xl - lower bound
2421 -   xu - upper bound
2422 
2423     Level: intermediate
2424 
2425 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2426 
2427 @*/
2428 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2429 {
2430   PetscErrorCode ierr;
2431 
2432   PetscFunctionBegin;
2433   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2434   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2435   if (dm->ops->computevariablebounds) {
2436     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2437   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2438   PetscFunctionReturn(0);
2439 }
2440 
2441 #undef __FUNCT__
2442 #define __FUNCT__ "DMHasColoring"
2443 /*@
2444     DMHasColoring - does the DM object have a method of providing a coloring?
2445 
2446     Not Collective
2447 
2448     Input Parameter:
2449 .   dm - the DM object
2450 
2451     Output Parameter:
2452 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2453 
2454     Level: developer
2455 
2456 .seealso DMHasFunction(), DMCreateColoring()
2457 
2458 @*/
2459 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2460 {
2461   PetscFunctionBegin;
2462   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2463   PetscFunctionReturn(0);
2464 }
2465 
2466 #undef  __FUNCT__
2467 #define __FUNCT__ "DMSetVec"
2468 /*@C
2469     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2470 
2471     Collective on DM
2472 
2473     Input Parameter:
2474 +   dm - the DM object
2475 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2476 
2477     Level: developer
2478 
2479 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2480 
2481 @*/
2482 PetscErrorCode  DMSetVec(DM dm,Vec x)
2483 {
2484   PetscErrorCode ierr;
2485 
2486   PetscFunctionBegin;
2487   if (x) {
2488     if (!dm->x) {
2489       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2490     }
2491     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2492   } else if (dm->x) {
2493     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2494   }
2495   PetscFunctionReturn(0);
2496 }
2497 
2498 PetscFunctionList DMList              = NULL;
2499 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2500 
2501 #undef __FUNCT__
2502 #define __FUNCT__ "DMSetType"
2503 /*@C
2504   DMSetType - Builds a DM, for a particular DM implementation.
2505 
2506   Collective on DM
2507 
2508   Input Parameters:
2509 + dm     - The DM object
2510 - method - The name of the DM type
2511 
2512   Options Database Key:
2513 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2514 
2515   Notes:
2516   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2517 
2518   Level: intermediate
2519 
2520 .keywords: DM, set, type
2521 .seealso: DMGetType(), DMCreate()
2522 @*/
2523 PetscErrorCode  DMSetType(DM dm, DMType method)
2524 {
2525   PetscErrorCode (*r)(DM);
2526   PetscBool      match;
2527   PetscErrorCode ierr;
2528 
2529   PetscFunctionBegin;
2530   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2531   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2532   if (match) PetscFunctionReturn(0);
2533 
2534   if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);}
2535   ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr);
2536   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2537 
2538   if (dm->ops->destroy) {
2539     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2540     dm->ops->destroy = NULL;
2541   }
2542   ierr = (*r)(dm);CHKERRQ(ierr);
2543   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2544   PetscFunctionReturn(0);
2545 }
2546 
2547 #undef __FUNCT__
2548 #define __FUNCT__ "DMGetType"
2549 /*@C
2550   DMGetType - Gets the DM type name (as a string) from the DM.
2551 
2552   Not Collective
2553 
2554   Input Parameter:
2555 . dm  - The DM
2556 
2557   Output Parameter:
2558 . type - The DM type name
2559 
2560   Level: intermediate
2561 
2562 .keywords: DM, get, type, name
2563 .seealso: DMSetType(), DMCreate()
2564 @*/
2565 PetscErrorCode  DMGetType(DM dm, DMType *type)
2566 {
2567   PetscErrorCode ierr;
2568 
2569   PetscFunctionBegin;
2570   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2571   PetscValidCharPointer(type,2);
2572   if (!DMRegisterAllCalled) {
2573     ierr = DMRegisterAll();CHKERRQ(ierr);
2574   }
2575   *type = ((PetscObject)dm)->type_name;
2576   PetscFunctionReturn(0);
2577 }
2578 
2579 #undef __FUNCT__
2580 #define __FUNCT__ "DMConvert"
2581 /*@C
2582   DMConvert - Converts a DM to another DM, either of the same or different type.
2583 
2584   Collective on DM
2585 
2586   Input Parameters:
2587 + dm - the DM
2588 - newtype - new DM type (use "same" for the same type)
2589 
2590   Output Parameter:
2591 . M - pointer to new DM
2592 
2593   Notes:
2594   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2595   the MPI communicator of the generated DM is always the same as the communicator
2596   of the input DM.
2597 
2598   Level: intermediate
2599 
2600 .seealso: DMCreate()
2601 @*/
2602 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2603 {
2604   DM             B;
2605   char           convname[256];
2606   PetscBool      sametype, issame;
2607   PetscErrorCode ierr;
2608 
2609   PetscFunctionBegin;
2610   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2611   PetscValidType(dm,1);
2612   PetscValidPointer(M,3);
2613   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2614   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2615   {
2616     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2617 
2618     /*
2619        Order of precedence:
2620        1) See if a specialized converter is known to the current DM.
2621        2) See if a specialized converter is known to the desired DM class.
2622        3) See if a good general converter is registered for the desired class
2623        4) See if a good general converter is known for the current matrix.
2624        5) Use a really basic converter.
2625     */
2626 
2627     /* 1) See if a specialized converter is known to the current DM and the desired class */
2628     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2629     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2630     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2631     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2632     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2633     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr);
2634     if (conv) goto foundconv;
2635 
2636     /* 2)  See if a specialized converter is known to the desired DM class. */
2637     ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr);
2638     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2639     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2640     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2641     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2642     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2643     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2644     ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr);
2645     if (conv) {
2646       ierr = DMDestroy(&B);CHKERRQ(ierr);
2647       goto foundconv;
2648     }
2649 
2650 #if 0
2651     /* 3) See if a good general converter is registered for the desired class */
2652     conv = B->ops->convertfrom;
2653     ierr = DMDestroy(&B);CHKERRQ(ierr);
2654     if (conv) goto foundconv;
2655 
2656     /* 4) See if a good general converter is known for the current matrix */
2657     if (dm->ops->convert) {
2658       conv = dm->ops->convert;
2659     }
2660     if (conv) goto foundconv;
2661 #endif
2662 
2663     /* 5) Use a really basic converter. */
2664     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2665 
2666 foundconv:
2667     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2668     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2669     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2670   }
2671   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2672   PetscFunctionReturn(0);
2673 }
2674 
2675 /*--------------------------------------------------------------------------------------------------------------------*/
2676 
2677 #undef __FUNCT__
2678 #define __FUNCT__ "DMRegister"
2679 /*@C
2680   DMRegister -  Adds a new DM component implementation
2681 
2682   Not Collective
2683 
2684   Input Parameters:
2685 + name        - The name of a new user-defined creation routine
2686 - create_func - The creation routine itself
2687 
2688   Notes:
2689   DMRegister() may be called multiple times to add several user-defined DMs
2690 
2691 
2692   Sample usage:
2693 .vb
2694     DMRegister("my_da", MyDMCreate);
2695 .ve
2696 
2697   Then, your DM type can be chosen with the procedural interface via
2698 .vb
2699     DMCreate(MPI_Comm, DM *);
2700     DMSetType(DM,"my_da");
2701 .ve
2702    or at runtime via the option
2703 .vb
2704     -da_type my_da
2705 .ve
2706 
2707   Level: advanced
2708 
2709 .keywords: DM, register
2710 .seealso: DMRegisterAll(), DMRegisterDestroy()
2711 
2712 @*/
2713 PetscErrorCode  DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2714 {
2715   PetscErrorCode ierr;
2716 
2717   PetscFunctionBegin;
2718   ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr);
2719   PetscFunctionReturn(0);
2720 }
2721 
2722 #undef __FUNCT__
2723 #define __FUNCT__ "DMLoad"
2724 /*@C
2725   DMLoad - Loads a DM that has been stored in binary  with DMView().
2726 
2727   Collective on PetscViewer
2728 
2729   Input Parameters:
2730 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2731            some related function before a call to DMLoad().
2732 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2733            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2734 
2735    Level: intermediate
2736 
2737   Notes:
2738    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2739 
2740   Notes for advanced users:
2741   Most users should not need to know the details of the binary storage
2742   format, since DMLoad() and DMView() completely hide these details.
2743   But for anyone who's interested, the standard binary matrix storage
2744   format is
2745 .vb
2746      has not yet been determined
2747 .ve
2748 
2749 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2750 @*/
2751 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2752 {
2753   PetscErrorCode ierr;
2754   PetscBool      isbinary;
2755   PetscInt       classid;
2756   char           type[256];
2757 
2758   PetscFunctionBegin;
2759   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2760   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2761   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2762   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2763 
2764   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2765   if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid);
2766   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2767   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2768   if (newdm->ops->load) {
2769     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2770   }
2771   PetscFunctionReturn(0);
2772 }
2773 
2774 /******************************** FEM Support **********************************/
2775 
2776 #undef __FUNCT__
2777 #define __FUNCT__ "DMPrintCellVector"
2778 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2779 {
2780   PetscInt       f;
2781   PetscErrorCode ierr;
2782 
2783   PetscFunctionBegin;
2784   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2785   for (f = 0; f < len; ++f) {
2786     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %g |\n", (double)PetscRealPart(x[f]));CHKERRQ(ierr);
2787   }
2788   PetscFunctionReturn(0);
2789 }
2790 
2791 #undef __FUNCT__
2792 #define __FUNCT__ "DMPrintCellMatrix"
2793 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2794 {
2795   PetscInt       f, g;
2796   PetscErrorCode ierr;
2797 
2798   PetscFunctionBegin;
2799   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2800   for (f = 0; f < rows; ++f) {
2801     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2802     for (g = 0; g < cols; ++g) {
2803       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2804     }
2805     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2806   }
2807   PetscFunctionReturn(0);
2808 }
2809 
2810 #undef __FUNCT__
2811 #define __FUNCT__ "DMPrintLocalVec"
2812 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X)
2813 {
2814   PetscMPIInt    rank, numProcs;
2815   PetscInt       p;
2816   PetscErrorCode ierr;
2817 
2818   PetscFunctionBegin;
2819   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2820   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
2821   ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr);
2822   for (p = 0; p < numProcs; ++p) {
2823     if (p == rank) {
2824       Vec x;
2825 
2826       ierr = VecDuplicate(X, &x);CHKERRQ(ierr);
2827       ierr = VecCopy(X, x);CHKERRQ(ierr);
2828       ierr = VecChop(x, tol);CHKERRQ(ierr);
2829       ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2830       ierr = VecDestroy(&x);CHKERRQ(ierr);
2831       ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2832     }
2833     ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr);
2834   }
2835   PetscFunctionReturn(0);
2836 }
2837 
2838 #undef __FUNCT__
2839 #define __FUNCT__ "DMGetDefaultSection"
2840 /*@
2841   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2842 
2843   Input Parameter:
2844 . dm - The DM
2845 
2846   Output Parameter:
2847 . section - The PetscSection
2848 
2849   Level: intermediate
2850 
2851   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2852 
2853 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2854 @*/
2855 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2856 {
2857   PetscFunctionBegin;
2858   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2859   PetscValidPointer(section, 2);
2860   *section = dm->defaultSection;
2861   PetscFunctionReturn(0);
2862 }
2863 
2864 #undef __FUNCT__
2865 #define __FUNCT__ "DMSetDefaultSection"
2866 /*@
2867   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2868 
2869   Input Parameters:
2870 + dm - The DM
2871 - section - The PetscSection
2872 
2873   Level: intermediate
2874 
2875   Note: Any existing Section will be destroyed
2876 
2877 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2878 @*/
2879 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2880 {
2881   PetscInt       numFields;
2882   PetscInt       f;
2883   PetscErrorCode ierr;
2884 
2885   PetscFunctionBegin;
2886   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2887   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2888   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2889   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2890   dm->defaultSection = section;
2891   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2892   if (numFields) {
2893     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2894     for (f = 0; f < numFields; ++f) {
2895       const char *name;
2896 
2897       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2898       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2899     }
2900   }
2901   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2902   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2903   PetscFunctionReturn(0);
2904 }
2905 
2906 #undef __FUNCT__
2907 #define __FUNCT__ "DMGetDefaultGlobalSection"
2908 /*@
2909   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2910 
2911   Collective on DM
2912 
2913   Input Parameter:
2914 . dm - The DM
2915 
2916   Output Parameter:
2917 . section - The PetscSection
2918 
2919   Level: intermediate
2920 
2921   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2922 
2923 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2924 @*/
2925 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2926 {
2927   PetscErrorCode ierr;
2928 
2929   PetscFunctionBegin;
2930   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2931   PetscValidPointer(section, 2);
2932   if (!dm->defaultGlobalSection) {
2933     if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2934     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2935     ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr);
2936     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2937   }
2938   *section = dm->defaultGlobalSection;
2939   PetscFunctionReturn(0);
2940 }
2941 
2942 #undef __FUNCT__
2943 #define __FUNCT__ "DMSetDefaultGlobalSection"
2944 /*@
2945   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2946 
2947   Input Parameters:
2948 + dm - The DM
2949 - section - The PetscSection, or NULL
2950 
2951   Level: intermediate
2952 
2953   Note: Any existing Section will be destroyed
2954 
2955 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2956 @*/
2957 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2958 {
2959   PetscErrorCode ierr;
2960 
2961   PetscFunctionBegin;
2962   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2963   if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2964   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2965   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2966   dm->defaultGlobalSection = section;
2967   PetscFunctionReturn(0);
2968 }
2969 
2970 #undef __FUNCT__
2971 #define __FUNCT__ "DMGetDefaultSF"
2972 /*@
2973   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2974   it is created from the default PetscSection layouts in the DM.
2975 
2976   Input Parameter:
2977 . dm - The DM
2978 
2979   Output Parameter:
2980 . sf - The PetscSF
2981 
2982   Level: intermediate
2983 
2984   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2985 
2986 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2987 @*/
2988 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2989 {
2990   PetscInt       nroots;
2991   PetscErrorCode ierr;
2992 
2993   PetscFunctionBegin;
2994   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2995   PetscValidPointer(sf, 2);
2996   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
2997   if (nroots < 0) {
2998     PetscSection section, gSection;
2999 
3000     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
3001     if (section) {
3002       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
3003       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
3004     } else {
3005       *sf = NULL;
3006       PetscFunctionReturn(0);
3007     }
3008   }
3009   *sf = dm->defaultSF;
3010   PetscFunctionReturn(0);
3011 }
3012 
3013 #undef __FUNCT__
3014 #define __FUNCT__ "DMSetDefaultSF"
3015 /*@
3016   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3017 
3018   Input Parameters:
3019 + dm - The DM
3020 - sf - The PetscSF
3021 
3022   Level: intermediate
3023 
3024   Note: Any previous SF is destroyed
3025 
3026 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3027 @*/
3028 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
3029 {
3030   PetscErrorCode ierr;
3031 
3032   PetscFunctionBegin;
3033   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3034   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
3035   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3036   dm->defaultSF = sf;
3037   PetscFunctionReturn(0);
3038 }
3039 
3040 #undef __FUNCT__
3041 #define __FUNCT__ "DMCreateDefaultSF"
3042 /*@C
3043   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3044   describing the data layout.
3045 
3046   Input Parameters:
3047 + dm - The DM
3048 . localSection - PetscSection describing the local data layout
3049 - globalSection - PetscSection describing the global data layout
3050 
3051   Level: intermediate
3052 
3053 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3054 @*/
3055 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3056 {
3057   MPI_Comm       comm;
3058   PetscLayout    layout;
3059   const PetscInt *ranges;
3060   PetscInt       *local;
3061   PetscSFNode    *remote;
3062   PetscInt       pStart, pEnd, p, nroots, nleaves = 0, l;
3063   PetscMPIInt    size, rank;
3064   PetscErrorCode ierr;
3065 
3066   PetscFunctionBegin;
3067   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
3068   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3069   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3070   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3071   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3072   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3073   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3074   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3075   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3076   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3077   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3078   for (p = pStart; p < pEnd; ++p) {
3079     PetscInt gdof, gcdof;
3080 
3081     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3082     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3083     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3084   }
3085   ierr = PetscMalloc1(nleaves, &local);CHKERRQ(ierr);
3086   ierr = PetscMalloc1(nleaves, &remote);CHKERRQ(ierr);
3087   for (p = pStart, l = 0; p < pEnd; ++p) {
3088     const PetscInt *cind;
3089     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3090 
3091     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3092     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3093     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3094     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3095     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3096     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3097     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3098     if (!gdof) continue; /* Censored point */
3099     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3100     if (gsize != dof-cdof) {
3101       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3102       cdof = 0; /* Ignore constraints */
3103     }
3104     for (d = 0, c = 0; d < dof; ++d) {
3105       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3106       local[l+d-c] = off+d;
3107     }
3108     if (gdof < 0) {
3109       for (d = 0; d < gsize; ++d, ++l) {
3110         PetscInt offset = -(goff+1) + d, r;
3111 
3112         ierr = PetscFindInt(offset,size+1,ranges,&r);CHKERRQ(ierr);
3113         if (r < 0) r = -(r+2);
3114         if ((r < 0) || (r >= size)) SETERRQ4(PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "Point %d mapped to invalid process %d (%d, %d)", p, r, gdof, goff);
3115         remote[l].rank  = r;
3116         remote[l].index = offset - ranges[r];
3117       }
3118     } else {
3119       for (d = 0; d < gsize; ++d, ++l) {
3120         remote[l].rank  = rank;
3121         remote[l].index = goff+d - ranges[rank];
3122       }
3123     }
3124   }
3125   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3126   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3127   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3128   PetscFunctionReturn(0);
3129 }
3130 
3131 #undef __FUNCT__
3132 #define __FUNCT__ "DMGetPointSF"
3133 /*@
3134   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3135 
3136   Input Parameter:
3137 . dm - The DM
3138 
3139   Output Parameter:
3140 . sf - The PetscSF
3141 
3142   Level: intermediate
3143 
3144   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3145 
3146 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3147 @*/
3148 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3149 {
3150   PetscFunctionBegin;
3151   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3152   PetscValidPointer(sf, 2);
3153   *sf = dm->sf;
3154   PetscFunctionReturn(0);
3155 }
3156 
3157 #undef __FUNCT__
3158 #define __FUNCT__ "DMSetPointSF"
3159 /*@
3160   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3161 
3162   Input Parameters:
3163 + dm - The DM
3164 - sf - The PetscSF
3165 
3166   Level: intermediate
3167 
3168 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3169 @*/
3170 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3171 {
3172   PetscErrorCode ierr;
3173 
3174   PetscFunctionBegin;
3175   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3176   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3177   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3178   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3179   dm->sf = sf;
3180   PetscFunctionReturn(0);
3181 }
3182 
3183 #undef __FUNCT__
3184 #define __FUNCT__ "DMGetNumFields"
3185 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3186 {
3187   PetscFunctionBegin;
3188   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3189   PetscValidPointer(numFields, 2);
3190   *numFields = dm->numFields;
3191   PetscFunctionReturn(0);
3192 }
3193 
3194 #undef __FUNCT__
3195 #define __FUNCT__ "DMSetNumFields"
3196 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3197 {
3198   PetscInt       f;
3199   PetscErrorCode ierr;
3200 
3201   PetscFunctionBegin;
3202   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3203   for (f = 0; f < dm->numFields; ++f) {
3204     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3205   }
3206   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3207   dm->numFields = numFields;
3208   ierr          = PetscMalloc1(dm->numFields, &dm->fields);CHKERRQ(ierr);
3209   for (f = 0; f < dm->numFields; ++f) {
3210     ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3211   }
3212   PetscFunctionReturn(0);
3213 }
3214 
3215 #undef __FUNCT__
3216 #define __FUNCT__ "DMGetField"
3217 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3218 {
3219   PetscFunctionBegin;
3220   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3221   PetscValidPointer(field, 2);
3222   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3223   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3224   *field = dm->fields[f];
3225   PetscFunctionReturn(0);
3226 }
3227 
3228 #undef __FUNCT__
3229 #define __FUNCT__ "DMRestrictHook_Coordinates"
3230 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3231 {
3232   DM dm_coord,dmc_coord;
3233   PetscErrorCode ierr;
3234   Vec coords,ccoords;
3235   VecScatter scat;
3236   PetscFunctionBegin;
3237   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3238   ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr);
3239   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3240   ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr);
3241   if (coords && !ccoords) {
3242     ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr);
3243     ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr);
3244     ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3245     ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3246     ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr);
3247     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
3248     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3249   }
3250   PetscFunctionReturn(0);
3251 }
3252 
3253 #undef __FUNCT__
3254 #define __FUNCT__ "DMSubDomainHook_Coordinates"
3255 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3256 {
3257   DM dm_coord,subdm_coord;
3258   PetscErrorCode ierr;
3259   Vec coords,ccoords,clcoords;
3260   VecScatter *scat_i,*scat_g;
3261   PetscFunctionBegin;
3262   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3263   ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr);
3264   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3265   ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr);
3266   if (coords && !ccoords) {
3267     ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr);
3268     ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr);
3269     ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr);
3270     ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3271     ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3272     ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3273     ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3274     ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr);
3275     ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr);
3276     ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr);
3277     ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr);
3278     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3279     ierr = VecDestroy(&clcoords);CHKERRQ(ierr);
3280     ierr = PetscFree(scat_i);CHKERRQ(ierr);
3281     ierr = PetscFree(scat_g);CHKERRQ(ierr);
3282   }
3283   PetscFunctionReturn(0);
3284 }
3285 
3286 #undef __FUNCT__
3287 #define __FUNCT__ "DMSetCoordinates"
3288 /*@
3289   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3290 
3291   Collective on DM
3292 
3293   Input Parameters:
3294 + dm - the DM
3295 - c - coordinate vector
3296 
3297   Note:
3298   The coordinates do include those for ghost points, which are in the local vector
3299 
3300   Level: intermediate
3301 
3302 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3303 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3304 @*/
3305 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3306 {
3307   PetscErrorCode ierr;
3308 
3309   PetscFunctionBegin;
3310   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3311   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3312   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3313   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3314   dm->coordinates = c;
3315   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3316   ierr            = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3317   ierr            = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3318   PetscFunctionReturn(0);
3319 }
3320 
3321 #undef __FUNCT__
3322 #define __FUNCT__ "DMSetCoordinatesLocal"
3323 /*@
3324   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3325 
3326   Collective on DM
3327 
3328    Input Parameters:
3329 +  dm - the DM
3330 -  c - coordinate vector
3331 
3332   Note:
3333   The coordinates of ghost points can be set using DMSetCoordinates()
3334   followed by DMGetCoordinatesLocal(). This is intended to enable the
3335   setting of ghost coordinates outside of the domain.
3336 
3337   Level: intermediate
3338 
3339 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3340 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3341 @*/
3342 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3343 {
3344   PetscErrorCode ierr;
3345 
3346   PetscFunctionBegin;
3347   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3348   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3349   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3350   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3351 
3352   dm->coordinatesLocal = c;
3353 
3354   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3355   PetscFunctionReturn(0);
3356 }
3357 
3358 #undef __FUNCT__
3359 #define __FUNCT__ "DMGetCoordinates"
3360 /*@
3361   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3362 
3363   Not Collective
3364 
3365   Input Parameter:
3366 . dm - the DM
3367 
3368   Output Parameter:
3369 . c - global coordinate vector
3370 
3371   Note:
3372   This is a borrowed reference, so the user should NOT destroy this vector
3373 
3374   Each process has only the local coordinates (does NOT have the ghost coordinates).
3375 
3376   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3377   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3378 
3379   Level: intermediate
3380 
3381 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3382 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3383 @*/
3384 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3385 {
3386   PetscErrorCode ierr;
3387 
3388   PetscFunctionBegin;
3389   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3390   PetscValidPointer(c,2);
3391   if (!dm->coordinates && dm->coordinatesLocal) {
3392     DM cdm = NULL;
3393 
3394     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3395     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3396     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3397     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3398     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3399   }
3400   *c = dm->coordinates;
3401   PetscFunctionReturn(0);
3402 }
3403 
3404 #undef __FUNCT__
3405 #define __FUNCT__ "DMGetCoordinatesLocal"
3406 /*@
3407   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3408 
3409   Collective on DM
3410 
3411   Input Parameter:
3412 . dm - the DM
3413 
3414   Output Parameter:
3415 . c - coordinate vector
3416 
3417   Note:
3418   This is a borrowed reference, so the user should NOT destroy this vector
3419 
3420   Each process has the local and ghost coordinates
3421 
3422   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3423   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3424 
3425   Level: intermediate
3426 
3427 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3428 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3429 @*/
3430 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3431 {
3432   PetscErrorCode ierr;
3433 
3434   PetscFunctionBegin;
3435   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3436   PetscValidPointer(c,2);
3437   if (!dm->coordinatesLocal && dm->coordinates) {
3438     DM cdm = NULL;
3439 
3440     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3441     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3442     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3443     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3444     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3445   }
3446   *c = dm->coordinatesLocal;
3447   PetscFunctionReturn(0);
3448 }
3449 
3450 #undef __FUNCT__
3451 #define __FUNCT__ "DMGetCoordinateDM"
3452 /*@
3453   DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates
3454 
3455   Collective on DM
3456 
3457   Input Parameter:
3458 . dm - the DM
3459 
3460   Output Parameter:
3461 . cdm - coordinate DM
3462 
3463   Level: intermediate
3464 
3465 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3466 .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3467 @*/
3468 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3469 {
3470   PetscErrorCode ierr;
3471 
3472   PetscFunctionBegin;
3473   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3474   PetscValidPointer(cdm,2);
3475   if (!dm->coordinateDM) {
3476     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3477     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3478   }
3479   *cdm = dm->coordinateDM;
3480   PetscFunctionReturn(0);
3481 }
3482 
3483 #undef __FUNCT__
3484 #define __FUNCT__ "DMSetCoordinateDM"
3485 /*@
3486   DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates
3487 
3488   Logically Collective on DM
3489 
3490   Input Parameters:
3491 + dm - the DM
3492 - cdm - coordinate DM
3493 
3494   Level: intermediate
3495 
3496 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3497 .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3498 @*/
3499 PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm)
3500 {
3501   PetscErrorCode ierr;
3502 
3503   PetscFunctionBegin;
3504   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3505   PetscValidHeaderSpecific(cdm,DM_CLASSID,2);
3506   ierr = DMDestroy(&dm->coordinateDM);CHKERRQ(ierr);
3507   dm->coordinateDM = cdm;
3508   ierr = PetscObjectReference((PetscObject) dm->coordinateDM);CHKERRQ(ierr);
3509   PetscFunctionReturn(0);
3510 }
3511 
3512 #undef __FUNCT__
3513 #define __FUNCT__ "DMGetCoordinateSection"
3514 /*@
3515   DMGetCoordinateSection - Retrieve the layout of coordinate values over the mesh.
3516 
3517   Not Collective
3518 
3519   Input Parameter:
3520 . dm - The DM object
3521 
3522   Output Parameter:
3523 . section - The PetscSection object
3524 
3525   Level: intermediate
3526 
3527 .keywords: mesh, coordinates
3528 .seealso: DMGetCoordinateDM(), DMGetDefaultSection(), DMSetDefaultSection()
3529 @*/
3530 PetscErrorCode DMGetCoordinateSection(DM dm, PetscSection *section)
3531 {
3532   DM             cdm;
3533   PetscErrorCode ierr;
3534 
3535   PetscFunctionBegin;
3536   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3537   PetscValidPointer(section, 2);
3538   ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3539   ierr = DMGetDefaultSection(cdm, section);CHKERRQ(ierr);
3540   PetscFunctionReturn(0);
3541 }
3542 
3543 #undef __FUNCT__
3544 #define __FUNCT__ "DMSetCoordinateSection"
3545 /*@
3546   DMSetCoordinateSection - Set the layout of coordinate values over the mesh.
3547 
3548   Not Collective
3549 
3550   Input Parameters:
3551 + dm      - The DM object
3552 - section - The PetscSection object
3553 
3554   Level: intermediate
3555 
3556 .keywords: mesh, coordinates
3557 .seealso: DMGetCoordinateSection(), DMGetDefaultSection(), DMSetDefaultSection()
3558 @*/
3559 PetscErrorCode DMSetCoordinateSection(DM dm, PetscSection section)
3560 {
3561   DM             cdm;
3562   PetscErrorCode ierr;
3563 
3564   PetscFunctionBegin;
3565   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3566   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
3567   ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3568   ierr = DMSetDefaultSection(cdm, section);CHKERRQ(ierr);
3569   PetscFunctionReturn(0);
3570 }
3571 
3572 #undef __FUNCT__
3573 #define __FUNCT__ "DMLocatePoints"
3574 /*@
3575   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3576 
3577   Not collective
3578 
3579   Input Parameters:
3580 + dm - The DM
3581 - v - The Vec of points
3582 
3583   Output Parameter:
3584 . cells - The local cell numbers for cells which contain the points
3585 
3586   Level: developer
3587 
3588 .keywords: point location, mesh
3589 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3590 @*/
3591 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3592 {
3593   PetscErrorCode ierr;
3594 
3595   PetscFunctionBegin;
3596   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3597   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3598   PetscValidPointer(cells,3);
3599   if (dm->ops->locatepoints) {
3600     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3601   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3602   PetscFunctionReturn(0);
3603 }
3604