xref: /petsc/src/dm/interface/dm.c (revision b558a815d762ccc3bbeec496fd4da6cb2f6f9fd0)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 #include <petscsf.h>
3 
4 PetscClassId  DM_CLASSID;
5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "DMViewFromOptions"
9 /*
10   DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed.
11 
12   Collective on Vec
13 
14   Input Parameters:
15 + dm   - the DM
16 . prefix - prefix to use for viewing, or NULL to use prefix of 'dm'
17 - optionname - option to activate viewing
18 
19   Level: intermediate
20 
21 .keywords: DM, view, options, database
22 .seealso: VecViewFromOptions(), MatViewFromOptions()
23 */
24 PetscErrorCode  DMViewFromOptions(DM dm,const char prefix[],const char optionname[])
25 {
26   PetscErrorCode    ierr;
27   PetscBool         flg;
28   PetscViewer       viewer;
29   PetscViewerFormat format;
30 
31   PetscFunctionBegin;
32   if (prefix) {
33     ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
34   } else {
35     ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
36   }
37   if (flg) {
38     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
39     ierr = DMView(dm,viewer);CHKERRQ(ierr);
40     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
41     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
42   }
43   PetscFunctionReturn(0);
44 }
45 
46 
47 #undef __FUNCT__
48 #define __FUNCT__ "DMCreate"
49 /*@
50   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
51 
52    If you never  call DMSetType()  it will generate an
53    error when you try to use the vector.
54 
55   Collective on MPI_Comm
56 
57   Input Parameter:
58 . comm - The communicator for the DM object
59 
60   Output Parameter:
61 . dm - The DM object
62 
63   Level: beginner
64 
65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
66 @*/
67 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
68 {
69   DM             v;
70   PetscErrorCode ierr;
71 
72   PetscFunctionBegin;
73   PetscValidPointer(dm,2);
74   *dm = NULL;
75 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
76   ierr = VecInitializePackage();CHKERRQ(ierr);
77   ierr = MatInitializePackage();CHKERRQ(ierr);
78   ierr = DMInitializePackage();CHKERRQ(ierr);
79 #endif
80 
81   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
82   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
83 
84 
85   v->ltogmap              = NULL;
86   v->ltogmapb             = NULL;
87   v->bs                   = 1;
88   v->coloringtype         = IS_COLORING_GLOBAL;
89   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
90   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
91   v->defaultSection       = NULL;
92   v->defaultGlobalSection = NULL;
93   {
94     PetscInt i;
95     for (i = 0; i < 10; ++i) {
96       v->nullspaceConstructors[i] = NULL;
97     }
98   }
99   v->numFields = 0;
100   v->fields    = NULL;
101   ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr);
102   ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr);
103   *dm = v;
104   PetscFunctionReturn(0);
105 }
106 
107 #undef __FUNCT__
108 #define __FUNCT__ "DMClone"
109 /*@
110   DMClone - Creates a DM object with the same topology as the original.
111 
112   Collective on MPI_Comm
113 
114   Input Parameter:
115 . dm - The original DM object
116 
117   Output Parameter:
118 . newdm  - The new DM object
119 
120   Level: beginner
121 
122 .keywords: DM, topology, create
123 @*/
124 PetscErrorCode DMClone(DM dm, DM *newdm)
125 {
126   PetscSF        sf;
127   Vec            coords;
128   void          *ctx;
129   PetscErrorCode ierr;
130 
131   PetscFunctionBegin;
132   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
133   PetscValidPointer(newdm,2);
134   ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr);
135   if (dm->ops->clone) {
136     ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr);
137   }
138   (*newdm)->setupcalled = PETSC_TRUE;
139   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
140   ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr);
141   ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr);
142   ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr);
143   ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr);
144   if (coords) {
145     ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr);
146   } else {
147     ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr);
148     if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);}
149   }
150   PetscFunctionReturn(0);
151 }
152 
153 #undef __FUNCT__
154 #define __FUNCT__ "DMSetVecType"
155 /*@C
156        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
157 
158    Logically Collective on DMDA
159 
160    Input Parameter:
161 +  da - initial distributed array
162 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
163 
164    Options Database:
165 .   -dm_vec_type ctype
166 
167    Level: intermediate
168 
169 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType()
170 @*/
171 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
172 {
173   PetscErrorCode ierr;
174 
175   PetscFunctionBegin;
176   PetscValidHeaderSpecific(da,DM_CLASSID,1);
177   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
178   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
179   PetscFunctionReturn(0);
180 }
181 
182 #undef __FUNCT__
183 #define __FUNCT__ "DMGetVecType"
184 /*@C
185        DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
186 
187    Logically Collective on DMDA
188 
189    Input Parameter:
190 .  da - initial distributed array
191 
192    Output Parameter:
193 .  ctype - the vector type
194 
195    Level: intermediate
196 
197 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
198 @*/
199 PetscErrorCode  DMGetVecType(DM da,VecType *ctype)
200 {
201   PetscFunctionBegin;
202   PetscValidHeaderSpecific(da,DM_CLASSID,1);
203   *ctype = da->vectype;
204   PetscFunctionReturn(0);
205 }
206 
207 #undef __FUNCT__
208 #define __FUNCT__ "VecGetDM"
209 /*@
210   VecGetDM - Gets the DM defining the data layout of the vector
211 
212   Not collective
213 
214   Input Parameter:
215 . v - The Vec
216 
217   Output Parameter:
218 . dm - The DM
219 
220   Level: intermediate
221 
222 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
223 @*/
224 PetscErrorCode VecGetDM(Vec v, DM *dm)
225 {
226   PetscErrorCode ierr;
227 
228   PetscFunctionBegin;
229   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
230   PetscValidPointer(dm,2);
231   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
232   PetscFunctionReturn(0);
233 }
234 
235 #undef __FUNCT__
236 #define __FUNCT__ "VecSetDM"
237 /*@
238   VecSetDM - Sets the DM defining the data layout of the vector
239 
240   Not collective
241 
242   Input Parameters:
243 + v - The Vec
244 - dm - The DM
245 
246   Level: intermediate
247 
248 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
249 @*/
250 PetscErrorCode VecSetDM(Vec v, DM dm)
251 {
252   PetscErrorCode ierr;
253 
254   PetscFunctionBegin;
255   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
256   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
257   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
258   PetscFunctionReturn(0);
259 }
260 
261 #undef __FUNCT__
262 #define __FUNCT__ "DMSetMatType"
263 /*@C
264        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
265 
266    Logically Collective on DM
267 
268    Input Parameter:
269 +  dm - the DM context
270 .  ctype - the matrix type
271 
272    Options Database:
273 .   -dm_mat_type ctype
274 
275    Level: intermediate
276 
277 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType()
278 @*/
279 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
280 {
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
285   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
286   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
287   PetscFunctionReturn(0);
288 }
289 
290 #undef __FUNCT__
291 #define __FUNCT__ "DMGetMatType"
292 /*@C
293        DMGetMatType - Gets the type of matrix created with DMCreateMatrix()
294 
295    Logically Collective on DM
296 
297    Input Parameter:
298 .  dm - the DM context
299 
300    Output Parameter:
301 .  ctype - the matrix type
302 
303    Options Database:
304 .   -dm_mat_type ctype
305 
306    Level: intermediate
307 
308 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType()
309 @*/
310 PetscErrorCode  DMGetMatType(DM dm,MatType *ctype)
311 {
312   PetscFunctionBegin;
313   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
314   *ctype = dm->mattype;
315   PetscFunctionReturn(0);
316 }
317 
318 #undef __FUNCT__
319 #define __FUNCT__ "MatGetDM"
320 /*@
321   MatGetDM - Gets the DM defining the data layout of the matrix
322 
323   Not collective
324 
325   Input Parameter:
326 . A - The Mat
327 
328   Output Parameter:
329 . dm - The DM
330 
331   Level: intermediate
332 
333 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
334 @*/
335 PetscErrorCode MatGetDM(Mat A, DM *dm)
336 {
337   PetscErrorCode ierr;
338 
339   PetscFunctionBegin;
340   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
341   PetscValidPointer(dm,2);
342   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
343   PetscFunctionReturn(0);
344 }
345 
346 #undef __FUNCT__
347 #define __FUNCT__ "MatSetDM"
348 /*@
349   MatSetDM - Sets the DM defining the data layout of the matrix
350 
351   Not collective
352 
353   Input Parameters:
354 + A - The Mat
355 - dm - The DM
356 
357   Level: intermediate
358 
359 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
360 @*/
361 PetscErrorCode MatSetDM(Mat A, DM dm)
362 {
363   PetscErrorCode ierr;
364 
365   PetscFunctionBegin;
366   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
367   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
368   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
369   PetscFunctionReturn(0);
370 }
371 
372 #undef __FUNCT__
373 #define __FUNCT__ "DMSetOptionsPrefix"
374 /*@C
375    DMSetOptionsPrefix - Sets the prefix used for searching for all
376    DMDA options in the database.
377 
378    Logically Collective on DMDA
379 
380    Input Parameter:
381 +  da - the DMDA context
382 -  prefix - the prefix to prepend to all option names
383 
384    Notes:
385    A hyphen (-) must NOT be given at the beginning of the prefix name.
386    The first character of all runtime options is AUTOMATICALLY the hyphen.
387 
388    Level: advanced
389 
390 .keywords: DMDA, set, options, prefix, database
391 
392 .seealso: DMSetFromOptions()
393 @*/
394 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
395 {
396   PetscErrorCode ierr;
397 
398   PetscFunctionBegin;
399   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
400   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
401   PetscFunctionReturn(0);
402 }
403 
404 #undef __FUNCT__
405 #define __FUNCT__ "DMDestroy"
406 /*@
407     DMDestroy - Destroys a vector packer or DMDA.
408 
409     Collective on DM
410 
411     Input Parameter:
412 .   dm - the DM object to destroy
413 
414     Level: developer
415 
416 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
417 
418 @*/
419 PetscErrorCode  DMDestroy(DM *dm)
420 {
421   PetscInt       i, cnt = 0, f;
422   DMNamedVecLink nlink,nnext;
423   PetscErrorCode ierr;
424 
425   PetscFunctionBegin;
426   if (!*dm) PetscFunctionReturn(0);
427   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
428 
429   /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */
430   for (f = 0; f < (*dm)->numFields; ++f) {
431     ierr = PetscObjectCompose((*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr);
432     ierr = PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr);
433     ierr = PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr);
434   }
435   /* count all the circular references of DM and its contained Vecs */
436   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
437     if ((*dm)->localin[i])  cnt++;
438     if ((*dm)->globalin[i]) cnt++;
439   }
440   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
441   if ((*dm)->x) {
442     DM obj;
443     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
444     if (obj == *dm) cnt++;
445   }
446   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
447   if ((*dm)->x) {
448     DM obj;
449     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
450     if (obj == *dm) cnt++;
451   }
452 
453   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
454   /*
455      Need this test because the dm references the vectors that
456      reference the dm, so destroying the dm calls destroy on the
457      vectors that cause another destroy on the dm
458   */
459   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
460   ((PetscObject) (*dm))->refct = 0;
461   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
462     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
463     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
464   }
465   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
466     nnext = nlink->next;
467     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
468     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
469     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
470     ierr = PetscFree(nlink);CHKERRQ(ierr);
471   }
472   (*dm)->namedglobal = NULL;
473 
474   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
475     nnext = nlink->next;
476     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
477     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
478     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
479     ierr = PetscFree(nlink);CHKERRQ(ierr);
480   }
481   (*dm)->namedlocal = NULL;
482 
483   /* Destroy the list of hooks */
484   {
485     DMCoarsenHookLink link,next;
486     for (link=(*dm)->coarsenhook; link; link=next) {
487       next = link->next;
488       ierr = PetscFree(link);CHKERRQ(ierr);
489     }
490     (*dm)->coarsenhook = NULL;
491   }
492   {
493     DMRefineHookLink link,next;
494     for (link=(*dm)->refinehook; link; link=next) {
495       next = link->next;
496       ierr = PetscFree(link);CHKERRQ(ierr);
497     }
498     (*dm)->refinehook = NULL;
499   }
500   {
501     DMSubDomainHookLink link,next;
502     for (link=(*dm)->subdomainhook; link; link=next) {
503       next = link->next;
504       ierr = PetscFree(link);CHKERRQ(ierr);
505     }
506     (*dm)->subdomainhook = NULL;
507   }
508   {
509     DMGlobalToLocalHookLink link,next;
510     for (link=(*dm)->gtolhook; link; link=next) {
511       next = link->next;
512       ierr = PetscFree(link);CHKERRQ(ierr);
513     }
514     (*dm)->gtolhook = NULL;
515   }
516   /* Destroy the work arrays */
517   {
518     DMWorkLink link,next;
519     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
520     for (link=(*dm)->workin; link; link=next) {
521       next = link->next;
522       ierr = PetscFree(link->mem);CHKERRQ(ierr);
523       ierr = PetscFree(link);CHKERRQ(ierr);
524     }
525     (*dm)->workin = NULL;
526   }
527 
528   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
529   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
530   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
531 
532   if ((*dm)->ctx && (*dm)->ctxdestroy) {
533     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
534   }
535   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
536   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
537   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
538   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
539   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
540   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
541   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
542 
543   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
544   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
545   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
546   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
547   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
548 
549   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
550   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
551   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
552 
553   for (f = 0; f < (*dm)->numFields; ++f) {
554     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
555   }
556   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
557   /* if memory was published with AMS then destroy it */
558   ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr);
559 
560   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
561   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
562   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
563   PetscFunctionReturn(0);
564 }
565 
566 #undef __FUNCT__
567 #define __FUNCT__ "DMSetUp"
568 /*@
569     DMSetUp - sets up the data structures inside a DM object
570 
571     Collective on DM
572 
573     Input Parameter:
574 .   dm - the DM object to setup
575 
576     Level: developer
577 
578 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
579 
580 @*/
581 PetscErrorCode  DMSetUp(DM dm)
582 {
583   PetscErrorCode ierr;
584 
585   PetscFunctionBegin;
586   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
587   if (dm->setupcalled) PetscFunctionReturn(0);
588   if (dm->ops->setup) {
589     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
590   }
591   dm->setupcalled = PETSC_TRUE;
592   PetscFunctionReturn(0);
593 }
594 
595 #undef __FUNCT__
596 #define __FUNCT__ "DMSetFromOptions"
597 /*@
598     DMSetFromOptions - sets parameters in a DM from the options database
599 
600     Collective on DM
601 
602     Input Parameter:
603 .   dm - the DM object to set options for
604 
605     Options Database:
606 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
607 .   -dm_vec_type <type>  type of vector to create inside DM
608 .   -dm_mat_type <type>  type of matrix to create inside DM
609 -   -dm_coloring_type <global or ghosted>
610 
611     Level: developer
612 
613 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
614 
615 @*/
616 PetscErrorCode  DMSetFromOptions(DM dm)
617 {
618   char           typeName[256] = MATAIJ;
619   PetscBool      flg;
620   PetscErrorCode ierr;
621 
622   PetscFunctionBegin;
623   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
624   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
625   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
626   ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
627   if (flg) {
628     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
629   }
630   ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
631   if (flg) {
632     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
633   }
634   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
635   if (dm->ops->setfromoptions) {
636     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
637   }
638   /* process any options handlers added with PetscObjectAddOptionsHandler() */
639   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
640   ierr = PetscOptionsEnd();CHKERRQ(ierr);
641   ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr);
642   PetscFunctionReturn(0);
643 }
644 
645 #undef __FUNCT__
646 #define __FUNCT__ "DMView"
647 /*@C
648     DMView - Views a vector packer or DMDA.
649 
650     Collective on DM
651 
652     Input Parameter:
653 +   dm - the DM object to view
654 -   v - the viewer
655 
656     Level: developer
657 
658 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
659 
660 @*/
661 PetscErrorCode  DMView(DM dm,PetscViewer v)
662 {
663   PetscErrorCode ierr;
664   PetscBool      isbinary;
665 
666   PetscFunctionBegin;
667   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
668   if (!v) {
669     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
670   }
671   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
672   if (isbinary) {
673     PetscInt classid = DM_FILE_CLASSID;
674     char     type[256];
675 
676     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
677     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
678     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
679   }
680   if (dm->ops->view) {
681     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
682   }
683   PetscFunctionReturn(0);
684 }
685 
686 #undef __FUNCT__
687 #define __FUNCT__ "DMCreateGlobalVector"
688 /*@
689     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
690 
691     Collective on DM
692 
693     Input Parameter:
694 .   dm - the DM object
695 
696     Output Parameter:
697 .   vec - the global vector
698 
699     Level: beginner
700 
701 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
702 
703 @*/
704 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
705 {
706   PetscErrorCode ierr;
707 
708   PetscFunctionBegin;
709   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
710   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
711   PetscFunctionReturn(0);
712 }
713 
714 #undef __FUNCT__
715 #define __FUNCT__ "DMCreateLocalVector"
716 /*@
717     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
718 
719     Not Collective
720 
721     Input Parameter:
722 .   dm - the DM object
723 
724     Output Parameter:
725 .   vec - the local vector
726 
727     Level: beginner
728 
729 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
730 
731 @*/
732 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
733 {
734   PetscErrorCode ierr;
735 
736   PetscFunctionBegin;
737   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
738   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
739   PetscFunctionReturn(0);
740 }
741 
742 #undef __FUNCT__
743 #define __FUNCT__ "DMGetLocalToGlobalMapping"
744 /*@
745    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
746 
747    Collective on DM
748 
749    Input Parameter:
750 .  dm - the DM that provides the mapping
751 
752    Output Parameter:
753 .  ltog - the mapping
754 
755    Level: intermediate
756 
757    Notes:
758    This mapping can then be used by VecSetLocalToGlobalMapping() or
759    MatSetLocalToGlobalMapping().
760 
761 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
762 @*/
763 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
764 {
765   PetscErrorCode ierr;
766 
767   PetscFunctionBegin;
768   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
769   PetscValidPointer(ltog,2);
770   if (!dm->ltogmap) {
771     PetscSection section, sectionGlobal;
772 
773     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
774     if (section) {
775       PetscInt *ltog;
776       PetscInt pStart, pEnd, size, p, l;
777 
778       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
779       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
780       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
781       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
782       for (p = pStart, l = 0; p < pEnd; ++p) {
783         PetscInt dof, off, c;
784 
785         /* Should probably use constrained dofs */
786         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
787         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
788         for (c = 0; c < dof; ++c, ++l) {
789           ltog[l] = off+c;
790         }
791       }
792       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
793       ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr);
794     } else {
795       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
796       ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr);
797     }
798   }
799   *ltog = dm->ltogmap;
800   PetscFunctionReturn(0);
801 }
802 
803 #undef __FUNCT__
804 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
805 /*@
806    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
807 
808    Collective on DM
809 
810    Input Parameter:
811 .  da - the distributed array that provides the mapping
812 
813    Output Parameter:
814 .  ltog - the block mapping
815 
816    Level: intermediate
817 
818    Notes:
819    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
820    MatSetLocalToGlobalMappingBlock().
821 
822 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
823 @*/
824 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
825 {
826   PetscErrorCode ierr;
827 
828   PetscFunctionBegin;
829   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
830   PetscValidPointer(ltog,2);
831   if (!dm->ltogmapb) {
832     PetscInt bs;
833     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
834     if (bs > 1) {
835       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
836       ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
837     } else {
838       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
839       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
840     }
841   }
842   *ltog = dm->ltogmapb;
843   PetscFunctionReturn(0);
844 }
845 
846 #undef __FUNCT__
847 #define __FUNCT__ "DMGetBlockSize"
848 /*@
849    DMGetBlockSize - Gets the inherent block size associated with a DM
850 
851    Not Collective
852 
853    Input Parameter:
854 .  dm - the DM with block structure
855 
856    Output Parameter:
857 .  bs - the block size, 1 implies no exploitable block structure
858 
859    Level: intermediate
860 
861 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
862 @*/
863 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
864 {
865   PetscFunctionBegin;
866   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
867   PetscValidPointer(bs,2);
868   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
869   *bs = dm->bs;
870   PetscFunctionReturn(0);
871 }
872 
873 #undef __FUNCT__
874 #define __FUNCT__ "DMCreateInterpolation"
875 /*@
876     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
877 
878     Collective on DM
879 
880     Input Parameter:
881 +   dm1 - the DM object
882 -   dm2 - the second, finer DM object
883 
884     Output Parameter:
885 +  mat - the interpolation
886 -  vec - the scaling (optional)
887 
888     Level: developer
889 
890     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
891         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
892 
893         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
894         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
895 
896 
897 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
898 
899 @*/
900 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
901 {
902   PetscErrorCode ierr;
903 
904   PetscFunctionBegin;
905   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
906   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
907   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
908   PetscFunctionReturn(0);
909 }
910 
911 #undef __FUNCT__
912 #define __FUNCT__ "DMCreateInjection"
913 /*@
914     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
915 
916     Collective on DM
917 
918     Input Parameter:
919 +   dm1 - the DM object
920 -   dm2 - the second, finer DM object
921 
922     Output Parameter:
923 .   ctx - the injection
924 
925     Level: developer
926 
927    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
928         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
929 
930 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
931 
932 @*/
933 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
934 {
935   PetscErrorCode ierr;
936 
937   PetscFunctionBegin;
938   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
939   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
940   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
941   PetscFunctionReturn(0);
942 }
943 
944 #undef __FUNCT__
945 #define __FUNCT__ "DMCreateColoring"
946 /*@
947     DMCreateColoring - Gets coloring for a DM
948 
949     Collective on DM
950 
951     Input Parameter:
952 +   dm - the DM object
953 -   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
954 
955     Output Parameter:
956 .   coloring - the coloring
957 
958     Level: developer
959 
960 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType()
961 
962 @*/
963 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring)
964 {
965   PetscErrorCode ierr;
966 
967   PetscFunctionBegin;
968   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
969   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
970   ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr);
971   PetscFunctionReturn(0);
972 }
973 
974 #undef __FUNCT__
975 #define __FUNCT__ "DMCreateMatrix"
976 /*@
977     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
978 
979     Collective on DM
980 
981     Input Parameter:
982 .   dm - the DM object
983 
984     Output Parameter:
985 .   mat - the empty Jacobian
986 
987     Level: beginner
988 
989     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
990        do not need to do it yourself.
991 
992        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
993        the nonzero pattern call DMDASetMatPreallocateOnly()
994 
995        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
996        internally by PETSc.
997 
998        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
999        the indices for the global numbering for DMDAs which is complicated.
1000 
1001 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType()
1002 
1003 @*/
1004 PetscErrorCode  DMCreateMatrix(DM dm,Mat *mat)
1005 {
1006   PetscErrorCode ierr;
1007 
1008   PetscFunctionBegin;
1009   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1010 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
1011   ierr = MatInitializePackage();CHKERRQ(ierr);
1012 #endif
1013   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1014   PetscValidPointer(mat,3);
1015   ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr);
1016   PetscFunctionReturn(0);
1017 }
1018 
1019 #undef __FUNCT__
1020 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
1021 /*@
1022   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
1023     preallocated but the nonzero structure and zero values will not be set.
1024 
1025   Logically Collective on DMDA
1026 
1027   Input Parameter:
1028 + dm - the DM
1029 - only - PETSC_TRUE if only want preallocation
1030 
1031   Level: developer
1032 .seealso DMCreateMatrix()
1033 @*/
1034 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
1035 {
1036   PetscFunctionBegin;
1037   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1038   dm->prealloc_only = only;
1039   PetscFunctionReturn(0);
1040 }
1041 
1042 #undef __FUNCT__
1043 #define __FUNCT__ "DMGetWorkArray"
1044 /*@C
1045   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1046 
1047   Not Collective
1048 
1049   Input Parameters:
1050 + dm - the DM object
1051 . count - The minium size
1052 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1053 
1054   Output Parameter:
1055 . array - the work array
1056 
1057   Level: developer
1058 
1059 .seealso DMDestroy(), DMCreate()
1060 @*/
1061 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1062 {
1063   PetscErrorCode ierr;
1064   DMWorkLink     link;
1065   size_t         size;
1066 
1067   PetscFunctionBegin;
1068   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1069   PetscValidPointer(mem,4);
1070   if (dm->workin) {
1071     link       = dm->workin;
1072     dm->workin = dm->workin->next;
1073   } else {
1074     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1075   }
1076   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1077   if (size*count > link->bytes) {
1078     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
1079     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1080     link->bytes = size*count;
1081   }
1082   link->next   = dm->workout;
1083   dm->workout  = link;
1084   *(void**)mem = link->mem;
1085   PetscFunctionReturn(0);
1086 }
1087 
1088 #undef __FUNCT__
1089 #define __FUNCT__ "DMRestoreWorkArray"
1090 /*@C
1091   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1092 
1093   Not Collective
1094 
1095   Input Parameters:
1096 + dm - the DM object
1097 . count - The minium size
1098 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1099 
1100   Output Parameter:
1101 . array - the work array
1102 
1103   Level: developer
1104 
1105 .seealso DMDestroy(), DMCreate()
1106 @*/
1107 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1108 {
1109   DMWorkLink *p,link;
1110 
1111   PetscFunctionBegin;
1112   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1113   PetscValidPointer(mem,4);
1114   for (p=&dm->workout; (link=*p); p=&link->next) {
1115     if (link->mem == *(void**)mem) {
1116       *p           = link->next;
1117       link->next   = dm->workin;
1118       dm->workin   = link;
1119       *(void**)mem = NULL;
1120       PetscFunctionReturn(0);
1121     }
1122   }
1123   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1124   PetscFunctionReturn(0);
1125 }
1126 
1127 #undef __FUNCT__
1128 #define __FUNCT__ "DMSetNullSpaceConstructor"
1129 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1130 {
1131   PetscFunctionBegin;
1132   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1133   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1134   dm->nullspaceConstructors[field] = nullsp;
1135   PetscFunctionReturn(0);
1136 }
1137 
1138 #undef __FUNCT__
1139 #define __FUNCT__ "DMCreateFieldIS"
1140 /*@C
1141   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1142 
1143   Not collective
1144 
1145   Input Parameter:
1146 . dm - the DM object
1147 
1148   Output Parameters:
1149 + numFields  - The number of fields (or NULL if not requested)
1150 . fieldNames - The name for each field (or NULL if not requested)
1151 - fields     - The global indices for each field (or NULL if not requested)
1152 
1153   Level: intermediate
1154 
1155   Notes:
1156   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1157   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1158   PetscFree().
1159 
1160 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1161 @*/
1162 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1163 {
1164   PetscSection   section, sectionGlobal;
1165   PetscErrorCode ierr;
1166 
1167   PetscFunctionBegin;
1168   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1169   if (numFields) {
1170     PetscValidPointer(numFields,2);
1171     *numFields = 0;
1172   }
1173   if (fieldNames) {
1174     PetscValidPointer(fieldNames,3);
1175     *fieldNames = NULL;
1176   }
1177   if (fields) {
1178     PetscValidPointer(fields,4);
1179     *fields = NULL;
1180   }
1181   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1182   if (section) {
1183     PetscInt *fieldSizes, **fieldIndices;
1184     PetscInt nF, f, pStart, pEnd, p;
1185 
1186     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1187     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1188     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr);
1189     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1190     for (f = 0; f < nF; ++f) {
1191       fieldSizes[f] = 0;
1192     }
1193     for (p = pStart; p < pEnd; ++p) {
1194       PetscInt gdof;
1195 
1196       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1197       if (gdof > 0) {
1198         for (f = 0; f < nF; ++f) {
1199           PetscInt fdof, fcdof;
1200 
1201           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1202           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1203           fieldSizes[f] += fdof-fcdof;
1204         }
1205       }
1206     }
1207     for (f = 0; f < nF; ++f) {
1208       ierr          = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1209       fieldSizes[f] = 0;
1210     }
1211     for (p = pStart; p < pEnd; ++p) {
1212       PetscInt gdof, goff;
1213 
1214       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1215       if (gdof > 0) {
1216         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1217         for (f = 0; f < nF; ++f) {
1218           PetscInt fdof, fcdof, fc;
1219 
1220           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1221           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1222           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1223             fieldIndices[f][fieldSizes[f]] = goff++;
1224           }
1225         }
1226       }
1227     }
1228     if (numFields) *numFields = nF;
1229     if (fieldNames) {
1230       ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr);
1231       for (f = 0; f < nF; ++f) {
1232         const char *fieldName;
1233 
1234         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1235         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1236       }
1237     }
1238     if (fields) {
1239       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1240       for (f = 0; f < nF; ++f) {
1241         ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1242       }
1243     }
1244     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1245   } else if (dm->ops->createfieldis) {
1246     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1247   }
1248   PetscFunctionReturn(0);
1249 }
1250 
1251 
1252 #undef __FUNCT__
1253 #define __FUNCT__ "DMCreateFieldDecomposition"
1254 /*@C
1255   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1256                           corresponding to different fields: each IS contains the global indices of the dofs of the
1257                           corresponding field. The optional list of DMs define the DM for each subproblem.
1258                           Generalizes DMCreateFieldIS().
1259 
1260   Not collective
1261 
1262   Input Parameter:
1263 . dm - the DM object
1264 
1265   Output Parameters:
1266 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1267 . namelist  - The name for each field (or NULL if not requested)
1268 . islist    - The global indices for each field (or NULL if not requested)
1269 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1270 
1271   Level: intermediate
1272 
1273   Notes:
1274   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1275   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1276   and all of the arrays should be freed with PetscFree().
1277 
1278 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1279 @*/
1280 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1281 {
1282   PetscErrorCode ierr;
1283 
1284   PetscFunctionBegin;
1285   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1286   if (len) {
1287     PetscValidPointer(len,2);
1288     *len = 0;
1289   }
1290   if (namelist) {
1291     PetscValidPointer(namelist,3);
1292     *namelist = 0;
1293   }
1294   if (islist) {
1295     PetscValidPointer(islist,4);
1296     *islist = 0;
1297   }
1298   if (dmlist) {
1299     PetscValidPointer(dmlist,5);
1300     *dmlist = 0;
1301   }
1302   /*
1303    Is it a good idea to apply the following check across all impls?
1304    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1305    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1306    */
1307   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1308   if (!dm->ops->createfielddecomposition) {
1309     PetscSection section;
1310     PetscInt     numFields, f;
1311 
1312     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1313     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1314     if (section && numFields && dm->ops->createsubdm) {
1315       *len = numFields;
1316       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1317       for (f = 0; f < numFields; ++f) {
1318         const char *fieldName;
1319 
1320         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1321         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1322         ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1323       }
1324     } else {
1325       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1326       /* By default there are no DMs associated with subproblems. */
1327       if (dmlist) *dmlist = NULL;
1328     }
1329   } else {
1330     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1331   }
1332   PetscFunctionReturn(0);
1333 }
1334 
1335 #undef __FUNCT__
1336 #define __FUNCT__ "DMCreateSubDM"
1337 /*@C
1338   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1339                   The fields are defined by DMCreateFieldIS().
1340 
1341   Not collective
1342 
1343   Input Parameters:
1344 + dm - the DM object
1345 . numFields - number of fields in this subproblem
1346 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1347 
1348   Output Parameters:
1349 . is - The global indices for the subproblem
1350 - dm - The DM for the subproblem
1351 
1352   Level: intermediate
1353 
1354 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1355 @*/
1356 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1357 {
1358   PetscErrorCode ierr;
1359 
1360   PetscFunctionBegin;
1361   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1362   PetscValidPointer(fields,3);
1363   if (is) PetscValidPointer(is,4);
1364   if (subdm) PetscValidPointer(subdm,5);
1365   if (dm->ops->createsubdm) {
1366     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1367   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1368   PetscFunctionReturn(0);
1369 }
1370 
1371 
1372 #undef __FUNCT__
1373 #define __FUNCT__ "DMCreateDomainDecomposition"
1374 /*@C
1375   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1376                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1377                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1378                           define a nonoverlapping covering, while outer subdomains can overlap.
1379                           The optional list of DMs define the DM for each subproblem.
1380 
1381   Not collective
1382 
1383   Input Parameter:
1384 . dm - the DM object
1385 
1386   Output Parameters:
1387 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1388 . namelist    - The name for each subdomain (or NULL if not requested)
1389 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1390 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1391 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1392 
1393   Level: intermediate
1394 
1395   Notes:
1396   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1397   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1398   and all of the arrays should be freed with PetscFree().
1399 
1400 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1401 @*/
1402 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1403 {
1404   PetscErrorCode      ierr;
1405   DMSubDomainHookLink link;
1406   PetscInt            i,l;
1407 
1408   PetscFunctionBegin;
1409   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1410   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1411   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1412   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1413   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1414   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1415   /*
1416    Is it a good idea to apply the following check across all impls?
1417    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1418    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1419    */
1420   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1421   if (dm->ops->createdomaindecomposition) {
1422     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1423     /* copy subdomain hooks and context over to the subdomain DMs */
1424     if (dmlist) {
1425       for (i = 0; i < l; i++) {
1426         for (link=dm->subdomainhook; link; link=link->next) {
1427           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1428         }
1429         (*dmlist)[i]->ctx = dm->ctx;
1430       }
1431     }
1432     if (len) *len = l;
1433   }
1434   PetscFunctionReturn(0);
1435 }
1436 
1437 
1438 #undef __FUNCT__
1439 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1440 /*@C
1441   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1442 
1443   Not collective
1444 
1445   Input Parameters:
1446 + dm - the DM object
1447 . n  - the number of subdomain scatters
1448 - subdms - the local subdomains
1449 
1450   Output Parameters:
1451 + n     - the number of scatters returned
1452 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1453 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1454 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1455 
1456   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1457   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1458   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1459   solution and residual data.
1460 
1461   Level: developer
1462 
1463 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1464 @*/
1465 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1466 {
1467   PetscErrorCode ierr;
1468 
1469   PetscFunctionBegin;
1470   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1471   PetscValidPointer(subdms,3);
1472   if (dm->ops->createddscatters) {
1473     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1474   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1475   PetscFunctionReturn(0);
1476 }
1477 
1478 #undef __FUNCT__
1479 #define __FUNCT__ "DMRefine"
1480 /*@
1481   DMRefine - Refines a DM object
1482 
1483   Collective on DM
1484 
1485   Input Parameter:
1486 + dm   - the DM object
1487 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1488 
1489   Output Parameter:
1490 . dmf - the refined DM, or NULL
1491 
1492   Note: If no refinement was done, the return value is NULL
1493 
1494   Level: developer
1495 
1496 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1497 @*/
1498 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1499 {
1500   PetscErrorCode   ierr;
1501   DMRefineHookLink link;
1502 
1503   PetscFunctionBegin;
1504   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1505   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1506   if (*dmf) {
1507     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1508 
1509     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1510 
1511     (*dmf)->ctx       = dm->ctx;
1512     (*dmf)->leveldown = dm->leveldown;
1513     (*dmf)->levelup   = dm->levelup + 1;
1514 
1515     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1516     for (link=dm->refinehook; link; link=link->next) {
1517       if (link->refinehook) {
1518         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1519       }
1520     }
1521   }
1522   PetscFunctionReturn(0);
1523 }
1524 
1525 #undef __FUNCT__
1526 #define __FUNCT__ "DMRefineHookAdd"
1527 /*@C
1528    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1529 
1530    Logically Collective
1531 
1532    Input Arguments:
1533 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1534 .  refinehook - function to run when setting up a coarser level
1535 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1536 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1537 
1538    Calling sequence of refinehook:
1539 $    refinehook(DM coarse,DM fine,void *ctx);
1540 
1541 +  coarse - coarse level DM
1542 .  fine - fine level DM to interpolate problem to
1543 -  ctx - optional user-defined function context
1544 
1545    Calling sequence for interphook:
1546 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1547 
1548 +  coarse - coarse level DM
1549 .  interp - matrix interpolating a coarse-level solution to the finer grid
1550 .  fine - fine level DM to update
1551 -  ctx - optional user-defined function context
1552 
1553    Level: advanced
1554 
1555    Notes:
1556    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1557 
1558    If this function is called multiple times, the hooks will be run in the order they are added.
1559 
1560    This function is currently not available from Fortran.
1561 
1562 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1563 @*/
1564 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1565 {
1566   PetscErrorCode   ierr;
1567   DMRefineHookLink link,*p;
1568 
1569   PetscFunctionBegin;
1570   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1571   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1572   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1573   link->refinehook = refinehook;
1574   link->interphook = interphook;
1575   link->ctx        = ctx;
1576   link->next       = NULL;
1577   *p               = link;
1578   PetscFunctionReturn(0);
1579 }
1580 
1581 #undef __FUNCT__
1582 #define __FUNCT__ "DMInterpolate"
1583 /*@
1584    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1585 
1586    Collective if any hooks are
1587 
1588    Input Arguments:
1589 +  coarse - coarser DM to use as a base
1590 .  restrct - interpolation matrix, apply using MatInterpolate()
1591 -  fine - finer DM to update
1592 
1593    Level: developer
1594 
1595 .seealso: DMRefineHookAdd(), MatInterpolate()
1596 @*/
1597 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1598 {
1599   PetscErrorCode   ierr;
1600   DMRefineHookLink link;
1601 
1602   PetscFunctionBegin;
1603   for (link=fine->refinehook; link; link=link->next) {
1604     if (link->interphook) {
1605       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1606     }
1607   }
1608   PetscFunctionReturn(0);
1609 }
1610 
1611 #undef __FUNCT__
1612 #define __FUNCT__ "DMGetRefineLevel"
1613 /*@
1614     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1615 
1616     Not Collective
1617 
1618     Input Parameter:
1619 .   dm - the DM object
1620 
1621     Output Parameter:
1622 .   level - number of refinements
1623 
1624     Level: developer
1625 
1626 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1627 
1628 @*/
1629 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1630 {
1631   PetscFunctionBegin;
1632   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1633   *level = dm->levelup;
1634   PetscFunctionReturn(0);
1635 }
1636 
1637 #undef __FUNCT__
1638 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1639 /*@C
1640    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1641 
1642    Logically Collective
1643 
1644    Input Arguments:
1645 +  dm - the DM
1646 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1647 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1648 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1649 
1650    Calling sequence for beginhook:
1651 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1652 
1653 +  dm - global DM
1654 .  g - global vector
1655 .  mode - mode
1656 .  l - local vector
1657 -  ctx - optional user-defined function context
1658 
1659 
1660    Calling sequence for endhook:
1661 $    endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1662 
1663 +  global - global DM
1664 -  ctx - optional user-defined function context
1665 
1666    Level: advanced
1667 
1668 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1669 @*/
1670 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1671 {
1672   PetscErrorCode          ierr;
1673   DMGlobalToLocalHookLink link,*p;
1674 
1675   PetscFunctionBegin;
1676   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1677   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1678   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1679   link->beginhook = beginhook;
1680   link->endhook   = endhook;
1681   link->ctx       = ctx;
1682   link->next      = NULL;
1683   *p              = link;
1684   PetscFunctionReturn(0);
1685 }
1686 
1687 #undef __FUNCT__
1688 #define __FUNCT__ "DMGlobalToLocalBegin"
1689 /*@
1690     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1691 
1692     Neighbor-wise Collective on DM
1693 
1694     Input Parameters:
1695 +   dm - the DM object
1696 .   g - the global vector
1697 .   mode - INSERT_VALUES or ADD_VALUES
1698 -   l - the local vector
1699 
1700 
1701     Level: beginner
1702 
1703 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1704 
1705 @*/
1706 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1707 {
1708   PetscSF                 sf;
1709   PetscErrorCode          ierr;
1710   DMGlobalToLocalHookLink link;
1711 
1712   PetscFunctionBegin;
1713   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1714   for (link=dm->gtolhook; link; link=link->next) {
1715     if (link->beginhook) {
1716       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1717     }
1718   }
1719   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1720   if (sf) {
1721     PetscScalar *lArray, *gArray;
1722 
1723     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1724     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1725     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1726     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1727     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1728     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1729   } else {
1730     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1731   }
1732   PetscFunctionReturn(0);
1733 }
1734 
1735 #undef __FUNCT__
1736 #define __FUNCT__ "DMGlobalToLocalEnd"
1737 /*@
1738     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1739 
1740     Neighbor-wise Collective on DM
1741 
1742     Input Parameters:
1743 +   dm - the DM object
1744 .   g - the global vector
1745 .   mode - INSERT_VALUES or ADD_VALUES
1746 -   l - the local vector
1747 
1748 
1749     Level: beginner
1750 
1751 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1752 
1753 @*/
1754 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1755 {
1756   PetscSF                 sf;
1757   PetscErrorCode          ierr;
1758   PetscScalar             *lArray, *gArray;
1759   DMGlobalToLocalHookLink link;
1760 
1761   PetscFunctionBegin;
1762   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1763   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1764   if (sf) {
1765     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1766 
1767     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1768     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1769     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1770     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1771     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1772   } else {
1773     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1774   }
1775   for (link=dm->gtolhook; link; link=link->next) {
1776     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1777   }
1778   PetscFunctionReturn(0);
1779 }
1780 
1781 #undef __FUNCT__
1782 #define __FUNCT__ "DMLocalToGlobalBegin"
1783 /*@
1784     DMLocalToGlobalBegin - updates global vectors from local vectors
1785 
1786     Neighbor-wise Collective on DM
1787 
1788     Input Parameters:
1789 +   dm - the DM object
1790 .   l - the local vector
1791 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1792            base point.
1793 - - the global vector
1794 
1795     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1796            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1797            global array to the final global array with VecAXPY().
1798 
1799     Level: beginner
1800 
1801 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1802 
1803 @*/
1804 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1805 {
1806   PetscSF        sf;
1807   PetscErrorCode ierr;
1808 
1809   PetscFunctionBegin;
1810   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1811   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1812   if (sf) {
1813     MPI_Op      op;
1814     PetscScalar *lArray, *gArray;
1815 
1816     switch (mode) {
1817     case INSERT_VALUES:
1818     case INSERT_ALL_VALUES:
1819       op = MPIU_REPLACE; break;
1820     case ADD_VALUES:
1821     case ADD_ALL_VALUES:
1822       op = MPI_SUM; break;
1823     default:
1824       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1825     }
1826     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1827     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1828     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1829     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1830     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1831   } else {
1832     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1833   }
1834   PetscFunctionReturn(0);
1835 }
1836 
1837 #undef __FUNCT__
1838 #define __FUNCT__ "DMLocalToGlobalEnd"
1839 /*@
1840     DMLocalToGlobalEnd - updates global vectors from local vectors
1841 
1842     Neighbor-wise Collective on DM
1843 
1844     Input Parameters:
1845 +   dm - the DM object
1846 .   l - the local vector
1847 .   mode - INSERT_VALUES or ADD_VALUES
1848 -   g - the global vector
1849 
1850 
1851     Level: beginner
1852 
1853 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1854 
1855 @*/
1856 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1857 {
1858   PetscSF        sf;
1859   PetscErrorCode ierr;
1860 
1861   PetscFunctionBegin;
1862   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1863   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1864   if (sf) {
1865     MPI_Op      op;
1866     PetscScalar *lArray, *gArray;
1867 
1868     switch (mode) {
1869     case INSERT_VALUES:
1870     case INSERT_ALL_VALUES:
1871       op = MPIU_REPLACE; break;
1872     case ADD_VALUES:
1873     case ADD_ALL_VALUES:
1874       op = MPI_SUM; break;
1875     default:
1876       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1877     }
1878     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1879     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1880     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1881     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1882     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1883   } else {
1884     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1885   }
1886   PetscFunctionReturn(0);
1887 }
1888 
1889 #undef __FUNCT__
1890 #define __FUNCT__ "DMLocalToLocalBegin"
1891 /*@
1892    DMLocalToLocalBegin - Maps from a local vector (including ghost points
1893    that contain irrelevant values) to another local vector where the ghost
1894    points in the second are set correctly. Must be followed by DMLocalToLocalEnd().
1895 
1896    Neighbor-wise Collective on DM and Vec
1897 
1898    Input Parameters:
1899 +  dm - the DM object
1900 .  g - the original local vector
1901 -  mode - one of INSERT_VALUES or ADD_VALUES
1902 
1903    Output Parameter:
1904 .  l  - the local vector with correct ghost values
1905 
1906    Level: intermediate
1907 
1908    Notes:
1909    The local vectors used here need not be the same as those
1910    obtained from DMCreateLocalVector(), BUT they
1911    must have the same parallel data layout; they could, for example, be
1912    obtained with VecDuplicate() from the DM originating vectors.
1913 
1914 .keywords: DM, local-to-local, begin
1915 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1916 
1917 @*/
1918 PetscErrorCode  DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1919 {
1920   PetscErrorCode          ierr;
1921 
1922   PetscFunctionBegin;
1923   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1924   ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1925   PetscFunctionReturn(0);
1926 }
1927 
1928 #undef __FUNCT__
1929 #define __FUNCT__ "DMLocalToLocalEnd"
1930 /*@
1931    DMLocalToLocalEnd - Maps from a local vector (including ghost points
1932    that contain irrelevant values) to another local vector where the ghost
1933    points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().
1934 
1935    Neighbor-wise Collective on DM and Vec
1936 
1937    Input Parameters:
1938 +  da - the DM object
1939 .  g - the original local vector
1940 -  mode - one of INSERT_VALUES or ADD_VALUES
1941 
1942    Output Parameter:
1943 .  l  - the local vector with correct ghost values
1944 
1945    Level: intermediate
1946 
1947    Notes:
1948    The local vectors used here need not be the same as those
1949    obtained from DMCreateLocalVector(), BUT they
1950    must have the same parallel data layout; they could, for example, be
1951    obtained with VecDuplicate() from the DM originating vectors.
1952 
1953 .keywords: DM, local-to-local, end
1954 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1955 
1956 @*/
1957 PetscErrorCode  DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1958 {
1959   PetscErrorCode          ierr;
1960 
1961   PetscFunctionBegin;
1962   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1963   ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1964   PetscFunctionReturn(0);
1965 }
1966 
1967 
1968 #undef __FUNCT__
1969 #define __FUNCT__ "DMCoarsen"
1970 /*@
1971     DMCoarsen - Coarsens a DM object
1972 
1973     Collective on DM
1974 
1975     Input Parameter:
1976 +   dm - the DM object
1977 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1978 
1979     Output Parameter:
1980 .   dmc - the coarsened DM
1981 
1982     Level: developer
1983 
1984 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1985 
1986 @*/
1987 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1988 {
1989   PetscErrorCode    ierr;
1990   DMCoarsenHookLink link;
1991 
1992   PetscFunctionBegin;
1993   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1994   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1995   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1996   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1997   (*dmc)->ctx               = dm->ctx;
1998   (*dmc)->levelup           = dm->levelup;
1999   (*dmc)->leveldown         = dm->leveldown + 1;
2000   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
2001   for (link=dm->coarsenhook; link; link=link->next) {
2002     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
2003   }
2004   PetscFunctionReturn(0);
2005 }
2006 
2007 #undef __FUNCT__
2008 #define __FUNCT__ "DMCoarsenHookAdd"
2009 /*@C
2010    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
2011 
2012    Logically Collective
2013 
2014    Input Arguments:
2015 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
2016 .  coarsenhook - function to run when setting up a coarser level
2017 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
2018 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2019 
2020    Calling sequence of coarsenhook:
2021 $    coarsenhook(DM fine,DM coarse,void *ctx);
2022 
2023 +  fine - fine level DM
2024 .  coarse - coarse level DM to restrict problem to
2025 -  ctx - optional user-defined function context
2026 
2027    Calling sequence for restricthook:
2028 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
2029 
2030 +  fine - fine level DM
2031 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
2032 .  rscale - scaling vector for restriction
2033 .  inject - matrix restricting by injection
2034 .  coarse - coarse level DM to update
2035 -  ctx - optional user-defined function context
2036 
2037    Level: advanced
2038 
2039    Notes:
2040    This function is only needed if auxiliary data needs to be set up on coarse grids.
2041 
2042    If this function is called multiple times, the hooks will be run in the order they are added.
2043 
2044    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2045    extract the finest level information from its context (instead of from the SNES).
2046 
2047    This function is currently not available from Fortran.
2048 
2049 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2050 @*/
2051 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
2052 {
2053   PetscErrorCode    ierr;
2054   DMCoarsenHookLink link,*p;
2055 
2056   PetscFunctionBegin;
2057   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
2058   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2059   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
2060   link->coarsenhook  = coarsenhook;
2061   link->restricthook = restricthook;
2062   link->ctx          = ctx;
2063   link->next         = NULL;
2064   *p                 = link;
2065   PetscFunctionReturn(0);
2066 }
2067 
2068 #undef __FUNCT__
2069 #define __FUNCT__ "DMRestrict"
2070 /*@
2071    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
2072 
2073    Collective if any hooks are
2074 
2075    Input Arguments:
2076 +  fine - finer DM to use as a base
2077 .  restrct - restriction matrix, apply using MatRestrict()
2078 .  inject - injection matrix, also use MatRestrict()
2079 -  coarse - coarer DM to update
2080 
2081    Level: developer
2082 
2083 .seealso: DMCoarsenHookAdd(), MatRestrict()
2084 @*/
2085 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2086 {
2087   PetscErrorCode    ierr;
2088   DMCoarsenHookLink link;
2089 
2090   PetscFunctionBegin;
2091   for (link=fine->coarsenhook; link; link=link->next) {
2092     if (link->restricthook) {
2093       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
2094     }
2095   }
2096   PetscFunctionReturn(0);
2097 }
2098 
2099 #undef __FUNCT__
2100 #define __FUNCT__ "DMSubDomainHookAdd"
2101 /*@C
2102    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2103 
2104    Logically Collective
2105 
2106    Input Arguments:
2107 +  global - global DM
2108 .  ddhook - function to run to pass data to the decomposition DM upon its creation
2109 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2110 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2111 
2112 
2113    Calling sequence for ddhook:
2114 $    ddhook(DM global,DM block,void *ctx)
2115 
2116 +  global - global DM
2117 .  block  - block DM
2118 -  ctx - optional user-defined function context
2119 
2120    Calling sequence for restricthook:
2121 $    restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)
2122 
2123 +  global - global DM
2124 .  out    - scatter to the outer (with ghost and overlap points) block vector
2125 .  in     - scatter to block vector values only owned locally
2126 .  block  - block DM
2127 -  ctx - optional user-defined function context
2128 
2129    Level: advanced
2130 
2131    Notes:
2132    This function is only needed if auxiliary data needs to be set up on subdomain DMs.
2133 
2134    If this function is called multiple times, the hooks will be run in the order they are added.
2135 
2136    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2137    extract the global information from its context (instead of from the SNES).
2138 
2139    This function is currently not available from Fortran.
2140 
2141 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2142 @*/
2143 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2144 {
2145   PetscErrorCode      ierr;
2146   DMSubDomainHookLink link,*p;
2147 
2148   PetscFunctionBegin;
2149   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2150   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2151   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2152   link->restricthook = restricthook;
2153   link->ddhook       = ddhook;
2154   link->ctx          = ctx;
2155   link->next         = NULL;
2156   *p                 = link;
2157   PetscFunctionReturn(0);
2158 }
2159 
2160 #undef __FUNCT__
2161 #define __FUNCT__ "DMSubDomainRestrict"
2162 /*@
2163    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2164 
2165    Collective if any hooks are
2166 
2167    Input Arguments:
2168 +  fine - finer DM to use as a base
2169 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2170 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2171 -  coarse - coarer DM to update
2172 
2173    Level: developer
2174 
2175 .seealso: DMCoarsenHookAdd(), MatRestrict()
2176 @*/
2177 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2178 {
2179   PetscErrorCode      ierr;
2180   DMSubDomainHookLink link;
2181 
2182   PetscFunctionBegin;
2183   for (link=global->subdomainhook; link; link=link->next) {
2184     if (link->restricthook) {
2185       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
2186     }
2187   }
2188   PetscFunctionReturn(0);
2189 }
2190 
2191 #undef __FUNCT__
2192 #define __FUNCT__ "DMGetCoarsenLevel"
2193 /*@
2194     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2195 
2196     Not Collective
2197 
2198     Input Parameter:
2199 .   dm - the DM object
2200 
2201     Output Parameter:
2202 .   level - number of coarsenings
2203 
2204     Level: developer
2205 
2206 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2207 
2208 @*/
2209 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2210 {
2211   PetscFunctionBegin;
2212   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2213   *level = dm->leveldown;
2214   PetscFunctionReturn(0);
2215 }
2216 
2217 
2218 
2219 #undef __FUNCT__
2220 #define __FUNCT__ "DMRefineHierarchy"
2221 /*@C
2222     DMRefineHierarchy - Refines a DM object, all levels at once
2223 
2224     Collective on DM
2225 
2226     Input Parameter:
2227 +   dm - the DM object
2228 -   nlevels - the number of levels of refinement
2229 
2230     Output Parameter:
2231 .   dmf - the refined DM hierarchy
2232 
2233     Level: developer
2234 
2235 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2236 
2237 @*/
2238 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2239 {
2240   PetscErrorCode ierr;
2241 
2242   PetscFunctionBegin;
2243   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2244   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2245   if (nlevels == 0) PetscFunctionReturn(0);
2246   if (dm->ops->refinehierarchy) {
2247     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2248   } else if (dm->ops->refine) {
2249     PetscInt i;
2250 
2251     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2252     for (i=1; i<nlevels; i++) {
2253       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2254     }
2255   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2256   PetscFunctionReturn(0);
2257 }
2258 
2259 #undef __FUNCT__
2260 #define __FUNCT__ "DMCoarsenHierarchy"
2261 /*@C
2262     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2263 
2264     Collective on DM
2265 
2266     Input Parameter:
2267 +   dm - the DM object
2268 -   nlevels - the number of levels of coarsening
2269 
2270     Output Parameter:
2271 .   dmc - the coarsened DM hierarchy
2272 
2273     Level: developer
2274 
2275 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2276 
2277 @*/
2278 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2279 {
2280   PetscErrorCode ierr;
2281 
2282   PetscFunctionBegin;
2283   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2284   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2285   if (nlevels == 0) PetscFunctionReturn(0);
2286   PetscValidPointer(dmc,3);
2287   if (dm->ops->coarsenhierarchy) {
2288     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2289   } else if (dm->ops->coarsen) {
2290     PetscInt i;
2291 
2292     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2293     for (i=1; i<nlevels; i++) {
2294       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2295     }
2296   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2297   PetscFunctionReturn(0);
2298 }
2299 
2300 #undef __FUNCT__
2301 #define __FUNCT__ "DMCreateAggregates"
2302 /*@
2303    DMCreateAggregates - Gets the aggregates that map between
2304    grids associated with two DMs.
2305 
2306    Collective on DM
2307 
2308    Input Parameters:
2309 +  dmc - the coarse grid DM
2310 -  dmf - the fine grid DM
2311 
2312    Output Parameters:
2313 .  rest - the restriction matrix (transpose of the projection matrix)
2314 
2315    Level: intermediate
2316 
2317 .keywords: interpolation, restriction, multigrid
2318 
2319 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2320 @*/
2321 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2322 {
2323   PetscErrorCode ierr;
2324 
2325   PetscFunctionBegin;
2326   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2327   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2328   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2329   PetscFunctionReturn(0);
2330 }
2331 
2332 #undef __FUNCT__
2333 #define __FUNCT__ "DMSetApplicationContextDestroy"
2334 /*@C
2335     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2336 
2337     Not Collective
2338 
2339     Input Parameters:
2340 +   dm - the DM object
2341 -   destroy - the destroy function
2342 
2343     Level: intermediate
2344 
2345 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2346 
2347 @*/
2348 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2349 {
2350   PetscFunctionBegin;
2351   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2352   dm->ctxdestroy = destroy;
2353   PetscFunctionReturn(0);
2354 }
2355 
2356 #undef __FUNCT__
2357 #define __FUNCT__ "DMSetApplicationContext"
2358 /*@
2359     DMSetApplicationContext - Set a user context into a DM object
2360 
2361     Not Collective
2362 
2363     Input Parameters:
2364 +   dm - the DM object
2365 -   ctx - the user context
2366 
2367     Level: intermediate
2368 
2369 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2370 
2371 @*/
2372 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2373 {
2374   PetscFunctionBegin;
2375   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2376   dm->ctx = ctx;
2377   PetscFunctionReturn(0);
2378 }
2379 
2380 #undef __FUNCT__
2381 #define __FUNCT__ "DMGetApplicationContext"
2382 /*@
2383     DMGetApplicationContext - Gets a user context from a DM object
2384 
2385     Not Collective
2386 
2387     Input Parameter:
2388 .   dm - the DM object
2389 
2390     Output Parameter:
2391 .   ctx - the user context
2392 
2393     Level: intermediate
2394 
2395 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2396 
2397 @*/
2398 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2399 {
2400   PetscFunctionBegin;
2401   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2402   *(void**)ctx = dm->ctx;
2403   PetscFunctionReturn(0);
2404 }
2405 
2406 #undef __FUNCT__
2407 #define __FUNCT__ "DMSetVariableBounds"
2408 /*@C
2409     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2410 
2411     Logically Collective on DM
2412 
2413     Input Parameter:
2414 +   dm - the DM object
2415 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2416 
2417     Level: intermediate
2418 
2419 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2420          DMSetJacobian()
2421 
2422 @*/
2423 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2424 {
2425   PetscFunctionBegin;
2426   dm->ops->computevariablebounds = f;
2427   PetscFunctionReturn(0);
2428 }
2429 
2430 #undef __FUNCT__
2431 #define __FUNCT__ "DMHasVariableBounds"
2432 /*@
2433     DMHasVariableBounds - does the DM object have a variable bounds function?
2434 
2435     Not Collective
2436 
2437     Input Parameter:
2438 .   dm - the DM object to destroy
2439 
2440     Output Parameter:
2441 .   flg - PETSC_TRUE if the variable bounds function exists
2442 
2443     Level: developer
2444 
2445 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2446 
2447 @*/
2448 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2449 {
2450   PetscFunctionBegin;
2451   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2452   PetscFunctionReturn(0);
2453 }
2454 
2455 #undef __FUNCT__
2456 #define __FUNCT__ "DMComputeVariableBounds"
2457 /*@C
2458     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2459 
2460     Logically Collective on DM
2461 
2462     Input Parameters:
2463 +   dm - the DM object to destroy
2464 -   x  - current solution at which the bounds are computed
2465 
2466     Output parameters:
2467 +   xl - lower bound
2468 -   xu - upper bound
2469 
2470     Level: intermediate
2471 
2472 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2473 
2474 @*/
2475 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2476 {
2477   PetscErrorCode ierr;
2478 
2479   PetscFunctionBegin;
2480   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2481   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2482   if (dm->ops->computevariablebounds) {
2483     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2484   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2485   PetscFunctionReturn(0);
2486 }
2487 
2488 #undef __FUNCT__
2489 #define __FUNCT__ "DMHasColoring"
2490 /*@
2491     DMHasColoring - does the DM object have a method of providing a coloring?
2492 
2493     Not Collective
2494 
2495     Input Parameter:
2496 .   dm - the DM object
2497 
2498     Output Parameter:
2499 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2500 
2501     Level: developer
2502 
2503 .seealso DMHasFunction(), DMCreateColoring()
2504 
2505 @*/
2506 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2507 {
2508   PetscFunctionBegin;
2509   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2510   PetscFunctionReturn(0);
2511 }
2512 
2513 #undef  __FUNCT__
2514 #define __FUNCT__ "DMSetVec"
2515 /*@C
2516     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2517 
2518     Collective on DM
2519 
2520     Input Parameter:
2521 +   dm - the DM object
2522 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2523 
2524     Level: developer
2525 
2526 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2527 
2528 @*/
2529 PetscErrorCode  DMSetVec(DM dm,Vec x)
2530 {
2531   PetscErrorCode ierr;
2532 
2533   PetscFunctionBegin;
2534   if (x) {
2535     if (!dm->x) {
2536       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2537     }
2538     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2539   } else if (dm->x) {
2540     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2541   }
2542   PetscFunctionReturn(0);
2543 }
2544 
2545 PetscFunctionList DMList              = NULL;
2546 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2547 
2548 #undef __FUNCT__
2549 #define __FUNCT__ "DMSetType"
2550 /*@C
2551   DMSetType - Builds a DM, for a particular DM implementation.
2552 
2553   Collective on DM
2554 
2555   Input Parameters:
2556 + dm     - The DM object
2557 - method - The name of the DM type
2558 
2559   Options Database Key:
2560 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2561 
2562   Notes:
2563   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2564 
2565   Level: intermediate
2566 
2567 .keywords: DM, set, type
2568 .seealso: DMGetType(), DMCreate()
2569 @*/
2570 PetscErrorCode  DMSetType(DM dm, DMType method)
2571 {
2572   PetscErrorCode (*r)(DM);
2573   PetscBool      match;
2574   PetscErrorCode ierr;
2575 
2576   PetscFunctionBegin;
2577   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2578   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2579   if (match) PetscFunctionReturn(0);
2580 
2581   if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);}
2582   ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr);
2583   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2584 
2585   if (dm->ops->destroy) {
2586     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2587     dm->ops->destroy = NULL;
2588   }
2589   ierr = (*r)(dm);CHKERRQ(ierr);
2590   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2591   PetscFunctionReturn(0);
2592 }
2593 
2594 #undef __FUNCT__
2595 #define __FUNCT__ "DMGetType"
2596 /*@C
2597   DMGetType - Gets the DM type name (as a string) from the DM.
2598 
2599   Not Collective
2600 
2601   Input Parameter:
2602 . dm  - The DM
2603 
2604   Output Parameter:
2605 . type - The DM type name
2606 
2607   Level: intermediate
2608 
2609 .keywords: DM, get, type, name
2610 .seealso: DMSetType(), DMCreate()
2611 @*/
2612 PetscErrorCode  DMGetType(DM dm, DMType *type)
2613 {
2614   PetscErrorCode ierr;
2615 
2616   PetscFunctionBegin;
2617   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2618   PetscValidCharPointer(type,2);
2619   if (!DMRegisterAllCalled) {
2620     ierr = DMRegisterAll();CHKERRQ(ierr);
2621   }
2622   *type = ((PetscObject)dm)->type_name;
2623   PetscFunctionReturn(0);
2624 }
2625 
2626 #undef __FUNCT__
2627 #define __FUNCT__ "DMConvert"
2628 /*@C
2629   DMConvert - Converts a DM to another DM, either of the same or different type.
2630 
2631   Collective on DM
2632 
2633   Input Parameters:
2634 + dm - the DM
2635 - newtype - new DM type (use "same" for the same type)
2636 
2637   Output Parameter:
2638 . M - pointer to new DM
2639 
2640   Notes:
2641   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2642   the MPI communicator of the generated DM is always the same as the communicator
2643   of the input DM.
2644 
2645   Level: intermediate
2646 
2647 .seealso: DMCreate()
2648 @*/
2649 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2650 {
2651   DM             B;
2652   char           convname[256];
2653   PetscBool      sametype, issame;
2654   PetscErrorCode ierr;
2655 
2656   PetscFunctionBegin;
2657   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2658   PetscValidType(dm,1);
2659   PetscValidPointer(M,3);
2660   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2661   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2662   {
2663     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2664 
2665     /*
2666        Order of precedence:
2667        1) See if a specialized converter is known to the current DM.
2668        2) See if a specialized converter is known to the desired DM class.
2669        3) See if a good general converter is registered for the desired class
2670        4) See if a good general converter is known for the current matrix.
2671        5) Use a really basic converter.
2672     */
2673 
2674     /* 1) See if a specialized converter is known to the current DM and the desired class */
2675     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2676     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2677     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2678     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2679     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2680     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr);
2681     if (conv) goto foundconv;
2682 
2683     /* 2)  See if a specialized converter is known to the desired DM class. */
2684     ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr);
2685     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2686     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2687     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2688     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2689     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2690     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2691     ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr);
2692     if (conv) {
2693       ierr = DMDestroy(&B);CHKERRQ(ierr);
2694       goto foundconv;
2695     }
2696 
2697 #if 0
2698     /* 3) See if a good general converter is registered for the desired class */
2699     conv = B->ops->convertfrom;
2700     ierr = DMDestroy(&B);CHKERRQ(ierr);
2701     if (conv) goto foundconv;
2702 
2703     /* 4) See if a good general converter is known for the current matrix */
2704     if (dm->ops->convert) {
2705       conv = dm->ops->convert;
2706     }
2707     if (conv) goto foundconv;
2708 #endif
2709 
2710     /* 5) Use a really basic converter. */
2711     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2712 
2713 foundconv:
2714     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2715     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2716     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2717   }
2718   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2719   PetscFunctionReturn(0);
2720 }
2721 
2722 /*--------------------------------------------------------------------------------------------------------------------*/
2723 
2724 #undef __FUNCT__
2725 #define __FUNCT__ "DMRegister"
2726 /*@C
2727   DMRegister -  Adds a new DM component implementation
2728 
2729   Not Collective
2730 
2731   Input Parameters:
2732 + name        - The name of a new user-defined creation routine
2733 - create_func - The creation routine itself
2734 
2735   Notes:
2736   DMRegister() may be called multiple times to add several user-defined DMs
2737 
2738 
2739   Sample usage:
2740 .vb
2741     DMRegister("my_da", MyDMCreate);
2742 .ve
2743 
2744   Then, your DM type can be chosen with the procedural interface via
2745 .vb
2746     DMCreate(MPI_Comm, DM *);
2747     DMSetType(DM,"my_da");
2748 .ve
2749    or at runtime via the option
2750 .vb
2751     -da_type my_da
2752 .ve
2753 
2754   Level: advanced
2755 
2756 .keywords: DM, register
2757 .seealso: DMRegisterAll(), DMRegisterDestroy()
2758 
2759 @*/
2760 PetscErrorCode  DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2761 {
2762   PetscErrorCode ierr;
2763 
2764   PetscFunctionBegin;
2765   ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr);
2766   PetscFunctionReturn(0);
2767 }
2768 
2769 #undef __FUNCT__
2770 #define __FUNCT__ "DMLoad"
2771 /*@C
2772   DMLoad - Loads a DM that has been stored in binary  with DMView().
2773 
2774   Collective on PetscViewer
2775 
2776   Input Parameters:
2777 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2778            some related function before a call to DMLoad().
2779 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2780            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2781 
2782    Level: intermediate
2783 
2784   Notes:
2785    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2786 
2787   Notes for advanced users:
2788   Most users should not need to know the details of the binary storage
2789   format, since DMLoad() and DMView() completely hide these details.
2790   But for anyone who's interested, the standard binary matrix storage
2791   format is
2792 .vb
2793      has not yet been determined
2794 .ve
2795 
2796 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2797 @*/
2798 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2799 {
2800   PetscErrorCode ierr;
2801   PetscBool      isbinary;
2802   PetscInt       classid;
2803   char           type[256];
2804 
2805   PetscFunctionBegin;
2806   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2807   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2808   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2809   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2810 
2811   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2812   if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file");
2813   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2814   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2815   if (newdm->ops->load) {
2816     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2817   }
2818   PetscFunctionReturn(0);
2819 }
2820 
2821 /******************************** FEM Support **********************************/
2822 
2823 #undef __FUNCT__
2824 #define __FUNCT__ "DMPrintCellVector"
2825 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2826 {
2827   PetscInt       f;
2828   PetscErrorCode ierr;
2829 
2830   PetscFunctionBegin;
2831   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2832   for (f = 0; f < len; ++f) {
2833     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2834   }
2835   PetscFunctionReturn(0);
2836 }
2837 
2838 #undef __FUNCT__
2839 #define __FUNCT__ "DMPrintCellMatrix"
2840 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2841 {
2842   PetscInt       f, g;
2843   PetscErrorCode ierr;
2844 
2845   PetscFunctionBegin;
2846   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2847   for (f = 0; f < rows; ++f) {
2848     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2849     for (g = 0; g < cols; ++g) {
2850       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2851     }
2852     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2853   }
2854   PetscFunctionReturn(0);
2855 }
2856 
2857 #undef __FUNCT__
2858 #define __FUNCT__ "DMPrintLocalVec"
2859 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X)
2860 {
2861   PetscMPIInt    rank, numProcs;
2862   PetscInt       p;
2863   PetscErrorCode ierr;
2864 
2865   PetscFunctionBegin;
2866   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2867   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
2868   ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr);
2869   for (p = 0; p < numProcs; ++p) {
2870     if (p == rank) {
2871       Vec x;
2872 
2873       ierr = VecDuplicate(X, &x);CHKERRQ(ierr);
2874       ierr = VecCopy(X, x);CHKERRQ(ierr);
2875       ierr = VecChop(x, tol);CHKERRQ(ierr);
2876       ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2877       ierr = VecDestroy(&x);CHKERRQ(ierr);
2878       ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2879     }
2880     ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr);
2881   }
2882   PetscFunctionReturn(0);
2883 }
2884 
2885 #undef __FUNCT__
2886 #define __FUNCT__ "DMGetDefaultSection"
2887 /*@
2888   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2889 
2890   Input Parameter:
2891 . dm - The DM
2892 
2893   Output Parameter:
2894 . section - The PetscSection
2895 
2896   Level: intermediate
2897 
2898   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2899 
2900 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2901 @*/
2902 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2903 {
2904   PetscFunctionBegin;
2905   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2906   PetscValidPointer(section, 2);
2907   *section = dm->defaultSection;
2908   PetscFunctionReturn(0);
2909 }
2910 
2911 #undef __FUNCT__
2912 #define __FUNCT__ "DMSetDefaultSection"
2913 /*@
2914   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2915 
2916   Input Parameters:
2917 + dm - The DM
2918 - section - The PetscSection
2919 
2920   Level: intermediate
2921 
2922   Note: Any existing Section will be destroyed
2923 
2924 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2925 @*/
2926 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2927 {
2928   PetscInt       numFields;
2929   PetscInt       f;
2930   PetscErrorCode ierr;
2931 
2932   PetscFunctionBegin;
2933   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2934   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2935   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2936   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2937   dm->defaultSection = section;
2938   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2939   if (numFields) {
2940     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2941     for (f = 0; f < numFields; ++f) {
2942       const char *name;
2943 
2944       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2945       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2946     }
2947   }
2948   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2949   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2950   PetscFunctionReturn(0);
2951 }
2952 
2953 #undef __FUNCT__
2954 #define __FUNCT__ "DMGetDefaultGlobalSection"
2955 /*@
2956   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2957 
2958   Collective on DM
2959 
2960   Input Parameter:
2961 . dm - The DM
2962 
2963   Output Parameter:
2964 . section - The PetscSection
2965 
2966   Level: intermediate
2967 
2968   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2969 
2970 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2971 @*/
2972 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2973 {
2974   PetscErrorCode ierr;
2975 
2976   PetscFunctionBegin;
2977   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2978   PetscValidPointer(section, 2);
2979   if (!dm->defaultGlobalSection) {
2980     if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2981     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2982     ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr);
2983     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2984   }
2985   *section = dm->defaultGlobalSection;
2986   PetscFunctionReturn(0);
2987 }
2988 
2989 #undef __FUNCT__
2990 #define __FUNCT__ "DMSetDefaultGlobalSection"
2991 /*@
2992   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2993 
2994   Input Parameters:
2995 + dm - The DM
2996 - section - The PetscSection, or NULL
2997 
2998   Level: intermediate
2999 
3000   Note: Any existing Section will be destroyed
3001 
3002 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
3003 @*/
3004 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
3005 {
3006   PetscErrorCode ierr;
3007 
3008   PetscFunctionBegin;
3009   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3010   if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
3011   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
3012   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
3013   dm->defaultGlobalSection = section;
3014   PetscFunctionReturn(0);
3015 }
3016 
3017 #undef __FUNCT__
3018 #define __FUNCT__ "DMGetDefaultSF"
3019 /*@
3020   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
3021   it is created from the default PetscSection layouts in the DM.
3022 
3023   Input Parameter:
3024 . dm - The DM
3025 
3026   Output Parameter:
3027 . sf - The PetscSF
3028 
3029   Level: intermediate
3030 
3031   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3032 
3033 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
3034 @*/
3035 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
3036 {
3037   PetscInt       nroots;
3038   PetscErrorCode ierr;
3039 
3040   PetscFunctionBegin;
3041   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3042   PetscValidPointer(sf, 2);
3043   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
3044   if (nroots < 0) {
3045     PetscSection section, gSection;
3046 
3047     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
3048     if (section) {
3049       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
3050       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
3051     } else {
3052       *sf = NULL;
3053       PetscFunctionReturn(0);
3054     }
3055   }
3056   *sf = dm->defaultSF;
3057   PetscFunctionReturn(0);
3058 }
3059 
3060 #undef __FUNCT__
3061 #define __FUNCT__ "DMSetDefaultSF"
3062 /*@
3063   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3064 
3065   Input Parameters:
3066 + dm - The DM
3067 - sf - The PetscSF
3068 
3069   Level: intermediate
3070 
3071   Note: Any previous SF is destroyed
3072 
3073 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3074 @*/
3075 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
3076 {
3077   PetscErrorCode ierr;
3078 
3079   PetscFunctionBegin;
3080   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3081   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
3082   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3083   dm->defaultSF = sf;
3084   PetscFunctionReturn(0);
3085 }
3086 
3087 #undef __FUNCT__
3088 #define __FUNCT__ "DMCreateDefaultSF"
3089 /*@C
3090   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3091   describing the data layout.
3092 
3093   Input Parameters:
3094 + dm - The DM
3095 . localSection - PetscSection describing the local data layout
3096 - globalSection - PetscSection describing the global data layout
3097 
3098   Level: intermediate
3099 
3100 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3101 @*/
3102 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3103 {
3104   MPI_Comm       comm;
3105   PetscLayout    layout;
3106   const PetscInt *ranges;
3107   PetscInt       *local;
3108   PetscSFNode    *remote;
3109   PetscInt       pStart, pEnd, p, nroots, nleaves = 0, l;
3110   PetscMPIInt    size, rank;
3111   PetscErrorCode ierr;
3112 
3113   PetscFunctionBegin;
3114   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
3115   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3116   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3117   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3118   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3119   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3120   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3121   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3122   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3123   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3124   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3125   for (p = pStart; p < pEnd; ++p) {
3126     PetscInt gdof, gcdof;
3127 
3128     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3129     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3130     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3131   }
3132   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
3133   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
3134   for (p = pStart, l = 0; p < pEnd; ++p) {
3135     const PetscInt *cind;
3136     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3137 
3138     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3139     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3140     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3141     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3142     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3143     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3144     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3145     if (!gdof) continue; /* Censored point */
3146     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3147     if (gsize != dof-cdof) {
3148       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3149       cdof = 0; /* Ignore constraints */
3150     }
3151     for (d = 0, c = 0; d < dof; ++d) {
3152       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3153       local[l+d-c] = off+d;
3154     }
3155     if (gdof < 0) {
3156       for (d = 0; d < gsize; ++d, ++l) {
3157         PetscInt offset = -(goff+1) + d, r;
3158 
3159         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3160         if (r < 0) r = -(r+2);
3161         remote[l].rank  = r;
3162         remote[l].index = offset - ranges[r];
3163       }
3164     } else {
3165       for (d = 0; d < gsize; ++d, ++l) {
3166         remote[l].rank  = rank;
3167         remote[l].index = goff+d - ranges[rank];
3168       }
3169     }
3170   }
3171   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3172   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3173   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3174   PetscFunctionReturn(0);
3175 }
3176 
3177 #undef __FUNCT__
3178 #define __FUNCT__ "DMGetPointSF"
3179 /*@
3180   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3181 
3182   Input Parameter:
3183 . dm - The DM
3184 
3185   Output Parameter:
3186 . sf - The PetscSF
3187 
3188   Level: intermediate
3189 
3190   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3191 
3192 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3193 @*/
3194 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3195 {
3196   PetscFunctionBegin;
3197   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3198   PetscValidPointer(sf, 2);
3199   *sf = dm->sf;
3200   PetscFunctionReturn(0);
3201 }
3202 
3203 #undef __FUNCT__
3204 #define __FUNCT__ "DMSetPointSF"
3205 /*@
3206   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3207 
3208   Input Parameters:
3209 + dm - The DM
3210 - sf - The PetscSF
3211 
3212   Level: intermediate
3213 
3214 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3215 @*/
3216 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3217 {
3218   PetscErrorCode ierr;
3219 
3220   PetscFunctionBegin;
3221   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3222   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3223   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3224   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3225   dm->sf = sf;
3226   PetscFunctionReturn(0);
3227 }
3228 
3229 #undef __FUNCT__
3230 #define __FUNCT__ "DMGetNumFields"
3231 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3232 {
3233   PetscFunctionBegin;
3234   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3235   PetscValidPointer(numFields, 2);
3236   *numFields = dm->numFields;
3237   PetscFunctionReturn(0);
3238 }
3239 
3240 #undef __FUNCT__
3241 #define __FUNCT__ "DMSetNumFields"
3242 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3243 {
3244   PetscInt       f;
3245   PetscErrorCode ierr;
3246 
3247   PetscFunctionBegin;
3248   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3249   for (f = 0; f < dm->numFields; ++f) {
3250     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3251   }
3252   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3253   dm->numFields = numFields;
3254   ierr          = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3255   for (f = 0; f < dm->numFields; ++f) {
3256     ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3257   }
3258   PetscFunctionReturn(0);
3259 }
3260 
3261 #undef __FUNCT__
3262 #define __FUNCT__ "DMGetField"
3263 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3264 {
3265   PetscFunctionBegin;
3266   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3267   PetscValidPointer(field, 2);
3268   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3269   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3270   *field = dm->fields[f];
3271   PetscFunctionReturn(0);
3272 }
3273 
3274 #undef __FUNCT__
3275 #define __FUNCT__ "DMRestrictHook_Coordinates"
3276 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3277 {
3278   DM dm_coord,dmc_coord;
3279   PetscErrorCode ierr;
3280   Vec coords,ccoords;
3281   VecScatter scat;
3282   PetscFunctionBegin;
3283   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3284   ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr);
3285   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3286   ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr);
3287   if (coords && !ccoords) {
3288     ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr);
3289     ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr);
3290     ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3291     ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3292     ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr);
3293     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
3294     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3295   }
3296   PetscFunctionReturn(0);
3297 }
3298 
3299 #undef __FUNCT__
3300 #define __FUNCT__ "DMSubDomainHook_Coordinates"
3301 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3302 {
3303   DM dm_coord,subdm_coord;
3304   PetscErrorCode ierr;
3305   Vec coords,ccoords,clcoords;
3306   VecScatter *scat_i,*scat_g;
3307   PetscFunctionBegin;
3308   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3309   ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr);
3310   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3311   ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr);
3312   if (coords && !ccoords) {
3313     ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr);
3314     ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr);
3315     ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr);
3316     ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3317     ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3318     ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3319     ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3320     ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr);
3321     ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr);
3322     ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr);
3323     ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr);
3324     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3325     ierr = VecDestroy(&clcoords);CHKERRQ(ierr);
3326     ierr = PetscFree(scat_i);CHKERRQ(ierr);
3327     ierr = PetscFree(scat_g);CHKERRQ(ierr);
3328   }
3329   PetscFunctionReturn(0);
3330 }
3331 
3332 #undef __FUNCT__
3333 #define __FUNCT__ "DMSetCoordinates"
3334 /*@
3335   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3336 
3337   Collective on DM
3338 
3339   Input Parameters:
3340 + dm - the DM
3341 - c - coordinate vector
3342 
3343   Note:
3344   The coordinates do include those for ghost points, which are in the local vector
3345 
3346   Level: intermediate
3347 
3348 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3349 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3350 @*/
3351 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3352 {
3353   PetscErrorCode ierr;
3354 
3355   PetscFunctionBegin;
3356   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3357   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3358   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3359   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3360   dm->coordinates = c;
3361   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3362   ierr            = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3363   ierr            = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3364   PetscFunctionReturn(0);
3365 }
3366 
3367 #undef __FUNCT__
3368 #define __FUNCT__ "DMSetCoordinatesLocal"
3369 /*@
3370   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3371 
3372   Collective on DM
3373 
3374    Input Parameters:
3375 +  dm - the DM
3376 -  c - coordinate vector
3377 
3378   Note:
3379   The coordinates of ghost points can be set using DMSetCoordinates()
3380   followed by DMGetCoordinatesLocal(). This is intended to enable the
3381   setting of ghost coordinates outside of the domain.
3382 
3383   Level: intermediate
3384 
3385 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3386 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3387 @*/
3388 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3389 {
3390   PetscErrorCode ierr;
3391 
3392   PetscFunctionBegin;
3393   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3394   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3395   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3396   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3397 
3398   dm->coordinatesLocal = c;
3399 
3400   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3401   PetscFunctionReturn(0);
3402 }
3403 
3404 #undef __FUNCT__
3405 #define __FUNCT__ "DMGetCoordinates"
3406 /*@
3407   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3408 
3409   Not Collective
3410 
3411   Input Parameter:
3412 . dm - the DM
3413 
3414   Output Parameter:
3415 . c - global coordinate vector
3416 
3417   Note:
3418   This is a borrowed reference, so the user should NOT destroy this vector
3419 
3420   Each process has only the local coordinates (does NOT have the ghost coordinates).
3421 
3422   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3423   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3424 
3425   Level: intermediate
3426 
3427 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3428 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3429 @*/
3430 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3431 {
3432   PetscErrorCode ierr;
3433 
3434   PetscFunctionBegin;
3435   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3436   PetscValidPointer(c,2);
3437   if (!dm->coordinates && dm->coordinatesLocal) {
3438     DM cdm = NULL;
3439 
3440     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3441     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3442     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3443     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3444     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3445   }
3446   *c = dm->coordinates;
3447   PetscFunctionReturn(0);
3448 }
3449 
3450 #undef __FUNCT__
3451 #define __FUNCT__ "DMGetCoordinatesLocal"
3452 /*@
3453   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3454 
3455   Collective on DM
3456 
3457   Input Parameter:
3458 . dm - the DM
3459 
3460   Output Parameter:
3461 . c - coordinate vector
3462 
3463   Note:
3464   This is a borrowed reference, so the user should NOT destroy this vector
3465 
3466   Each process has the local and ghost coordinates
3467 
3468   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3469   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3470 
3471   Level: intermediate
3472 
3473 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3474 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3475 @*/
3476 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3477 {
3478   PetscErrorCode ierr;
3479 
3480   PetscFunctionBegin;
3481   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3482   PetscValidPointer(c,2);
3483   if (!dm->coordinatesLocal && dm->coordinates) {
3484     DM cdm = NULL;
3485 
3486     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3487     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3488     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3489     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3490     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3491   }
3492   *c = dm->coordinatesLocal;
3493   PetscFunctionReturn(0);
3494 }
3495 
3496 #undef __FUNCT__
3497 #define __FUNCT__ "DMGetCoordinateDM"
3498 /*@
3499   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3500 
3501   Collective on DM
3502 
3503   Input Parameter:
3504 . dm - the DM
3505 
3506   Output Parameter:
3507 . cdm - coordinate DM
3508 
3509   Level: intermediate
3510 
3511 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3512 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3513 @*/
3514 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3515 {
3516   PetscErrorCode ierr;
3517 
3518   PetscFunctionBegin;
3519   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3520   PetscValidPointer(cdm,2);
3521   if (!dm->coordinateDM) {
3522     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3523     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3524   }
3525   *cdm = dm->coordinateDM;
3526   PetscFunctionReturn(0);
3527 }
3528 
3529 #undef __FUNCT__
3530 #define __FUNCT__ "DMLocatePoints"
3531 /*@
3532   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3533 
3534   Not collective
3535 
3536   Input Parameters:
3537 + dm - The DM
3538 - v - The Vec of points
3539 
3540   Output Parameter:
3541 . cells - The local cell numbers for cells which contain the points
3542 
3543   Level: developer
3544 
3545 .keywords: point location, mesh
3546 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3547 @*/
3548 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3549 {
3550   PetscErrorCode ierr;
3551 
3552   PetscFunctionBegin;
3553   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3554   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3555   PetscValidPointer(cells,3);
3556   if (dm->ops->locatepoints) {
3557     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3558   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3559   PetscFunctionReturn(0);
3560 }
3561