xref: /petsc/src/dm/interface/dm.c (revision e30e807f52449ee56cb8e6e4bd3267d3371e2f11)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMCreate"
8 /*@
9   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
10 
11    If you never  call DMSetType()  it will generate an
12    error when you try to use the vector.
13 
14   Collective on MPI_Comm
15 
16   Input Parameter:
17 . comm - The communicator for the DM object
18 
19   Output Parameter:
20 . dm - The DM object
21 
22   Level: beginner
23 
24 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
25 @*/
26 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
27 {
28   DM             v;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   PetscValidPointer(dm,2);
33   *dm = PETSC_NULL;
34 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
35   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
36   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
37   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
38 #endif
39 
40   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
41   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
42 
43 
44   v->ltogmap      = PETSC_NULL;
45   v->ltogmapb     = PETSC_NULL;
46   v->bs           = 1;
47   v->coloringtype = IS_COLORING_GLOBAL;
48   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
49   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
50   v->defaultSection       = PETSC_NULL;
51   v->defaultGlobalSection = PETSC_NULL;
52   {
53     PetscInt i;
54     for (i = 0; i < 10; ++i) {
55       v->nullspaceConstructors[i] = PETSC_NULL;
56     }
57   }
58   v->numFields = 0;
59   v->fields    = PETSC_NULL;
60 
61   *dm = v;
62   PetscFunctionReturn(0);
63 }
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "DMSetVecType"
67 /*@C
68        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
69 
70    Logically Collective on DMDA
71 
72    Input Parameter:
73 +  da - initial distributed array
74 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
75 
76    Options Database:
77 .   -dm_vec_type ctype
78 
79    Level: intermediate
80 
81 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
82 @*/
83 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
84 {
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   PetscValidHeaderSpecific(da,DM_CLASSID,1);
89   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
90   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
91   PetscFunctionReturn(0);
92 }
93 
94 #undef __FUNCT__
95 #define __FUNCT__ "VecGetDM"
96 /*@
97   VecGetDM - Gets the DM defining the data layout of the vector
98 
99   Not collective
100 
101   Input Parameter:
102 . v - The Vec
103 
104   Output Parameter:
105 . dm - The DM
106 
107   Level: intermediate
108 
109 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
110 @*/
111 PetscErrorCode VecGetDM(Vec v, DM *dm)
112 {
113   PetscErrorCode ierr;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
117   PetscValidPointer(dm,2);
118   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "VecSetDM"
124 /*@
125   VecSetDM - Sets the DM defining the data layout of the vector
126 
127   Not collective
128 
129   Input Parameters:
130 + v - The Vec
131 - dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecSetDM(Vec v, DM dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
144   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "DMSetMatType"
150 /*@C
151        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
152 
153    Logically Collective on DM
154 
155    Input Parameter:
156 +  dm - the DM context
157 .  ctype - the matrix type
158 
159    Options Database:
160 .   -dm_mat_type ctype
161 
162    Level: intermediate
163 
164 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
165 @*/
166 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
167 {
168   PetscErrorCode ierr;
169   PetscFunctionBegin;
170   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
171   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
172   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "MatGetDM"
178 /*@
179   MatGetDM - Gets the DM defining the data layout of the matrix
180 
181   Not collective
182 
183   Input Parameter:
184 . A - The Mat
185 
186   Output Parameter:
187 . dm - The DM
188 
189   Level: intermediate
190 
191 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
192 @*/
193 PetscErrorCode MatGetDM(Mat A, DM *dm)
194 {
195   PetscErrorCode ierr;
196 
197   PetscFunctionBegin;
198   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
199   PetscValidPointer(dm,2);
200   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 #undef __FUNCT__
205 #define __FUNCT__ "MatSetDM"
206 /*@
207   MatSetDM - Sets the DM defining the data layout of the matrix
208 
209   Not collective
210 
211   Input Parameters:
212 + A - The Mat
213 - dm - The DM
214 
215   Level: intermediate
216 
217 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
218 @*/
219 PetscErrorCode MatSetDM(Mat A, DM dm)
220 {
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
225   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
226   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "DMSetOptionsPrefix"
232 /*@C
233    DMSetOptionsPrefix - Sets the prefix used for searching for all
234    DMDA options in the database.
235 
236    Logically Collective on DMDA
237 
238    Input Parameter:
239 +  da - the DMDA context
240 -  prefix - the prefix to prepend to all option names
241 
242    Notes:
243    A hyphen (-) must NOT be given at the beginning of the prefix name.
244    The first character of all runtime options is AUTOMATICALLY the hyphen.
245 
246    Level: advanced
247 
248 .keywords: DMDA, set, options, prefix, database
249 
250 .seealso: DMSetFromOptions()
251 @*/
252 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
253 {
254   PetscErrorCode ierr;
255 
256   PetscFunctionBegin;
257   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
258   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
259   PetscFunctionReturn(0);
260 }
261 
262 #undef __FUNCT__
263 #define __FUNCT__ "DMDestroy"
264 /*@
265     DMDestroy - Destroys a vector packer or DMDA.
266 
267     Collective on DM
268 
269     Input Parameter:
270 .   dm - the DM object to destroy
271 
272     Level: developer
273 
274 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
275 
276 @*/
277 PetscErrorCode  DMDestroy(DM *dm)
278 {
279   PetscInt       i, cnt = 0, f;
280   DMNamedVecLink nlink,nnext;
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   if (!*dm) PetscFunctionReturn(0);
285   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
286 
287   /* count all the circular references of DM and its contained Vecs */
288   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
289     if ((*dm)->localin[i])  {cnt++;}
290     if ((*dm)->globalin[i]) {cnt++;}
291   }
292   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
293   if ((*dm)->x) {
294     DM obj;
295     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
296     if (obj == *dm) cnt++;
297   }
298 
299   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
300   /*
301      Need this test because the dm references the vectors that
302      reference the dm, so destroying the dm calls destroy on the
303      vectors that cause another destroy on the dm
304   */
305   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
306   ((PetscObject) (*dm))->refct = 0;
307   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
308     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
309     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
310   }
311   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
312     nnext = nlink->next;
313     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
314     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
315     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
316     ierr = PetscFree(nlink);CHKERRQ(ierr);
317   }
318   (*dm)->namedglobal = PETSC_NULL;
319 
320   /* Destroy the list of hooks */
321   {
322     DMCoarsenHookLink link,next;
323     for (link=(*dm)->coarsenhook; link; link=next) {
324       next = link->next;
325       ierr = PetscFree(link);CHKERRQ(ierr);
326     }
327     (*dm)->coarsenhook = PETSC_NULL;
328   }
329   {
330     DMRefineHookLink link,next;
331     for (link=(*dm)->refinehook; link; link=next) {
332       next = link->next;
333       ierr = PetscFree(link);CHKERRQ(ierr);
334     }
335     (*dm)->refinehook = PETSC_NULL;
336   }
337   /* Destroy the work arrays */
338   {
339     DMWorkLink link,next;
340     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
341     for (link=(*dm)->workin; link; link=next) {
342       next = link->next;
343       ierr = PetscFree(link->mem);CHKERRQ(ierr);
344       ierr = PetscFree(link);CHKERRQ(ierr);
345     }
346     (*dm)->workin = PETSC_NULL;
347   }
348 
349   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
350   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
351   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
352 
353   if ((*dm)->ctx && (*dm)->ctxdestroy) {
354     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
355   }
356   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
357   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
358   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
359   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
360   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
361   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
362   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
363 
364   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
365   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
366   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
367   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
368   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
369 
370   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
371   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
372   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
373 
374   for (f = 0; f < (*dm)->numFields; ++f) {
375     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
376   }
377   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
378   /* if memory was published with AMS then destroy it */
379   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
380 
381   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
382   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
383   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
384   PetscFunctionReturn(0);
385 }
386 
387 #undef __FUNCT__
388 #define __FUNCT__ "DMSetUp"
389 /*@
390     DMSetUp - sets up the data structures inside a DM object
391 
392     Collective on DM
393 
394     Input Parameter:
395 .   dm - the DM object to setup
396 
397     Level: developer
398 
399 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
400 
401 @*/
402 PetscErrorCode  DMSetUp(DM dm)
403 {
404   PetscErrorCode ierr;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
408   if (dm->setupcalled) PetscFunctionReturn(0);
409   if (dm->ops->setup) {
410     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
411   }
412   dm->setupcalled = PETSC_TRUE;
413   PetscFunctionReturn(0);
414 }
415 
416 #undef __FUNCT__
417 #define __FUNCT__ "DMSetFromOptions"
418 /*@
419     DMSetFromOptions - sets parameters in a DM from the options database
420 
421     Collective on DM
422 
423     Input Parameter:
424 .   dm - the DM object to set options for
425 
426     Options Database:
427 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
428 .   -dm_vec_type <type>  type of vector to create inside DM
429 .   -dm_mat_type <type>  type of matrix to create inside DM
430 -   -dm_coloring_type <global or ghosted>
431 
432     Level: developer
433 
434 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
435 
436 @*/
437 PetscErrorCode  DMSetFromOptions(DM dm)
438 {
439   PetscBool      flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg;
440   PetscErrorCode ierr;
441   char           typeName[256] = MATAIJ;
442 
443   PetscFunctionBegin;
444   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
445   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
446     ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr);
447     ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr);
448     ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr);
449     ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr);
450     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
451     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
452     if (flg) {
453       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
454     }
455     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
456     if (flg) {
457       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
458     }
459     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
460     if (dm->ops->setfromoptions) {
461       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
462     }
463     /* process any options handlers added with PetscObjectAddOptionsHandler() */
464     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
465   ierr = PetscOptionsEnd();CHKERRQ(ierr);
466   if (flg1) {
467     ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
468   }
469   if (flg2) {
470     PetscViewer viewer;
471 
472     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
473     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
474     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr);
475     ierr = DMView(dm, viewer);CHKERRQ(ierr);
476     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
477   }
478   if (flg3) {
479     PetscViewer viewer;
480 
481     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
482     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
483     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr);
484     ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr);
485     ierr = DMView(dm, viewer);CHKERRQ(ierr);
486     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
487   }
488   if (flg4) {
489     PetscViewer viewer;
490 
491     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
492     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
493     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr);
494     ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr);
495     ierr = DMView(dm, viewer);CHKERRQ(ierr);
496     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
497   }
498   PetscFunctionReturn(0);
499 }
500 
501 #undef __FUNCT__
502 #define __FUNCT__ "DMView"
503 /*@C
504     DMView - Views a vector packer or DMDA.
505 
506     Collective on DM
507 
508     Input Parameter:
509 +   dm - the DM object to view
510 -   v - the viewer
511 
512     Level: developer
513 
514 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
515 
516 @*/
517 PetscErrorCode  DMView(DM dm,PetscViewer v)
518 {
519   PetscErrorCode ierr;
520   PetscBool      isbinary;
521 
522   PetscFunctionBegin;
523   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
524   if (!v) {
525     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
526   }
527   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
528   if (isbinary) {
529     PetscInt         classid = DM_FILE_CLASSID;
530     MPI_Comm         comm;
531     PetscMPIInt      rank;
532     char             type[256];
533 
534     ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
535     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
536     if (!rank) {
537       ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
538       ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
539       ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
540     }
541   }
542   if (dm->ops->view) {
543     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
544   }
545   PetscFunctionReturn(0);
546 }
547 
548 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
549 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
550 
551 #undef __FUNCT__
552 #define __FUNCT__ "DMCreateGlobalVector"
553 /*@
554     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
555 
556     Collective on DM
557 
558     Input Parameter:
559 .   dm - the DM object
560 
561     Output Parameter:
562 .   vec - the global vector
563 
564     Level: beginner
565 
566 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
567 
568 @*/
569 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
570 {
571   PetscErrorCode ierr;
572 
573   PetscFunctionBegin;
574   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
575   if (dm->defaultSection) {
576     PetscSection gSection;
577     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
578 
579     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
580     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
581     for(p = pStart; p < pEnd; ++p) {
582       PetscInt dof, cdof;
583 
584       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
585       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
586       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
587       if ((dof > 0) && (dof-cdof != blockSize)) {
588         blockSize = 1;
589         break;
590       }
591     }
592     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
593     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
594     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
595     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
596     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
597     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
598     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
599     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
600     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
601     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
602     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
603     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
604   } else {
605     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
606   }
607   PetscFunctionReturn(0);
608 }
609 
610 #undef __FUNCT__
611 #define __FUNCT__ "DMCreateLocalVector"
612 /*@
613     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
614 
615     Not Collective
616 
617     Input Parameter:
618 .   dm - the DM object
619 
620     Output Parameter:
621 .   vec - the local vector
622 
623     Level: beginner
624 
625 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
626 
627 @*/
628 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
629 {
630   PetscErrorCode ierr;
631 
632   PetscFunctionBegin;
633   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
634   if (dm->defaultSection) {
635     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
636 
637     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
638     for(p = pStart; p < pEnd; ++p) {
639       PetscInt dof;
640 
641       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
642       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
643       if ((dof > 0) && (dof != blockSize)) {
644         blockSize = 1;
645         break;
646       }
647     }
648     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
649     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
650     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
651     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
652     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
653     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
654     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
655   } else {
656     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
657   }
658   PetscFunctionReturn(0);
659 }
660 
661 #undef __FUNCT__
662 #define __FUNCT__ "DMGetLocalToGlobalMapping"
663 /*@
664    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
665 
666    Collective on DM
667 
668    Input Parameter:
669 .  dm - the DM that provides the mapping
670 
671    Output Parameter:
672 .  ltog - the mapping
673 
674    Level: intermediate
675 
676    Notes:
677    This mapping can then be used by VecSetLocalToGlobalMapping() or
678    MatSetLocalToGlobalMapping().
679 
680 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
681 @*/
682 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
683 {
684   PetscErrorCode ierr;
685 
686   PetscFunctionBegin;
687   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
688   PetscValidPointer(ltog,2);
689   if (!dm->ltogmap) {
690     PetscSection section, sectionGlobal;
691 
692     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
693     if (section) {
694       PetscInt      *ltog;
695       PetscInt       pStart, pEnd, size, p, l;
696 
697       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
698       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
699       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
700       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
701       for (p = pStart, l = 0; p < pEnd; ++p) {
702         PetscInt dof, off, c;
703 
704         /* Should probably use constrained dofs */
705         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
706         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
707         for (c = 0; c < dof; ++c, ++l) {
708           ltog[l] = off+c;
709         }
710       }
711       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
712       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
713     } else {
714       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
715       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
716     }
717   }
718   *ltog = dm->ltogmap;
719   PetscFunctionReturn(0);
720 }
721 
722 #undef __FUNCT__
723 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
724 /*@
725    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
726 
727    Collective on DM
728 
729    Input Parameter:
730 .  da - the distributed array that provides the mapping
731 
732    Output Parameter:
733 .  ltog - the block mapping
734 
735    Level: intermediate
736 
737    Notes:
738    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
739    MatSetLocalToGlobalMappingBlock().
740 
741 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
742 @*/
743 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
744 {
745   PetscErrorCode ierr;
746 
747   PetscFunctionBegin;
748   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
749   PetscValidPointer(ltog,2);
750   if (!dm->ltogmapb) {
751     PetscInt bs;
752     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
753     if (bs > 1) {
754       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
755       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
756     } else {
757       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
758       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
759     }
760   }
761   *ltog = dm->ltogmapb;
762   PetscFunctionReturn(0);
763 }
764 
765 #undef __FUNCT__
766 #define __FUNCT__ "DMGetBlockSize"
767 /*@
768    DMGetBlockSize - Gets the inherent block size associated with a DM
769 
770    Not Collective
771 
772    Input Parameter:
773 .  dm - the DM with block structure
774 
775    Output Parameter:
776 .  bs - the block size, 1 implies no exploitable block structure
777 
778    Level: intermediate
779 
780 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
781 @*/
782 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
783 {
784   PetscFunctionBegin;
785   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
786   PetscValidPointer(bs,2);
787   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
788   *bs = dm->bs;
789   PetscFunctionReturn(0);
790 }
791 
792 #undef __FUNCT__
793 #define __FUNCT__ "DMCreateInterpolation"
794 /*@
795     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
796 
797     Collective on DM
798 
799     Input Parameter:
800 +   dm1 - the DM object
801 -   dm2 - the second, finer DM object
802 
803     Output Parameter:
804 +  mat - the interpolation
805 -  vec - the scaling (optional)
806 
807     Level: developer
808 
809     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
810         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
811 
812         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
813         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
814 
815 
816 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
817 
818 @*/
819 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
820 {
821   PetscErrorCode ierr;
822 
823   PetscFunctionBegin;
824   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
825   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
826   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
827   PetscFunctionReturn(0);
828 }
829 
830 #undef __FUNCT__
831 #define __FUNCT__ "DMCreateInjection"
832 /*@
833     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
834 
835     Collective on DM
836 
837     Input Parameter:
838 +   dm1 - the DM object
839 -   dm2 - the second, finer DM object
840 
841     Output Parameter:
842 .   ctx - the injection
843 
844     Level: developer
845 
846    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
847         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
848 
849 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
850 
851 @*/
852 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
853 {
854   PetscErrorCode ierr;
855 
856   PetscFunctionBegin;
857   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
858   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
859   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
860   PetscFunctionReturn(0);
861 }
862 
863 #undef __FUNCT__
864 #define __FUNCT__ "DMCreateColoring"
865 /*@C
866     DMCreateColoring - Gets coloring for a DMDA or DMComposite
867 
868     Collective on DM
869 
870     Input Parameter:
871 +   dm - the DM object
872 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
873 -   matype - either MATAIJ or MATBAIJ
874 
875     Output Parameter:
876 .   coloring - the coloring
877 
878     Level: developer
879 
880 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
881 
882 @*/
883 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
884 {
885   PetscErrorCode ierr;
886 
887   PetscFunctionBegin;
888   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
889   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
890   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
891   PetscFunctionReturn(0);
892 }
893 
894 #undef __FUNCT__
895 #define __FUNCT__ "DMCreateMatrix"
896 /*@C
897     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
898 
899     Collective on DM
900 
901     Input Parameter:
902 +   dm - the DM object
903 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
904             any type which inherits from one of these (such as MATAIJ)
905 
906     Output Parameter:
907 .   mat - the empty Jacobian
908 
909     Level: beginner
910 
911     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
912        do not need to do it yourself.
913 
914        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
915        the nonzero pattern call DMDASetMatPreallocateOnly()
916 
917        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
918        internally by PETSc.
919 
920        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
921        the indices for the global numbering for DMDAs which is complicated.
922 
923 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
924 
925 @*/
926 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
927 {
928   PetscErrorCode ierr;
929 
930   PetscFunctionBegin;
931   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
932 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
933   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
934 #endif
935   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
936   PetscValidPointer(mat,3);
937   if (dm->mattype) {
938     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
939   } else {
940     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
941   }
942   PetscFunctionReturn(0);
943 }
944 
945 #undef __FUNCT__
946 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
947 /*@
948   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
949     preallocated but the nonzero structure and zero values will not be set.
950 
951   Logically Collective on DMDA
952 
953   Input Parameter:
954 + dm - the DM
955 - only - PETSC_TRUE if only want preallocation
956 
957   Level: developer
958 .seealso DMCreateMatrix()
959 @*/
960 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
961 {
962   PetscFunctionBegin;
963   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
964   dm->prealloc_only = only;
965   PetscFunctionReturn(0);
966 }
967 
968 #undef __FUNCT__
969 #define __FUNCT__ "DMGetWorkArray"
970 /*@C
971   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
972 
973   Not Collective
974 
975   Input Parameters:
976 + dm - the DM object
977 . count - The minium size
978 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
979 
980   Output Parameter:
981 . array - the work array
982 
983   Level: developer
984 
985 .seealso DMDestroy(), DMCreate()
986 @*/
987 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
988 {
989   PetscErrorCode ierr;
990   DMWorkLink link;
991   size_t size;
992 
993   PetscFunctionBegin;
994   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
995   PetscValidPointer(mem,4);
996   if (dm->workin) {
997     link = dm->workin;
998     dm->workin = dm->workin->next;
999   } else {
1000     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1001   }
1002   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1003   if (size*count > link->bytes) {
1004     ierr = PetscFree(link->mem);CHKERRQ(ierr);
1005     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1006     link->bytes = size*count;
1007   }
1008   link->next = dm->workout;
1009   dm->workout = link;
1010   *(void**)mem = link->mem;
1011   PetscFunctionReturn(0);
1012 }
1013 
1014 #undef __FUNCT__
1015 #define __FUNCT__ "DMRestoreWorkArray"
1016 /*@C
1017   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1018 
1019   Not Collective
1020 
1021   Input Parameters:
1022 + dm - the DM object
1023 . count - The minium size
1024 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1025 
1026   Output Parameter:
1027 . array - the work array
1028 
1029   Level: developer
1030 
1031 .seealso DMDestroy(), DMCreate()
1032 @*/
1033 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1034 {
1035   DMWorkLink *p,link;
1036 
1037   PetscFunctionBegin;
1038   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1039   PetscValidPointer(mem,4);
1040   for (p=&dm->workout; (link=*p); p=&link->next) {
1041     if (link->mem == *(void**)mem) {
1042       *p = link->next;
1043       link->next = dm->workin;
1044       dm->workin = link;
1045       *(void**)mem = PETSC_NULL;
1046       PetscFunctionReturn(0);
1047     }
1048   }
1049   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1050   PetscFunctionReturn(0);
1051 }
1052 
1053 #undef __FUNCT__
1054 #define __FUNCT__ "DMSetNullSpaceConstructor"
1055 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1056 {
1057   PetscFunctionBegin;
1058   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1059   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1060   dm->nullspaceConstructors[field] = nullsp;
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 #undef __FUNCT__
1065 #define __FUNCT__ "DMCreateFieldIS"
1066 /*@C
1067   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1068 
1069   Not collective
1070 
1071   Input Parameter:
1072 . dm - the DM object
1073 
1074   Output Parameters:
1075 + numFields  - The number of fields (or PETSC_NULL if not requested)
1076 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1077 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1078 
1079   Level: intermediate
1080 
1081   Notes:
1082   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1083   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1084   PetscFree().
1085 
1086 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1087 @*/
1088 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1089 {
1090   PetscSection   section, sectionGlobal;
1091   PetscErrorCode ierr;
1092 
1093   PetscFunctionBegin;
1094   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1095   if (numFields) {
1096     PetscValidPointer(numFields,2);
1097     *numFields = 0;
1098   }
1099   if (fieldNames) {
1100     PetscValidPointer(fieldNames,3);
1101     *fieldNames = PETSC_NULL;
1102   }
1103   if (fields) {
1104     PetscValidPointer(fields,4);
1105     *fields = PETSC_NULL;
1106   }
1107   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1108   if (section) {
1109     PetscInt *fieldSizes, **fieldIndices;
1110     PetscInt  nF, f, pStart, pEnd, p;
1111 
1112     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1113     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1114     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1115     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1116     for (f = 0; f < nF; ++f) {
1117       fieldSizes[f] = 0;
1118     }
1119     for (p = pStart; p < pEnd; ++p) {
1120       PetscInt gdof;
1121 
1122       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1123       if (gdof > 0) {
1124         for (f = 0; f < nF; ++f) {
1125           PetscInt fdof, fcdof;
1126 
1127           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1128           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1129           fieldSizes[f] += fdof-fcdof;
1130         }
1131       }
1132     }
1133     for (f = 0; f < nF; ++f) {
1134       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1135       fieldSizes[f] = 0;
1136     }
1137     for (p = pStart; p < pEnd; ++p) {
1138       PetscInt gdof, goff;
1139 
1140       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1141       if (gdof > 0) {
1142         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1143         for (f = 0; f < nF; ++f) {
1144           PetscInt fdof, fcdof, fc;
1145 
1146           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1147           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1148           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1149             fieldIndices[f][fieldSizes[f]] = goff++;
1150           }
1151         }
1152       }
1153     }
1154     if (numFields) {*numFields = nF;}
1155     if (fieldNames) {
1156       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1157       for (f = 0; f < nF; ++f) {
1158         const char *fieldName;
1159 
1160         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1161         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1162       }
1163     }
1164     if (fields) {
1165       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1166       for (f = 0; f < nF; ++f) {
1167         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1168       }
1169     }
1170     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1171   } else {
1172     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1173   }
1174   PetscFunctionReturn(0);
1175 }
1176 
1177 #undef __FUNCT__
1178 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1179 /*@C
1180   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1181 
1182   Not Collective
1183 
1184   Input Parameters:
1185 + dm   - the DM object
1186 - name - the name of the field decomposition
1187 
1188   Output Parameter:
1189 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1190 
1191   Level: advanced
1192 
1193 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1194 @*/
1195 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1196 {
1197   PetscErrorCode ierr;
1198 
1199   PetscFunctionBegin;
1200   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1201   PetscValidCharPointer(name,2);
1202   PetscValidPointer(ddm,3);
1203   *ddm = PETSC_NULL;
1204   if (dm->ops->createfielddecompositiondm) {
1205     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1206   }
1207   PetscFunctionReturn(0);
1208 }
1209 
1210 #undef __FUNCT__
1211 #define __FUNCT__ "DMCreateFieldDecomposition"
1212 /*@C
1213   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1214                           corresponding to different fields: each IS contains the global indices of the dofs of the
1215                           corresponding field. The optional list of DMs define the DM for each subproblem.
1216                           Generalizes DMCreateFieldIS().
1217 
1218   Not collective
1219 
1220   Input Parameter:
1221 . dm - the DM object
1222 
1223   Output Parameters:
1224 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1225 . namelist  - The name for each field (or PETSC_NULL if not requested)
1226 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1227 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1228 
1229   Level: intermediate
1230 
1231   Notes:
1232   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1233   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1234   and all of the arrays should be freed with PetscFree().
1235 
1236 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1237 @*/
1238 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1239 {
1240   PetscErrorCode ierr;
1241 
1242   PetscFunctionBegin;
1243   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1244   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1245   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1246   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1247   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1248   if (!dm->ops->createfielddecomposition) {
1249     PetscSection section;
1250     PetscInt     numFields, f;
1251 
1252     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1253     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1254     if (section && numFields && dm->ops->createsubdm) {
1255       *len = numFields;
1256       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1257       for (f = 0; f < numFields; ++f) {
1258         const char *fieldName;
1259 
1260         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1261         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1262         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1263       }
1264     } else {
1265       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1266       /* By default there are no DMs associated with subproblems. */
1267       if (dmlist) *dmlist = PETSC_NULL;
1268     }
1269   }
1270   else {
1271     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1272   }
1273   PetscFunctionReturn(0);
1274 }
1275 
1276 #undef __FUNCT__
1277 #define __FUNCT__ "DMCreateSubDM"
1278 /*@C
1279   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1280                   The fields are defined by DMCreateFieldIS().
1281 
1282   Not collective
1283 
1284   Input Parameters:
1285 + dm - the DM object
1286 . numFields - number of fields in this subproblem
1287 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1288 
1289   Output Parameters:
1290 . is - The global indices for the subproblem
1291 - dm - The DM for the subproblem
1292 
1293   Level: intermediate
1294 
1295 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1296 @*/
1297 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1298 {
1299   PetscErrorCode ierr;
1300 
1301   PetscFunctionBegin;
1302   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1303   PetscValidPointer(fields,3);
1304   if (is) {PetscValidPointer(is,4);}
1305   if (subdm) {PetscValidPointer(subdm,5);}
1306   if (dm->ops->createsubdm) {
1307     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1308   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1309   PetscFunctionReturn(0);
1310 }
1311 
1312 #undef __FUNCT__
1313 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1314 /*@C
1315   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1316 
1317   Not Collective
1318 
1319   Input Parameters:
1320 + dm   - the DM object
1321 - name - the name of the subdomain decomposition
1322 
1323   Output Parameter:
1324 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1325 
1326   Level: advanced
1327 
1328 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1329 @*/
1330 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1331 {
1332   PetscErrorCode ierr;
1333 
1334   PetscFunctionBegin;
1335   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1336   PetscValidCharPointer(name,2);
1337   PetscValidPointer(ddm,3);
1338   *ddm = PETSC_NULL;
1339   if (dm->ops->createdomaindecompositiondm) {
1340     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1341   }
1342   PetscFunctionReturn(0);
1343 }
1344 
1345 #undef __FUNCT__
1346 #define __FUNCT__ "DMCreateDomainDecomposition"
1347 /*@C
1348   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1349                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1350                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1351                           define a nonoverlapping covering, while outer subdomains can overlap.
1352                           The optional list of DMs define the DM for each subproblem.
1353 
1354   Not collective
1355 
1356   Input Parameter:
1357 . dm - the DM object
1358 
1359   Output Parameters:
1360 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1361 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1362 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1363 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1364 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1365 
1366   Level: intermediate
1367 
1368   Notes:
1369   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1370   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1371   and all of the arrays should be freed with PetscFree().
1372 
1373 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1374 @*/
1375 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1376 {
1377   PetscErrorCode ierr;
1378 
1379   PetscFunctionBegin;
1380   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1381   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1382   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1383   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1384   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1385   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1386   if (dm->ops->createdomaindecomposition) {
1387     ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1388   }
1389   PetscFunctionReturn(0);
1390 }
1391 
1392 
1393 #undef __FUNCT__
1394 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1395 /*@C
1396   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1397 
1398   Not collective
1399 
1400   Input Parameters:
1401 + dm - the DM object
1402 . n  - the number of subdomain scatters
1403 - subdms - the local subdomains
1404 
1405   Output Parameters:
1406 + n     - the number of scatters returned
1407 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1408 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1409 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1410 
1411   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1412   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1413   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1414   solution and residual data.
1415 
1416   Level: developer
1417 
1418 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1419 @*/
1420 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1421 {
1422   PetscErrorCode ierr;
1423 
1424   PetscFunctionBegin;
1425   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1426   PetscValidPointer(subdms,3);
1427   PetscValidPointer(iscat,4);
1428   PetscValidPointer(oscat,5);
1429   PetscValidPointer(gscat,6);
1430   if (dm->ops->createddscatters) {
1431     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat); CHKERRQ(ierr);
1432   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1433   PetscFunctionReturn(0);
1434 }
1435 
1436 #undef __FUNCT__
1437 #define __FUNCT__ "DMRefine"
1438 /*@
1439   DMRefine - Refines a DM object
1440 
1441   Collective on DM
1442 
1443   Input Parameter:
1444 + dm   - the DM object
1445 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1446 
1447   Output Parameter:
1448 . dmf - the refined DM, or PETSC_NULL
1449 
1450   Note: If no refinement was done, the return value is PETSC_NULL
1451 
1452   Level: developer
1453 
1454 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1455 @*/
1456 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1457 {
1458   PetscErrorCode ierr;
1459   DMRefineHookLink link;
1460 
1461   PetscFunctionBegin;
1462   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1463   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1464   if (*dmf) {
1465     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1466     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1467     (*dmf)->ctx       = dm->ctx;
1468     (*dmf)->leveldown = dm->leveldown;
1469     (*dmf)->levelup   = dm->levelup + 1;
1470     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1471     for (link=dm->refinehook; link; link=link->next) {
1472       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1473     }
1474   }
1475   PetscFunctionReturn(0);
1476 }
1477 
1478 #undef __FUNCT__
1479 #define __FUNCT__ "DMRefineHookAdd"
1480 /*@
1481    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1482 
1483    Logically Collective
1484 
1485    Input Arguments:
1486 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1487 .  refinehook - function to run when setting up a coarser level
1488 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1489 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1490 
1491    Calling sequence of refinehook:
1492 $    refinehook(DM coarse,DM fine,void *ctx);
1493 
1494 +  coarse - coarse level DM
1495 .  fine - fine level DM to interpolate problem to
1496 -  ctx - optional user-defined function context
1497 
1498    Calling sequence for interphook:
1499 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1500 
1501 +  coarse - coarse level DM
1502 .  interp - matrix interpolating a coarse-level solution to the finer grid
1503 .  fine - fine level DM to update
1504 -  ctx - optional user-defined function context
1505 
1506    Level: advanced
1507 
1508    Notes:
1509    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1510 
1511    If this function is called multiple times, the hooks will be run in the order they are added.
1512 
1513 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1514 @*/
1515 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1516 {
1517   PetscErrorCode ierr;
1518   DMRefineHookLink link,*p;
1519 
1520   PetscFunctionBegin;
1521   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1522   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1523   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1524   link->refinehook = refinehook;
1525   link->interphook = interphook;
1526   link->ctx = ctx;
1527   link->next = PETSC_NULL;
1528   *p = link;
1529   PetscFunctionReturn(0);
1530 }
1531 
1532 #undef __FUNCT__
1533 #define __FUNCT__ "DMInterpolate"
1534 /*@
1535    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1536 
1537    Collective if any hooks are
1538 
1539    Input Arguments:
1540 +  coarse - coarser DM to use as a base
1541 .  restrct - interpolation matrix, apply using MatInterpolate()
1542 -  fine - finer DM to update
1543 
1544    Level: developer
1545 
1546 .seealso: DMRefineHookAdd(), MatInterpolate()
1547 @*/
1548 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1549 {
1550   PetscErrorCode ierr;
1551   DMRefineHookLink link;
1552 
1553   PetscFunctionBegin;
1554   for (link=fine->refinehook; link; link=link->next) {
1555     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1556   }
1557   PetscFunctionReturn(0);
1558 }
1559 
1560 #undef __FUNCT__
1561 #define __FUNCT__ "DMGetRefineLevel"
1562 /*@
1563     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1564 
1565     Not Collective
1566 
1567     Input Parameter:
1568 .   dm - the DM object
1569 
1570     Output Parameter:
1571 .   level - number of refinements
1572 
1573     Level: developer
1574 
1575 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1576 
1577 @*/
1578 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1579 {
1580   PetscFunctionBegin;
1581   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1582   *level = dm->levelup;
1583   PetscFunctionReturn(0);
1584 }
1585 
1586 #undef __FUNCT__
1587 #define __FUNCT__ "DMGlobalToLocalBegin"
1588 /*@
1589     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1590 
1591     Neighbor-wise Collective on DM
1592 
1593     Input Parameters:
1594 +   dm - the DM object
1595 .   g - the global vector
1596 .   mode - INSERT_VALUES or ADD_VALUES
1597 -   l - the local vector
1598 
1599 
1600     Level: beginner
1601 
1602 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1603 
1604 @*/
1605 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1606 {
1607   PetscSF        sf;
1608   PetscErrorCode ierr;
1609 
1610   PetscFunctionBegin;
1611   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1612   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1613   if (sf) {
1614     PetscScalar *lArray, *gArray;
1615 
1616     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1617     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1618     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1619     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1620     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1621     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1622   } else {
1623     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1624   }
1625   PetscFunctionReturn(0);
1626 }
1627 
1628 #undef __FUNCT__
1629 #define __FUNCT__ "DMGlobalToLocalEnd"
1630 /*@
1631     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1632 
1633     Neighbor-wise Collective on DM
1634 
1635     Input Parameters:
1636 +   dm - the DM object
1637 .   g - the global vector
1638 .   mode - INSERT_VALUES or ADD_VALUES
1639 -   l - the local vector
1640 
1641 
1642     Level: beginner
1643 
1644 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1645 
1646 @*/
1647 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1648 {
1649   PetscSF        sf;
1650   PetscErrorCode ierr;
1651   PetscScalar    *lArray, *gArray;
1652 
1653   PetscFunctionBegin;
1654   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1655   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1656   if (sf) {
1657   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1658 
1659     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1660     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1661     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1662     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1663     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1664   } else {
1665     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1666   }
1667   PetscFunctionReturn(0);
1668 }
1669 
1670 #undef __FUNCT__
1671 #define __FUNCT__ "DMLocalToGlobalBegin"
1672 /*@
1673     DMLocalToGlobalBegin - updates global vectors from local vectors
1674 
1675     Neighbor-wise Collective on DM
1676 
1677     Input Parameters:
1678 +   dm - the DM object
1679 .   l - the local vector
1680 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1681            base point.
1682 - - the global vector
1683 
1684     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1685            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1686            global array to the final global array with VecAXPY().
1687 
1688     Level: beginner
1689 
1690 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1691 
1692 @*/
1693 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1694 {
1695   PetscSF        sf;
1696   PetscErrorCode ierr;
1697 
1698   PetscFunctionBegin;
1699   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1700   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1701   if (sf) {
1702     MPI_Op       op;
1703     PetscScalar *lArray, *gArray;
1704 
1705     switch(mode) {
1706     case INSERT_VALUES:
1707     case INSERT_ALL_VALUES:
1708 #if defined(PETSC_HAVE_MPI_REPLACE)
1709       op = MPI_REPLACE; break;
1710 #else
1711       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1712 #endif
1713     case ADD_VALUES:
1714     case ADD_ALL_VALUES:
1715       op = MPI_SUM; break;
1716   default:
1717     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1718     }
1719     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1720     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1721     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1722     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1723     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1724   } else {
1725     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1726   }
1727   PetscFunctionReturn(0);
1728 }
1729 
1730 #undef __FUNCT__
1731 #define __FUNCT__ "DMLocalToGlobalEnd"
1732 /*@
1733     DMLocalToGlobalEnd - updates global vectors from local vectors
1734 
1735     Neighbor-wise Collective on DM
1736 
1737     Input Parameters:
1738 +   dm - the DM object
1739 .   l - the local vector
1740 .   mode - INSERT_VALUES or ADD_VALUES
1741 -   g - the global vector
1742 
1743 
1744     Level: beginner
1745 
1746 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1747 
1748 @*/
1749 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1750 {
1751   PetscSF        sf;
1752   PetscErrorCode ierr;
1753 
1754   PetscFunctionBegin;
1755   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1756   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1757   if (sf) {
1758     MPI_Op       op;
1759     PetscScalar *lArray, *gArray;
1760 
1761     switch(mode) {
1762     case INSERT_VALUES:
1763     case INSERT_ALL_VALUES:
1764 #if defined(PETSC_HAVE_MPI_REPLACE)
1765       op = MPI_REPLACE; break;
1766 #else
1767       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1768 #endif
1769     case ADD_VALUES:
1770     case ADD_ALL_VALUES:
1771       op = MPI_SUM; break;
1772     default:
1773       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1774     }
1775     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1776     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1777     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1778     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1779     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1780   } else {
1781     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1782   }
1783   PetscFunctionReturn(0);
1784 }
1785 
1786 #undef __FUNCT__
1787 #define __FUNCT__ "DMCoarsen"
1788 /*@
1789     DMCoarsen - Coarsens a DM object
1790 
1791     Collective on DM
1792 
1793     Input Parameter:
1794 +   dm - the DM object
1795 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1796 
1797     Output Parameter:
1798 .   dmc - the coarsened DM
1799 
1800     Level: developer
1801 
1802 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1803 
1804 @*/
1805 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1806 {
1807   PetscErrorCode ierr;
1808   DMCoarsenHookLink link;
1809 
1810   PetscFunctionBegin;
1811   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1812   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1813   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1814   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1815   (*dmc)->ctx       = dm->ctx;
1816   (*dmc)->levelup   = dm->levelup;
1817   (*dmc)->leveldown = dm->leveldown + 1;
1818   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1819   for (link=dm->coarsenhook; link; link=link->next) {
1820     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1821   }
1822   PetscFunctionReturn(0);
1823 }
1824 
1825 #undef __FUNCT__
1826 #define __FUNCT__ "DMCoarsenHookAdd"
1827 /*@
1828    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1829 
1830    Logically Collective
1831 
1832    Input Arguments:
1833 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1834 .  coarsenhook - function to run when setting up a coarser level
1835 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1836 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1837 
1838    Calling sequence of coarsenhook:
1839 $    coarsenhook(DM fine,DM coarse,void *ctx);
1840 
1841 +  fine - fine level DM
1842 .  coarse - coarse level DM to restrict problem to
1843 -  ctx - optional user-defined function context
1844 
1845    Calling sequence for restricthook:
1846 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1847 
1848 +  fine - fine level DM
1849 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1850 .  rscale - scaling vector for restriction
1851 .  inject - matrix restricting by injection
1852 .  coarse - coarse level DM to update
1853 -  ctx - optional user-defined function context
1854 
1855    Level: advanced
1856 
1857    Notes:
1858    This function is only needed if auxiliary data needs to be set up on coarse grids.
1859 
1860    If this function is called multiple times, the hooks will be run in the order they are added.
1861 
1862    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1863    extract the finest level information from its context (instead of from the SNES).
1864 
1865 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1866 @*/
1867 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1868 {
1869   PetscErrorCode ierr;
1870   DMCoarsenHookLink link,*p;
1871 
1872   PetscFunctionBegin;
1873   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1874   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1875   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1876   link->coarsenhook = coarsenhook;
1877   link->restricthook = restricthook;
1878   link->ctx = ctx;
1879   link->next = PETSC_NULL;
1880   *p = link;
1881   PetscFunctionReturn(0);
1882 }
1883 
1884 #undef __FUNCT__
1885 #define __FUNCT__ "DMRestrict"
1886 /*@
1887    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1888 
1889    Collective if any hooks are
1890 
1891    Input Arguments:
1892 +  fine - finer DM to use as a base
1893 .  restrct - restriction matrix, apply using MatRestrict()
1894 .  inject - injection matrix, also use MatRestrict()
1895 -  coarse - coarer DM to update
1896 
1897    Level: developer
1898 
1899 .seealso: DMCoarsenHookAdd(), MatRestrict()
1900 @*/
1901 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1902 {
1903   PetscErrorCode ierr;
1904   DMCoarsenHookLink link;
1905 
1906   PetscFunctionBegin;
1907   for (link=fine->coarsenhook; link; link=link->next) {
1908     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1909   }
1910   PetscFunctionReturn(0);
1911 }
1912 
1913 #undef __FUNCT__
1914 #define __FUNCT__ "DMBlockRestrictHookAdd"
1915 /*@
1916    DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1917 
1918    Logically Collective
1919 
1920    Input Arguments:
1921 +  global - global DM
1922 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1923 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1924 
1925    Calling sequence for restricthook:
1926 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1927 
1928 +  global - global DM
1929 .  out    - scatter to the outer (with ghost and overlap points) block vector
1930 .  in     - scatter to block vector values only owned locally
1931 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1932 -  ctx - optional user-defined function context
1933 
1934    Level: advanced
1935 
1936    Notes:
1937    This function is only needed if auxiliary data needs to be set up on coarse grids.
1938 
1939    If this function is called multiple times, the hooks will be run in the order they are added.
1940 
1941    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1942    extract the finest level information from its context (instead of from the SNES).
1943 
1944 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1945 @*/
1946 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1947 {
1948   PetscErrorCode ierr;
1949   DMBlockRestrictHookLink link,*p;
1950 
1951   PetscFunctionBegin;
1952   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1953   for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1954   ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr);
1955   link->restricthook = restricthook;
1956   link->ctx = ctx;
1957   link->next = PETSC_NULL;
1958   *p = link;
1959   PetscFunctionReturn(0);
1960 }
1961 
1962 #undef __FUNCT__
1963 #define __FUNCT__ "DMBlockRestrict"
1964 /*@
1965    DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd()
1966 
1967    Collective if any hooks are
1968 
1969    Input Arguments:
1970 +  fine - finer DM to use as a base
1971 .  restrct - restriction matrix, apply using MatRestrict()
1972 .  inject - injection matrix, also use MatRestrict()
1973 -  coarse - coarer DM to update
1974 
1975    Level: developer
1976 
1977 .seealso: DMCoarsenHookAdd(), MatRestrict()
1978 @*/
1979 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block)
1980 {
1981   PetscErrorCode ierr;
1982   DMBlockRestrictHookLink link;
1983 
1984   PetscFunctionBegin;
1985   for (link=global->blockrestricthook; link; link=link->next) {
1986     if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);}
1987   }
1988   PetscFunctionReturn(0);
1989 }
1990 
1991 #undef __FUNCT__
1992 #define __FUNCT__ "DMGetCoarsenLevel"
1993 /*@
1994     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
1995 
1996     Not Collective
1997 
1998     Input Parameter:
1999 .   dm - the DM object
2000 
2001     Output Parameter:
2002 .   level - number of coarsenings
2003 
2004     Level: developer
2005 
2006 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2007 
2008 @*/
2009 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2010 {
2011   PetscFunctionBegin;
2012   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2013   *level = dm->leveldown;
2014   PetscFunctionReturn(0);
2015 }
2016 
2017 
2018 
2019 #undef __FUNCT__
2020 #define __FUNCT__ "DMRefineHierarchy"
2021 /*@C
2022     DMRefineHierarchy - Refines a DM object, all levels at once
2023 
2024     Collective on DM
2025 
2026     Input Parameter:
2027 +   dm - the DM object
2028 -   nlevels - the number of levels of refinement
2029 
2030     Output Parameter:
2031 .   dmf - the refined DM hierarchy
2032 
2033     Level: developer
2034 
2035 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2036 
2037 @*/
2038 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2039 {
2040   PetscErrorCode ierr;
2041 
2042   PetscFunctionBegin;
2043   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2044   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2045   if (nlevels == 0) PetscFunctionReturn(0);
2046   if (dm->ops->refinehierarchy) {
2047     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2048   } else if (dm->ops->refine) {
2049     PetscInt i;
2050 
2051     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2052     for (i=1; i<nlevels; i++) {
2053       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2054     }
2055   } else {
2056     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2057   }
2058   PetscFunctionReturn(0);
2059 }
2060 
2061 #undef __FUNCT__
2062 #define __FUNCT__ "DMCoarsenHierarchy"
2063 /*@C
2064     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2065 
2066     Collective on DM
2067 
2068     Input Parameter:
2069 +   dm - the DM object
2070 -   nlevels - the number of levels of coarsening
2071 
2072     Output Parameter:
2073 .   dmc - the coarsened DM hierarchy
2074 
2075     Level: developer
2076 
2077 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2078 
2079 @*/
2080 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2081 {
2082   PetscErrorCode ierr;
2083 
2084   PetscFunctionBegin;
2085   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2086   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2087   if (nlevels == 0) PetscFunctionReturn(0);
2088   PetscValidPointer(dmc,3);
2089   if (dm->ops->coarsenhierarchy) {
2090     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2091   } else if (dm->ops->coarsen) {
2092     PetscInt i;
2093 
2094     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2095     for (i=1; i<nlevels; i++) {
2096       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2097     }
2098   } else {
2099     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2100   }
2101   PetscFunctionReturn(0);
2102 }
2103 
2104 #undef __FUNCT__
2105 #define __FUNCT__ "DMCreateAggregates"
2106 /*@
2107    DMCreateAggregates - Gets the aggregates that map between
2108    grids associated with two DMs.
2109 
2110    Collective on DM
2111 
2112    Input Parameters:
2113 +  dmc - the coarse grid DM
2114 -  dmf - the fine grid DM
2115 
2116    Output Parameters:
2117 .  rest - the restriction matrix (transpose of the projection matrix)
2118 
2119    Level: intermediate
2120 
2121 .keywords: interpolation, restriction, multigrid
2122 
2123 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2124 @*/
2125 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2126 {
2127   PetscErrorCode ierr;
2128 
2129   PetscFunctionBegin;
2130   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2131   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2132   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2133   PetscFunctionReturn(0);
2134 }
2135 
2136 #undef __FUNCT__
2137 #define __FUNCT__ "DMSetApplicationContextDestroy"
2138 /*@C
2139     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2140 
2141     Not Collective
2142 
2143     Input Parameters:
2144 +   dm - the DM object
2145 -   destroy - the destroy function
2146 
2147     Level: intermediate
2148 
2149 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2150 
2151 @*/
2152 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2153 {
2154   PetscFunctionBegin;
2155   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2156   dm->ctxdestroy = destroy;
2157   PetscFunctionReturn(0);
2158 }
2159 
2160 #undef __FUNCT__
2161 #define __FUNCT__ "DMSetApplicationContext"
2162 /*@
2163     DMSetApplicationContext - Set a user context into a DM object
2164 
2165     Not Collective
2166 
2167     Input Parameters:
2168 +   dm - the DM object
2169 -   ctx - the user context
2170 
2171     Level: intermediate
2172 
2173 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2174 
2175 @*/
2176 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2177 {
2178   PetscFunctionBegin;
2179   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2180   dm->ctx = ctx;
2181   PetscFunctionReturn(0);
2182 }
2183 
2184 #undef __FUNCT__
2185 #define __FUNCT__ "DMGetApplicationContext"
2186 /*@
2187     DMGetApplicationContext - Gets a user context from a DM object
2188 
2189     Not Collective
2190 
2191     Input Parameter:
2192 .   dm - the DM object
2193 
2194     Output Parameter:
2195 .   ctx - the user context
2196 
2197     Level: intermediate
2198 
2199 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2200 
2201 @*/
2202 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2203 {
2204   PetscFunctionBegin;
2205   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2206   *(void**)ctx = dm->ctx;
2207   PetscFunctionReturn(0);
2208 }
2209 
2210 #undef __FUNCT__
2211 #define __FUNCT__ "DMSetVariableBounds"
2212 /*@C
2213     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2214 
2215     Logically Collective on DM
2216 
2217     Input Parameter:
2218 +   dm - the DM object
2219 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2220 
2221     Level: intermediate
2222 
2223 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2224          DMSetJacobian()
2225 
2226 @*/
2227 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2228 {
2229   PetscFunctionBegin;
2230   dm->ops->computevariablebounds = f;
2231   PetscFunctionReturn(0);
2232 }
2233 
2234 #undef __FUNCT__
2235 #define __FUNCT__ "DMHasVariableBounds"
2236 /*@
2237     DMHasVariableBounds - does the DM object have a variable bounds function?
2238 
2239     Not Collective
2240 
2241     Input Parameter:
2242 .   dm - the DM object to destroy
2243 
2244     Output Parameter:
2245 .   flg - PETSC_TRUE if the variable bounds function exists
2246 
2247     Level: developer
2248 
2249 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2250 
2251 @*/
2252 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2253 {
2254   PetscFunctionBegin;
2255   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2256   PetscFunctionReturn(0);
2257 }
2258 
2259 #undef __FUNCT__
2260 #define __FUNCT__ "DMComputeVariableBounds"
2261 /*@C
2262     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2263 
2264     Logically Collective on DM
2265 
2266     Input Parameters:
2267 +   dm - the DM object to destroy
2268 -   x  - current solution at which the bounds are computed
2269 
2270     Output parameters:
2271 +   xl - lower bound
2272 -   xu - upper bound
2273 
2274     Level: intermediate
2275 
2276 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2277 
2278 @*/
2279 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2280 {
2281   PetscErrorCode ierr;
2282   PetscFunctionBegin;
2283   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2284   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2285   if (dm->ops->computevariablebounds) {
2286     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2287   }
2288   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2289   PetscFunctionReturn(0);
2290 }
2291 
2292 #undef __FUNCT__
2293 #define __FUNCT__ "DMHasColoring"
2294 /*@
2295     DMHasColoring - does the DM object have a method of providing a coloring?
2296 
2297     Not Collective
2298 
2299     Input Parameter:
2300 .   dm - the DM object
2301 
2302     Output Parameter:
2303 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2304 
2305     Level: developer
2306 
2307 .seealso DMHasFunction(), DMCreateColoring()
2308 
2309 @*/
2310 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2311 {
2312   PetscFunctionBegin;
2313   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2314   PetscFunctionReturn(0);
2315 }
2316 
2317 #undef  __FUNCT__
2318 #define __FUNCT__ "DMSetVec"
2319 /*@C
2320     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2321 
2322     Collective on DM
2323 
2324     Input Parameter:
2325 +   dm - the DM object
2326 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2327 
2328     Level: developer
2329 
2330 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2331 
2332 @*/
2333 PetscErrorCode  DMSetVec(DM dm,Vec x)
2334 {
2335   PetscErrorCode ierr;
2336   PetscFunctionBegin;
2337   if (x) {
2338     if (!dm->x) {
2339       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2340     }
2341     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2342   }
2343   else if (dm->x) {
2344     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2345   }
2346   PetscFunctionReturn(0);
2347 }
2348 
2349 PetscFList DMList                       = PETSC_NULL;
2350 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2351 
2352 #undef __FUNCT__
2353 #define __FUNCT__ "DMSetType"
2354 /*@C
2355   DMSetType - Builds a DM, for a particular DM implementation.
2356 
2357   Collective on DM
2358 
2359   Input Parameters:
2360 + dm     - The DM object
2361 - method - The name of the DM type
2362 
2363   Options Database Key:
2364 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2365 
2366   Notes:
2367   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2368 
2369   Level: intermediate
2370 
2371 .keywords: DM, set, type
2372 .seealso: DMGetType(), DMCreate()
2373 @*/
2374 PetscErrorCode  DMSetType(DM dm, DMType method)
2375 {
2376   PetscErrorCode (*r)(DM);
2377   PetscBool      match;
2378   PetscErrorCode ierr;
2379 
2380   PetscFunctionBegin;
2381   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2382   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2383   if (match) PetscFunctionReturn(0);
2384 
2385   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2386   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2387   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2388 
2389   if (dm->ops->destroy) {
2390     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2391     dm->ops->destroy = PETSC_NULL;
2392   }
2393   ierr = (*r)(dm);CHKERRQ(ierr);
2394   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2395   PetscFunctionReturn(0);
2396 }
2397 
2398 #undef __FUNCT__
2399 #define __FUNCT__ "DMGetType"
2400 /*@C
2401   DMGetType - Gets the DM type name (as a string) from the DM.
2402 
2403   Not Collective
2404 
2405   Input Parameter:
2406 . dm  - The DM
2407 
2408   Output Parameter:
2409 . type - The DM type name
2410 
2411   Level: intermediate
2412 
2413 .keywords: DM, get, type, name
2414 .seealso: DMSetType(), DMCreate()
2415 @*/
2416 PetscErrorCode  DMGetType(DM dm, DMType *type)
2417 {
2418   PetscErrorCode ierr;
2419 
2420   PetscFunctionBegin;
2421   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2422   PetscValidCharPointer(type,2);
2423   if (!DMRegisterAllCalled) {
2424     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2425   }
2426   *type = ((PetscObject)dm)->type_name;
2427   PetscFunctionReturn(0);
2428 }
2429 
2430 #undef __FUNCT__
2431 #define __FUNCT__ "DMConvert"
2432 /*@C
2433   DMConvert - Converts a DM to another DM, either of the same or different type.
2434 
2435   Collective on DM
2436 
2437   Input Parameters:
2438 + dm - the DM
2439 - newtype - new DM type (use "same" for the same type)
2440 
2441   Output Parameter:
2442 . M - pointer to new DM
2443 
2444   Notes:
2445   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2446   the MPI communicator of the generated DM is always the same as the communicator
2447   of the input DM.
2448 
2449   Level: intermediate
2450 
2451 .seealso: DMCreate()
2452 @*/
2453 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2454 {
2455   DM             B;
2456   char           convname[256];
2457   PetscBool      sametype, issame;
2458   PetscErrorCode ierr;
2459 
2460   PetscFunctionBegin;
2461   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2462   PetscValidType(dm,1);
2463   PetscValidPointer(M,3);
2464   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2465   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2466   {
2467     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2468 
2469     /*
2470        Order of precedence:
2471        1) See if a specialized converter is known to the current DM.
2472        2) See if a specialized converter is known to the desired DM class.
2473        3) See if a good general converter is registered for the desired class
2474        4) See if a good general converter is known for the current matrix.
2475        5) Use a really basic converter.
2476     */
2477 
2478     /* 1) See if a specialized converter is known to the current DM and the desired class */
2479     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2480     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2481     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2482     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2483     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2484     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2485     if (conv) goto foundconv;
2486 
2487     /* 2)  See if a specialized converter is known to the desired DM class. */
2488     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2489     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2490     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2491     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2492     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2493     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2494     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2495     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2496     if (conv) {
2497       ierr = DMDestroy(&B);CHKERRQ(ierr);
2498       goto foundconv;
2499     }
2500 
2501 #if 0
2502     /* 3) See if a good general converter is registered for the desired class */
2503     conv = B->ops->convertfrom;
2504     ierr = DMDestroy(&B);CHKERRQ(ierr);
2505     if (conv) goto foundconv;
2506 
2507     /* 4) See if a good general converter is known for the current matrix */
2508     if (dm->ops->convert) {
2509       conv = dm->ops->convert;
2510     }
2511     if (conv) goto foundconv;
2512 #endif
2513 
2514     /* 5) Use a really basic converter. */
2515     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2516 
2517     foundconv:
2518     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2519     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2520     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2521   }
2522   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2523   PetscFunctionReturn(0);
2524 }
2525 
2526 /*--------------------------------------------------------------------------------------------------------------------*/
2527 
2528 #undef __FUNCT__
2529 #define __FUNCT__ "DMRegister"
2530 /*@C
2531   DMRegister - See DMRegisterDynamic()
2532 
2533   Level: advanced
2534 @*/
2535 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2536 {
2537   char fullname[PETSC_MAX_PATH_LEN];
2538   PetscErrorCode ierr;
2539 
2540   PetscFunctionBegin;
2541   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2542   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2543   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2544   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2545   PetscFunctionReturn(0);
2546 }
2547 
2548 
2549 /*--------------------------------------------------------------------------------------------------------------------*/
2550 #undef __FUNCT__
2551 #define __FUNCT__ "DMRegisterDestroy"
2552 /*@C
2553    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2554 
2555    Not Collective
2556 
2557    Level: advanced
2558 
2559 .keywords: DM, register, destroy
2560 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2561 @*/
2562 PetscErrorCode  DMRegisterDestroy(void)
2563 {
2564   PetscErrorCode ierr;
2565 
2566   PetscFunctionBegin;
2567   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2568   DMRegisterAllCalled = PETSC_FALSE;
2569   PetscFunctionReturn(0);
2570 }
2571 
2572 #undef __FUNCT__
2573 #define __FUNCT__ "DMLoad"
2574 /*@C
2575   DMLoad - Loads a DM that has been stored in binary  with DMView().
2576 
2577   Collective on PetscViewer
2578 
2579   Input Parameters:
2580 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2581            some related function before a call to DMLoad().
2582 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2583            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2584 
2585    Level: intermediate
2586 
2587   Notes:
2588    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2589 
2590   Notes for advanced users:
2591   Most users should not need to know the details of the binary storage
2592   format, since DMLoad() and DMView() completely hide these details.
2593   But for anyone who's interested, the standard binary matrix storage
2594   format is
2595 .vb
2596      has not yet been determined
2597 .ve
2598 
2599 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2600 @*/
2601 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2602 {
2603   PetscErrorCode ierr;
2604   PetscBool      isbinary;
2605   PetscInt       classid;
2606   char           type[256];
2607 
2608   PetscFunctionBegin;
2609   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2610   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2611   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2612   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2613 
2614   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2615   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2616   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2617   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2618   ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2619   PetscFunctionReturn(0);
2620 }
2621 
2622 /******************************** FEM Support **********************************/
2623 
2624 #undef __FUNCT__
2625 #define __FUNCT__ "DMPrintCellVector"
2626 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2627   PetscInt       f;
2628   PetscErrorCode ierr;
2629 
2630   PetscFunctionBegin;
2631   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2632   for (f = 0; f < len; ++f) {
2633     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2634   }
2635   PetscFunctionReturn(0);
2636 }
2637 
2638 #undef __FUNCT__
2639 #define __FUNCT__ "DMPrintCellMatrix"
2640 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2641   PetscInt       f, g;
2642   PetscErrorCode ierr;
2643 
2644   PetscFunctionBegin;
2645   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2646   for (f = 0; f < rows; ++f) {
2647     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2648     for (g = 0; g < cols; ++g) {
2649       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2650     }
2651     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2652   }
2653   PetscFunctionReturn(0);
2654 }
2655 
2656 #undef __FUNCT__
2657 #define __FUNCT__ "DMGetDefaultSection"
2658 /*@
2659   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2660 
2661   Input Parameter:
2662 . dm - The DM
2663 
2664   Output Parameter:
2665 . section - The PetscSection
2666 
2667   Level: intermediate
2668 
2669   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2670 
2671 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2672 @*/
2673 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2674   PetscFunctionBegin;
2675   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2676   PetscValidPointer(section, 2);
2677   *section = dm->defaultSection;
2678   PetscFunctionReturn(0);
2679 }
2680 
2681 #undef __FUNCT__
2682 #define __FUNCT__ "DMSetDefaultSection"
2683 /*@
2684   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2685 
2686   Input Parameters:
2687 + dm - The DM
2688 - section - The PetscSection
2689 
2690   Level: intermediate
2691 
2692   Note: Any existing Section will be destroyed
2693 
2694 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2695 @*/
2696 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2697   PetscInt       numFields;
2698   PetscInt       f;
2699   PetscErrorCode ierr;
2700 
2701   PetscFunctionBegin;
2702   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2703   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2704   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2705   dm->defaultSection = section;
2706   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2707   if (numFields) {
2708     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2709     for (f = 0; f < numFields; ++f) {
2710       const char *name;
2711 
2712       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2713       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2714     }
2715   }
2716   PetscFunctionReturn(0);
2717 }
2718 
2719 #undef __FUNCT__
2720 #define __FUNCT__ "DMGetDefaultGlobalSection"
2721 /*@
2722   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2723 
2724   Collective on DM
2725 
2726   Input Parameter:
2727 . dm - The DM
2728 
2729   Output Parameter:
2730 . section - The PetscSection
2731 
2732   Level: intermediate
2733 
2734   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2735 
2736 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2737 @*/
2738 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2739   PetscErrorCode ierr;
2740 
2741   PetscFunctionBegin;
2742   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2743   PetscValidPointer(section, 2);
2744   if (!dm->defaultGlobalSection) {
2745     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2746     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2747     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2748   }
2749   *section = dm->defaultGlobalSection;
2750   PetscFunctionReturn(0);
2751 }
2752 
2753 #undef __FUNCT__
2754 #define __FUNCT__ "DMSetDefaultGlobalSection"
2755 /*@
2756   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2757 
2758   Input Parameters:
2759 + dm - The DM
2760 - section - The PetscSection
2761 
2762   Level: intermediate
2763 
2764   Note: Any existing Section will be destroyed
2765 
2766 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2767 @*/
2768 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2769   PetscErrorCode ierr;
2770 
2771   PetscFunctionBegin;
2772   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2773   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2774   dm->defaultGlobalSection = section;
2775   PetscFunctionReturn(0);
2776 }
2777 
2778 #undef __FUNCT__
2779 #define __FUNCT__ "DMGetDefaultSF"
2780 /*@
2781   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2782   it is created from the default PetscSection layouts in the DM.
2783 
2784   Input Parameter:
2785 . dm - The DM
2786 
2787   Output Parameter:
2788 . sf - The PetscSF
2789 
2790   Level: intermediate
2791 
2792   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2793 
2794 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2795 @*/
2796 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2797   PetscInt       nroots;
2798   PetscErrorCode ierr;
2799 
2800   PetscFunctionBegin;
2801   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2802   PetscValidPointer(sf, 2);
2803   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2804   if (nroots < 0) {
2805     PetscSection section, gSection;
2806 
2807     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2808     if (section) {
2809       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2810       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2811     } else {
2812       *sf = PETSC_NULL;
2813       PetscFunctionReturn(0);
2814     }
2815   }
2816   *sf = dm->defaultSF;
2817   PetscFunctionReturn(0);
2818 }
2819 
2820 #undef __FUNCT__
2821 #define __FUNCT__ "DMSetDefaultSF"
2822 /*@
2823   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2824 
2825   Input Parameters:
2826 + dm - The DM
2827 - sf - The PetscSF
2828 
2829   Level: intermediate
2830 
2831   Note: Any previous SF is destroyed
2832 
2833 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2834 @*/
2835 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2836   PetscErrorCode ierr;
2837 
2838   PetscFunctionBegin;
2839   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2840   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2841   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2842   dm->defaultSF = sf;
2843   PetscFunctionReturn(0);
2844 }
2845 
2846 #undef __FUNCT__
2847 #define __FUNCT__ "DMCreateDefaultSF"
2848 /*@C
2849   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2850   describing the data layout.
2851 
2852   Input Parameters:
2853 + dm - The DM
2854 . localSection - PetscSection describing the local data layout
2855 - globalSection - PetscSection describing the global data layout
2856 
2857   Level: intermediate
2858 
2859 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2860 @*/
2861 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2862 {
2863   MPI_Comm        comm = ((PetscObject) dm)->comm;
2864   PetscLayout     layout;
2865   const PetscInt *ranges;
2866   PetscInt       *local;
2867   PetscSFNode    *remote;
2868   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2869   PetscMPIInt     size, rank;
2870   PetscErrorCode  ierr;
2871 
2872   PetscFunctionBegin;
2873   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2874   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2875   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2876   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2877   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2878   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2879   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2880   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2881   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2882   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2883   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2884     PetscInt gdof, gcdof;
2885 
2886     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2887     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2888     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2889   }
2890   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2891   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2892   for (p = pStart, l = 0; p < pEnd; ++p) {
2893     const PetscInt *cind;
2894     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2895 
2896     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2897     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2898     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2899     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2900     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2901     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2902     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2903     if (!gdof) continue; /* Censored point */
2904     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2905     if (gsize != dof-cdof) {
2906       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2907       cdof = 0; /* Ignore constraints */
2908     }
2909     for (d = 0, c = 0; d < dof; ++d) {
2910       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2911       local[l+d-c] = off+d;
2912     }
2913     if (gdof < 0) {
2914       for(d = 0; d < gsize; ++d, ++l) {
2915         PetscInt offset = -(goff+1) + d, r;
2916 
2917         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2918         if (r < 0) r = -(r+2);
2919         remote[l].rank  = r;
2920         remote[l].index = offset - ranges[r];
2921       }
2922     } else {
2923       for(d = 0; d < gsize; ++d, ++l) {
2924         remote[l].rank  = rank;
2925         remote[l].index = goff+d - ranges[rank];
2926       }
2927     }
2928   }
2929   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2930   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2931   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2932   PetscFunctionReturn(0);
2933 }
2934 
2935 #undef __FUNCT__
2936 #define __FUNCT__ "DMGetPointSF"
2937 /*@
2938   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2939 
2940   Input Parameter:
2941 . dm - The DM
2942 
2943   Output Parameter:
2944 . sf - The PetscSF
2945 
2946   Level: intermediate
2947 
2948   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2949 
2950 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2951 @*/
2952 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
2953   PetscFunctionBegin;
2954   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2955   PetscValidPointer(sf, 2);
2956   *sf = dm->sf;
2957   PetscFunctionReturn(0);
2958 }
2959 
2960 #undef __FUNCT__
2961 #define __FUNCT__ "DMSetPointSF"
2962 /*@
2963   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
2964 
2965   Input Parameters:
2966 + dm - The DM
2967 - sf - The PetscSF
2968 
2969   Level: intermediate
2970 
2971 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2972 @*/
2973 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
2974   PetscErrorCode ierr;
2975 
2976   PetscFunctionBegin;
2977   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2978   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
2979   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
2980   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
2981   dm->sf = sf;
2982   PetscFunctionReturn(0);
2983 }
2984 
2985 #undef __FUNCT__
2986 #define __FUNCT__ "DMGetNumFields"
2987 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
2988 {
2989   PetscFunctionBegin;
2990   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2991   PetscValidPointer(numFields, 2);
2992   *numFields = dm->numFields;
2993   PetscFunctionReturn(0);
2994 }
2995 
2996 #undef __FUNCT__
2997 #define __FUNCT__ "DMSetNumFields"
2998 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
2999 {
3000   PetscInt       f;
3001   PetscErrorCode ierr;
3002 
3003   PetscFunctionBegin;
3004   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3005   for (f = 0; f < dm->numFields; ++f) {
3006     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3007   }
3008   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3009   dm->numFields = numFields;
3010   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3011   for (f = 0; f < dm->numFields; ++f) {
3012     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3013   }
3014   PetscFunctionReturn(0);
3015 }
3016 
3017 #undef __FUNCT__
3018 #define __FUNCT__ "DMGetField"
3019 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3020 {
3021   PetscFunctionBegin;
3022   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3023   PetscValidPointer(field, 2);
3024   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3025   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3026   *field = dm->fields[f];
3027   PetscFunctionReturn(0);
3028 }
3029 
3030 #undef __FUNCT__
3031 #define __FUNCT__ "DMSetCoordinates"
3032 /*@
3033   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3034 
3035   Collective on DM
3036 
3037   Input Parameters:
3038 + dm - the DM
3039 - c - coordinate vector
3040 
3041   Note:
3042   The coordinates do include those for ghost points, which are in the local vector
3043 
3044   Level: intermediate
3045 
3046 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3047 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3048 @*/
3049 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3050 {
3051   PetscErrorCode ierr;
3052 
3053   PetscFunctionBegin;
3054   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3055   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3056   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3057   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3058   dm->coordinates = c;
3059   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3060   PetscFunctionReturn(0);
3061 }
3062 
3063 #undef __FUNCT__
3064 #define __FUNCT__ "DMSetCoordinatesLocal"
3065 /*@
3066   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3067 
3068   Collective on DM
3069 
3070    Input Parameters:
3071 +  dm - the DM
3072 -  c - coordinate vector
3073 
3074   Note:
3075   The coordinates of ghost points can be set using DMSetCoordinates()
3076   followed by DMGetCoordinatesLocal(). This is intended to enable the
3077   setting of ghost coordinates outside of the domain.
3078 
3079   Level: intermediate
3080 
3081 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3082 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3083 @*/
3084 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3085 {
3086   PetscErrorCode ierr;
3087 
3088   PetscFunctionBegin;
3089   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3090   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3091   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3092   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3093   dm->coordinatesLocal = c;
3094   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3095   PetscFunctionReturn(0);
3096 }
3097 
3098 #undef __FUNCT__
3099 #define __FUNCT__ "DMGetCoordinates"
3100 /*@
3101   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3102 
3103   Not Collective
3104 
3105   Input Parameter:
3106 . dm - the DM
3107 
3108   Output Parameter:
3109 . c - global coordinate vector
3110 
3111   Note:
3112   This is a borrowed reference, so the user should NOT destroy this vector
3113 
3114   Each process has only the local coordinates (does NOT have the ghost coordinates).
3115 
3116   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3117   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3118 
3119   Level: intermediate
3120 
3121 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3122 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3123 @*/
3124 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3125 {
3126   PetscErrorCode ierr;
3127 
3128   PetscFunctionBegin;
3129   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3130   PetscValidPointer(c,2);
3131   if (!dm->coordinates && dm->coordinatesLocal) {
3132     DM cdm = PETSC_NULL;
3133 
3134     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3135     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3136     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3137     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3138     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3139   }
3140   *c = dm->coordinates;
3141   PetscFunctionReturn(0);
3142 }
3143 
3144 #undef __FUNCT__
3145 #define __FUNCT__ "DMGetCoordinatesLocal"
3146 /*@
3147   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3148 
3149   Collective on DM
3150 
3151   Input Parameter:
3152 . dm - the DM
3153 
3154   Output Parameter:
3155 . c - coordinate vector
3156 
3157   Note:
3158   This is a borrowed reference, so the user should NOT destroy this vector
3159 
3160   Each process has the local and ghost coordinates
3161 
3162   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3163   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3164 
3165   Level: intermediate
3166 
3167 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3168 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3169 @*/
3170 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3171 {
3172   PetscErrorCode ierr;
3173 
3174   PetscFunctionBegin;
3175   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3176   PetscValidPointer(c,2);
3177   if (!dm->coordinatesLocal && dm->coordinates) {
3178     DM cdm = PETSC_NULL;
3179 
3180     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3181     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3182     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3183     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3184     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3185   }
3186   *c = dm->coordinatesLocal;
3187   PetscFunctionReturn(0);
3188 }
3189 
3190 #undef __FUNCT__
3191 #define __FUNCT__ "DMGetCoordinateDM"
3192 /*@
3193   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3194 
3195   Collective on DM
3196 
3197   Input Parameter:
3198 . dm - the DM
3199 
3200   Output Parameter:
3201 . cdm - coordinate DM
3202 
3203   Level: intermediate
3204 
3205 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3206 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3207 @*/
3208 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3209 {
3210   PetscErrorCode ierr;
3211 
3212   PetscFunctionBegin;
3213   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3214   PetscValidPointer(cdm,2);
3215   if (!dm->coordinateDM) {
3216     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3217     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3218   }
3219   *cdm = dm->coordinateDM;
3220   PetscFunctionReturn(0);
3221 }
3222