xref: /petsc/src/dm/interface/dm.c (revision 5c6c1daec53e1d9ab0bec9db5309fd8fc7645b8d)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMCreate"
8 /*@
9   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
10 
11    If you never  call DMSetType()  it will generate an
12    error when you try to use the vector.
13 
14   Collective on MPI_Comm
15 
16   Input Parameter:
17 . comm - The communicator for the DM object
18 
19   Output Parameter:
20 . dm - The DM object
21 
22   Level: beginner
23 
24 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
25 @*/
26 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
27 {
28   DM             v;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   PetscValidPointer(dm,2);
33   *dm = PETSC_NULL;
34 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
35   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
36   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
37   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
38 #endif
39 
40   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
41   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
42 
43 
44   v->ltogmap      = PETSC_NULL;
45   v->ltogmapb     = PETSC_NULL;
46   v->bs           = 1;
47   v->coloringtype = IS_COLORING_GLOBAL;
48   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
49   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
50   v->defaultSection       = PETSC_NULL;
51   v->defaultGlobalSection = PETSC_NULL;
52   {
53     PetscInt i;
54     for (i = 0; i < 10; ++i) {
55       v->nullspaceConstructors[i] = PETSC_NULL;
56     }
57   }
58   v->numFields = 0;
59   v->fields    = PETSC_NULL;
60 
61   *dm = v;
62   PetscFunctionReturn(0);
63 }
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "DMSetVecType"
67 /*@C
68        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
69 
70    Logically Collective on DMDA
71 
72    Input Parameter:
73 +  da - initial distributed array
74 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
75 
76    Options Database:
77 .   -dm_vec_type ctype
78 
79    Level: intermediate
80 
81 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
82 @*/
83 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
84 {
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   PetscValidHeaderSpecific(da,DM_CLASSID,1);
89   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
90   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
91   PetscFunctionReturn(0);
92 }
93 
94 #undef __FUNCT__
95 #define __FUNCT__ "VecGetDM"
96 /*@
97   VecGetDM - Gets the DM defining the data layout of the vector
98 
99   Not collective
100 
101   Input Parameter:
102 . v - The Vec
103 
104   Output Parameter:
105 . dm - The DM
106 
107   Level: intermediate
108 
109 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
110 @*/
111 PetscErrorCode VecGetDM(Vec v, DM *dm)
112 {
113   PetscErrorCode ierr;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
117   PetscValidPointer(dm,2);
118   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "VecSetDM"
124 /*@
125   VecSetDM - Sets the DM defining the data layout of the vector
126 
127   Not collective
128 
129   Input Parameters:
130 + v - The Vec
131 - dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecSetDM(Vec v, DM dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
144   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "DMSetMatType"
150 /*@C
151        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
152 
153    Logically Collective on DM
154 
155    Input Parameter:
156 +  dm - the DM context
157 .  ctype - the matrix type
158 
159    Options Database:
160 .   -dm_mat_type ctype
161 
162    Level: intermediate
163 
164 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
165 @*/
166 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
167 {
168   PetscErrorCode ierr;
169   PetscFunctionBegin;
170   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
171   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
172   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "MatGetDM"
178 /*@
179   MatGetDM - Gets the DM defining the data layout of the matrix
180 
181   Not collective
182 
183   Input Parameter:
184 . A - The Mat
185 
186   Output Parameter:
187 . dm - The DM
188 
189   Level: intermediate
190 
191 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
192 @*/
193 PetscErrorCode MatGetDM(Mat A, DM *dm)
194 {
195   PetscErrorCode ierr;
196 
197   PetscFunctionBegin;
198   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
199   PetscValidPointer(dm,2);
200   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 #undef __FUNCT__
205 #define __FUNCT__ "MatSetDM"
206 /*@
207   MatSetDM - Sets the DM defining the data layout of the matrix
208 
209   Not collective
210 
211   Input Parameters:
212 + A - The Mat
213 - dm - The DM
214 
215   Level: intermediate
216 
217 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
218 @*/
219 PetscErrorCode MatSetDM(Mat A, DM dm)
220 {
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
225   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
226   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "DMSetOptionsPrefix"
232 /*@C
233    DMSetOptionsPrefix - Sets the prefix used for searching for all
234    DMDA options in the database.
235 
236    Logically Collective on DMDA
237 
238    Input Parameter:
239 +  da - the DMDA context
240 -  prefix - the prefix to prepend to all option names
241 
242    Notes:
243    A hyphen (-) must NOT be given at the beginning of the prefix name.
244    The first character of all runtime options is AUTOMATICALLY the hyphen.
245 
246    Level: advanced
247 
248 .keywords: DMDA, set, options, prefix, database
249 
250 .seealso: DMSetFromOptions()
251 @*/
252 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
253 {
254   PetscErrorCode ierr;
255 
256   PetscFunctionBegin;
257   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
258   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
259   PetscFunctionReturn(0);
260 }
261 
262 #undef __FUNCT__
263 #define __FUNCT__ "DMDestroy"
264 /*@
265     DMDestroy - Destroys a vector packer or DMDA.
266 
267     Collective on DM
268 
269     Input Parameter:
270 .   dm - the DM object to destroy
271 
272     Level: developer
273 
274 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
275 
276 @*/
277 PetscErrorCode  DMDestroy(DM *dm)
278 {
279   PetscInt       i, cnt = 0, f;
280   DMNamedVecLink nlink,nnext;
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   if (!*dm) PetscFunctionReturn(0);
285   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
286 
287   /* count all the circular references of DM and its contained Vecs */
288   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
289     if ((*dm)->localin[i])  {cnt++;}
290     if ((*dm)->globalin[i]) {cnt++;}
291   }
292   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
293   if ((*dm)->x) {
294     DM obj;
295     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
296     if (obj == *dm) cnt++;
297   }
298 
299   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
300   /*
301      Need this test because the dm references the vectors that
302      reference the dm, so destroying the dm calls destroy on the
303      vectors that cause another destroy on the dm
304   */
305   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
306   ((PetscObject) (*dm))->refct = 0;
307   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
308     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
309     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
310   }
311   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
312     nnext = nlink->next;
313     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
314     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
315     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
316     ierr = PetscFree(nlink);CHKERRQ(ierr);
317   }
318   (*dm)->namedglobal = PETSC_NULL;
319 
320   /* Destroy the list of hooks */
321   {
322     DMCoarsenHookLink link,next;
323     for (link=(*dm)->coarsenhook; link; link=next) {
324       next = link->next;
325       ierr = PetscFree(link);CHKERRQ(ierr);
326     }
327     (*dm)->coarsenhook = PETSC_NULL;
328   }
329   {
330     DMRefineHookLink link,next;
331     for (link=(*dm)->refinehook; link; link=next) {
332       next = link->next;
333       ierr = PetscFree(link);CHKERRQ(ierr);
334     }
335     (*dm)->refinehook = PETSC_NULL;
336   }
337   {
338     DMSubDomainHookLink link,next;
339     for (link=(*dm)->subdomainhook; link; link=next) {
340       next = link->next;
341       ierr = PetscFree(link);CHKERRQ(ierr);
342     }
343     (*dm)->subdomainhook = PETSC_NULL;
344   }
345   {
346     DMGlobalToLocalHookLink link,next;
347     for (link=(*dm)->gtolhook; link; link=next) {
348       next = link->next;
349       ierr = PetscFree(link);CHKERRQ(ierr);
350     }
351     (*dm)->gtolhook = PETSC_NULL;
352   }
353   /* Destroy the work arrays */
354   {
355     DMWorkLink link,next;
356     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
357     for (link=(*dm)->workin; link; link=next) {
358       next = link->next;
359       ierr = PetscFree(link->mem);CHKERRQ(ierr);
360       ierr = PetscFree(link);CHKERRQ(ierr);
361     }
362     (*dm)->workin = PETSC_NULL;
363   }
364 
365   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
366   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
367   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
368 
369   if ((*dm)->ctx && (*dm)->ctxdestroy) {
370     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
371   }
372   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
373   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
374   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
375   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
376   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
377   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
378   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
379 
380   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
381   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
382   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
383   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
384   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
385 
386   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
387   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
388   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
389 
390   for (f = 0; f < (*dm)->numFields; ++f) {
391     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
392   }
393   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
394   /* if memory was published with AMS then destroy it */
395   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
396 
397   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
398   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
399   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
400   PetscFunctionReturn(0);
401 }
402 
403 #undef __FUNCT__
404 #define __FUNCT__ "DMSetUp"
405 /*@
406     DMSetUp - sets up the data structures inside a DM object
407 
408     Collective on DM
409 
410     Input Parameter:
411 .   dm - the DM object to setup
412 
413     Level: developer
414 
415 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
416 
417 @*/
418 PetscErrorCode  DMSetUp(DM dm)
419 {
420   PetscErrorCode ierr;
421 
422   PetscFunctionBegin;
423   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
424   if (dm->setupcalled) PetscFunctionReturn(0);
425   if (dm->ops->setup) {
426     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
427   }
428   dm->setupcalled = PETSC_TRUE;
429   PetscFunctionReturn(0);
430 }
431 
432 #undef __FUNCT__
433 #define __FUNCT__ "DMSetFromOptions"
434 /*@
435     DMSetFromOptions - sets parameters in a DM from the options database
436 
437     Collective on DM
438 
439     Input Parameter:
440 .   dm - the DM object to set options for
441 
442     Options Database:
443 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
444 .   -dm_vec_type <type>  type of vector to create inside DM
445 .   -dm_mat_type <type>  type of matrix to create inside DM
446 -   -dm_coloring_type <global or ghosted>
447 
448     Level: developer
449 
450 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
451 
452 @*/
453 PetscErrorCode  DMSetFromOptions(DM dm)
454 {
455   PetscBool      flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg;
456   PetscErrorCode ierr;
457   char           typeName[256] = MATAIJ;
458 
459   PetscFunctionBegin;
460   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
461   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
462     ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr);
463     ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr);
464     ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr);
465     ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr);
466     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
467     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
468     if (flg) {
469       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
470     }
471     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
472     if (flg) {
473       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
474     }
475     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
476     if (dm->ops->setfromoptions) {
477       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
478     }
479     /* process any options handlers added with PetscObjectAddOptionsHandler() */
480     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
481   ierr = PetscOptionsEnd();CHKERRQ(ierr);
482   if (flg1) {
483     ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
484   }
485   if (flg2) {
486     PetscViewer viewer;
487 
488     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
489     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
490     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr);
491     ierr = DMView(dm, viewer);CHKERRQ(ierr);
492     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
493   }
494   if (flg3) {
495     PetscViewer viewer;
496 
497     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
498     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
499     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr);
500     ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr);
501     ierr = DMView(dm, viewer);CHKERRQ(ierr);
502     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
503   }
504   if (flg4) {
505     PetscViewer viewer;
506 
507     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
508     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
509     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr);
510     ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr);
511     ierr = DMView(dm, viewer);CHKERRQ(ierr);
512     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
513   }
514   PetscFunctionReturn(0);
515 }
516 
517 #undef __FUNCT__
518 #define __FUNCT__ "DMView"
519 /*@C
520     DMView - Views a vector packer or DMDA.
521 
522     Collective on DM
523 
524     Input Parameter:
525 +   dm - the DM object to view
526 -   v - the viewer
527 
528     Level: developer
529 
530 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
531 
532 @*/
533 PetscErrorCode  DMView(DM dm,PetscViewer v)
534 {
535   PetscErrorCode ierr;
536   PetscBool      isbinary;
537 
538   PetscFunctionBegin;
539   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
540   if (!v) {
541     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
542   }
543   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
544   if (isbinary) {
545     PetscInt         classid = DM_FILE_CLASSID;
546     char             type[256];
547 
548     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
549     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
550     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
551   }
552   if (dm->ops->view) {
553     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
554   }
555   PetscFunctionReturn(0);
556 }
557 
558 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
559 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
560 
561 #undef __FUNCT__
562 #define __FUNCT__ "DMCreateGlobalVector"
563 /*@
564     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
565 
566     Collective on DM
567 
568     Input Parameter:
569 .   dm - the DM object
570 
571     Output Parameter:
572 .   vec - the global vector
573 
574     Level: beginner
575 
576 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
577 
578 @*/
579 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
580 {
581   PetscErrorCode ierr;
582 
583   PetscFunctionBegin;
584   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
585   if (dm->defaultSection) {
586     PetscSection gSection;
587     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
588 
589     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
590     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
591     for(p = pStart; p < pEnd; ++p) {
592       PetscInt dof, cdof;
593 
594       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
595       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
596       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
597       if ((dof > 0) && (dof-cdof != blockSize)) {
598         blockSize = 1;
599         break;
600       }
601     }
602     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
603     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
604     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
605     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
606     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
607     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
608     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
609     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
610     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
611     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
612     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
613     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
614   } else {
615     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
616   }
617   PetscFunctionReturn(0);
618 }
619 
620 #undef __FUNCT__
621 #define __FUNCT__ "DMCreateLocalVector"
622 /*@
623     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
624 
625     Not Collective
626 
627     Input Parameter:
628 .   dm - the DM object
629 
630     Output Parameter:
631 .   vec - the local vector
632 
633     Level: beginner
634 
635 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
636 
637 @*/
638 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
639 {
640   PetscErrorCode ierr;
641 
642   PetscFunctionBegin;
643   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
644   if (dm->defaultSection) {
645     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
646 
647     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
648     for(p = pStart; p < pEnd; ++p) {
649       PetscInt dof;
650 
651       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
652       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
653       if ((dof > 0) && (dof != blockSize)) {
654         blockSize = 1;
655         break;
656       }
657     }
658     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
659     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
660     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
661     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
662     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
663     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
664     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
665   } else {
666     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
667   }
668   PetscFunctionReturn(0);
669 }
670 
671 #undef __FUNCT__
672 #define __FUNCT__ "DMGetLocalToGlobalMapping"
673 /*@
674    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
675 
676    Collective on DM
677 
678    Input Parameter:
679 .  dm - the DM that provides the mapping
680 
681    Output Parameter:
682 .  ltog - the mapping
683 
684    Level: intermediate
685 
686    Notes:
687    This mapping can then be used by VecSetLocalToGlobalMapping() or
688    MatSetLocalToGlobalMapping().
689 
690 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
691 @*/
692 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
693 {
694   PetscErrorCode ierr;
695 
696   PetscFunctionBegin;
697   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
698   PetscValidPointer(ltog,2);
699   if (!dm->ltogmap) {
700     PetscSection section, sectionGlobal;
701 
702     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
703     if (section) {
704       PetscInt      *ltog;
705       PetscInt       pStart, pEnd, size, p, l;
706 
707       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
708       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
709       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
710       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
711       for (p = pStart, l = 0; p < pEnd; ++p) {
712         PetscInt dof, off, c;
713 
714         /* Should probably use constrained dofs */
715         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
716         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
717         for (c = 0; c < dof; ++c, ++l) {
718           ltog[l] = off+c;
719         }
720       }
721       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
722       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
723     } else {
724       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
725       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
726     }
727   }
728   *ltog = dm->ltogmap;
729   PetscFunctionReturn(0);
730 }
731 
732 #undef __FUNCT__
733 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
734 /*@
735    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
736 
737    Collective on DM
738 
739    Input Parameter:
740 .  da - the distributed array that provides the mapping
741 
742    Output Parameter:
743 .  ltog - the block mapping
744 
745    Level: intermediate
746 
747    Notes:
748    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
749    MatSetLocalToGlobalMappingBlock().
750 
751 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
752 @*/
753 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
754 {
755   PetscErrorCode ierr;
756 
757   PetscFunctionBegin;
758   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
759   PetscValidPointer(ltog,2);
760   if (!dm->ltogmapb) {
761     PetscInt bs;
762     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
763     if (bs > 1) {
764       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
765       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
766     } else {
767       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
768       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
769     }
770   }
771   *ltog = dm->ltogmapb;
772   PetscFunctionReturn(0);
773 }
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "DMGetBlockSize"
777 /*@
778    DMGetBlockSize - Gets the inherent block size associated with a DM
779 
780    Not Collective
781 
782    Input Parameter:
783 .  dm - the DM with block structure
784 
785    Output Parameter:
786 .  bs - the block size, 1 implies no exploitable block structure
787 
788    Level: intermediate
789 
790 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
791 @*/
792 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
793 {
794   PetscFunctionBegin;
795   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
796   PetscValidPointer(bs,2);
797   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
798   *bs = dm->bs;
799   PetscFunctionReturn(0);
800 }
801 
802 #undef __FUNCT__
803 #define __FUNCT__ "DMCreateInterpolation"
804 /*@
805     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
806 
807     Collective on DM
808 
809     Input Parameter:
810 +   dm1 - the DM object
811 -   dm2 - the second, finer DM object
812 
813     Output Parameter:
814 +  mat - the interpolation
815 -  vec - the scaling (optional)
816 
817     Level: developer
818 
819     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
820         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
821 
822         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
823         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
824 
825 
826 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
827 
828 @*/
829 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
830 {
831   PetscErrorCode ierr;
832 
833   PetscFunctionBegin;
834   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
835   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
836   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
837   PetscFunctionReturn(0);
838 }
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "DMCreateInjection"
842 /*@
843     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
844 
845     Collective on DM
846 
847     Input Parameter:
848 +   dm1 - the DM object
849 -   dm2 - the second, finer DM object
850 
851     Output Parameter:
852 .   ctx - the injection
853 
854     Level: developer
855 
856    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
857         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
858 
859 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
860 
861 @*/
862 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
863 {
864   PetscErrorCode ierr;
865 
866   PetscFunctionBegin;
867   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
868   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
869   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
870   PetscFunctionReturn(0);
871 }
872 
873 #undef __FUNCT__
874 #define __FUNCT__ "DMCreateColoring"
875 /*@C
876     DMCreateColoring - Gets coloring for a DMDA or DMComposite
877 
878     Collective on DM
879 
880     Input Parameter:
881 +   dm - the DM object
882 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
883 -   matype - either MATAIJ or MATBAIJ
884 
885     Output Parameter:
886 .   coloring - the coloring
887 
888     Level: developer
889 
890 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
891 
892 @*/
893 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
894 {
895   PetscErrorCode ierr;
896 
897   PetscFunctionBegin;
898   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
899   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
900   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
901   PetscFunctionReturn(0);
902 }
903 
904 #undef __FUNCT__
905 #define __FUNCT__ "DMCreateMatrix"
906 /*@C
907     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
908 
909     Collective on DM
910 
911     Input Parameter:
912 +   dm - the DM object
913 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
914             any type which inherits from one of these (such as MATAIJ)
915 
916     Output Parameter:
917 .   mat - the empty Jacobian
918 
919     Level: beginner
920 
921     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
922        do not need to do it yourself.
923 
924        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
925        the nonzero pattern call DMDASetMatPreallocateOnly()
926 
927        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
928        internally by PETSc.
929 
930        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
931        the indices for the global numbering for DMDAs which is complicated.
932 
933 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
934 
935 @*/
936 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
937 {
938   PetscErrorCode ierr;
939 
940   PetscFunctionBegin;
941   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
942 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
943   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
944 #endif
945   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
946   PetscValidPointer(mat,3);
947   if (dm->mattype) {
948     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
949   } else {
950     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
951   }
952   PetscFunctionReturn(0);
953 }
954 
955 #undef __FUNCT__
956 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
957 /*@
958   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
959     preallocated but the nonzero structure and zero values will not be set.
960 
961   Logically Collective on DMDA
962 
963   Input Parameter:
964 + dm - the DM
965 - only - PETSC_TRUE if only want preallocation
966 
967   Level: developer
968 .seealso DMCreateMatrix()
969 @*/
970 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
971 {
972   PetscFunctionBegin;
973   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
974   dm->prealloc_only = only;
975   PetscFunctionReturn(0);
976 }
977 
978 #undef __FUNCT__
979 #define __FUNCT__ "DMGetWorkArray"
980 /*@C
981   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
982 
983   Not Collective
984 
985   Input Parameters:
986 + dm - the DM object
987 . count - The minium size
988 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
989 
990   Output Parameter:
991 . array - the work array
992 
993   Level: developer
994 
995 .seealso DMDestroy(), DMCreate()
996 @*/
997 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
998 {
999   PetscErrorCode ierr;
1000   DMWorkLink link;
1001   size_t size;
1002 
1003   PetscFunctionBegin;
1004   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1005   PetscValidPointer(mem,4);
1006   if (dm->workin) {
1007     link = dm->workin;
1008     dm->workin = dm->workin->next;
1009   } else {
1010     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1011   }
1012   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1013   if (size*count > link->bytes) {
1014     ierr = PetscFree(link->mem);CHKERRQ(ierr);
1015     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1016     link->bytes = size*count;
1017   }
1018   link->next = dm->workout;
1019   dm->workout = link;
1020   *(void**)mem = link->mem;
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 #undef __FUNCT__
1025 #define __FUNCT__ "DMRestoreWorkArray"
1026 /*@C
1027   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1028 
1029   Not Collective
1030 
1031   Input Parameters:
1032 + dm - the DM object
1033 . count - The minium size
1034 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1035 
1036   Output Parameter:
1037 . array - the work array
1038 
1039   Level: developer
1040 
1041 .seealso DMDestroy(), DMCreate()
1042 @*/
1043 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1044 {
1045   DMWorkLink *p,link;
1046 
1047   PetscFunctionBegin;
1048   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1049   PetscValidPointer(mem,4);
1050   for (p=&dm->workout; (link=*p); p=&link->next) {
1051     if (link->mem == *(void**)mem) {
1052       *p = link->next;
1053       link->next = dm->workin;
1054       dm->workin = link;
1055       *(void**)mem = PETSC_NULL;
1056       PetscFunctionReturn(0);
1057     }
1058   }
1059   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1060   PetscFunctionReturn(0);
1061 }
1062 
1063 #undef __FUNCT__
1064 #define __FUNCT__ "DMSetNullSpaceConstructor"
1065 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1066 {
1067   PetscFunctionBegin;
1068   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1069   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1070   dm->nullspaceConstructors[field] = nullsp;
1071   PetscFunctionReturn(0);
1072 }
1073 
1074 #undef __FUNCT__
1075 #define __FUNCT__ "DMCreateFieldIS"
1076 /*@C
1077   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1078 
1079   Not collective
1080 
1081   Input Parameter:
1082 . dm - the DM object
1083 
1084   Output Parameters:
1085 + numFields  - The number of fields (or PETSC_NULL if not requested)
1086 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1087 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1088 
1089   Level: intermediate
1090 
1091   Notes:
1092   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1093   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1094   PetscFree().
1095 
1096 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1097 @*/
1098 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1099 {
1100   PetscSection   section, sectionGlobal;
1101   PetscErrorCode ierr;
1102 
1103   PetscFunctionBegin;
1104   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1105   if (numFields) {
1106     PetscValidPointer(numFields,2);
1107     *numFields = 0;
1108   }
1109   if (fieldNames) {
1110     PetscValidPointer(fieldNames,3);
1111     *fieldNames = PETSC_NULL;
1112   }
1113   if (fields) {
1114     PetscValidPointer(fields,4);
1115     *fields = PETSC_NULL;
1116   }
1117   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1118   if (section) {
1119     PetscInt *fieldSizes, **fieldIndices;
1120     PetscInt  nF, f, pStart, pEnd, p;
1121 
1122     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1123     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1124     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1125     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1126     for (f = 0; f < nF; ++f) {
1127       fieldSizes[f] = 0;
1128     }
1129     for (p = pStart; p < pEnd; ++p) {
1130       PetscInt gdof;
1131 
1132       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1133       if (gdof > 0) {
1134         for (f = 0; f < nF; ++f) {
1135           PetscInt fdof, fcdof;
1136 
1137           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1138           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1139           fieldSizes[f] += fdof-fcdof;
1140         }
1141       }
1142     }
1143     for (f = 0; f < nF; ++f) {
1144       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1145       fieldSizes[f] = 0;
1146     }
1147     for (p = pStart; p < pEnd; ++p) {
1148       PetscInt gdof, goff;
1149 
1150       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1151       if (gdof > 0) {
1152         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1153         for (f = 0; f < nF; ++f) {
1154           PetscInt fdof, fcdof, fc;
1155 
1156           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1157           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1158           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1159             fieldIndices[f][fieldSizes[f]] = goff++;
1160           }
1161         }
1162       }
1163     }
1164     if (numFields) {*numFields = nF;}
1165     if (fieldNames) {
1166       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1167       for (f = 0; f < nF; ++f) {
1168         const char *fieldName;
1169 
1170         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1171         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1172       }
1173     }
1174     if (fields) {
1175       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1176       for (f = 0; f < nF; ++f) {
1177         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1178       }
1179     }
1180     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1181   } else {
1182     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1183   }
1184   PetscFunctionReturn(0);
1185 }
1186 
1187 #undef __FUNCT__
1188 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1189 /*@C
1190   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1191 
1192   Not Collective
1193 
1194   Input Parameters:
1195 + dm   - the DM object
1196 - name - the name of the field decomposition
1197 
1198   Output Parameter:
1199 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1200 
1201   Level: advanced
1202 
1203 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1204 @*/
1205 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1206 {
1207   PetscErrorCode ierr;
1208 
1209   PetscFunctionBegin;
1210   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1211   PetscValidCharPointer(name,2);
1212   PetscValidPointer(ddm,3);
1213   *ddm = PETSC_NULL;
1214   if (dm->ops->createfielddecompositiondm) {
1215     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1216   }
1217   PetscFunctionReturn(0);
1218 }
1219 
1220 #undef __FUNCT__
1221 #define __FUNCT__ "DMCreateFieldDecomposition"
1222 /*@C
1223   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1224                           corresponding to different fields: each IS contains the global indices of the dofs of the
1225                           corresponding field. The optional list of DMs define the DM for each subproblem.
1226                           Generalizes DMCreateFieldIS().
1227 
1228   Not collective
1229 
1230   Input Parameter:
1231 . dm - the DM object
1232 
1233   Output Parameters:
1234 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1235 . namelist  - The name for each field (or PETSC_NULL if not requested)
1236 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1237 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1238 
1239   Level: intermediate
1240 
1241   Notes:
1242   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1243   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1244   and all of the arrays should be freed with PetscFree().
1245 
1246 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1247 @*/
1248 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1249 {
1250   PetscErrorCode ierr;
1251 
1252   PetscFunctionBegin;
1253   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1254   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1255   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1256   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1257   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1258   if (!dm->ops->createfielddecomposition) {
1259     PetscSection section;
1260     PetscInt     numFields, f;
1261 
1262     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1263     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1264     if (section && numFields && dm->ops->createsubdm) {
1265       *len = numFields;
1266       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1267       for (f = 0; f < numFields; ++f) {
1268         const char *fieldName;
1269 
1270         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1271         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1272         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1273       }
1274     } else {
1275       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1276       /* By default there are no DMs associated with subproblems. */
1277       if (dmlist) *dmlist = PETSC_NULL;
1278     }
1279   }
1280   else {
1281     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1282   }
1283   PetscFunctionReturn(0);
1284 }
1285 
1286 #undef __FUNCT__
1287 #define __FUNCT__ "DMCreateSubDM"
1288 /*@C
1289   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1290                   The fields are defined by DMCreateFieldIS().
1291 
1292   Not collective
1293 
1294   Input Parameters:
1295 + dm - the DM object
1296 . numFields - number of fields in this subproblem
1297 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1298 
1299   Output Parameters:
1300 . is - The global indices for the subproblem
1301 - dm - The DM for the subproblem
1302 
1303   Level: intermediate
1304 
1305 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1306 @*/
1307 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1308 {
1309   PetscErrorCode ierr;
1310 
1311   PetscFunctionBegin;
1312   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1313   PetscValidPointer(fields,3);
1314   if (is) {PetscValidPointer(is,4);}
1315   if (subdm) {PetscValidPointer(subdm,5);}
1316   if (dm->ops->createsubdm) {
1317     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1318   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 #undef __FUNCT__
1323 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1324 /*@C
1325   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1326 
1327   Not Collective
1328 
1329   Input Parameters:
1330 + dm   - the DM object
1331 - name - the name of the subdomain decomposition
1332 
1333   Output Parameter:
1334 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1335 
1336   Level: advanced
1337 
1338 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1339 @*/
1340 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1341 {
1342   PetscErrorCode ierr;
1343 
1344   PetscFunctionBegin;
1345   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1346   PetscValidCharPointer(name,2);
1347   PetscValidPointer(ddm,3);
1348   *ddm = PETSC_NULL;
1349   if (dm->ops->createdomaindecompositiondm) {
1350     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1351   }
1352   PetscFunctionReturn(0);
1353 }
1354 
1355 #undef __FUNCT__
1356 #define __FUNCT__ "DMCreateDomainDecomposition"
1357 /*@C
1358   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1359                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1360                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1361                           define a nonoverlapping covering, while outer subdomains can overlap.
1362                           The optional list of DMs define the DM for each subproblem.
1363 
1364   Not collective
1365 
1366   Input Parameter:
1367 . dm - the DM object
1368 
1369   Output Parameters:
1370 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1371 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1372 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1373 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1374 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1375 
1376   Level: intermediate
1377 
1378   Notes:
1379   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1380   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1381   and all of the arrays should be freed with PetscFree().
1382 
1383 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1384 @*/
1385 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1386 {
1387   PetscErrorCode ierr;
1388   DMSubDomainHookLink link;
1389   PetscInt            i,l;
1390 
1391   PetscFunctionBegin;
1392   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1393   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1394   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1395   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1396   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1397   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1398   if (dm->ops->createdomaindecomposition) {
1399     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1400   }
1401   if (len) *len = l;
1402   for (i = 0; i < l; i++) {
1403     for (link=dm->subdomainhook; link; link=link->next) {
1404       if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1405     }
1406   }
1407   PetscFunctionReturn(0);
1408 }
1409 
1410 
1411 #undef __FUNCT__
1412 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1413 /*@C
1414   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1415 
1416   Not collective
1417 
1418   Input Parameters:
1419 + dm - the DM object
1420 . n  - the number of subdomain scatters
1421 - subdms - the local subdomains
1422 
1423   Output Parameters:
1424 + n     - the number of scatters returned
1425 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1426 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1427 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1428 
1429   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1430   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1431   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1432   solution and residual data.
1433 
1434   Level: developer
1435 
1436 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1437 @*/
1438 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1439 {
1440   PetscErrorCode ierr;
1441 
1442   PetscFunctionBegin;
1443   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1444   PetscValidPointer(subdms,3);
1445   PetscValidPointer(iscat,4);
1446   PetscValidPointer(oscat,5);
1447   PetscValidPointer(gscat,6);
1448   if (dm->ops->createddscatters) {
1449     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat); CHKERRQ(ierr);
1450   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1451   PetscFunctionReturn(0);
1452 }
1453 
1454 #undef __FUNCT__
1455 #define __FUNCT__ "DMRefine"
1456 /*@
1457   DMRefine - Refines a DM object
1458 
1459   Collective on DM
1460 
1461   Input Parameter:
1462 + dm   - the DM object
1463 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1464 
1465   Output Parameter:
1466 . dmf - the refined DM, or PETSC_NULL
1467 
1468   Note: If no refinement was done, the return value is PETSC_NULL
1469 
1470   Level: developer
1471 
1472 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1473 @*/
1474 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1475 {
1476   PetscErrorCode ierr;
1477   DMRefineHookLink link;
1478 
1479   PetscFunctionBegin;
1480   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1481   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1482   if (*dmf) {
1483     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1484     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1485     (*dmf)->ctx       = dm->ctx;
1486     (*dmf)->leveldown = dm->leveldown;
1487     (*dmf)->levelup   = dm->levelup + 1;
1488     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1489     for (link=dm->refinehook; link; link=link->next) {
1490       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1491     }
1492   }
1493   PetscFunctionReturn(0);
1494 }
1495 
1496 #undef __FUNCT__
1497 #define __FUNCT__ "DMRefineHookAdd"
1498 /*@
1499    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1500 
1501    Logically Collective
1502 
1503    Input Arguments:
1504 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1505 .  refinehook - function to run when setting up a coarser level
1506 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1507 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1508 
1509    Calling sequence of refinehook:
1510 $    refinehook(DM coarse,DM fine,void *ctx);
1511 
1512 +  coarse - coarse level DM
1513 .  fine - fine level DM to interpolate problem to
1514 -  ctx - optional user-defined function context
1515 
1516    Calling sequence for interphook:
1517 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1518 
1519 +  coarse - coarse level DM
1520 .  interp - matrix interpolating a coarse-level solution to the finer grid
1521 .  fine - fine level DM to update
1522 -  ctx - optional user-defined function context
1523 
1524    Level: advanced
1525 
1526    Notes:
1527    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1528 
1529    If this function is called multiple times, the hooks will be run in the order they are added.
1530 
1531 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1532 @*/
1533 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1534 {
1535   PetscErrorCode ierr;
1536   DMRefineHookLink link,*p;
1537 
1538   PetscFunctionBegin;
1539   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1540   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1541   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1542   link->refinehook = refinehook;
1543   link->interphook = interphook;
1544   link->ctx = ctx;
1545   link->next = PETSC_NULL;
1546   *p = link;
1547   PetscFunctionReturn(0);
1548 }
1549 
1550 #undef __FUNCT__
1551 #define __FUNCT__ "DMInterpolate"
1552 /*@
1553    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1554 
1555    Collective if any hooks are
1556 
1557    Input Arguments:
1558 +  coarse - coarser DM to use as a base
1559 .  restrct - interpolation matrix, apply using MatInterpolate()
1560 -  fine - finer DM to update
1561 
1562    Level: developer
1563 
1564 .seealso: DMRefineHookAdd(), MatInterpolate()
1565 @*/
1566 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1567 {
1568   PetscErrorCode ierr;
1569   DMRefineHookLink link;
1570 
1571   PetscFunctionBegin;
1572   for (link=fine->refinehook; link; link=link->next) {
1573     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1574   }
1575   PetscFunctionReturn(0);
1576 }
1577 
1578 #undef __FUNCT__
1579 #define __FUNCT__ "DMGetRefineLevel"
1580 /*@
1581     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1582 
1583     Not Collective
1584 
1585     Input Parameter:
1586 .   dm - the DM object
1587 
1588     Output Parameter:
1589 .   level - number of refinements
1590 
1591     Level: developer
1592 
1593 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1594 
1595 @*/
1596 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1597 {
1598   PetscFunctionBegin;
1599   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1600   *level = dm->levelup;
1601   PetscFunctionReturn(0);
1602 }
1603 
1604 #undef __FUNCT__
1605 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1606 /*@
1607    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1608 
1609    Logically Collective
1610 
1611    Input Arguments:
1612 +  dm - the DM
1613 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1614 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1615 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1616 
1617    Calling sequence for beginhook:
1618 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1619 
1620 +  dm - global DM
1621 .  g - global vector
1622 .  mode - mode
1623 .  l - local vector
1624 -  ctx - optional user-defined function context
1625 
1626 
1627    Calling sequence for endhook:
1628 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1629 
1630 +  global - global DM
1631 -  ctx - optional user-defined function context
1632 
1633    Level: advanced
1634 
1635    Notes:
1636    This function is only needed if auxiliary data needs to be set up on coarse grids.
1637 
1638    If this function is called multiple times, the hooks will be run in the order they are added.
1639 
1640    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1641    extract the finest level information from its context (instead of from the SNES).
1642 
1643 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1644 @*/
1645 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1646 {
1647   PetscErrorCode ierr;
1648   DMGlobalToLocalHookLink link,*p;
1649 
1650   PetscFunctionBegin;
1651   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1652   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1653   ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1654   link->beginhook = beginhook;
1655   link->endhook = endhook;
1656   link->ctx = ctx;
1657   link->next = PETSC_NULL;
1658   *p = link;
1659   PetscFunctionReturn(0);
1660 }
1661 
1662 #undef __FUNCT__
1663 #define __FUNCT__ "DMGlobalToLocalBegin"
1664 /*@
1665     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1666 
1667     Neighbor-wise Collective on DM
1668 
1669     Input Parameters:
1670 +   dm - the DM object
1671 .   g - the global vector
1672 .   mode - INSERT_VALUES or ADD_VALUES
1673 -   l - the local vector
1674 
1675 
1676     Level: beginner
1677 
1678 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1679 
1680 @*/
1681 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1682 {
1683   PetscSF        sf;
1684   PetscErrorCode ierr;
1685   DMGlobalToLocalHookLink link;
1686 
1687   PetscFunctionBegin;
1688   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1689   for (link=dm->gtolhook; link; link=link->next) {
1690     if (link->beginhook) {ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1691   }
1692   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1693   if (sf) {
1694     PetscScalar *lArray, *gArray;
1695 
1696     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1697     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1698     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1699     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1700     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1701     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1702   } else {
1703     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1704   }
1705   PetscFunctionReturn(0);
1706 }
1707 
1708 #undef __FUNCT__
1709 #define __FUNCT__ "DMGlobalToLocalEnd"
1710 /*@
1711     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1712 
1713     Neighbor-wise Collective on DM
1714 
1715     Input Parameters:
1716 +   dm - the DM object
1717 .   g - the global vector
1718 .   mode - INSERT_VALUES or ADD_VALUES
1719 -   l - the local vector
1720 
1721 
1722     Level: beginner
1723 
1724 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1725 
1726 @*/
1727 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1728 {
1729   PetscSF        sf;
1730   PetscErrorCode ierr;
1731   PetscScalar    *lArray, *gArray;
1732   DMGlobalToLocalHookLink link;
1733 
1734   PetscFunctionBegin;
1735   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1736   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1737   if (sf) {
1738   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1739 
1740     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1741     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1742     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1743     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1744     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1745   } else {
1746     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1747   }
1748   for (link=dm->gtolhook; link; link=link->next) {
1749     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1750   }
1751   PetscFunctionReturn(0);
1752 }
1753 
1754 #undef __FUNCT__
1755 #define __FUNCT__ "DMLocalToGlobalBegin"
1756 /*@
1757     DMLocalToGlobalBegin - updates global vectors from local vectors
1758 
1759     Neighbor-wise Collective on DM
1760 
1761     Input Parameters:
1762 +   dm - the DM object
1763 .   l - the local vector
1764 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1765            base point.
1766 - - the global vector
1767 
1768     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1769            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1770            global array to the final global array with VecAXPY().
1771 
1772     Level: beginner
1773 
1774 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1775 
1776 @*/
1777 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1778 {
1779   PetscSF        sf;
1780   PetscErrorCode ierr;
1781 
1782   PetscFunctionBegin;
1783   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1784   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1785   if (sf) {
1786     MPI_Op       op;
1787     PetscScalar *lArray, *gArray;
1788 
1789     switch(mode) {
1790     case INSERT_VALUES:
1791     case INSERT_ALL_VALUES:
1792 #if defined(PETSC_HAVE_MPI_REPLACE)
1793       op = MPI_REPLACE; break;
1794 #else
1795       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1796 #endif
1797     case ADD_VALUES:
1798     case ADD_ALL_VALUES:
1799       op = MPI_SUM; break;
1800   default:
1801     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1802     }
1803     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1804     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1805     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1806     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1807     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1808   } else {
1809     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1810   }
1811   PetscFunctionReturn(0);
1812 }
1813 
1814 #undef __FUNCT__
1815 #define __FUNCT__ "DMLocalToGlobalEnd"
1816 /*@
1817     DMLocalToGlobalEnd - updates global vectors from local vectors
1818 
1819     Neighbor-wise Collective on DM
1820 
1821     Input Parameters:
1822 +   dm - the DM object
1823 .   l - the local vector
1824 .   mode - INSERT_VALUES or ADD_VALUES
1825 -   g - the global vector
1826 
1827 
1828     Level: beginner
1829 
1830 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1831 
1832 @*/
1833 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1834 {
1835   PetscSF        sf;
1836   PetscErrorCode ierr;
1837 
1838   PetscFunctionBegin;
1839   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1840   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1841   if (sf) {
1842     MPI_Op       op;
1843     PetscScalar *lArray, *gArray;
1844 
1845     switch(mode) {
1846     case INSERT_VALUES:
1847     case INSERT_ALL_VALUES:
1848 #if defined(PETSC_HAVE_MPI_REPLACE)
1849       op = MPI_REPLACE; break;
1850 #else
1851       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1852 #endif
1853     case ADD_VALUES:
1854     case ADD_ALL_VALUES:
1855       op = MPI_SUM; break;
1856     default:
1857       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1858     }
1859     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1860     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1861     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1862     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1863     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1864   } else {
1865     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1866   }
1867   PetscFunctionReturn(0);
1868 }
1869 
1870 #undef __FUNCT__
1871 #define __FUNCT__ "DMCoarsen"
1872 /*@
1873     DMCoarsen - Coarsens a DM object
1874 
1875     Collective on DM
1876 
1877     Input Parameter:
1878 +   dm - the DM object
1879 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1880 
1881     Output Parameter:
1882 .   dmc - the coarsened DM
1883 
1884     Level: developer
1885 
1886 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1887 
1888 @*/
1889 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1890 {
1891   PetscErrorCode ierr;
1892   DMCoarsenHookLink link;
1893 
1894   PetscFunctionBegin;
1895   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1896   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1897   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1898   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1899   (*dmc)->ctx       = dm->ctx;
1900   (*dmc)->levelup   = dm->levelup;
1901   (*dmc)->leveldown = dm->leveldown + 1;
1902   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1903   for (link=dm->coarsenhook; link; link=link->next) {
1904     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1905   }
1906   PetscFunctionReturn(0);
1907 }
1908 
1909 #undef __FUNCT__
1910 #define __FUNCT__ "DMCoarsenHookAdd"
1911 /*@
1912    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1913 
1914    Logically Collective
1915 
1916    Input Arguments:
1917 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1918 .  coarsenhook - function to run when setting up a coarser level
1919 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1920 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1921 
1922    Calling sequence of coarsenhook:
1923 $    coarsenhook(DM fine,DM coarse,void *ctx);
1924 
1925 +  fine - fine level DM
1926 .  coarse - coarse level DM to restrict problem to
1927 -  ctx - optional user-defined function context
1928 
1929    Calling sequence for restricthook:
1930 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1931 
1932 +  fine - fine level DM
1933 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1934 .  rscale - scaling vector for restriction
1935 .  inject - matrix restricting by injection
1936 .  coarse - coarse level DM to update
1937 -  ctx - optional user-defined function context
1938 
1939    Level: advanced
1940 
1941    Notes:
1942    This function is only needed if auxiliary data needs to be set up on coarse grids.
1943 
1944    If this function is called multiple times, the hooks will be run in the order they are added.
1945 
1946    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1947    extract the finest level information from its context (instead of from the SNES).
1948 
1949 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1950 @*/
1951 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1952 {
1953   PetscErrorCode ierr;
1954   DMCoarsenHookLink link,*p;
1955 
1956   PetscFunctionBegin;
1957   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1958   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1959   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1960   link->coarsenhook = coarsenhook;
1961   link->restricthook = restricthook;
1962   link->ctx = ctx;
1963   link->next = PETSC_NULL;
1964   *p = link;
1965   PetscFunctionReturn(0);
1966 }
1967 
1968 #undef __FUNCT__
1969 #define __FUNCT__ "DMRestrict"
1970 /*@
1971    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1972 
1973    Collective if any hooks are
1974 
1975    Input Arguments:
1976 +  fine - finer DM to use as a base
1977 .  restrct - restriction matrix, apply using MatRestrict()
1978 .  inject - injection matrix, also use MatRestrict()
1979 -  coarse - coarer DM to update
1980 
1981    Level: developer
1982 
1983 .seealso: DMCoarsenHookAdd(), MatRestrict()
1984 @*/
1985 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1986 {
1987   PetscErrorCode ierr;
1988   DMCoarsenHookLink link;
1989 
1990   PetscFunctionBegin;
1991   for (link=fine->coarsenhook; link; link=link->next) {
1992     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1993   }
1994   PetscFunctionReturn(0);
1995 }
1996 
1997 #undef __FUNCT__
1998 #define __FUNCT__ "DMSubDomainHookAdd"
1999 /*@
2000    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2001 
2002    Logically Collective
2003 
2004    Input Arguments:
2005 +  global - global DM
2006 .
2007 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2008 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
2009 
2010    Calling sequence for restricthook:
2011 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
2012 
2013 +  global - global DM
2014 .  out    - scatter to the outer (with ghost and overlap points) block vector
2015 .  in     - scatter to block vector values only owned locally
2016 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
2017 -  ctx - optional user-defined function context
2018 
2019    Level: advanced
2020 
2021    Notes:
2022    This function is only needed if auxiliary data needs to be set up on coarse grids.
2023 
2024    If this function is called multiple times, the hooks will be run in the order they are added.
2025 
2026    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2027    extract the finest level information from its context (instead of from the SNES).
2028 
2029 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2030 @*/
2031 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2032 {
2033   PetscErrorCode ierr;
2034   DMSubDomainHookLink link,*p;
2035 
2036   PetscFunctionBegin;
2037   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2038   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2039   ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2040   link->restricthook = restricthook;
2041   link->ddhook = ddhook;
2042   link->ctx = ctx;
2043   link->next = PETSC_NULL;
2044   *p = link;
2045   PetscFunctionReturn(0);
2046 }
2047 
2048 #undef __FUNCT__
2049 #define __FUNCT__ "DMSubDomainRestrict"
2050 /*@
2051    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2052 
2053    Collective if any hooks are
2054 
2055    Input Arguments:
2056 +  fine - finer DM to use as a base
2057 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2058 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2059 -  coarse - coarer DM to update
2060 
2061    Level: developer
2062 
2063 .seealso: DMCoarsenHookAdd(), MatRestrict()
2064 @*/
2065 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2066 {
2067   PetscErrorCode ierr;
2068   DMSubDomainHookLink link;
2069 
2070   PetscFunctionBegin;
2071   for (link=global->subdomainhook; link; link=link->next) {
2072     if (link->restricthook) {ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);}
2073   }
2074   PetscFunctionReturn(0);
2075 }
2076 
2077 #undef __FUNCT__
2078 #define __FUNCT__ "DMGetCoarsenLevel"
2079 /*@
2080     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2081 
2082     Not Collective
2083 
2084     Input Parameter:
2085 .   dm - the DM object
2086 
2087     Output Parameter:
2088 .   level - number of coarsenings
2089 
2090     Level: developer
2091 
2092 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2093 
2094 @*/
2095 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2096 {
2097   PetscFunctionBegin;
2098   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2099   *level = dm->leveldown;
2100   PetscFunctionReturn(0);
2101 }
2102 
2103 
2104 
2105 #undef __FUNCT__
2106 #define __FUNCT__ "DMRefineHierarchy"
2107 /*@C
2108     DMRefineHierarchy - Refines a DM object, all levels at once
2109 
2110     Collective on DM
2111 
2112     Input Parameter:
2113 +   dm - the DM object
2114 -   nlevels - the number of levels of refinement
2115 
2116     Output Parameter:
2117 .   dmf - the refined DM hierarchy
2118 
2119     Level: developer
2120 
2121 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2122 
2123 @*/
2124 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2125 {
2126   PetscErrorCode ierr;
2127 
2128   PetscFunctionBegin;
2129   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2130   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2131   if (nlevels == 0) PetscFunctionReturn(0);
2132   if (dm->ops->refinehierarchy) {
2133     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2134   } else if (dm->ops->refine) {
2135     PetscInt i;
2136 
2137     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2138     for (i=1; i<nlevels; i++) {
2139       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2140     }
2141   } else {
2142     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2143   }
2144   PetscFunctionReturn(0);
2145 }
2146 
2147 #undef __FUNCT__
2148 #define __FUNCT__ "DMCoarsenHierarchy"
2149 /*@C
2150     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2151 
2152     Collective on DM
2153 
2154     Input Parameter:
2155 +   dm - the DM object
2156 -   nlevels - the number of levels of coarsening
2157 
2158     Output Parameter:
2159 .   dmc - the coarsened DM hierarchy
2160 
2161     Level: developer
2162 
2163 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2164 
2165 @*/
2166 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2167 {
2168   PetscErrorCode ierr;
2169 
2170   PetscFunctionBegin;
2171   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2172   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2173   if (nlevels == 0) PetscFunctionReturn(0);
2174   PetscValidPointer(dmc,3);
2175   if (dm->ops->coarsenhierarchy) {
2176     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2177   } else if (dm->ops->coarsen) {
2178     PetscInt i;
2179 
2180     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2181     for (i=1; i<nlevels; i++) {
2182       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2183     }
2184   } else {
2185     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2186   }
2187   PetscFunctionReturn(0);
2188 }
2189 
2190 #undef __FUNCT__
2191 #define __FUNCT__ "DMCreateAggregates"
2192 /*@
2193    DMCreateAggregates - Gets the aggregates that map between
2194    grids associated with two DMs.
2195 
2196    Collective on DM
2197 
2198    Input Parameters:
2199 +  dmc - the coarse grid DM
2200 -  dmf - the fine grid DM
2201 
2202    Output Parameters:
2203 .  rest - the restriction matrix (transpose of the projection matrix)
2204 
2205    Level: intermediate
2206 
2207 .keywords: interpolation, restriction, multigrid
2208 
2209 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2210 @*/
2211 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2212 {
2213   PetscErrorCode ierr;
2214 
2215   PetscFunctionBegin;
2216   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2217   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2218   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2219   PetscFunctionReturn(0);
2220 }
2221 
2222 #undef __FUNCT__
2223 #define __FUNCT__ "DMSetApplicationContextDestroy"
2224 /*@C
2225     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2226 
2227     Not Collective
2228 
2229     Input Parameters:
2230 +   dm - the DM object
2231 -   destroy - the destroy function
2232 
2233     Level: intermediate
2234 
2235 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2236 
2237 @*/
2238 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2239 {
2240   PetscFunctionBegin;
2241   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2242   dm->ctxdestroy = destroy;
2243   PetscFunctionReturn(0);
2244 }
2245 
2246 #undef __FUNCT__
2247 #define __FUNCT__ "DMSetApplicationContext"
2248 /*@
2249     DMSetApplicationContext - Set a user context into a DM object
2250 
2251     Not Collective
2252 
2253     Input Parameters:
2254 +   dm - the DM object
2255 -   ctx - the user context
2256 
2257     Level: intermediate
2258 
2259 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2260 
2261 @*/
2262 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2263 {
2264   PetscFunctionBegin;
2265   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2266   dm->ctx = ctx;
2267   PetscFunctionReturn(0);
2268 }
2269 
2270 #undef __FUNCT__
2271 #define __FUNCT__ "DMGetApplicationContext"
2272 /*@
2273     DMGetApplicationContext - Gets a user context from a DM object
2274 
2275     Not Collective
2276 
2277     Input Parameter:
2278 .   dm - the DM object
2279 
2280     Output Parameter:
2281 .   ctx - the user context
2282 
2283     Level: intermediate
2284 
2285 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2286 
2287 @*/
2288 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2289 {
2290   PetscFunctionBegin;
2291   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2292   *(void**)ctx = dm->ctx;
2293   PetscFunctionReturn(0);
2294 }
2295 
2296 #undef __FUNCT__
2297 #define __FUNCT__ "DMSetVariableBounds"
2298 /*@C
2299     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2300 
2301     Logically Collective on DM
2302 
2303     Input Parameter:
2304 +   dm - the DM object
2305 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2306 
2307     Level: intermediate
2308 
2309 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2310          DMSetJacobian()
2311 
2312 @*/
2313 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2314 {
2315   PetscFunctionBegin;
2316   dm->ops->computevariablebounds = f;
2317   PetscFunctionReturn(0);
2318 }
2319 
2320 #undef __FUNCT__
2321 #define __FUNCT__ "DMHasVariableBounds"
2322 /*@
2323     DMHasVariableBounds - does the DM object have a variable bounds function?
2324 
2325     Not Collective
2326 
2327     Input Parameter:
2328 .   dm - the DM object to destroy
2329 
2330     Output Parameter:
2331 .   flg - PETSC_TRUE if the variable bounds function exists
2332 
2333     Level: developer
2334 
2335 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2336 
2337 @*/
2338 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2339 {
2340   PetscFunctionBegin;
2341   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2342   PetscFunctionReturn(0);
2343 }
2344 
2345 #undef __FUNCT__
2346 #define __FUNCT__ "DMComputeVariableBounds"
2347 /*@C
2348     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2349 
2350     Logically Collective on DM
2351 
2352     Input Parameters:
2353 +   dm - the DM object to destroy
2354 -   x  - current solution at which the bounds are computed
2355 
2356     Output parameters:
2357 +   xl - lower bound
2358 -   xu - upper bound
2359 
2360     Level: intermediate
2361 
2362 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2363 
2364 @*/
2365 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2366 {
2367   PetscErrorCode ierr;
2368   PetscFunctionBegin;
2369   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2370   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2371   if (dm->ops->computevariablebounds) {
2372     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2373   }
2374   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2375   PetscFunctionReturn(0);
2376 }
2377 
2378 #undef __FUNCT__
2379 #define __FUNCT__ "DMHasColoring"
2380 /*@
2381     DMHasColoring - does the DM object have a method of providing a coloring?
2382 
2383     Not Collective
2384 
2385     Input Parameter:
2386 .   dm - the DM object
2387 
2388     Output Parameter:
2389 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2390 
2391     Level: developer
2392 
2393 .seealso DMHasFunction(), DMCreateColoring()
2394 
2395 @*/
2396 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2397 {
2398   PetscFunctionBegin;
2399   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2400   PetscFunctionReturn(0);
2401 }
2402 
2403 #undef  __FUNCT__
2404 #define __FUNCT__ "DMSetVec"
2405 /*@C
2406     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2407 
2408     Collective on DM
2409 
2410     Input Parameter:
2411 +   dm - the DM object
2412 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2413 
2414     Level: developer
2415 
2416 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2417 
2418 @*/
2419 PetscErrorCode  DMSetVec(DM dm,Vec x)
2420 {
2421   PetscErrorCode ierr;
2422   PetscFunctionBegin;
2423   if (x) {
2424     if (!dm->x) {
2425       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2426     }
2427     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2428   }
2429   else if (dm->x) {
2430     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2431   }
2432   PetscFunctionReturn(0);
2433 }
2434 
2435 PetscFList DMList                       = PETSC_NULL;
2436 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2437 
2438 #undef __FUNCT__
2439 #define __FUNCT__ "DMSetType"
2440 /*@C
2441   DMSetType - Builds a DM, for a particular DM implementation.
2442 
2443   Collective on DM
2444 
2445   Input Parameters:
2446 + dm     - The DM object
2447 - method - The name of the DM type
2448 
2449   Options Database Key:
2450 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2451 
2452   Notes:
2453   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2454 
2455   Level: intermediate
2456 
2457 .keywords: DM, set, type
2458 .seealso: DMGetType(), DMCreate()
2459 @*/
2460 PetscErrorCode  DMSetType(DM dm, DMType method)
2461 {
2462   PetscErrorCode (*r)(DM);
2463   PetscBool      match;
2464   PetscErrorCode ierr;
2465 
2466   PetscFunctionBegin;
2467   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2468   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2469   if (match) PetscFunctionReturn(0);
2470 
2471   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2472   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2473   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2474 
2475   if (dm->ops->destroy) {
2476     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2477     dm->ops->destroy = PETSC_NULL;
2478   }
2479   ierr = (*r)(dm);CHKERRQ(ierr);
2480   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2481   PetscFunctionReturn(0);
2482 }
2483 
2484 #undef __FUNCT__
2485 #define __FUNCT__ "DMGetType"
2486 /*@C
2487   DMGetType - Gets the DM type name (as a string) from the DM.
2488 
2489   Not Collective
2490 
2491   Input Parameter:
2492 . dm  - The DM
2493 
2494   Output Parameter:
2495 . type - The DM type name
2496 
2497   Level: intermediate
2498 
2499 .keywords: DM, get, type, name
2500 .seealso: DMSetType(), DMCreate()
2501 @*/
2502 PetscErrorCode  DMGetType(DM dm, DMType *type)
2503 {
2504   PetscErrorCode ierr;
2505 
2506   PetscFunctionBegin;
2507   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2508   PetscValidCharPointer(type,2);
2509   if (!DMRegisterAllCalled) {
2510     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2511   }
2512   *type = ((PetscObject)dm)->type_name;
2513   PetscFunctionReturn(0);
2514 }
2515 
2516 #undef __FUNCT__
2517 #define __FUNCT__ "DMConvert"
2518 /*@C
2519   DMConvert - Converts a DM to another DM, either of the same or different type.
2520 
2521   Collective on DM
2522 
2523   Input Parameters:
2524 + dm - the DM
2525 - newtype - new DM type (use "same" for the same type)
2526 
2527   Output Parameter:
2528 . M - pointer to new DM
2529 
2530   Notes:
2531   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2532   the MPI communicator of the generated DM is always the same as the communicator
2533   of the input DM.
2534 
2535   Level: intermediate
2536 
2537 .seealso: DMCreate()
2538 @*/
2539 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2540 {
2541   DM             B;
2542   char           convname[256];
2543   PetscBool      sametype, issame;
2544   PetscErrorCode ierr;
2545 
2546   PetscFunctionBegin;
2547   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2548   PetscValidType(dm,1);
2549   PetscValidPointer(M,3);
2550   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2551   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2552   {
2553     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2554 
2555     /*
2556        Order of precedence:
2557        1) See if a specialized converter is known to the current DM.
2558        2) See if a specialized converter is known to the desired DM class.
2559        3) See if a good general converter is registered for the desired class
2560        4) See if a good general converter is known for the current matrix.
2561        5) Use a really basic converter.
2562     */
2563 
2564     /* 1) See if a specialized converter is known to the current DM and the desired class */
2565     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2566     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2567     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2568     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2569     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2570     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2571     if (conv) goto foundconv;
2572 
2573     /* 2)  See if a specialized converter is known to the desired DM class. */
2574     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2575     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2576     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2577     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2578     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2579     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2580     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2581     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2582     if (conv) {
2583       ierr = DMDestroy(&B);CHKERRQ(ierr);
2584       goto foundconv;
2585     }
2586 
2587 #if 0
2588     /* 3) See if a good general converter is registered for the desired class */
2589     conv = B->ops->convertfrom;
2590     ierr = DMDestroy(&B);CHKERRQ(ierr);
2591     if (conv) goto foundconv;
2592 
2593     /* 4) See if a good general converter is known for the current matrix */
2594     if (dm->ops->convert) {
2595       conv = dm->ops->convert;
2596     }
2597     if (conv) goto foundconv;
2598 #endif
2599 
2600     /* 5) Use a really basic converter. */
2601     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2602 
2603     foundconv:
2604     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2605     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2606     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2607   }
2608   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2609   PetscFunctionReturn(0);
2610 }
2611 
2612 /*--------------------------------------------------------------------------------------------------------------------*/
2613 
2614 #undef __FUNCT__
2615 #define __FUNCT__ "DMRegister"
2616 /*@C
2617   DMRegister - See DMRegisterDynamic()
2618 
2619   Level: advanced
2620 @*/
2621 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2622 {
2623   char fullname[PETSC_MAX_PATH_LEN];
2624   PetscErrorCode ierr;
2625 
2626   PetscFunctionBegin;
2627   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2628   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2629   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2630   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2631   PetscFunctionReturn(0);
2632 }
2633 
2634 
2635 /*--------------------------------------------------------------------------------------------------------------------*/
2636 #undef __FUNCT__
2637 #define __FUNCT__ "DMRegisterDestroy"
2638 /*@C
2639    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2640 
2641    Not Collective
2642 
2643    Level: advanced
2644 
2645 .keywords: DM, register, destroy
2646 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2647 @*/
2648 PetscErrorCode  DMRegisterDestroy(void)
2649 {
2650   PetscErrorCode ierr;
2651 
2652   PetscFunctionBegin;
2653   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2654   DMRegisterAllCalled = PETSC_FALSE;
2655   PetscFunctionReturn(0);
2656 }
2657 
2658 #undef __FUNCT__
2659 #define __FUNCT__ "DMLoad"
2660 /*@C
2661   DMLoad - Loads a DM that has been stored in binary  with DMView().
2662 
2663   Collective on PetscViewer
2664 
2665   Input Parameters:
2666 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2667            some related function before a call to DMLoad().
2668 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2669            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2670 
2671    Level: intermediate
2672 
2673   Notes:
2674    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2675 
2676   Notes for advanced users:
2677   Most users should not need to know the details of the binary storage
2678   format, since DMLoad() and DMView() completely hide these details.
2679   But for anyone who's interested, the standard binary matrix storage
2680   format is
2681 .vb
2682      has not yet been determined
2683 .ve
2684 
2685 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2686 @*/
2687 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2688 {
2689   PetscErrorCode ierr;
2690   PetscBool      isbinary;
2691   PetscInt       classid;
2692   char           type[256];
2693 
2694   PetscFunctionBegin;
2695   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2696   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2697   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2698   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2699 
2700   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2701   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2702   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2703   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2704   if (newdm->ops->load) {
2705     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2706   }
2707   PetscFunctionReturn(0);
2708 }
2709 
2710 /******************************** FEM Support **********************************/
2711 
2712 #undef __FUNCT__
2713 #define __FUNCT__ "DMPrintCellVector"
2714 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2715   PetscInt       f;
2716   PetscErrorCode ierr;
2717 
2718   PetscFunctionBegin;
2719   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2720   for (f = 0; f < len; ++f) {
2721     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2722   }
2723   PetscFunctionReturn(0);
2724 }
2725 
2726 #undef __FUNCT__
2727 #define __FUNCT__ "DMPrintCellMatrix"
2728 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2729   PetscInt       f, g;
2730   PetscErrorCode ierr;
2731 
2732   PetscFunctionBegin;
2733   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2734   for (f = 0; f < rows; ++f) {
2735     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2736     for (g = 0; g < cols; ++g) {
2737       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2738     }
2739     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2740   }
2741   PetscFunctionReturn(0);
2742 }
2743 
2744 #undef __FUNCT__
2745 #define __FUNCT__ "DMGetDefaultSection"
2746 /*@
2747   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2748 
2749   Input Parameter:
2750 . dm - The DM
2751 
2752   Output Parameter:
2753 . section - The PetscSection
2754 
2755   Level: intermediate
2756 
2757   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2758 
2759 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2760 @*/
2761 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2762   PetscFunctionBegin;
2763   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2764   PetscValidPointer(section, 2);
2765   *section = dm->defaultSection;
2766   PetscFunctionReturn(0);
2767 }
2768 
2769 #undef __FUNCT__
2770 #define __FUNCT__ "DMSetDefaultSection"
2771 /*@
2772   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2773 
2774   Input Parameters:
2775 + dm - The DM
2776 - section - The PetscSection
2777 
2778   Level: intermediate
2779 
2780   Note: Any existing Section will be destroyed
2781 
2782 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2783 @*/
2784 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2785   PetscInt       numFields;
2786   PetscInt       f;
2787   PetscErrorCode ierr;
2788 
2789   PetscFunctionBegin;
2790   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2791   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2792   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2793   dm->defaultSection = section;
2794   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2795   if (numFields) {
2796     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2797     for (f = 0; f < numFields; ++f) {
2798       const char *name;
2799 
2800       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2801       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2802     }
2803   }
2804   PetscFunctionReturn(0);
2805 }
2806 
2807 #undef __FUNCT__
2808 #define __FUNCT__ "DMGetDefaultGlobalSection"
2809 /*@
2810   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2811 
2812   Collective on DM
2813 
2814   Input Parameter:
2815 . dm - The DM
2816 
2817   Output Parameter:
2818 . section - The PetscSection
2819 
2820   Level: intermediate
2821 
2822   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2823 
2824 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2825 @*/
2826 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2827   PetscErrorCode ierr;
2828 
2829   PetscFunctionBegin;
2830   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2831   PetscValidPointer(section, 2);
2832   if (!dm->defaultGlobalSection) {
2833     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2834     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2835     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2836   }
2837   *section = dm->defaultGlobalSection;
2838   PetscFunctionReturn(0);
2839 }
2840 
2841 #undef __FUNCT__
2842 #define __FUNCT__ "DMSetDefaultGlobalSection"
2843 /*@
2844   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2845 
2846   Input Parameters:
2847 + dm - The DM
2848 - section - The PetscSection
2849 
2850   Level: intermediate
2851 
2852   Note: Any existing Section will be destroyed
2853 
2854 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2855 @*/
2856 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2857   PetscErrorCode ierr;
2858 
2859   PetscFunctionBegin;
2860   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2861   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2862   dm->defaultGlobalSection = section;
2863   PetscFunctionReturn(0);
2864 }
2865 
2866 #undef __FUNCT__
2867 #define __FUNCT__ "DMGetDefaultSF"
2868 /*@
2869   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2870   it is created from the default PetscSection layouts in the DM.
2871 
2872   Input Parameter:
2873 . dm - The DM
2874 
2875   Output Parameter:
2876 . sf - The PetscSF
2877 
2878   Level: intermediate
2879 
2880   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2881 
2882 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2883 @*/
2884 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2885   PetscInt       nroots;
2886   PetscErrorCode ierr;
2887 
2888   PetscFunctionBegin;
2889   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2890   PetscValidPointer(sf, 2);
2891   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2892   if (nroots < 0) {
2893     PetscSection section, gSection;
2894 
2895     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2896     if (section) {
2897       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2898       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2899     } else {
2900       *sf = PETSC_NULL;
2901       PetscFunctionReturn(0);
2902     }
2903   }
2904   *sf = dm->defaultSF;
2905   PetscFunctionReturn(0);
2906 }
2907 
2908 #undef __FUNCT__
2909 #define __FUNCT__ "DMSetDefaultSF"
2910 /*@
2911   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2912 
2913   Input Parameters:
2914 + dm - The DM
2915 - sf - The PetscSF
2916 
2917   Level: intermediate
2918 
2919   Note: Any previous SF is destroyed
2920 
2921 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2922 @*/
2923 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2924   PetscErrorCode ierr;
2925 
2926   PetscFunctionBegin;
2927   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2928   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2929   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2930   dm->defaultSF = sf;
2931   PetscFunctionReturn(0);
2932 }
2933 
2934 #undef __FUNCT__
2935 #define __FUNCT__ "DMCreateDefaultSF"
2936 /*@C
2937   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2938   describing the data layout.
2939 
2940   Input Parameters:
2941 + dm - The DM
2942 . localSection - PetscSection describing the local data layout
2943 - globalSection - PetscSection describing the global data layout
2944 
2945   Level: intermediate
2946 
2947 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2948 @*/
2949 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2950 {
2951   MPI_Comm        comm = ((PetscObject) dm)->comm;
2952   PetscLayout     layout;
2953   const PetscInt *ranges;
2954   PetscInt       *local;
2955   PetscSFNode    *remote;
2956   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2957   PetscMPIInt     size, rank;
2958   PetscErrorCode  ierr;
2959 
2960   PetscFunctionBegin;
2961   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2962   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2963   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2964   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2965   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2966   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2967   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2968   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2969   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2970   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2971   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2972     PetscInt gdof, gcdof;
2973 
2974     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2975     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2976     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2977   }
2978   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2979   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2980   for (p = pStart, l = 0; p < pEnd; ++p) {
2981     const PetscInt *cind;
2982     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2983 
2984     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2985     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2986     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2987     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2988     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2989     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2990     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2991     if (!gdof) continue; /* Censored point */
2992     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2993     if (gsize != dof-cdof) {
2994       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2995       cdof = 0; /* Ignore constraints */
2996     }
2997     for (d = 0, c = 0; d < dof; ++d) {
2998       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2999       local[l+d-c] = off+d;
3000     }
3001     if (gdof < 0) {
3002       for(d = 0; d < gsize; ++d, ++l) {
3003         PetscInt offset = -(goff+1) + d, r;
3004 
3005         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3006         if (r < 0) r = -(r+2);
3007         remote[l].rank  = r;
3008         remote[l].index = offset - ranges[r];
3009       }
3010     } else {
3011       for(d = 0; d < gsize; ++d, ++l) {
3012         remote[l].rank  = rank;
3013         remote[l].index = goff+d - ranges[rank];
3014       }
3015     }
3016   }
3017   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3018   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3019   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3020   PetscFunctionReturn(0);
3021 }
3022 
3023 #undef __FUNCT__
3024 #define __FUNCT__ "DMGetPointSF"
3025 /*@
3026   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3027 
3028   Input Parameter:
3029 . dm - The DM
3030 
3031   Output Parameter:
3032 . sf - The PetscSF
3033 
3034   Level: intermediate
3035 
3036   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3037 
3038 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3039 @*/
3040 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
3041   PetscFunctionBegin;
3042   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3043   PetscValidPointer(sf, 2);
3044   *sf = dm->sf;
3045   PetscFunctionReturn(0);
3046 }
3047 
3048 #undef __FUNCT__
3049 #define __FUNCT__ "DMSetPointSF"
3050 /*@
3051   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3052 
3053   Input Parameters:
3054 + dm - The DM
3055 - sf - The PetscSF
3056 
3057   Level: intermediate
3058 
3059 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3060 @*/
3061 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
3062   PetscErrorCode ierr;
3063 
3064   PetscFunctionBegin;
3065   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3066   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3067   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3068   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3069   dm->sf = sf;
3070   PetscFunctionReturn(0);
3071 }
3072 
3073 #undef __FUNCT__
3074 #define __FUNCT__ "DMGetNumFields"
3075 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3076 {
3077   PetscFunctionBegin;
3078   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3079   PetscValidPointer(numFields, 2);
3080   *numFields = dm->numFields;
3081   PetscFunctionReturn(0);
3082 }
3083 
3084 #undef __FUNCT__
3085 #define __FUNCT__ "DMSetNumFields"
3086 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3087 {
3088   PetscInt       f;
3089   PetscErrorCode ierr;
3090 
3091   PetscFunctionBegin;
3092   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3093   for (f = 0; f < dm->numFields; ++f) {
3094     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3095   }
3096   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3097   dm->numFields = numFields;
3098   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3099   for (f = 0; f < dm->numFields; ++f) {
3100     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3101   }
3102   PetscFunctionReturn(0);
3103 }
3104 
3105 #undef __FUNCT__
3106 #define __FUNCT__ "DMGetField"
3107 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3108 {
3109   PetscFunctionBegin;
3110   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3111   PetscValidPointer(field, 2);
3112   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3113   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3114   *field = dm->fields[f];
3115   PetscFunctionReturn(0);
3116 }
3117 
3118 #undef __FUNCT__
3119 #define __FUNCT__ "DMSetCoordinates"
3120 /*@
3121   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3122 
3123   Collective on DM
3124 
3125   Input Parameters:
3126 + dm - the DM
3127 - c - coordinate vector
3128 
3129   Note:
3130   The coordinates do include those for ghost points, which are in the local vector
3131 
3132   Level: intermediate
3133 
3134 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3135 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3136 @*/
3137 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3138 {
3139   PetscErrorCode ierr;
3140 
3141   PetscFunctionBegin;
3142   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3143   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3144   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3145   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3146   dm->coordinates = c;
3147   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3148   PetscFunctionReturn(0);
3149 }
3150 
3151 #undef __FUNCT__
3152 #define __FUNCT__ "DMSetCoordinatesLocal"
3153 /*@
3154   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3155 
3156   Collective on DM
3157 
3158    Input Parameters:
3159 +  dm - the DM
3160 -  c - coordinate vector
3161 
3162   Note:
3163   The coordinates of ghost points can be set using DMSetCoordinates()
3164   followed by DMGetCoordinatesLocal(). This is intended to enable the
3165   setting of ghost coordinates outside of the domain.
3166 
3167   Level: intermediate
3168 
3169 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3170 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3171 @*/
3172 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3173 {
3174   PetscErrorCode ierr;
3175 
3176   PetscFunctionBegin;
3177   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3178   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3179   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3180   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3181   dm->coordinatesLocal = c;
3182   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3183   PetscFunctionReturn(0);
3184 }
3185 
3186 #undef __FUNCT__
3187 #define __FUNCT__ "DMGetCoordinates"
3188 /*@
3189   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3190 
3191   Not Collective
3192 
3193   Input Parameter:
3194 . dm - the DM
3195 
3196   Output Parameter:
3197 . c - global coordinate vector
3198 
3199   Note:
3200   This is a borrowed reference, so the user should NOT destroy this vector
3201 
3202   Each process has only the local coordinates (does NOT have the ghost coordinates).
3203 
3204   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3205   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3206 
3207   Level: intermediate
3208 
3209 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3210 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3211 @*/
3212 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3213 {
3214   PetscErrorCode ierr;
3215 
3216   PetscFunctionBegin;
3217   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3218   PetscValidPointer(c,2);
3219   if (!dm->coordinates && dm->coordinatesLocal) {
3220     DM cdm = PETSC_NULL;
3221 
3222     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3223     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3224     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3225     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3226     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3227   }
3228   *c = dm->coordinates;
3229   PetscFunctionReturn(0);
3230 }
3231 
3232 #undef __FUNCT__
3233 #define __FUNCT__ "DMGetCoordinatesLocal"
3234 /*@
3235   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3236 
3237   Collective on DM
3238 
3239   Input Parameter:
3240 . dm - the DM
3241 
3242   Output Parameter:
3243 . c - coordinate vector
3244 
3245   Note:
3246   This is a borrowed reference, so the user should NOT destroy this vector
3247 
3248   Each process has the local and ghost coordinates
3249 
3250   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3251   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3252 
3253   Level: intermediate
3254 
3255 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3256 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3257 @*/
3258 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3259 {
3260   PetscErrorCode ierr;
3261 
3262   PetscFunctionBegin;
3263   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3264   PetscValidPointer(c,2);
3265   if (!dm->coordinatesLocal && dm->coordinates) {
3266     DM cdm = PETSC_NULL;
3267 
3268     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3269     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3270     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3271     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3272     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3273   }
3274   *c = dm->coordinatesLocal;
3275   PetscFunctionReturn(0);
3276 }
3277 
3278 #undef __FUNCT__
3279 #define __FUNCT__ "DMGetCoordinateDM"
3280 /*@
3281   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3282 
3283   Collective on DM
3284 
3285   Input Parameter:
3286 . dm - the DM
3287 
3288   Output Parameter:
3289 . cdm - coordinate DM
3290 
3291   Level: intermediate
3292 
3293 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3294 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3295 @*/
3296 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3297 {
3298   PetscErrorCode ierr;
3299 
3300   PetscFunctionBegin;
3301   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3302   PetscValidPointer(cdm,2);
3303   if (!dm->coordinateDM) {
3304     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3305     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3306   }
3307   *cdm = dm->coordinateDM;
3308   PetscFunctionReturn(0);
3309 }
3310