xref: /petsc/src/dm/interface/dm.c (revision b58ca069146ead337847ed7b06dec3c869beb1a6)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMCreate"
8 /*@
9   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
10 
11    If you never  call DMSetType()  it will generate an
12    error when you try to use the vector.
13 
14   Collective on MPI_Comm
15 
16   Input Parameter:
17 . comm - The communicator for the DM object
18 
19   Output Parameter:
20 . dm - The DM object
21 
22   Level: beginner
23 
24 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
25 @*/
26 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
27 {
28   DM             v;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   PetscValidPointer(dm,2);
33   *dm = PETSC_NULL;
34 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
35   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
36   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
37   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
38 #endif
39 
40   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
41   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
42 
43 
44   v->ltogmap      = PETSC_NULL;
45   v->ltogmapb     = PETSC_NULL;
46   v->bs           = 1;
47   v->coloringtype = IS_COLORING_GLOBAL;
48   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
49   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
50   v->defaultSection       = PETSC_NULL;
51   v->defaultGlobalSection = PETSC_NULL;
52   {
53     PetscInt i;
54     for (i = 0; i < 10; ++i) {
55       v->nullspaceConstructors[i] = PETSC_NULL;
56     }
57   }
58   v->numFields = 0;
59   v->fields    = PETSC_NULL;
60 
61   *dm = v;
62   PetscFunctionReturn(0);
63 }
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "DMSetVecType"
67 /*@C
68        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
69 
70    Logically Collective on DMDA
71 
72    Input Parameter:
73 +  da - initial distributed array
74 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
75 
76    Options Database:
77 .   -dm_vec_type ctype
78 
79    Level: intermediate
80 
81 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
82 @*/
83 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
84 {
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   PetscValidHeaderSpecific(da,DM_CLASSID,1);
89   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
90   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
91   PetscFunctionReturn(0);
92 }
93 
94 #undef __FUNCT__
95 #define __FUNCT__ "VecGetDM"
96 /*@
97   VecGetDM - Gets the DM defining the data layout of the vector
98 
99   Not collective
100 
101   Input Parameter:
102 . v - The Vec
103 
104   Output Parameter:
105 . dm - The DM
106 
107   Level: intermediate
108 
109 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
110 @*/
111 PetscErrorCode VecGetDM(Vec v, DM *dm)
112 {
113   PetscErrorCode ierr;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
117   PetscValidPointer(dm,2);
118   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "VecSetDM"
124 /*@
125   VecSetDM - Sets the DM defining the data layout of the vector
126 
127   Not collective
128 
129   Input Parameters:
130 + v - The Vec
131 - dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecSetDM(Vec v, DM dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
144   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "DMSetMatType"
150 /*@C
151        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
152 
153    Logically Collective on DM
154 
155    Input Parameter:
156 +  dm - the DM context
157 .  ctype - the matrix type
158 
159    Options Database:
160 .   -dm_mat_type ctype
161 
162    Level: intermediate
163 
164 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
165 @*/
166 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
167 {
168   PetscErrorCode ierr;
169   PetscFunctionBegin;
170   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
171   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
172   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "MatGetDM"
178 /*@
179   MatGetDM - Gets the DM defining the data layout of the matrix
180 
181   Not collective
182 
183   Input Parameter:
184 . A - The Mat
185 
186   Output Parameter:
187 . dm - The DM
188 
189   Level: intermediate
190 
191 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
192 @*/
193 PetscErrorCode MatGetDM(Mat A, DM *dm)
194 {
195   PetscErrorCode ierr;
196 
197   PetscFunctionBegin;
198   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
199   PetscValidPointer(dm,2);
200   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 #undef __FUNCT__
205 #define __FUNCT__ "MatSetDM"
206 /*@
207   MatSetDM - Sets the DM defining the data layout of the matrix
208 
209   Not collective
210 
211   Input Parameters:
212 + A - The Mat
213 - dm - The DM
214 
215   Level: intermediate
216 
217 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
218 @*/
219 PetscErrorCode MatSetDM(Mat A, DM dm)
220 {
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
225   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
226   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "DMSetOptionsPrefix"
232 /*@C
233    DMSetOptionsPrefix - Sets the prefix used for searching for all
234    DMDA options in the database.
235 
236    Logically Collective on DMDA
237 
238    Input Parameter:
239 +  da - the DMDA context
240 -  prefix - the prefix to prepend to all option names
241 
242    Notes:
243    A hyphen (-) must NOT be given at the beginning of the prefix name.
244    The first character of all runtime options is AUTOMATICALLY the hyphen.
245 
246    Level: advanced
247 
248 .keywords: DMDA, set, options, prefix, database
249 
250 .seealso: DMSetFromOptions()
251 @*/
252 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
253 {
254   PetscErrorCode ierr;
255 
256   PetscFunctionBegin;
257   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
258   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
259   PetscFunctionReturn(0);
260 }
261 
262 #undef __FUNCT__
263 #define __FUNCT__ "DMDestroy"
264 /*@
265     DMDestroy - Destroys a vector packer or DMDA.
266 
267     Collective on DM
268 
269     Input Parameter:
270 .   dm - the DM object to destroy
271 
272     Level: developer
273 
274 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
275 
276 @*/
277 PetscErrorCode  DMDestroy(DM *dm)
278 {
279   PetscInt       i, cnt = 0, f;
280   DMNamedVecLink nlink,nnext;
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   if (!*dm) PetscFunctionReturn(0);
285   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
286 
287   /* count all the circular references of DM and its contained Vecs */
288   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
289     if ((*dm)->localin[i])  {cnt++;}
290     if ((*dm)->globalin[i]) {cnt++;}
291   }
292   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
293   if ((*dm)->x) {
294     DM obj;
295     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
296     if (obj == *dm) cnt++;
297   }
298 
299   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
300   /*
301      Need this test because the dm references the vectors that
302      reference the dm, so destroying the dm calls destroy on the
303      vectors that cause another destroy on the dm
304   */
305   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
306   ((PetscObject) (*dm))->refct = 0;
307   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
308     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
309     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
310   }
311   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
312     nnext = nlink->next;
313     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
314     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
315     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
316     ierr = PetscFree(nlink);CHKERRQ(ierr);
317   }
318   (*dm)->namedglobal = PETSC_NULL;
319 
320   /* Destroy the list of hooks */
321   {
322     DMCoarsenHookLink link,next;
323     for (link=(*dm)->coarsenhook; link; link=next) {
324       next = link->next;
325       ierr = PetscFree(link);CHKERRQ(ierr);
326     }
327     (*dm)->coarsenhook = PETSC_NULL;
328   }
329   {
330     DMRefineHookLink link,next;
331     for (link=(*dm)->refinehook; link; link=next) {
332       next = link->next;
333       ierr = PetscFree(link);CHKERRQ(ierr);
334     }
335     (*dm)->refinehook = PETSC_NULL;
336   }
337   {
338     DMSubDomainHookLink link,next;
339     for (link=(*dm)->subdomainhook; link; link=next) {
340       next = link->next;
341       ierr = PetscFree(link);CHKERRQ(ierr);
342     }
343     (*dm)->subdomainhook = PETSC_NULL;
344   }
345   {
346     DMGlobalToLocalHookLink link,next;
347     for (link=(*dm)->gtolhook; link; link=next) {
348       next = link->next;
349       ierr = PetscFree(link);CHKERRQ(ierr);
350     }
351     (*dm)->gtolhook = PETSC_NULL;
352   }
353   /* Destroy the work arrays */
354   {
355     DMWorkLink link,next;
356     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
357     for (link=(*dm)->workin; link; link=next) {
358       next = link->next;
359       ierr = PetscFree(link->mem);CHKERRQ(ierr);
360       ierr = PetscFree(link);CHKERRQ(ierr);
361     }
362     (*dm)->workin = PETSC_NULL;
363   }
364 
365   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
366   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
367   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
368 
369   if ((*dm)->ctx && (*dm)->ctxdestroy) {
370     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
371   }
372   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
373   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
374   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
375   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
376   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
377   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
378   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
379 
380   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
381   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
382   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
383   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
384   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
385 
386   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
387   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
388   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
389 
390   for (f = 0; f < (*dm)->numFields; ++f) {
391     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
392   }
393   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
394   /* if memory was published with AMS then destroy it */
395   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
396 
397   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
398   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
399   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
400   PetscFunctionReturn(0);
401 }
402 
403 #undef __FUNCT__
404 #define __FUNCT__ "DMSetUp"
405 /*@
406     DMSetUp - sets up the data structures inside a DM object
407 
408     Collective on DM
409 
410     Input Parameter:
411 .   dm - the DM object to setup
412 
413     Level: developer
414 
415 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
416 
417 @*/
418 PetscErrorCode  DMSetUp(DM dm)
419 {
420   PetscErrorCode ierr;
421 
422   PetscFunctionBegin;
423   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
424   if (dm->setupcalled) PetscFunctionReturn(0);
425   if (dm->ops->setup) {
426     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
427   }
428   dm->setupcalled = PETSC_TRUE;
429   PetscFunctionReturn(0);
430 }
431 
432 #undef __FUNCT__
433 #define __FUNCT__ "DMSetFromOptions"
434 /*@
435     DMSetFromOptions - sets parameters in a DM from the options database
436 
437     Collective on DM
438 
439     Input Parameter:
440 .   dm - the DM object to set options for
441 
442     Options Database:
443 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
444 .   -dm_vec_type <type>  type of vector to create inside DM
445 .   -dm_mat_type <type>  type of matrix to create inside DM
446 -   -dm_coloring_type <global or ghosted>
447 
448     Level: developer
449 
450 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
451 
452 @*/
453 PetscErrorCode  DMSetFromOptions(DM dm)
454 {
455   PetscBool      flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg;
456   PetscErrorCode ierr;
457   char           typeName[256] = MATAIJ;
458 
459   PetscFunctionBegin;
460   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
461   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
462     ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr);
463     ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr);
464     ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr);
465     ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr);
466     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
467     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
468     if (flg) {
469       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
470     }
471     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
472     if (flg) {
473       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
474     }
475     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
476     if (dm->ops->setfromoptions) {
477       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
478     }
479     /* process any options handlers added with PetscObjectAddOptionsHandler() */
480     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
481   ierr = PetscOptionsEnd();CHKERRQ(ierr);
482   if (flg1) {
483     ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
484   }
485   if (flg2) {
486     PetscViewer viewer;
487 
488     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
489     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
490     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr);
491     ierr = DMView(dm, viewer);CHKERRQ(ierr);
492     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
493   }
494   if (flg3) {
495     PetscViewer viewer;
496 
497     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
498     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
499     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr);
500     ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr);
501     ierr = DMView(dm, viewer);CHKERRQ(ierr);
502     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
503   }
504   if (flg4) {
505     PetscViewer viewer;
506 
507     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
508     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
509     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr);
510     ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr);
511     ierr = DMView(dm, viewer);CHKERRQ(ierr);
512     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
513   }
514   PetscFunctionReturn(0);
515 }
516 
517 #undef __FUNCT__
518 #define __FUNCT__ "DMView"
519 /*@C
520     DMView - Views a vector packer or DMDA.
521 
522     Collective on DM
523 
524     Input Parameter:
525 +   dm - the DM object to view
526 -   v - the viewer
527 
528     Level: developer
529 
530 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
531 
532 @*/
533 PetscErrorCode  DMView(DM dm,PetscViewer v)
534 {
535   PetscErrorCode ierr;
536   PetscBool      isbinary;
537 
538   PetscFunctionBegin;
539   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
540   if (!v) {
541     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
542   }
543   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
544   if (isbinary) {
545     PetscInt         classid = DM_FILE_CLASSID;
546     char             type[256];
547 
548     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
549     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
550     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
551   }
552   if (dm->ops->view) {
553     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
554   }
555   PetscFunctionReturn(0);
556 }
557 
558 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
559 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
560 
561 #undef __FUNCT__
562 #define __FUNCT__ "DMCreateGlobalVector"
563 /*@
564     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
565 
566     Collective on DM
567 
568     Input Parameter:
569 .   dm - the DM object
570 
571     Output Parameter:
572 .   vec - the global vector
573 
574     Level: beginner
575 
576 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
577 
578 @*/
579 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
580 {
581   PetscErrorCode ierr;
582 
583   PetscFunctionBegin;
584   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
585   if (dm->defaultSection) {
586     PetscSection gSection;
587     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
588 
589     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
590     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
591     for(p = pStart; p < pEnd; ++p) {
592       PetscInt dof, cdof;
593 
594       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
595       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
596       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
597       if ((dof > 0) && (dof-cdof != blockSize)) {
598         blockSize = 1;
599         break;
600       }
601     }
602     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
603     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
604     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
605     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
606     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
607     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
608     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
609     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
610     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
611     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
612     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
613     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
614   } else {
615     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
616   }
617   PetscFunctionReturn(0);
618 }
619 
620 #undef __FUNCT__
621 #define __FUNCT__ "DMCreateLocalVector"
622 /*@
623     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
624 
625     Not Collective
626 
627     Input Parameter:
628 .   dm - the DM object
629 
630     Output Parameter:
631 .   vec - the local vector
632 
633     Level: beginner
634 
635 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
636 
637 @*/
638 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
639 {
640   PetscErrorCode ierr;
641 
642   PetscFunctionBegin;
643   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
644   if (dm->defaultSection) {
645     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
646 
647     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
648     for(p = pStart; p < pEnd; ++p) {
649       PetscInt dof;
650 
651       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
652       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
653       if ((dof > 0) && (dof != blockSize)) {
654         blockSize = 1;
655         break;
656       }
657     }
658     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
659     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
660     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
661     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
662     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
663     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
664     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
665   } else {
666     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
667   }
668   PetscFunctionReturn(0);
669 }
670 
671 #undef __FUNCT__
672 #define __FUNCT__ "DMGetLocalToGlobalMapping"
673 /*@
674    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
675 
676    Collective on DM
677 
678    Input Parameter:
679 .  dm - the DM that provides the mapping
680 
681    Output Parameter:
682 .  ltog - the mapping
683 
684    Level: intermediate
685 
686    Notes:
687    This mapping can then be used by VecSetLocalToGlobalMapping() or
688    MatSetLocalToGlobalMapping().
689 
690 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
691 @*/
692 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
693 {
694   PetscErrorCode ierr;
695 
696   PetscFunctionBegin;
697   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
698   PetscValidPointer(ltog,2);
699   if (!dm->ltogmap) {
700     PetscSection section, sectionGlobal;
701 
702     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
703     if (section) {
704       PetscInt      *ltog;
705       PetscInt       pStart, pEnd, size, p, l;
706 
707       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
708       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
709       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
710       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
711       for (p = pStart, l = 0; p < pEnd; ++p) {
712         PetscInt dof, off, c;
713 
714         /* Should probably use constrained dofs */
715         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
716         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
717         for (c = 0; c < dof; ++c, ++l) {
718           ltog[l] = off+c;
719         }
720       }
721       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
722       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
723     } else {
724       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
725       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
726     }
727   }
728   *ltog = dm->ltogmap;
729   PetscFunctionReturn(0);
730 }
731 
732 #undef __FUNCT__
733 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
734 /*@
735    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
736 
737    Collective on DM
738 
739    Input Parameter:
740 .  da - the distributed array that provides the mapping
741 
742    Output Parameter:
743 .  ltog - the block mapping
744 
745    Level: intermediate
746 
747    Notes:
748    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
749    MatSetLocalToGlobalMappingBlock().
750 
751 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
752 @*/
753 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
754 {
755   PetscErrorCode ierr;
756 
757   PetscFunctionBegin;
758   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
759   PetscValidPointer(ltog,2);
760   if (!dm->ltogmapb) {
761     PetscInt bs;
762     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
763     if (bs > 1) {
764       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
765       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
766     } else {
767       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
768       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
769     }
770   }
771   *ltog = dm->ltogmapb;
772   PetscFunctionReturn(0);
773 }
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "DMGetBlockSize"
777 /*@
778    DMGetBlockSize - Gets the inherent block size associated with a DM
779 
780    Not Collective
781 
782    Input Parameter:
783 .  dm - the DM with block structure
784 
785    Output Parameter:
786 .  bs - the block size, 1 implies no exploitable block structure
787 
788    Level: intermediate
789 
790 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
791 @*/
792 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
793 {
794   PetscFunctionBegin;
795   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
796   PetscValidPointer(bs,2);
797   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
798   *bs = dm->bs;
799   PetscFunctionReturn(0);
800 }
801 
802 #undef __FUNCT__
803 #define __FUNCT__ "DMCreateInterpolation"
804 /*@
805     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
806 
807     Collective on DM
808 
809     Input Parameter:
810 +   dm1 - the DM object
811 -   dm2 - the second, finer DM object
812 
813     Output Parameter:
814 +  mat - the interpolation
815 -  vec - the scaling (optional)
816 
817     Level: developer
818 
819     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
820         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
821 
822         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
823         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
824 
825 
826 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
827 
828 @*/
829 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
830 {
831   PetscErrorCode ierr;
832 
833   PetscFunctionBegin;
834   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
835   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
836   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
837   PetscFunctionReturn(0);
838 }
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "DMCreateInjection"
842 /*@
843     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
844 
845     Collective on DM
846 
847     Input Parameter:
848 +   dm1 - the DM object
849 -   dm2 - the second, finer DM object
850 
851     Output Parameter:
852 .   ctx - the injection
853 
854     Level: developer
855 
856    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
857         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
858 
859 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
860 
861 @*/
862 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
863 {
864   PetscErrorCode ierr;
865 
866   PetscFunctionBegin;
867   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
868   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
869   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
870   PetscFunctionReturn(0);
871 }
872 
873 #undef __FUNCT__
874 #define __FUNCT__ "DMCreateColoring"
875 /*@C
876     DMCreateColoring - Gets coloring for a DMDA or DMComposite
877 
878     Collective on DM
879 
880     Input Parameter:
881 +   dm - the DM object
882 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
883 -   matype - either MATAIJ or MATBAIJ
884 
885     Output Parameter:
886 .   coloring - the coloring
887 
888     Level: developer
889 
890 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
891 
892 @*/
893 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
894 {
895   PetscErrorCode ierr;
896 
897   PetscFunctionBegin;
898   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
899   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
900   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
901   PetscFunctionReturn(0);
902 }
903 
904 #undef __FUNCT__
905 #define __FUNCT__ "DMCreateMatrix"
906 /*@C
907     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
908 
909     Collective on DM
910 
911     Input Parameter:
912 +   dm - the DM object
913 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
914             any type which inherits from one of these (such as MATAIJ)
915 
916     Output Parameter:
917 .   mat - the empty Jacobian
918 
919     Level: beginner
920 
921     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
922        do not need to do it yourself.
923 
924        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
925        the nonzero pattern call DMDASetMatPreallocateOnly()
926 
927        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
928        internally by PETSc.
929 
930        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
931        the indices for the global numbering for DMDAs which is complicated.
932 
933 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
934 
935 @*/
936 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
937 {
938   PetscErrorCode ierr;
939 
940   PetscFunctionBegin;
941   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
942 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
943   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
944 #endif
945   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
946   PetscValidPointer(mat,3);
947   if (dm->mattype) {
948     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
949   } else {
950     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
951   }
952   PetscFunctionReturn(0);
953 }
954 
955 #undef __FUNCT__
956 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
957 /*@
958   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
959     preallocated but the nonzero structure and zero values will not be set.
960 
961   Logically Collective on DMDA
962 
963   Input Parameter:
964 + dm - the DM
965 - only - PETSC_TRUE if only want preallocation
966 
967   Level: developer
968 .seealso DMCreateMatrix()
969 @*/
970 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
971 {
972   PetscFunctionBegin;
973   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
974   dm->prealloc_only = only;
975   PetscFunctionReturn(0);
976 }
977 
978 #undef __FUNCT__
979 #define __FUNCT__ "DMGetWorkArray"
980 /*@C
981   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
982 
983   Not Collective
984 
985   Input Parameters:
986 + dm - the DM object
987 . count - The minium size
988 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
989 
990   Output Parameter:
991 . array - the work array
992 
993   Level: developer
994 
995 .seealso DMDestroy(), DMCreate()
996 @*/
997 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
998 {
999   PetscErrorCode ierr;
1000   DMWorkLink link;
1001   size_t size;
1002 
1003   PetscFunctionBegin;
1004   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1005   PetscValidPointer(mem,4);
1006   if (dm->workin) {
1007     link = dm->workin;
1008     dm->workin = dm->workin->next;
1009   } else {
1010     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1011   }
1012   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1013   if (size*count > link->bytes) {
1014     ierr = PetscFree(link->mem);CHKERRQ(ierr);
1015     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1016     link->bytes = size*count;
1017   }
1018   link->next = dm->workout;
1019   dm->workout = link;
1020   *(void**)mem = link->mem;
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 #undef __FUNCT__
1025 #define __FUNCT__ "DMRestoreWorkArray"
1026 /*@C
1027   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1028 
1029   Not Collective
1030 
1031   Input Parameters:
1032 + dm - the DM object
1033 . count - The minium size
1034 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1035 
1036   Output Parameter:
1037 . array - the work array
1038 
1039   Level: developer
1040 
1041 .seealso DMDestroy(), DMCreate()
1042 @*/
1043 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1044 {
1045   DMWorkLink *p,link;
1046 
1047   PetscFunctionBegin;
1048   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1049   PetscValidPointer(mem,4);
1050   for (p=&dm->workout; (link=*p); p=&link->next) {
1051     if (link->mem == *(void**)mem) {
1052       *p = link->next;
1053       link->next = dm->workin;
1054       dm->workin = link;
1055       *(void**)mem = PETSC_NULL;
1056       PetscFunctionReturn(0);
1057     }
1058   }
1059   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1060   PetscFunctionReturn(0);
1061 }
1062 
1063 #undef __FUNCT__
1064 #define __FUNCT__ "DMSetNullSpaceConstructor"
1065 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1066 {
1067   PetscFunctionBegin;
1068   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1069   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1070   dm->nullspaceConstructors[field] = nullsp;
1071   PetscFunctionReturn(0);
1072 }
1073 
1074 #undef __FUNCT__
1075 #define __FUNCT__ "DMCreateFieldIS"
1076 /*@C
1077   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1078 
1079   Not collective
1080 
1081   Input Parameter:
1082 . dm - the DM object
1083 
1084   Output Parameters:
1085 + numFields  - The number of fields (or PETSC_NULL if not requested)
1086 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1087 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1088 
1089   Level: intermediate
1090 
1091   Notes:
1092   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1093   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1094   PetscFree().
1095 
1096 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1097 @*/
1098 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1099 {
1100   PetscSection   section, sectionGlobal;
1101   PetscErrorCode ierr;
1102 
1103   PetscFunctionBegin;
1104   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1105   if (numFields) {
1106     PetscValidPointer(numFields,2);
1107     *numFields = 0;
1108   }
1109   if (fieldNames) {
1110     PetscValidPointer(fieldNames,3);
1111     *fieldNames = PETSC_NULL;
1112   }
1113   if (fields) {
1114     PetscValidPointer(fields,4);
1115     *fields = PETSC_NULL;
1116   }
1117   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1118   if (section) {
1119     PetscInt *fieldSizes, **fieldIndices;
1120     PetscInt  nF, f, pStart, pEnd, p;
1121 
1122     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1123     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1124     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1125     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1126     for (f = 0; f < nF; ++f) {
1127       fieldSizes[f] = 0;
1128     }
1129     for (p = pStart; p < pEnd; ++p) {
1130       PetscInt gdof;
1131 
1132       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1133       if (gdof > 0) {
1134         for (f = 0; f < nF; ++f) {
1135           PetscInt fdof, fcdof;
1136 
1137           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1138           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1139           fieldSizes[f] += fdof-fcdof;
1140         }
1141       }
1142     }
1143     for (f = 0; f < nF; ++f) {
1144       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1145       fieldSizes[f] = 0;
1146     }
1147     for (p = pStart; p < pEnd; ++p) {
1148       PetscInt gdof, goff;
1149 
1150       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1151       if (gdof > 0) {
1152         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1153         for (f = 0; f < nF; ++f) {
1154           PetscInt fdof, fcdof, fc;
1155 
1156           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1157           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1158           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1159             fieldIndices[f][fieldSizes[f]] = goff++;
1160           }
1161         }
1162       }
1163     }
1164     if (numFields) {*numFields = nF;}
1165     if (fieldNames) {
1166       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1167       for (f = 0; f < nF; ++f) {
1168         const char *fieldName;
1169 
1170         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1171         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1172       }
1173     }
1174     if (fields) {
1175       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1176       for (f = 0; f < nF; ++f) {
1177         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1178       }
1179     }
1180     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1181   } else {
1182     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1183   }
1184   PetscFunctionReturn(0);
1185 }
1186 
1187 #undef __FUNCT__
1188 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1189 /*@C
1190   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1191 
1192   Not Collective
1193 
1194   Input Parameters:
1195 + dm   - the DM object
1196 - name - the name of the field decomposition
1197 
1198   Output Parameter:
1199 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1200 
1201   Level: advanced
1202 
1203 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1204 @*/
1205 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1206 {
1207   PetscErrorCode ierr;
1208 
1209   PetscFunctionBegin;
1210   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1211   PetscValidCharPointer(name,2);
1212   PetscValidPointer(ddm,3);
1213   *ddm = PETSC_NULL;
1214   if (dm->ops->createfielddecompositiondm) {
1215     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1216   }
1217   PetscFunctionReturn(0);
1218 }
1219 
1220 #undef __FUNCT__
1221 #define __FUNCT__ "DMCreateFieldDecomposition"
1222 /*@C
1223   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1224                           corresponding to different fields: each IS contains the global indices of the dofs of the
1225                           corresponding field. The optional list of DMs define the DM for each subproblem.
1226                           Generalizes DMCreateFieldIS().
1227 
1228   Not collective
1229 
1230   Input Parameter:
1231 . dm - the DM object
1232 
1233   Output Parameters:
1234 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1235 . namelist  - The name for each field (or PETSC_NULL if not requested)
1236 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1237 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1238 
1239   Level: intermediate
1240 
1241   Notes:
1242   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1243   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1244   and all of the arrays should be freed with PetscFree().
1245 
1246 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1247 @*/
1248 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1249 {
1250   PetscErrorCode ierr;
1251 
1252   PetscFunctionBegin;
1253   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1254   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1255   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1256   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1257   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1258   if (!dm->ops->createfielddecomposition) {
1259     PetscSection section;
1260     PetscInt     numFields, f;
1261 
1262     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1263     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1264     if (section && numFields && dm->ops->createsubdm) {
1265       *len = numFields;
1266       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1267       for (f = 0; f < numFields; ++f) {
1268         const char *fieldName;
1269 
1270         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1271         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1272         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1273       }
1274     } else {
1275       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1276       /* By default there are no DMs associated with subproblems. */
1277       if (dmlist) *dmlist = PETSC_NULL;
1278     }
1279   }
1280   else {
1281     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1282   }
1283   PetscFunctionReturn(0);
1284 }
1285 
1286 #undef __FUNCT__
1287 #define __FUNCT__ "DMCreateSubDM"
1288 /*@C
1289   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1290                   The fields are defined by DMCreateFieldIS().
1291 
1292   Not collective
1293 
1294   Input Parameters:
1295 + dm - the DM object
1296 . numFields - number of fields in this subproblem
1297 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1298 
1299   Output Parameters:
1300 . is - The global indices for the subproblem
1301 - dm - The DM for the subproblem
1302 
1303   Level: intermediate
1304 
1305 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1306 @*/
1307 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1308 {
1309   PetscErrorCode ierr;
1310 
1311   PetscFunctionBegin;
1312   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1313   PetscValidPointer(fields,3);
1314   if (is) {PetscValidPointer(is,4);}
1315   if (subdm) {PetscValidPointer(subdm,5);}
1316   if (dm->ops->createsubdm) {
1317     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1318   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 #undef __FUNCT__
1323 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1324 /*@C
1325   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1326 
1327   Not Collective
1328 
1329   Input Parameters:
1330 + dm   - the DM object
1331 - name - the name of the subdomain decomposition
1332 
1333   Output Parameter:
1334 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1335 
1336   Level: advanced
1337 
1338 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1339 @*/
1340 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1341 {
1342   PetscErrorCode ierr;
1343 
1344   PetscFunctionBegin;
1345   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1346   PetscValidCharPointer(name,2);
1347   PetscValidPointer(ddm,3);
1348   *ddm = PETSC_NULL;
1349   if (dm->ops->createdomaindecompositiondm) {
1350     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1351   }
1352   PetscFunctionReturn(0);
1353 }
1354 
1355 #undef __FUNCT__
1356 #define __FUNCT__ "DMCreateDomainDecomposition"
1357 /*@C
1358   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1359                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1360                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1361                           define a nonoverlapping covering, while outer subdomains can overlap.
1362                           The optional list of DMs define the DM for each subproblem.
1363 
1364   Not collective
1365 
1366   Input Parameter:
1367 . dm - the DM object
1368 
1369   Output Parameters:
1370 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1371 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1372 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1373 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1374 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1375 
1376   Level: intermediate
1377 
1378   Notes:
1379   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1380   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1381   and all of the arrays should be freed with PetscFree().
1382 
1383 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1384 @*/
1385 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1386 {
1387   PetscErrorCode ierr;
1388   DMSubDomainHookLink link;
1389   PetscInt            i,l;
1390 
1391   PetscFunctionBegin;
1392   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1393   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1394   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1395   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1396   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1397   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1398   if (dm->ops->createdomaindecomposition) {
1399     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1400   } else {
1401     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DMCreateDomainDecomposition not supported for this DM type!");CHKERRQ(ierr);
1402   }
1403   if (len) *len = l;
1404   for (i = 0; i < l; i++) {
1405     for (link=dm->subdomainhook; link; link=link->next) {
1406       if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1407     }
1408     (*dmlist)[i]->ctx = dm->ctx;
1409   }
1410   PetscFunctionReturn(0);
1411 }
1412 
1413 
1414 #undef __FUNCT__
1415 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1416 /*@C
1417   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1418 
1419   Not collective
1420 
1421   Input Parameters:
1422 + dm - the DM object
1423 . n  - the number of subdomain scatters
1424 - subdms - the local subdomains
1425 
1426   Output Parameters:
1427 + n     - the number of scatters returned
1428 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1429 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1430 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1431 
1432   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1433   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1434   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1435   solution and residual data.
1436 
1437   Level: developer
1438 
1439 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1440 @*/
1441 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1442 {
1443   PetscErrorCode ierr;
1444 
1445   PetscFunctionBegin;
1446   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1447   PetscValidPointer(subdms,3);
1448   PetscValidPointer(iscat,4);
1449   PetscValidPointer(oscat,5);
1450   PetscValidPointer(gscat,6);
1451   if (dm->ops->createddscatters) {
1452     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat); CHKERRQ(ierr);
1453   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1454   PetscFunctionReturn(0);
1455 }
1456 
1457 #undef __FUNCT__
1458 #define __FUNCT__ "DMRefine"
1459 /*@
1460   DMRefine - Refines a DM object
1461 
1462   Collective on DM
1463 
1464   Input Parameter:
1465 + dm   - the DM object
1466 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1467 
1468   Output Parameter:
1469 . dmf - the refined DM, or PETSC_NULL
1470 
1471   Note: If no refinement was done, the return value is PETSC_NULL
1472 
1473   Level: developer
1474 
1475 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1476 @*/
1477 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1478 {
1479   PetscErrorCode ierr;
1480   DMRefineHookLink link;
1481 
1482   PetscFunctionBegin;
1483   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1484   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1485   if (*dmf) {
1486     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1487     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1488     (*dmf)->ctx       = dm->ctx;
1489     (*dmf)->leveldown = dm->leveldown;
1490     (*dmf)->levelup   = dm->levelup + 1;
1491     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1492     for (link=dm->refinehook; link; link=link->next) {
1493       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1494     }
1495   }
1496   PetscFunctionReturn(0);
1497 }
1498 
1499 #undef __FUNCT__
1500 #define __FUNCT__ "DMRefineHookAdd"
1501 /*@
1502    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1503 
1504    Logically Collective
1505 
1506    Input Arguments:
1507 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1508 .  refinehook - function to run when setting up a coarser level
1509 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1510 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1511 
1512    Calling sequence of refinehook:
1513 $    refinehook(DM coarse,DM fine,void *ctx);
1514 
1515 +  coarse - coarse level DM
1516 .  fine - fine level DM to interpolate problem to
1517 -  ctx - optional user-defined function context
1518 
1519    Calling sequence for interphook:
1520 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1521 
1522 +  coarse - coarse level DM
1523 .  interp - matrix interpolating a coarse-level solution to the finer grid
1524 .  fine - fine level DM to update
1525 -  ctx - optional user-defined function context
1526 
1527    Level: advanced
1528 
1529    Notes:
1530    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1531 
1532    If this function is called multiple times, the hooks will be run in the order they are added.
1533 
1534 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1535 @*/
1536 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1537 {
1538   PetscErrorCode ierr;
1539   DMRefineHookLink link,*p;
1540 
1541   PetscFunctionBegin;
1542   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1543   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1544   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1545   link->refinehook = refinehook;
1546   link->interphook = interphook;
1547   link->ctx = ctx;
1548   link->next = PETSC_NULL;
1549   *p = link;
1550   PetscFunctionReturn(0);
1551 }
1552 
1553 #undef __FUNCT__
1554 #define __FUNCT__ "DMInterpolate"
1555 /*@
1556    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1557 
1558    Collective if any hooks are
1559 
1560    Input Arguments:
1561 +  coarse - coarser DM to use as a base
1562 .  restrct - interpolation matrix, apply using MatInterpolate()
1563 -  fine - finer DM to update
1564 
1565    Level: developer
1566 
1567 .seealso: DMRefineHookAdd(), MatInterpolate()
1568 @*/
1569 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1570 {
1571   PetscErrorCode ierr;
1572   DMRefineHookLink link;
1573 
1574   PetscFunctionBegin;
1575   for (link=fine->refinehook; link; link=link->next) {
1576     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1577   }
1578   PetscFunctionReturn(0);
1579 }
1580 
1581 #undef __FUNCT__
1582 #define __FUNCT__ "DMGetRefineLevel"
1583 /*@
1584     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1585 
1586     Not Collective
1587 
1588     Input Parameter:
1589 .   dm - the DM object
1590 
1591     Output Parameter:
1592 .   level - number of refinements
1593 
1594     Level: developer
1595 
1596 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1597 
1598 @*/
1599 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1600 {
1601   PetscFunctionBegin;
1602   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1603   *level = dm->levelup;
1604   PetscFunctionReturn(0);
1605 }
1606 
1607 #undef __FUNCT__
1608 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1609 /*@
1610    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1611 
1612    Logically Collective
1613 
1614    Input Arguments:
1615 +  dm - the DM
1616 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1617 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1618 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1619 
1620    Calling sequence for beginhook:
1621 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1622 
1623 +  dm - global DM
1624 .  g - global vector
1625 .  mode - mode
1626 .  l - local vector
1627 -  ctx - optional user-defined function context
1628 
1629 
1630    Calling sequence for endhook:
1631 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1632 
1633 +  global - global DM
1634 -  ctx - optional user-defined function context
1635 
1636    Level: advanced
1637 
1638    Notes:
1639    This function is only needed if auxiliary data needs to be set up on coarse grids.
1640 
1641    If this function is called multiple times, the hooks will be run in the order they are added.
1642 
1643    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1644    extract the finest level information from its context (instead of from the SNES).
1645 
1646 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1647 @*/
1648 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1649 {
1650   PetscErrorCode ierr;
1651   DMGlobalToLocalHookLink link,*p;
1652 
1653   PetscFunctionBegin;
1654   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1655   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1656   ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1657   link->beginhook = beginhook;
1658   link->endhook = endhook;
1659   link->ctx = ctx;
1660   link->next = PETSC_NULL;
1661   *p = link;
1662   PetscFunctionReturn(0);
1663 }
1664 
1665 #undef __FUNCT__
1666 #define __FUNCT__ "DMGlobalToLocalBegin"
1667 /*@
1668     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1669 
1670     Neighbor-wise Collective on DM
1671 
1672     Input Parameters:
1673 +   dm - the DM object
1674 .   g - the global vector
1675 .   mode - INSERT_VALUES or ADD_VALUES
1676 -   l - the local vector
1677 
1678 
1679     Level: beginner
1680 
1681 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1682 
1683 @*/
1684 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1685 {
1686   PetscSF        sf;
1687   PetscErrorCode ierr;
1688   DMGlobalToLocalHookLink link;
1689 
1690   PetscFunctionBegin;
1691   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1692   for (link=dm->gtolhook; link; link=link->next) {
1693     if (link->beginhook) {ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1694   }
1695   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1696   if (sf) {
1697     PetscScalar *lArray, *gArray;
1698 
1699     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1700     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1701     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1702     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1703     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1704     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1705   } else {
1706     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1707   }
1708   PetscFunctionReturn(0);
1709 }
1710 
1711 #undef __FUNCT__
1712 #define __FUNCT__ "DMGlobalToLocalEnd"
1713 /*@
1714     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1715 
1716     Neighbor-wise Collective on DM
1717 
1718     Input Parameters:
1719 +   dm - the DM object
1720 .   g - the global vector
1721 .   mode - INSERT_VALUES or ADD_VALUES
1722 -   l - the local vector
1723 
1724 
1725     Level: beginner
1726 
1727 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1728 
1729 @*/
1730 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1731 {
1732   PetscSF        sf;
1733   PetscErrorCode ierr;
1734   PetscScalar    *lArray, *gArray;
1735   DMGlobalToLocalHookLink link;
1736 
1737   PetscFunctionBegin;
1738   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1739   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1740   if (sf) {
1741   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1742 
1743     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1744     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1745     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1746     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1747     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1748   } else {
1749     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1750   }
1751   for (link=dm->gtolhook; link; link=link->next) {
1752     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1753   }
1754   PetscFunctionReturn(0);
1755 }
1756 
1757 #undef __FUNCT__
1758 #define __FUNCT__ "DMLocalToGlobalBegin"
1759 /*@
1760     DMLocalToGlobalBegin - updates global vectors from local vectors
1761 
1762     Neighbor-wise Collective on DM
1763 
1764     Input Parameters:
1765 +   dm - the DM object
1766 .   l - the local vector
1767 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1768            base point.
1769 - - the global vector
1770 
1771     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1772            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1773            global array to the final global array with VecAXPY().
1774 
1775     Level: beginner
1776 
1777 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1778 
1779 @*/
1780 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1781 {
1782   PetscSF        sf;
1783   PetscErrorCode ierr;
1784 
1785   PetscFunctionBegin;
1786   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1787   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1788   if (sf) {
1789     MPI_Op       op;
1790     PetscScalar *lArray, *gArray;
1791 
1792     switch(mode) {
1793     case INSERT_VALUES:
1794     case INSERT_ALL_VALUES:
1795 #if defined(PETSC_HAVE_MPI_REPLACE)
1796       op = MPI_REPLACE; break;
1797 #else
1798       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1799 #endif
1800     case ADD_VALUES:
1801     case ADD_ALL_VALUES:
1802       op = MPI_SUM; break;
1803   default:
1804     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1805     }
1806     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1807     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1808     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1809     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1810     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1811   } else {
1812     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1813   }
1814   PetscFunctionReturn(0);
1815 }
1816 
1817 #undef __FUNCT__
1818 #define __FUNCT__ "DMLocalToGlobalEnd"
1819 /*@
1820     DMLocalToGlobalEnd - updates global vectors from local vectors
1821 
1822     Neighbor-wise Collective on DM
1823 
1824     Input Parameters:
1825 +   dm - the DM object
1826 .   l - the local vector
1827 .   mode - INSERT_VALUES or ADD_VALUES
1828 -   g - the global vector
1829 
1830 
1831     Level: beginner
1832 
1833 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1834 
1835 @*/
1836 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1837 {
1838   PetscSF        sf;
1839   PetscErrorCode ierr;
1840 
1841   PetscFunctionBegin;
1842   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1843   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1844   if (sf) {
1845     MPI_Op       op;
1846     PetscScalar *lArray, *gArray;
1847 
1848     switch(mode) {
1849     case INSERT_VALUES:
1850     case INSERT_ALL_VALUES:
1851 #if defined(PETSC_HAVE_MPI_REPLACE)
1852       op = MPI_REPLACE; break;
1853 #else
1854       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1855 #endif
1856     case ADD_VALUES:
1857     case ADD_ALL_VALUES:
1858       op = MPI_SUM; break;
1859     default:
1860       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1861     }
1862     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1863     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1864     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1865     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1866     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1867   } else {
1868     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1869   }
1870   PetscFunctionReturn(0);
1871 }
1872 
1873 #undef __FUNCT__
1874 #define __FUNCT__ "DMCoarsen"
1875 /*@
1876     DMCoarsen - Coarsens a DM object
1877 
1878     Collective on DM
1879 
1880     Input Parameter:
1881 +   dm - the DM object
1882 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1883 
1884     Output Parameter:
1885 .   dmc - the coarsened DM
1886 
1887     Level: developer
1888 
1889 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1890 
1891 @*/
1892 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1893 {
1894   PetscErrorCode ierr;
1895   DMCoarsenHookLink link;
1896 
1897   PetscFunctionBegin;
1898   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1899   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1900   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1901   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1902   (*dmc)->ctx       = dm->ctx;
1903   (*dmc)->levelup   = dm->levelup;
1904   (*dmc)->leveldown = dm->leveldown + 1;
1905   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1906   for (link=dm->coarsenhook; link; link=link->next) {
1907     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1908   }
1909   PetscFunctionReturn(0);
1910 }
1911 
1912 #undef __FUNCT__
1913 #define __FUNCT__ "DMCoarsenHookAdd"
1914 /*@
1915    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1916 
1917    Logically Collective
1918 
1919    Input Arguments:
1920 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1921 .  coarsenhook - function to run when setting up a coarser level
1922 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1923 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1924 
1925    Calling sequence of coarsenhook:
1926 $    coarsenhook(DM fine,DM coarse,void *ctx);
1927 
1928 +  fine - fine level DM
1929 .  coarse - coarse level DM to restrict problem to
1930 -  ctx - optional user-defined function context
1931 
1932    Calling sequence for restricthook:
1933 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1934 
1935 +  fine - fine level DM
1936 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1937 .  rscale - scaling vector for restriction
1938 .  inject - matrix restricting by injection
1939 .  coarse - coarse level DM to update
1940 -  ctx - optional user-defined function context
1941 
1942    Level: advanced
1943 
1944    Notes:
1945    This function is only needed if auxiliary data needs to be set up on coarse grids.
1946 
1947    If this function is called multiple times, the hooks will be run in the order they are added.
1948 
1949    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1950    extract the finest level information from its context (instead of from the SNES).
1951 
1952 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1953 @*/
1954 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1955 {
1956   PetscErrorCode ierr;
1957   DMCoarsenHookLink link,*p;
1958 
1959   PetscFunctionBegin;
1960   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1961   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1962   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1963   link->coarsenhook = coarsenhook;
1964   link->restricthook = restricthook;
1965   link->ctx = ctx;
1966   link->next = PETSC_NULL;
1967   *p = link;
1968   PetscFunctionReturn(0);
1969 }
1970 
1971 #undef __FUNCT__
1972 #define __FUNCT__ "DMRestrict"
1973 /*@
1974    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1975 
1976    Collective if any hooks are
1977 
1978    Input Arguments:
1979 +  fine - finer DM to use as a base
1980 .  restrct - restriction matrix, apply using MatRestrict()
1981 .  inject - injection matrix, also use MatRestrict()
1982 -  coarse - coarer DM to update
1983 
1984    Level: developer
1985 
1986 .seealso: DMCoarsenHookAdd(), MatRestrict()
1987 @*/
1988 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1989 {
1990   PetscErrorCode ierr;
1991   DMCoarsenHookLink link;
1992 
1993   PetscFunctionBegin;
1994   for (link=fine->coarsenhook; link; link=link->next) {
1995     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1996   }
1997   PetscFunctionReturn(0);
1998 }
1999 
2000 #undef __FUNCT__
2001 #define __FUNCT__ "DMSubDomainHookAdd"
2002 /*@
2003    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2004 
2005    Logically Collective
2006 
2007    Input Arguments:
2008 +  global - global DM
2009 .
2010 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2011 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
2012 
2013    Calling sequence for restricthook:
2014 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
2015 
2016 +  global - global DM
2017 .  out    - scatter to the outer (with ghost and overlap points) block vector
2018 .  in     - scatter to block vector values only owned locally
2019 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
2020 -  ctx - optional user-defined function context
2021 
2022    Level: advanced
2023 
2024    Notes:
2025    This function is only needed if auxiliary data needs to be set up on coarse grids.
2026 
2027    If this function is called multiple times, the hooks will be run in the order they are added.
2028 
2029    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2030    extract the finest level information from its context (instead of from the SNES).
2031 
2032 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2033 @*/
2034 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2035 {
2036   PetscErrorCode ierr;
2037   DMSubDomainHookLink link,*p;
2038 
2039   PetscFunctionBegin;
2040   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2041   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2042   ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2043   link->restricthook = restricthook;
2044   link->ddhook = ddhook;
2045   link->ctx = ctx;
2046   link->next = PETSC_NULL;
2047   *p = link;
2048   PetscFunctionReturn(0);
2049 }
2050 
2051 #undef __FUNCT__
2052 #define __FUNCT__ "DMSubDomainRestrict"
2053 /*@
2054    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2055 
2056    Collective if any hooks are
2057 
2058    Input Arguments:
2059 +  fine - finer DM to use as a base
2060 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2061 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2062 -  coarse - coarer DM to update
2063 
2064    Level: developer
2065 
2066 .seealso: DMCoarsenHookAdd(), MatRestrict()
2067 @*/
2068 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2069 {
2070   PetscErrorCode ierr;
2071   DMSubDomainHookLink link;
2072 
2073   PetscFunctionBegin;
2074   for (link=global->subdomainhook; link; link=link->next) {
2075     if (link->restricthook) {ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);}
2076   }
2077   PetscFunctionReturn(0);
2078 }
2079 
2080 #undef __FUNCT__
2081 #define __FUNCT__ "DMGetCoarsenLevel"
2082 /*@
2083     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2084 
2085     Not Collective
2086 
2087     Input Parameter:
2088 .   dm - the DM object
2089 
2090     Output Parameter:
2091 .   level - number of coarsenings
2092 
2093     Level: developer
2094 
2095 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2096 
2097 @*/
2098 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2099 {
2100   PetscFunctionBegin;
2101   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2102   *level = dm->leveldown;
2103   PetscFunctionReturn(0);
2104 }
2105 
2106 
2107 
2108 #undef __FUNCT__
2109 #define __FUNCT__ "DMRefineHierarchy"
2110 /*@C
2111     DMRefineHierarchy - Refines a DM object, all levels at once
2112 
2113     Collective on DM
2114 
2115     Input Parameter:
2116 +   dm - the DM object
2117 -   nlevels - the number of levels of refinement
2118 
2119     Output Parameter:
2120 .   dmf - the refined DM hierarchy
2121 
2122     Level: developer
2123 
2124 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2125 
2126 @*/
2127 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2128 {
2129   PetscErrorCode ierr;
2130 
2131   PetscFunctionBegin;
2132   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2133   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2134   if (nlevels == 0) PetscFunctionReturn(0);
2135   if (dm->ops->refinehierarchy) {
2136     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2137   } else if (dm->ops->refine) {
2138     PetscInt i;
2139 
2140     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2141     for (i=1; i<nlevels; i++) {
2142       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2143     }
2144   } else {
2145     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2146   }
2147   PetscFunctionReturn(0);
2148 }
2149 
2150 #undef __FUNCT__
2151 #define __FUNCT__ "DMCoarsenHierarchy"
2152 /*@C
2153     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2154 
2155     Collective on DM
2156 
2157     Input Parameter:
2158 +   dm - the DM object
2159 -   nlevels - the number of levels of coarsening
2160 
2161     Output Parameter:
2162 .   dmc - the coarsened DM hierarchy
2163 
2164     Level: developer
2165 
2166 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2167 
2168 @*/
2169 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2170 {
2171   PetscErrorCode ierr;
2172 
2173   PetscFunctionBegin;
2174   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2175   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2176   if (nlevels == 0) PetscFunctionReturn(0);
2177   PetscValidPointer(dmc,3);
2178   if (dm->ops->coarsenhierarchy) {
2179     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2180   } else if (dm->ops->coarsen) {
2181     PetscInt i;
2182 
2183     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2184     for (i=1; i<nlevels; i++) {
2185       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2186     }
2187   } else {
2188     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2189   }
2190   PetscFunctionReturn(0);
2191 }
2192 
2193 #undef __FUNCT__
2194 #define __FUNCT__ "DMCreateAggregates"
2195 /*@
2196    DMCreateAggregates - Gets the aggregates that map between
2197    grids associated with two DMs.
2198 
2199    Collective on DM
2200 
2201    Input Parameters:
2202 +  dmc - the coarse grid DM
2203 -  dmf - the fine grid DM
2204 
2205    Output Parameters:
2206 .  rest - the restriction matrix (transpose of the projection matrix)
2207 
2208    Level: intermediate
2209 
2210 .keywords: interpolation, restriction, multigrid
2211 
2212 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2213 @*/
2214 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2215 {
2216   PetscErrorCode ierr;
2217 
2218   PetscFunctionBegin;
2219   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2220   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2221   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2222   PetscFunctionReturn(0);
2223 }
2224 
2225 #undef __FUNCT__
2226 #define __FUNCT__ "DMSetApplicationContextDestroy"
2227 /*@C
2228     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2229 
2230     Not Collective
2231 
2232     Input Parameters:
2233 +   dm - the DM object
2234 -   destroy - the destroy function
2235 
2236     Level: intermediate
2237 
2238 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2239 
2240 @*/
2241 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2242 {
2243   PetscFunctionBegin;
2244   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2245   dm->ctxdestroy = destroy;
2246   PetscFunctionReturn(0);
2247 }
2248 
2249 #undef __FUNCT__
2250 #define __FUNCT__ "DMSetApplicationContext"
2251 /*@
2252     DMSetApplicationContext - Set a user context into a DM object
2253 
2254     Not Collective
2255 
2256     Input Parameters:
2257 +   dm - the DM object
2258 -   ctx - the user context
2259 
2260     Level: intermediate
2261 
2262 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2263 
2264 @*/
2265 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2266 {
2267   PetscFunctionBegin;
2268   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2269   dm->ctx = ctx;
2270   PetscFunctionReturn(0);
2271 }
2272 
2273 #undef __FUNCT__
2274 #define __FUNCT__ "DMGetApplicationContext"
2275 /*@
2276     DMGetApplicationContext - Gets a user context from a DM object
2277 
2278     Not Collective
2279 
2280     Input Parameter:
2281 .   dm - the DM object
2282 
2283     Output Parameter:
2284 .   ctx - the user context
2285 
2286     Level: intermediate
2287 
2288 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2289 
2290 @*/
2291 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2292 {
2293   PetscFunctionBegin;
2294   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2295   *(void**)ctx = dm->ctx;
2296   PetscFunctionReturn(0);
2297 }
2298 
2299 #undef __FUNCT__
2300 #define __FUNCT__ "DMSetVariableBounds"
2301 /*@C
2302     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2303 
2304     Logically Collective on DM
2305 
2306     Input Parameter:
2307 +   dm - the DM object
2308 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2309 
2310     Level: intermediate
2311 
2312 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2313          DMSetJacobian()
2314 
2315 @*/
2316 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2317 {
2318   PetscFunctionBegin;
2319   dm->ops->computevariablebounds = f;
2320   PetscFunctionReturn(0);
2321 }
2322 
2323 #undef __FUNCT__
2324 #define __FUNCT__ "DMHasVariableBounds"
2325 /*@
2326     DMHasVariableBounds - does the DM object have a variable bounds function?
2327 
2328     Not Collective
2329 
2330     Input Parameter:
2331 .   dm - the DM object to destroy
2332 
2333     Output Parameter:
2334 .   flg - PETSC_TRUE if the variable bounds function exists
2335 
2336     Level: developer
2337 
2338 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2339 
2340 @*/
2341 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2342 {
2343   PetscFunctionBegin;
2344   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2345   PetscFunctionReturn(0);
2346 }
2347 
2348 #undef __FUNCT__
2349 #define __FUNCT__ "DMComputeVariableBounds"
2350 /*@C
2351     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2352 
2353     Logically Collective on DM
2354 
2355     Input Parameters:
2356 +   dm - the DM object to destroy
2357 -   x  - current solution at which the bounds are computed
2358 
2359     Output parameters:
2360 +   xl - lower bound
2361 -   xu - upper bound
2362 
2363     Level: intermediate
2364 
2365 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2366 
2367 @*/
2368 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2369 {
2370   PetscErrorCode ierr;
2371   PetscFunctionBegin;
2372   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2373   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2374   if (dm->ops->computevariablebounds) {
2375     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2376   }
2377   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2378   PetscFunctionReturn(0);
2379 }
2380 
2381 #undef __FUNCT__
2382 #define __FUNCT__ "DMHasColoring"
2383 /*@
2384     DMHasColoring - does the DM object have a method of providing a coloring?
2385 
2386     Not Collective
2387 
2388     Input Parameter:
2389 .   dm - the DM object
2390 
2391     Output Parameter:
2392 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2393 
2394     Level: developer
2395 
2396 .seealso DMHasFunction(), DMCreateColoring()
2397 
2398 @*/
2399 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2400 {
2401   PetscFunctionBegin;
2402   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2403   PetscFunctionReturn(0);
2404 }
2405 
2406 #undef  __FUNCT__
2407 #define __FUNCT__ "DMSetVec"
2408 /*@C
2409     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2410 
2411     Collective on DM
2412 
2413     Input Parameter:
2414 +   dm - the DM object
2415 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2416 
2417     Level: developer
2418 
2419 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2420 
2421 @*/
2422 PetscErrorCode  DMSetVec(DM dm,Vec x)
2423 {
2424   PetscErrorCode ierr;
2425   PetscFunctionBegin;
2426   if (x) {
2427     if (!dm->x) {
2428       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2429     }
2430     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2431   }
2432   else if (dm->x) {
2433     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2434   }
2435   PetscFunctionReturn(0);
2436 }
2437 
2438 PetscFList DMList                       = PETSC_NULL;
2439 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2440 
2441 #undef __FUNCT__
2442 #define __FUNCT__ "DMSetType"
2443 /*@C
2444   DMSetType - Builds a DM, for a particular DM implementation.
2445 
2446   Collective on DM
2447 
2448   Input Parameters:
2449 + dm     - The DM object
2450 - method - The name of the DM type
2451 
2452   Options Database Key:
2453 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2454 
2455   Notes:
2456   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2457 
2458   Level: intermediate
2459 
2460 .keywords: DM, set, type
2461 .seealso: DMGetType(), DMCreate()
2462 @*/
2463 PetscErrorCode  DMSetType(DM dm, DMType method)
2464 {
2465   PetscErrorCode (*r)(DM);
2466   PetscBool      match;
2467   PetscErrorCode ierr;
2468 
2469   PetscFunctionBegin;
2470   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2471   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2472   if (match) PetscFunctionReturn(0);
2473 
2474   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2475   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2476   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2477 
2478   if (dm->ops->destroy) {
2479     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2480     dm->ops->destroy = PETSC_NULL;
2481   }
2482   ierr = (*r)(dm);CHKERRQ(ierr);
2483   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2484   PetscFunctionReturn(0);
2485 }
2486 
2487 #undef __FUNCT__
2488 #define __FUNCT__ "DMGetType"
2489 /*@C
2490   DMGetType - Gets the DM type name (as a string) from the DM.
2491 
2492   Not Collective
2493 
2494   Input Parameter:
2495 . dm  - The DM
2496 
2497   Output Parameter:
2498 . type - The DM type name
2499 
2500   Level: intermediate
2501 
2502 .keywords: DM, get, type, name
2503 .seealso: DMSetType(), DMCreate()
2504 @*/
2505 PetscErrorCode  DMGetType(DM dm, DMType *type)
2506 {
2507   PetscErrorCode ierr;
2508 
2509   PetscFunctionBegin;
2510   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2511   PetscValidCharPointer(type,2);
2512   if (!DMRegisterAllCalled) {
2513     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2514   }
2515   *type = ((PetscObject)dm)->type_name;
2516   PetscFunctionReturn(0);
2517 }
2518 
2519 #undef __FUNCT__
2520 #define __FUNCT__ "DMConvert"
2521 /*@C
2522   DMConvert - Converts a DM to another DM, either of the same or different type.
2523 
2524   Collective on DM
2525 
2526   Input Parameters:
2527 + dm - the DM
2528 - newtype - new DM type (use "same" for the same type)
2529 
2530   Output Parameter:
2531 . M - pointer to new DM
2532 
2533   Notes:
2534   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2535   the MPI communicator of the generated DM is always the same as the communicator
2536   of the input DM.
2537 
2538   Level: intermediate
2539 
2540 .seealso: DMCreate()
2541 @*/
2542 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2543 {
2544   DM             B;
2545   char           convname[256];
2546   PetscBool      sametype, issame;
2547   PetscErrorCode ierr;
2548 
2549   PetscFunctionBegin;
2550   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2551   PetscValidType(dm,1);
2552   PetscValidPointer(M,3);
2553   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2554   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2555   {
2556     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2557 
2558     /*
2559        Order of precedence:
2560        1) See if a specialized converter is known to the current DM.
2561        2) See if a specialized converter is known to the desired DM class.
2562        3) See if a good general converter is registered for the desired class
2563        4) See if a good general converter is known for the current matrix.
2564        5) Use a really basic converter.
2565     */
2566 
2567     /* 1) See if a specialized converter is known to the current DM and the desired class */
2568     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2569     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2570     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2571     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2572     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2573     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2574     if (conv) goto foundconv;
2575 
2576     /* 2)  See if a specialized converter is known to the desired DM class. */
2577     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2578     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2579     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2580     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2581     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2582     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2583     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2584     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2585     if (conv) {
2586       ierr = DMDestroy(&B);CHKERRQ(ierr);
2587       goto foundconv;
2588     }
2589 
2590 #if 0
2591     /* 3) See if a good general converter is registered for the desired class */
2592     conv = B->ops->convertfrom;
2593     ierr = DMDestroy(&B);CHKERRQ(ierr);
2594     if (conv) goto foundconv;
2595 
2596     /* 4) See if a good general converter is known for the current matrix */
2597     if (dm->ops->convert) {
2598       conv = dm->ops->convert;
2599     }
2600     if (conv) goto foundconv;
2601 #endif
2602 
2603     /* 5) Use a really basic converter. */
2604     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2605 
2606     foundconv:
2607     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2608     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2609     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2610   }
2611   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2612   PetscFunctionReturn(0);
2613 }
2614 
2615 /*--------------------------------------------------------------------------------------------------------------------*/
2616 
2617 #undef __FUNCT__
2618 #define __FUNCT__ "DMRegister"
2619 /*@C
2620   DMRegister - See DMRegisterDynamic()
2621 
2622   Level: advanced
2623 @*/
2624 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2625 {
2626   char fullname[PETSC_MAX_PATH_LEN];
2627   PetscErrorCode ierr;
2628 
2629   PetscFunctionBegin;
2630   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2631   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2632   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2633   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2634   PetscFunctionReturn(0);
2635 }
2636 
2637 
2638 /*--------------------------------------------------------------------------------------------------------------------*/
2639 #undef __FUNCT__
2640 #define __FUNCT__ "DMRegisterDestroy"
2641 /*@C
2642    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2643 
2644    Not Collective
2645 
2646    Level: advanced
2647 
2648 .keywords: DM, register, destroy
2649 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2650 @*/
2651 PetscErrorCode  DMRegisterDestroy(void)
2652 {
2653   PetscErrorCode ierr;
2654 
2655   PetscFunctionBegin;
2656   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2657   DMRegisterAllCalled = PETSC_FALSE;
2658   PetscFunctionReturn(0);
2659 }
2660 
2661 #undef __FUNCT__
2662 #define __FUNCT__ "DMLoad"
2663 /*@C
2664   DMLoad - Loads a DM that has been stored in binary  with DMView().
2665 
2666   Collective on PetscViewer
2667 
2668   Input Parameters:
2669 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2670            some related function before a call to DMLoad().
2671 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2672            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2673 
2674    Level: intermediate
2675 
2676   Notes:
2677    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2678 
2679   Notes for advanced users:
2680   Most users should not need to know the details of the binary storage
2681   format, since DMLoad() and DMView() completely hide these details.
2682   But for anyone who's interested, the standard binary matrix storage
2683   format is
2684 .vb
2685      has not yet been determined
2686 .ve
2687 
2688 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2689 @*/
2690 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2691 {
2692   PetscErrorCode ierr;
2693   PetscBool      isbinary;
2694   PetscInt       classid;
2695   char           type[256];
2696 
2697   PetscFunctionBegin;
2698   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2699   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2700   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2701   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2702 
2703   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2704   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2705   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2706   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2707   if (newdm->ops->load) {
2708     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2709   }
2710   PetscFunctionReturn(0);
2711 }
2712 
2713 /******************************** FEM Support **********************************/
2714 
2715 #undef __FUNCT__
2716 #define __FUNCT__ "DMPrintCellVector"
2717 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2718   PetscInt       f;
2719   PetscErrorCode ierr;
2720 
2721   PetscFunctionBegin;
2722   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2723   for (f = 0; f < len; ++f) {
2724     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2725   }
2726   PetscFunctionReturn(0);
2727 }
2728 
2729 #undef __FUNCT__
2730 #define __FUNCT__ "DMPrintCellMatrix"
2731 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2732   PetscInt       f, g;
2733   PetscErrorCode ierr;
2734 
2735   PetscFunctionBegin;
2736   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2737   for (f = 0; f < rows; ++f) {
2738     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2739     for (g = 0; g < cols; ++g) {
2740       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2741     }
2742     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2743   }
2744   PetscFunctionReturn(0);
2745 }
2746 
2747 #undef __FUNCT__
2748 #define __FUNCT__ "DMGetDefaultSection"
2749 /*@
2750   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2751 
2752   Input Parameter:
2753 . dm - The DM
2754 
2755   Output Parameter:
2756 . section - The PetscSection
2757 
2758   Level: intermediate
2759 
2760   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2761 
2762 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2763 @*/
2764 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2765   PetscFunctionBegin;
2766   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2767   PetscValidPointer(section, 2);
2768   *section = dm->defaultSection;
2769   PetscFunctionReturn(0);
2770 }
2771 
2772 #undef __FUNCT__
2773 #define __FUNCT__ "DMSetDefaultSection"
2774 /*@
2775   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2776 
2777   Input Parameters:
2778 + dm - The DM
2779 - section - The PetscSection
2780 
2781   Level: intermediate
2782 
2783   Note: Any existing Section will be destroyed
2784 
2785 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2786 @*/
2787 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2788   PetscInt       numFields;
2789   PetscInt       f;
2790   PetscErrorCode ierr;
2791 
2792   PetscFunctionBegin;
2793   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2794   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2795   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2796   dm->defaultSection = section;
2797   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2798   if (numFields) {
2799     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2800     for (f = 0; f < numFields; ++f) {
2801       const char *name;
2802 
2803       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2804       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2805     }
2806   }
2807   PetscFunctionReturn(0);
2808 }
2809 
2810 #undef __FUNCT__
2811 #define __FUNCT__ "DMGetDefaultGlobalSection"
2812 /*@
2813   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2814 
2815   Collective on DM
2816 
2817   Input Parameter:
2818 . dm - The DM
2819 
2820   Output Parameter:
2821 . section - The PetscSection
2822 
2823   Level: intermediate
2824 
2825   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2826 
2827 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2828 @*/
2829 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2830   PetscErrorCode ierr;
2831 
2832   PetscFunctionBegin;
2833   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2834   PetscValidPointer(section, 2);
2835   if (!dm->defaultGlobalSection) {
2836     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2837     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2838     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2839   }
2840   *section = dm->defaultGlobalSection;
2841   PetscFunctionReturn(0);
2842 }
2843 
2844 #undef __FUNCT__
2845 #define __FUNCT__ "DMSetDefaultGlobalSection"
2846 /*@
2847   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2848 
2849   Input Parameters:
2850 + dm - The DM
2851 - section - The PetscSection
2852 
2853   Level: intermediate
2854 
2855   Note: Any existing Section will be destroyed
2856 
2857 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2858 @*/
2859 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2860   PetscErrorCode ierr;
2861 
2862   PetscFunctionBegin;
2863   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2864   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2865   dm->defaultGlobalSection = section;
2866   PetscFunctionReturn(0);
2867 }
2868 
2869 #undef __FUNCT__
2870 #define __FUNCT__ "DMGetDefaultSF"
2871 /*@
2872   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2873   it is created from the default PetscSection layouts in the DM.
2874 
2875   Input Parameter:
2876 . dm - The DM
2877 
2878   Output Parameter:
2879 . sf - The PetscSF
2880 
2881   Level: intermediate
2882 
2883   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2884 
2885 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2886 @*/
2887 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2888   PetscInt       nroots;
2889   PetscErrorCode ierr;
2890 
2891   PetscFunctionBegin;
2892   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2893   PetscValidPointer(sf, 2);
2894   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2895   if (nroots < 0) {
2896     PetscSection section, gSection;
2897 
2898     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2899     if (section) {
2900       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2901       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2902     } else {
2903       *sf = PETSC_NULL;
2904       PetscFunctionReturn(0);
2905     }
2906   }
2907   *sf = dm->defaultSF;
2908   PetscFunctionReturn(0);
2909 }
2910 
2911 #undef __FUNCT__
2912 #define __FUNCT__ "DMSetDefaultSF"
2913 /*@
2914   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2915 
2916   Input Parameters:
2917 + dm - The DM
2918 - sf - The PetscSF
2919 
2920   Level: intermediate
2921 
2922   Note: Any previous SF is destroyed
2923 
2924 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2925 @*/
2926 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2927   PetscErrorCode ierr;
2928 
2929   PetscFunctionBegin;
2930   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2931   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2932   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2933   dm->defaultSF = sf;
2934   PetscFunctionReturn(0);
2935 }
2936 
2937 #undef __FUNCT__
2938 #define __FUNCT__ "DMCreateDefaultSF"
2939 /*@C
2940   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2941   describing the data layout.
2942 
2943   Input Parameters:
2944 + dm - The DM
2945 . localSection - PetscSection describing the local data layout
2946 - globalSection - PetscSection describing the global data layout
2947 
2948   Level: intermediate
2949 
2950 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2951 @*/
2952 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2953 {
2954   MPI_Comm        comm = ((PetscObject) dm)->comm;
2955   PetscLayout     layout;
2956   const PetscInt *ranges;
2957   PetscInt       *local;
2958   PetscSFNode    *remote;
2959   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2960   PetscMPIInt     size, rank;
2961   PetscErrorCode  ierr;
2962 
2963   PetscFunctionBegin;
2964   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2965   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2966   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2967   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2968   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2969   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2970   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2971   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2972   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2973   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2974   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2975     PetscInt gdof, gcdof;
2976 
2977     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2978     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2979     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2980   }
2981   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2982   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2983   for (p = pStart, l = 0; p < pEnd; ++p) {
2984     const PetscInt *cind;
2985     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2986 
2987     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2988     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2989     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2990     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2991     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2992     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2993     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2994     if (!gdof) continue; /* Censored point */
2995     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2996     if (gsize != dof-cdof) {
2997       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2998       cdof = 0; /* Ignore constraints */
2999     }
3000     for (d = 0, c = 0; d < dof; ++d) {
3001       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3002       local[l+d-c] = off+d;
3003     }
3004     if (gdof < 0) {
3005       for(d = 0; d < gsize; ++d, ++l) {
3006         PetscInt offset = -(goff+1) + d, r;
3007 
3008         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3009         if (r < 0) r = -(r+2);
3010         remote[l].rank  = r;
3011         remote[l].index = offset - ranges[r];
3012       }
3013     } else {
3014       for(d = 0; d < gsize; ++d, ++l) {
3015         remote[l].rank  = rank;
3016         remote[l].index = goff+d - ranges[rank];
3017       }
3018     }
3019   }
3020   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3021   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3022   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3023   PetscFunctionReturn(0);
3024 }
3025 
3026 #undef __FUNCT__
3027 #define __FUNCT__ "DMGetPointSF"
3028 /*@
3029   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3030 
3031   Input Parameter:
3032 . dm - The DM
3033 
3034   Output Parameter:
3035 . sf - The PetscSF
3036 
3037   Level: intermediate
3038 
3039   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3040 
3041 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3042 @*/
3043 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
3044   PetscFunctionBegin;
3045   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3046   PetscValidPointer(sf, 2);
3047   *sf = dm->sf;
3048   PetscFunctionReturn(0);
3049 }
3050 
3051 #undef __FUNCT__
3052 #define __FUNCT__ "DMSetPointSF"
3053 /*@
3054   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3055 
3056   Input Parameters:
3057 + dm - The DM
3058 - sf - The PetscSF
3059 
3060   Level: intermediate
3061 
3062 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3063 @*/
3064 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
3065   PetscErrorCode ierr;
3066 
3067   PetscFunctionBegin;
3068   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3069   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3070   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3071   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3072   dm->sf = sf;
3073   PetscFunctionReturn(0);
3074 }
3075 
3076 #undef __FUNCT__
3077 #define __FUNCT__ "DMGetNumFields"
3078 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3079 {
3080   PetscFunctionBegin;
3081   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3082   PetscValidPointer(numFields, 2);
3083   *numFields = dm->numFields;
3084   PetscFunctionReturn(0);
3085 }
3086 
3087 #undef __FUNCT__
3088 #define __FUNCT__ "DMSetNumFields"
3089 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3090 {
3091   PetscInt       f;
3092   PetscErrorCode ierr;
3093 
3094   PetscFunctionBegin;
3095   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3096   for (f = 0; f < dm->numFields; ++f) {
3097     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3098   }
3099   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3100   dm->numFields = numFields;
3101   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3102   for (f = 0; f < dm->numFields; ++f) {
3103     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3104   }
3105   PetscFunctionReturn(0);
3106 }
3107 
3108 #undef __FUNCT__
3109 #define __FUNCT__ "DMGetField"
3110 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3111 {
3112   PetscFunctionBegin;
3113   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3114   PetscValidPointer(field, 2);
3115   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3116   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3117   *field = dm->fields[f];
3118   PetscFunctionReturn(0);
3119 }
3120 
3121 #undef __FUNCT__
3122 #define __FUNCT__ "DMSetCoordinates"
3123 /*@
3124   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3125 
3126   Collective on DM
3127 
3128   Input Parameters:
3129 + dm - the DM
3130 - c - coordinate vector
3131 
3132   Note:
3133   The coordinates do include those for ghost points, which are in the local vector
3134 
3135   Level: intermediate
3136 
3137 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3138 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3139 @*/
3140 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3141 {
3142   PetscErrorCode ierr;
3143 
3144   PetscFunctionBegin;
3145   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3146   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3147   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3148   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3149   dm->coordinates = c;
3150   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3151   PetscFunctionReturn(0);
3152 }
3153 
3154 #undef __FUNCT__
3155 #define __FUNCT__ "DMSetCoordinatesLocal"
3156 /*@
3157   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3158 
3159   Collective on DM
3160 
3161    Input Parameters:
3162 +  dm - the DM
3163 -  c - coordinate vector
3164 
3165   Note:
3166   The coordinates of ghost points can be set using DMSetCoordinates()
3167   followed by DMGetCoordinatesLocal(). This is intended to enable the
3168   setting of ghost coordinates outside of the domain.
3169 
3170   Level: intermediate
3171 
3172 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3173 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3174 @*/
3175 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3176 {
3177   PetscErrorCode ierr;
3178 
3179   PetscFunctionBegin;
3180   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3181   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3182   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3183   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3184   dm->coordinatesLocal = c;
3185   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3186   PetscFunctionReturn(0);
3187 }
3188 
3189 #undef __FUNCT__
3190 #define __FUNCT__ "DMGetCoordinates"
3191 /*@
3192   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3193 
3194   Not Collective
3195 
3196   Input Parameter:
3197 . dm - the DM
3198 
3199   Output Parameter:
3200 . c - global coordinate vector
3201 
3202   Note:
3203   This is a borrowed reference, so the user should NOT destroy this vector
3204 
3205   Each process has only the local coordinates (does NOT have the ghost coordinates).
3206 
3207   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3208   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3209 
3210   Level: intermediate
3211 
3212 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3213 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3214 @*/
3215 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3216 {
3217   PetscErrorCode ierr;
3218 
3219   PetscFunctionBegin;
3220   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3221   PetscValidPointer(c,2);
3222   if (!dm->coordinates && dm->coordinatesLocal) {
3223     DM cdm = PETSC_NULL;
3224 
3225     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3226     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3227     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3228     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3229     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3230   }
3231   *c = dm->coordinates;
3232   PetscFunctionReturn(0);
3233 }
3234 
3235 #undef __FUNCT__
3236 #define __FUNCT__ "DMGetCoordinatesLocal"
3237 /*@
3238   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3239 
3240   Collective on DM
3241 
3242   Input Parameter:
3243 . dm - the DM
3244 
3245   Output Parameter:
3246 . c - coordinate vector
3247 
3248   Note:
3249   This is a borrowed reference, so the user should NOT destroy this vector
3250 
3251   Each process has the local and ghost coordinates
3252 
3253   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3254   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3255 
3256   Level: intermediate
3257 
3258 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3259 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3260 @*/
3261 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3262 {
3263   PetscErrorCode ierr;
3264 
3265   PetscFunctionBegin;
3266   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3267   PetscValidPointer(c,2);
3268   if (!dm->coordinatesLocal && dm->coordinates) {
3269     DM cdm = PETSC_NULL;
3270 
3271     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3272     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3273     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3274     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3275     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3276   }
3277   *c = dm->coordinatesLocal;
3278   PetscFunctionReturn(0);
3279 }
3280 
3281 #undef __FUNCT__
3282 #define __FUNCT__ "DMGetCoordinateDM"
3283 /*@
3284   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3285 
3286   Collective on DM
3287 
3288   Input Parameter:
3289 . dm - the DM
3290 
3291   Output Parameter:
3292 . cdm - coordinate DM
3293 
3294   Level: intermediate
3295 
3296 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3297 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3298 @*/
3299 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3300 {
3301   PetscErrorCode ierr;
3302 
3303   PetscFunctionBegin;
3304   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3305   PetscValidPointer(cdm,2);
3306   if (!dm->coordinateDM) {
3307     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3308     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3309   }
3310   *cdm = dm->coordinateDM;
3311   PetscFunctionReturn(0);
3312 }
3313