xref: /petsc/src/dm/interface/dm.c (revision 6d187b9135d66ac23f29b4e7e5d4e626b6954b97)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMViewFromOptions"
8 /*
9   Processes command line options to determine if/how a dm
10   is to be viewed.
11 
12 */
13 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
14 {
15   PetscErrorCode ierr;
16   PetscBool      flg;
17   PetscViewer    viewer;
18 
19   PetscFunctionBegin;
20   ierr = PetscOptionsGetViewer(((PetscObject)dm)->comm,((PetscObject)dm)->prefix,optionname,&viewer,&flg);CHKERRQ(ierr);
21   if (flg) {
22     ierr = DMView(dm,viewer);CHKERRQ(ierr);
23     ierr = PetscOptionsRestoreViewer(viewer);CHKERRQ(ierr);
24   }
25   PetscFunctionReturn(0);
26 }
27 
28 
29 #undef __FUNCT__
30 #define __FUNCT__ "DMCreate"
31 /*@
32   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
33 
34    If you never  call DMSetType()  it will generate an
35    error when you try to use the vector.
36 
37   Collective on MPI_Comm
38 
39   Input Parameter:
40 . comm - The communicator for the DM object
41 
42   Output Parameter:
43 . dm - The DM object
44 
45   Level: beginner
46 
47 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
48 @*/
49 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
50 {
51   DM             v;
52   PetscErrorCode ierr;
53 
54   PetscFunctionBegin;
55   PetscValidPointer(dm,2);
56   *dm = PETSC_NULL;
57 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
58   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
59   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
60   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
61 #endif
62 
63   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
64   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
65 
66 
67   v->ltogmap      = PETSC_NULL;
68   v->ltogmapb     = PETSC_NULL;
69   v->bs           = 1;
70   v->coloringtype = IS_COLORING_GLOBAL;
71   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
72   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
73   v->defaultSection       = PETSC_NULL;
74   v->defaultGlobalSection = PETSC_NULL;
75   {
76     PetscInt i;
77     for (i = 0; i < 10; ++i) {
78       v->nullspaceConstructors[i] = PETSC_NULL;
79     }
80   }
81   v->numFields = 0;
82   v->fields    = PETSC_NULL;
83 
84   *dm = v;
85   PetscFunctionReturn(0);
86 }
87 
88 #undef __FUNCT__
89 #define __FUNCT__ "DMSetVecType"
90 /*@C
91        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
92 
93    Logically Collective on DMDA
94 
95    Input Parameter:
96 +  da - initial distributed array
97 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
98 
99    Options Database:
100 .   -dm_vec_type ctype
101 
102    Level: intermediate
103 
104 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
105 @*/
106 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
107 {
108   PetscErrorCode ierr;
109 
110   PetscFunctionBegin;
111   PetscValidHeaderSpecific(da,DM_CLASSID,1);
112   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
113   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
114   PetscFunctionReturn(0);
115 }
116 
117 #undef __FUNCT__
118 #define __FUNCT__ "VecGetDM"
119 /*@
120   VecGetDM - Gets the DM defining the data layout of the vector
121 
122   Not collective
123 
124   Input Parameter:
125 . v - The Vec
126 
127   Output Parameter:
128 . dm - The DM
129 
130   Level: intermediate
131 
132 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
133 @*/
134 PetscErrorCode VecGetDM(Vec v, DM *dm)
135 {
136   PetscErrorCode ierr;
137 
138   PetscFunctionBegin;
139   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
140   PetscValidPointer(dm,2);
141   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
142   PetscFunctionReturn(0);
143 }
144 
145 #undef __FUNCT__
146 #define __FUNCT__ "VecSetDM"
147 /*@
148   VecSetDM - Sets the DM defining the data layout of the vector
149 
150   Not collective
151 
152   Input Parameters:
153 + v - The Vec
154 - dm - The DM
155 
156   Level: intermediate
157 
158 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
159 @*/
160 PetscErrorCode VecSetDM(Vec v, DM dm)
161 {
162   PetscErrorCode ierr;
163 
164   PetscFunctionBegin;
165   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
166   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
167   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "DMSetMatType"
173 /*@C
174        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
175 
176    Logically Collective on DM
177 
178    Input Parameter:
179 +  dm - the DM context
180 .  ctype - the matrix type
181 
182    Options Database:
183 .   -dm_mat_type ctype
184 
185    Level: intermediate
186 
187 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
188 @*/
189 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
190 {
191   PetscErrorCode ierr;
192   PetscFunctionBegin;
193   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
194   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
195   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "MatGetDM"
201 /*@
202   MatGetDM - Gets the DM defining the data layout of the matrix
203 
204   Not collective
205 
206   Input Parameter:
207 . A - The Mat
208 
209   Output Parameter:
210 . dm - The DM
211 
212   Level: intermediate
213 
214 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
215 @*/
216 PetscErrorCode MatGetDM(Mat A, DM *dm)
217 {
218   PetscErrorCode ierr;
219 
220   PetscFunctionBegin;
221   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
222   PetscValidPointer(dm,2);
223   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "MatSetDM"
229 /*@
230   MatSetDM - Sets the DM defining the data layout of the matrix
231 
232   Not collective
233 
234   Input Parameters:
235 + A - The Mat
236 - dm - The DM
237 
238   Level: intermediate
239 
240 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
241 @*/
242 PetscErrorCode MatSetDM(Mat A, DM dm)
243 {
244   PetscErrorCode ierr;
245 
246   PetscFunctionBegin;
247   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
248   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
249   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
250   PetscFunctionReturn(0);
251 }
252 
253 #undef __FUNCT__
254 #define __FUNCT__ "DMSetOptionsPrefix"
255 /*@C
256    DMSetOptionsPrefix - Sets the prefix used for searching for all
257    DMDA options in the database.
258 
259    Logically Collective on DMDA
260 
261    Input Parameter:
262 +  da - the DMDA context
263 -  prefix - the prefix to prepend to all option names
264 
265    Notes:
266    A hyphen (-) must NOT be given at the beginning of the prefix name.
267    The first character of all runtime options is AUTOMATICALLY the hyphen.
268 
269    Level: advanced
270 
271 .keywords: DMDA, set, options, prefix, database
272 
273 .seealso: DMSetFromOptions()
274 @*/
275 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
276 {
277   PetscErrorCode ierr;
278 
279   PetscFunctionBegin;
280   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
281   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
282   PetscFunctionReturn(0);
283 }
284 
285 #undef __FUNCT__
286 #define __FUNCT__ "DMDestroy"
287 /*@
288     DMDestroy - Destroys a vector packer or DMDA.
289 
290     Collective on DM
291 
292     Input Parameter:
293 .   dm - the DM object to destroy
294 
295     Level: developer
296 
297 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
298 
299 @*/
300 PetscErrorCode  DMDestroy(DM *dm)
301 {
302   PetscInt       i, cnt = 0, f;
303   DMNamedVecLink nlink,nnext;
304   PetscErrorCode ierr;
305 
306   PetscFunctionBegin;
307   if (!*dm) PetscFunctionReturn(0);
308   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
309 
310   /* count all the circular references of DM and its contained Vecs */
311   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
312     if ((*dm)->localin[i])  {cnt++;}
313     if ((*dm)->globalin[i]) {cnt++;}
314   }
315   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
316   if ((*dm)->x) {
317     DM obj;
318     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
319     if (obj == *dm) cnt++;
320   }
321 
322   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
323   /*
324      Need this test because the dm references the vectors that
325      reference the dm, so destroying the dm calls destroy on the
326      vectors that cause another destroy on the dm
327   */
328   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
329   ((PetscObject) (*dm))->refct = 0;
330   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
331     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
332     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
333   }
334   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
335     nnext = nlink->next;
336     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
337     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
338     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
339     ierr = PetscFree(nlink);CHKERRQ(ierr);
340   }
341   (*dm)->namedglobal = PETSC_NULL;
342 
343   /* Destroy the list of hooks */
344   {
345     DMCoarsenHookLink link,next;
346     for (link=(*dm)->coarsenhook; link; link=next) {
347       next = link->next;
348       ierr = PetscFree(link);CHKERRQ(ierr);
349     }
350     (*dm)->coarsenhook = PETSC_NULL;
351   }
352   {
353     DMRefineHookLink link,next;
354     for (link=(*dm)->refinehook; link; link=next) {
355       next = link->next;
356       ierr = PetscFree(link);CHKERRQ(ierr);
357     }
358     (*dm)->refinehook = PETSC_NULL;
359   }
360   {
361     DMSubDomainHookLink link,next;
362     for (link=(*dm)->subdomainhook; link; link=next) {
363       next = link->next;
364       ierr = PetscFree(link);CHKERRQ(ierr);
365     }
366     (*dm)->subdomainhook = PETSC_NULL;
367   }
368   {
369     DMGlobalToLocalHookLink link,next;
370     for (link=(*dm)->gtolhook; link; link=next) {
371       next = link->next;
372       ierr = PetscFree(link);CHKERRQ(ierr);
373     }
374     (*dm)->gtolhook = PETSC_NULL;
375   }
376   /* Destroy the work arrays */
377   {
378     DMWorkLink link,next;
379     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
380     for (link=(*dm)->workin; link; link=next) {
381       next = link->next;
382       ierr = PetscFree(link->mem);CHKERRQ(ierr);
383       ierr = PetscFree(link);CHKERRQ(ierr);
384     }
385     (*dm)->workin = PETSC_NULL;
386   }
387 
388   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
389   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
390   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
391 
392   if ((*dm)->ctx && (*dm)->ctxdestroy) {
393     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
394   }
395   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
396   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
397   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
398   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
399   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
400   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
401   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
402 
403   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
404   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
405   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
406   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
407   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
408 
409   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
410   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
411   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
412 
413   for (f = 0; f < (*dm)->numFields; ++f) {
414     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
415   }
416   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
417   /* if memory was published with AMS then destroy it */
418   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
419 
420   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
421   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
422   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
423   PetscFunctionReturn(0);
424 }
425 
426 #undef __FUNCT__
427 #define __FUNCT__ "DMSetUp"
428 /*@
429     DMSetUp - sets up the data structures inside a DM object
430 
431     Collective on DM
432 
433     Input Parameter:
434 .   dm - the DM object to setup
435 
436     Level: developer
437 
438 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
439 
440 @*/
441 PetscErrorCode  DMSetUp(DM dm)
442 {
443   PetscErrorCode ierr;
444 
445   PetscFunctionBegin;
446   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
447   if (dm->setupcalled) PetscFunctionReturn(0);
448   if (dm->ops->setup) {
449     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
450   }
451   dm->setupcalled = PETSC_TRUE;
452   PetscFunctionReturn(0);
453 }
454 
455 #undef __FUNCT__
456 #define __FUNCT__ "DMSetFromOptions"
457 /*@
458     DMSetFromOptions - sets parameters in a DM from the options database
459 
460     Collective on DM
461 
462     Input Parameter:
463 .   dm - the DM object to set options for
464 
465     Options Database:
466 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
467 .   -dm_vec_type <type>  type of vector to create inside DM
468 .   -dm_mat_type <type>  type of matrix to create inside DM
469 -   -dm_coloring_type <global or ghosted>
470 
471     Level: developer
472 
473 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
474 
475 @*/
476 PetscErrorCode  DMSetFromOptions(DM dm)
477 {
478   PetscBool      flg;
479   PetscErrorCode ierr;
480   char           typeName[256] = MATAIJ;
481 
482   PetscFunctionBegin;
483   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
484   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
485     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
486     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
487     if (flg) {
488       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
489     }
490     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
491     if (flg) {
492       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
493     }
494     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
495     if (dm->ops->setfromoptions) {
496       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
497     }
498     /* process any options handlers added with PetscObjectAddOptionsHandler() */
499     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
500   ierr = PetscOptionsEnd();CHKERRQ(ierr);
501   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
502   PetscFunctionReturn(0);
503 }
504 
505 #undef __FUNCT__
506 #define __FUNCT__ "DMView"
507 /*@C
508     DMView - Views a vector packer or DMDA.
509 
510     Collective on DM
511 
512     Input Parameter:
513 +   dm - the DM object to view
514 -   v - the viewer
515 
516     Level: developer
517 
518 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
519 
520 @*/
521 PetscErrorCode  DMView(DM dm,PetscViewer v)
522 {
523   PetscErrorCode ierr;
524   PetscBool      isbinary;
525 
526   PetscFunctionBegin;
527   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
528   if (!v) {
529     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
530   }
531   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
532   if (isbinary) {
533     PetscInt         classid = DM_FILE_CLASSID;
534     char             type[256];
535 
536     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
537     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
538     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
539   }
540   if (dm->ops->view) {
541     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
542   }
543   PetscFunctionReturn(0);
544 }
545 
546 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
547 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
548 
549 #undef __FUNCT__
550 #define __FUNCT__ "DMCreateGlobalVector"
551 /*@
552     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
553 
554     Collective on DM
555 
556     Input Parameter:
557 .   dm - the DM object
558 
559     Output Parameter:
560 .   vec - the global vector
561 
562     Level: beginner
563 
564 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
565 
566 @*/
567 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
568 {
569   PetscErrorCode ierr;
570 
571   PetscFunctionBegin;
572   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
573   if (dm->defaultSection) {
574     PetscSection gSection;
575     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
576 
577     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
578     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
579     for(p = pStart; p < pEnd; ++p) {
580       PetscInt dof, cdof;
581 
582       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
583       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
584       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
585       if ((dof > 0) && (dof-cdof != blockSize)) {
586         blockSize = 1;
587         break;
588       }
589     }
590     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
591     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
592     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
593     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
594     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
595     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
596     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
597     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
598     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
599     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
600     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
601     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
602   } else {
603     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
604   }
605   PetscFunctionReturn(0);
606 }
607 
608 #undef __FUNCT__
609 #define __FUNCT__ "DMCreateLocalVector"
610 /*@
611     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
612 
613     Not Collective
614 
615     Input Parameter:
616 .   dm - the DM object
617 
618     Output Parameter:
619 .   vec - the local vector
620 
621     Level: beginner
622 
623 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
624 
625 @*/
626 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
627 {
628   PetscErrorCode ierr;
629 
630   PetscFunctionBegin;
631   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
632   if (dm->defaultSection) {
633     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
634 
635     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
636     for(p = pStart; p < pEnd; ++p) {
637       PetscInt dof;
638 
639       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
640       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
641       if ((dof > 0) && (dof != blockSize)) {
642         blockSize = 1;
643         break;
644       }
645     }
646     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
647     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
648     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
649     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
650     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
651     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
652     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
653   } else {
654     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
655   }
656   PetscFunctionReturn(0);
657 }
658 
659 #undef __FUNCT__
660 #define __FUNCT__ "DMGetLocalToGlobalMapping"
661 /*@
662    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
663 
664    Collective on DM
665 
666    Input Parameter:
667 .  dm - the DM that provides the mapping
668 
669    Output Parameter:
670 .  ltog - the mapping
671 
672    Level: intermediate
673 
674    Notes:
675    This mapping can then be used by VecSetLocalToGlobalMapping() or
676    MatSetLocalToGlobalMapping().
677 
678 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
679 @*/
680 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
681 {
682   PetscErrorCode ierr;
683 
684   PetscFunctionBegin;
685   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
686   PetscValidPointer(ltog,2);
687   if (!dm->ltogmap) {
688     PetscSection section, sectionGlobal;
689 
690     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
691     if (section) {
692       PetscInt      *ltog;
693       PetscInt       pStart, pEnd, size, p, l;
694 
695       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
696       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
697       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
698       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
699       for (p = pStart, l = 0; p < pEnd; ++p) {
700         PetscInt dof, off, c;
701 
702         /* Should probably use constrained dofs */
703         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
704         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
705         for (c = 0; c < dof; ++c, ++l) {
706           ltog[l] = off+c;
707         }
708       }
709       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
710       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
711     } else {
712       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
713       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
714     }
715   }
716   *ltog = dm->ltogmap;
717   PetscFunctionReturn(0);
718 }
719 
720 #undef __FUNCT__
721 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
722 /*@
723    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
724 
725    Collective on DM
726 
727    Input Parameter:
728 .  da - the distributed array that provides the mapping
729 
730    Output Parameter:
731 .  ltog - the block mapping
732 
733    Level: intermediate
734 
735    Notes:
736    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
737    MatSetLocalToGlobalMappingBlock().
738 
739 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
740 @*/
741 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
742 {
743   PetscErrorCode ierr;
744 
745   PetscFunctionBegin;
746   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
747   PetscValidPointer(ltog,2);
748   if (!dm->ltogmapb) {
749     PetscInt bs;
750     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
751     if (bs > 1) {
752       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
753       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
754     } else {
755       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
756       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
757     }
758   }
759   *ltog = dm->ltogmapb;
760   PetscFunctionReturn(0);
761 }
762 
763 #undef __FUNCT__
764 #define __FUNCT__ "DMGetBlockSize"
765 /*@
766    DMGetBlockSize - Gets the inherent block size associated with a DM
767 
768    Not Collective
769 
770    Input Parameter:
771 .  dm - the DM with block structure
772 
773    Output Parameter:
774 .  bs - the block size, 1 implies no exploitable block structure
775 
776    Level: intermediate
777 
778 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
779 @*/
780 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
781 {
782   PetscFunctionBegin;
783   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
784   PetscValidPointer(bs,2);
785   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
786   *bs = dm->bs;
787   PetscFunctionReturn(0);
788 }
789 
790 #undef __FUNCT__
791 #define __FUNCT__ "DMCreateInterpolation"
792 /*@
793     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
794 
795     Collective on DM
796 
797     Input Parameter:
798 +   dm1 - the DM object
799 -   dm2 - the second, finer DM object
800 
801     Output Parameter:
802 +  mat - the interpolation
803 -  vec - the scaling (optional)
804 
805     Level: developer
806 
807     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
808         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
809 
810         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
811         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
812 
813 
814 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
815 
816 @*/
817 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
818 {
819   PetscErrorCode ierr;
820 
821   PetscFunctionBegin;
822   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
823   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
824   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
825   PetscFunctionReturn(0);
826 }
827 
828 #undef __FUNCT__
829 #define __FUNCT__ "DMCreateInjection"
830 /*@
831     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
832 
833     Collective on DM
834 
835     Input Parameter:
836 +   dm1 - the DM object
837 -   dm2 - the second, finer DM object
838 
839     Output Parameter:
840 .   ctx - the injection
841 
842     Level: developer
843 
844    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
845         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
846 
847 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
848 
849 @*/
850 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
851 {
852   PetscErrorCode ierr;
853 
854   PetscFunctionBegin;
855   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
856   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
857   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
858   PetscFunctionReturn(0);
859 }
860 
861 #undef __FUNCT__
862 #define __FUNCT__ "DMCreateColoring"
863 /*@C
864     DMCreateColoring - Gets coloring for a DMDA or DMComposite
865 
866     Collective on DM
867 
868     Input Parameter:
869 +   dm - the DM object
870 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
871 -   matype - either MATAIJ or MATBAIJ
872 
873     Output Parameter:
874 .   coloring - the coloring
875 
876     Level: developer
877 
878 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
879 
880 @*/
881 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
882 {
883   PetscErrorCode ierr;
884 
885   PetscFunctionBegin;
886   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
887   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
888   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "DMCreateMatrix"
894 /*@C
895     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
896 
897     Collective on DM
898 
899     Input Parameter:
900 +   dm - the DM object
901 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
902             any type which inherits from one of these (such as MATAIJ)
903 
904     Output Parameter:
905 .   mat - the empty Jacobian
906 
907     Level: beginner
908 
909     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
910        do not need to do it yourself.
911 
912        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
913        the nonzero pattern call DMDASetMatPreallocateOnly()
914 
915        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
916        internally by PETSc.
917 
918        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
919        the indices for the global numbering for DMDAs which is complicated.
920 
921 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
922 
923 @*/
924 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
925 {
926   PetscErrorCode ierr;
927 
928   PetscFunctionBegin;
929   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
930 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
931   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
932 #endif
933   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
934   PetscValidPointer(mat,3);
935   if (dm->mattype) {
936     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
937   } else {
938     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
939   }
940   PetscFunctionReturn(0);
941 }
942 
943 #undef __FUNCT__
944 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
945 /*@
946   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
947     preallocated but the nonzero structure and zero values will not be set.
948 
949   Logically Collective on DMDA
950 
951   Input Parameter:
952 + dm - the DM
953 - only - PETSC_TRUE if only want preallocation
954 
955   Level: developer
956 .seealso DMCreateMatrix()
957 @*/
958 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
959 {
960   PetscFunctionBegin;
961   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
962   dm->prealloc_only = only;
963   PetscFunctionReturn(0);
964 }
965 
966 #undef __FUNCT__
967 #define __FUNCT__ "DMGetWorkArray"
968 /*@C
969   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
970 
971   Not Collective
972 
973   Input Parameters:
974 + dm - the DM object
975 . count - The minium size
976 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
977 
978   Output Parameter:
979 . array - the work array
980 
981   Level: developer
982 
983 .seealso DMDestroy(), DMCreate()
984 @*/
985 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
986 {
987   PetscErrorCode ierr;
988   DMWorkLink link;
989   size_t size;
990 
991   PetscFunctionBegin;
992   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
993   PetscValidPointer(mem,4);
994   if (dm->workin) {
995     link = dm->workin;
996     dm->workin = dm->workin->next;
997   } else {
998     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
999   }
1000   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1001   if (size*count > link->bytes) {
1002     ierr = PetscFree(link->mem);CHKERRQ(ierr);
1003     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1004     link->bytes = size*count;
1005   }
1006   link->next = dm->workout;
1007   dm->workout = link;
1008   *(void**)mem = link->mem;
1009   PetscFunctionReturn(0);
1010 }
1011 
1012 #undef __FUNCT__
1013 #define __FUNCT__ "DMRestoreWorkArray"
1014 /*@C
1015   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1016 
1017   Not Collective
1018 
1019   Input Parameters:
1020 + dm - the DM object
1021 . count - The minium size
1022 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1023 
1024   Output Parameter:
1025 . array - the work array
1026 
1027   Level: developer
1028 
1029 .seealso DMDestroy(), DMCreate()
1030 @*/
1031 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1032 {
1033   DMWorkLink *p,link;
1034 
1035   PetscFunctionBegin;
1036   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1037   PetscValidPointer(mem,4);
1038   for (p=&dm->workout; (link=*p); p=&link->next) {
1039     if (link->mem == *(void**)mem) {
1040       *p = link->next;
1041       link->next = dm->workin;
1042       dm->workin = link;
1043       *(void**)mem = PETSC_NULL;
1044       PetscFunctionReturn(0);
1045     }
1046   }
1047   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1048   PetscFunctionReturn(0);
1049 }
1050 
1051 #undef __FUNCT__
1052 #define __FUNCT__ "DMSetNullSpaceConstructor"
1053 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1054 {
1055   PetscFunctionBegin;
1056   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1057   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1058   dm->nullspaceConstructors[field] = nullsp;
1059   PetscFunctionReturn(0);
1060 }
1061 
1062 #undef __FUNCT__
1063 #define __FUNCT__ "DMCreateFieldIS"
1064 /*@C
1065   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1066 
1067   Not collective
1068 
1069   Input Parameter:
1070 . dm - the DM object
1071 
1072   Output Parameters:
1073 + numFields  - The number of fields (or PETSC_NULL if not requested)
1074 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1075 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1076 
1077   Level: intermediate
1078 
1079   Notes:
1080   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1081   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1082   PetscFree().
1083 
1084 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1085 @*/
1086 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1087 {
1088   PetscSection   section, sectionGlobal;
1089   PetscErrorCode ierr;
1090 
1091   PetscFunctionBegin;
1092   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1093   if (numFields) {
1094     PetscValidPointer(numFields,2);
1095     *numFields = 0;
1096   }
1097   if (fieldNames) {
1098     PetscValidPointer(fieldNames,3);
1099     *fieldNames = PETSC_NULL;
1100   }
1101   if (fields) {
1102     PetscValidPointer(fields,4);
1103     *fields = PETSC_NULL;
1104   }
1105   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1106   if (section) {
1107     PetscInt *fieldSizes, **fieldIndices;
1108     PetscInt  nF, f, pStart, pEnd, p;
1109 
1110     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1111     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1112     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1113     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1114     for (f = 0; f < nF; ++f) {
1115       fieldSizes[f] = 0;
1116     }
1117     for (p = pStart; p < pEnd; ++p) {
1118       PetscInt gdof;
1119 
1120       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1121       if (gdof > 0) {
1122         for (f = 0; f < nF; ++f) {
1123           PetscInt fdof, fcdof;
1124 
1125           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1126           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1127           fieldSizes[f] += fdof-fcdof;
1128         }
1129       }
1130     }
1131     for (f = 0; f < nF; ++f) {
1132       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1133       fieldSizes[f] = 0;
1134     }
1135     for (p = pStart; p < pEnd; ++p) {
1136       PetscInt gdof, goff;
1137 
1138       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1139       if (gdof > 0) {
1140         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1141         for (f = 0; f < nF; ++f) {
1142           PetscInt fdof, fcdof, fc;
1143 
1144           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1145           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1146           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1147             fieldIndices[f][fieldSizes[f]] = goff++;
1148           }
1149         }
1150       }
1151     }
1152     if (numFields) {*numFields = nF;}
1153     if (fieldNames) {
1154       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1155       for (f = 0; f < nF; ++f) {
1156         const char *fieldName;
1157 
1158         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1159         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1160       }
1161     }
1162     if (fields) {
1163       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1164       for (f = 0; f < nF; ++f) {
1165         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1166       }
1167     }
1168     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1169   } else {
1170     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1171   }
1172   PetscFunctionReturn(0);
1173 }
1174 
1175 #undef __FUNCT__
1176 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1177 /*@C
1178   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1179 
1180   Not Collective
1181 
1182   Input Parameters:
1183 + dm   - the DM object
1184 - name - the name of the field decomposition
1185 
1186   Output Parameter:
1187 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1188 
1189   Level: advanced
1190 
1191 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1192 @*/
1193 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1194 {
1195   PetscErrorCode ierr;
1196 
1197   PetscFunctionBegin;
1198   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1199   PetscValidCharPointer(name,2);
1200   PetscValidPointer(ddm,3);
1201   *ddm = PETSC_NULL;
1202   if (dm->ops->createfielddecompositiondm) {
1203     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1204   }
1205   PetscFunctionReturn(0);
1206 }
1207 
1208 #undef __FUNCT__
1209 #define __FUNCT__ "DMCreateFieldDecomposition"
1210 /*@C
1211   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1212                           corresponding to different fields: each IS contains the global indices of the dofs of the
1213                           corresponding field. The optional list of DMs define the DM for each subproblem.
1214                           Generalizes DMCreateFieldIS().
1215 
1216   Not collective
1217 
1218   Input Parameter:
1219 . dm - the DM object
1220 
1221   Output Parameters:
1222 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1223 . namelist  - The name for each field (or PETSC_NULL if not requested)
1224 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1225 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1226 
1227   Level: intermediate
1228 
1229   Notes:
1230   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1231   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1232   and all of the arrays should be freed with PetscFree().
1233 
1234 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1235 @*/
1236 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1237 {
1238   PetscErrorCode ierr;
1239 
1240   PetscFunctionBegin;
1241   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1242   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1243   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1244   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1245   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1246   if (!dm->ops->createfielddecomposition) {
1247     PetscSection section;
1248     PetscInt     numFields, f;
1249 
1250     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1251     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1252     if (section && numFields && dm->ops->createsubdm) {
1253       *len = numFields;
1254       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1255       for (f = 0; f < numFields; ++f) {
1256         const char *fieldName;
1257 
1258         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1259         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1260         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1261       }
1262     } else {
1263       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1264       /* By default there are no DMs associated with subproblems. */
1265       if (dmlist) *dmlist = PETSC_NULL;
1266     }
1267   }
1268   else {
1269     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1270   }
1271   PetscFunctionReturn(0);
1272 }
1273 
1274 #undef __FUNCT__
1275 #define __FUNCT__ "DMCreateSubDM"
1276 /*@C
1277   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1278                   The fields are defined by DMCreateFieldIS().
1279 
1280   Not collective
1281 
1282   Input Parameters:
1283 + dm - the DM object
1284 . numFields - number of fields in this subproblem
1285 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1286 
1287   Output Parameters:
1288 . is - The global indices for the subproblem
1289 - dm - The DM for the subproblem
1290 
1291   Level: intermediate
1292 
1293 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1294 @*/
1295 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1296 {
1297   PetscErrorCode ierr;
1298 
1299   PetscFunctionBegin;
1300   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1301   PetscValidPointer(fields,3);
1302   if (is) {PetscValidPointer(is,4);}
1303   if (subdm) {PetscValidPointer(subdm,5);}
1304   if (dm->ops->createsubdm) {
1305     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1306   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1307   PetscFunctionReturn(0);
1308 }
1309 
1310 #undef __FUNCT__
1311 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1312 /*@C
1313   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1314 
1315   Not Collective
1316 
1317   Input Parameters:
1318 + dm   - the DM object
1319 - name - the name of the subdomain decomposition
1320 
1321   Output Parameter:
1322 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1323 
1324   Level: advanced
1325 
1326 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1327 @*/
1328 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1329 {
1330   PetscErrorCode ierr;
1331 
1332   PetscFunctionBegin;
1333   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1334   PetscValidCharPointer(name,2);
1335   PetscValidPointer(ddm,3);
1336   *ddm = PETSC_NULL;
1337   if (dm->ops->createdomaindecompositiondm) {
1338     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1339   }
1340   PetscFunctionReturn(0);
1341 }
1342 
1343 #undef __FUNCT__
1344 #define __FUNCT__ "DMCreateDomainDecomposition"
1345 /*@C
1346   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1347                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1348                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1349                           define a nonoverlapping covering, while outer subdomains can overlap.
1350                           The optional list of DMs define the DM for each subproblem.
1351 
1352   Not collective
1353 
1354   Input Parameter:
1355 . dm - the DM object
1356 
1357   Output Parameters:
1358 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1359 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1360 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1361 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1362 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1363 
1364   Level: intermediate
1365 
1366   Notes:
1367   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1368   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1369   and all of the arrays should be freed with PetscFree().
1370 
1371 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1372 @*/
1373 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1374 {
1375   PetscErrorCode ierr;
1376   DMSubDomainHookLink link;
1377   PetscInt            i,l;
1378 
1379   PetscFunctionBegin;
1380   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1381   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1382   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1383   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1384   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1385   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1386   if (dm->ops->createdomaindecomposition) {
1387     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1388   } else {
1389     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DMCreateDomainDecomposition not supported for this DM type!");CHKERRQ(ierr);
1390   }
1391   if (len) *len = l;
1392   for (i = 0; i < l; i++) {
1393     for (link=dm->subdomainhook; link; link=link->next) {
1394       if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1395     }
1396     (*dmlist)[i]->ctx = dm->ctx;
1397   }
1398   PetscFunctionReturn(0);
1399 }
1400 
1401 
1402 #undef __FUNCT__
1403 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1404 /*@C
1405   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1406 
1407   Not collective
1408 
1409   Input Parameters:
1410 + dm - the DM object
1411 . n  - the number of subdomain scatters
1412 - subdms - the local subdomains
1413 
1414   Output Parameters:
1415 + n     - the number of scatters returned
1416 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1417 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1418 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1419 
1420   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1421   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1422   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1423   solution and residual data.
1424 
1425   Level: developer
1426 
1427 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1428 @*/
1429 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1430 {
1431   PetscErrorCode ierr;
1432 
1433   PetscFunctionBegin;
1434   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1435   PetscValidPointer(subdms,3);
1436   PetscValidPointer(iscat,4);
1437   PetscValidPointer(oscat,5);
1438   PetscValidPointer(gscat,6);
1439   if (dm->ops->createddscatters) {
1440     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat); CHKERRQ(ierr);
1441   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1442   PetscFunctionReturn(0);
1443 }
1444 
1445 #undef __FUNCT__
1446 #define __FUNCT__ "DMRefine"
1447 /*@
1448   DMRefine - Refines a DM object
1449 
1450   Collective on DM
1451 
1452   Input Parameter:
1453 + dm   - the DM object
1454 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1455 
1456   Output Parameter:
1457 . dmf - the refined DM, or PETSC_NULL
1458 
1459   Note: If no refinement was done, the return value is PETSC_NULL
1460 
1461   Level: developer
1462 
1463 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1464 @*/
1465 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1466 {
1467   PetscErrorCode ierr;
1468   DMRefineHookLink link;
1469 
1470   PetscFunctionBegin;
1471   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1472   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1473   if (*dmf) {
1474     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1475     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1476     (*dmf)->ctx       = dm->ctx;
1477     (*dmf)->leveldown = dm->leveldown;
1478     (*dmf)->levelup   = dm->levelup + 1;
1479     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1480     for (link=dm->refinehook; link; link=link->next) {
1481       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1482     }
1483   }
1484   PetscFunctionReturn(0);
1485 }
1486 
1487 #undef __FUNCT__
1488 #define __FUNCT__ "DMRefineHookAdd"
1489 /*@
1490    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1491 
1492    Logically Collective
1493 
1494    Input Arguments:
1495 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1496 .  refinehook - function to run when setting up a coarser level
1497 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1498 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1499 
1500    Calling sequence of refinehook:
1501 $    refinehook(DM coarse,DM fine,void *ctx);
1502 
1503 +  coarse - coarse level DM
1504 .  fine - fine level DM to interpolate problem to
1505 -  ctx - optional user-defined function context
1506 
1507    Calling sequence for interphook:
1508 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1509 
1510 +  coarse - coarse level DM
1511 .  interp - matrix interpolating a coarse-level solution to the finer grid
1512 .  fine - fine level DM to update
1513 -  ctx - optional user-defined function context
1514 
1515    Level: advanced
1516 
1517    Notes:
1518    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1519 
1520    If this function is called multiple times, the hooks will be run in the order they are added.
1521 
1522 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1523 @*/
1524 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1525 {
1526   PetscErrorCode ierr;
1527   DMRefineHookLink link,*p;
1528 
1529   PetscFunctionBegin;
1530   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1531   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1532   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1533   link->refinehook = refinehook;
1534   link->interphook = interphook;
1535   link->ctx = ctx;
1536   link->next = PETSC_NULL;
1537   *p = link;
1538   PetscFunctionReturn(0);
1539 }
1540 
1541 #undef __FUNCT__
1542 #define __FUNCT__ "DMInterpolate"
1543 /*@
1544    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1545 
1546    Collective if any hooks are
1547 
1548    Input Arguments:
1549 +  coarse - coarser DM to use as a base
1550 .  restrct - interpolation matrix, apply using MatInterpolate()
1551 -  fine - finer DM to update
1552 
1553    Level: developer
1554 
1555 .seealso: DMRefineHookAdd(), MatInterpolate()
1556 @*/
1557 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1558 {
1559   PetscErrorCode ierr;
1560   DMRefineHookLink link;
1561 
1562   PetscFunctionBegin;
1563   for (link=fine->refinehook; link; link=link->next) {
1564     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1565   }
1566   PetscFunctionReturn(0);
1567 }
1568 
1569 #undef __FUNCT__
1570 #define __FUNCT__ "DMGetRefineLevel"
1571 /*@
1572     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1573 
1574     Not Collective
1575 
1576     Input Parameter:
1577 .   dm - the DM object
1578 
1579     Output Parameter:
1580 .   level - number of refinements
1581 
1582     Level: developer
1583 
1584 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1585 
1586 @*/
1587 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1588 {
1589   PetscFunctionBegin;
1590   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1591   *level = dm->levelup;
1592   PetscFunctionReturn(0);
1593 }
1594 
1595 #undef __FUNCT__
1596 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1597 /*@
1598    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1599 
1600    Logically Collective
1601 
1602    Input Arguments:
1603 +  dm - the DM
1604 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1605 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1606 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1607 
1608    Calling sequence for beginhook:
1609 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1610 
1611 +  dm - global DM
1612 .  g - global vector
1613 .  mode - mode
1614 .  l - local vector
1615 -  ctx - optional user-defined function context
1616 
1617 
1618    Calling sequence for endhook:
1619 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1620 
1621 +  global - global DM
1622 -  ctx - optional user-defined function context
1623 
1624    Level: advanced
1625 
1626    Notes:
1627    This function is only needed if auxiliary data needs to be set up on coarse grids.
1628 
1629    If this function is called multiple times, the hooks will be run in the order they are added.
1630 
1631    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1632    extract the finest level information from its context (instead of from the SNES).
1633 
1634 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1635 @*/
1636 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1637 {
1638   PetscErrorCode ierr;
1639   DMGlobalToLocalHookLink link,*p;
1640 
1641   PetscFunctionBegin;
1642   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1643   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1644   ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1645   link->beginhook = beginhook;
1646   link->endhook = endhook;
1647   link->ctx = ctx;
1648   link->next = PETSC_NULL;
1649   *p = link;
1650   PetscFunctionReturn(0);
1651 }
1652 
1653 #undef __FUNCT__
1654 #define __FUNCT__ "DMGlobalToLocalBegin"
1655 /*@
1656     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1657 
1658     Neighbor-wise Collective on DM
1659 
1660     Input Parameters:
1661 +   dm - the DM object
1662 .   g - the global vector
1663 .   mode - INSERT_VALUES or ADD_VALUES
1664 -   l - the local vector
1665 
1666 
1667     Level: beginner
1668 
1669 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1670 
1671 @*/
1672 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1673 {
1674   PetscSF        sf;
1675   PetscErrorCode ierr;
1676   DMGlobalToLocalHookLink link;
1677 
1678   PetscFunctionBegin;
1679   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1680   for (link=dm->gtolhook; link; link=link->next) {
1681     if (link->beginhook) {ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1682   }
1683   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1684   if (sf) {
1685     PetscScalar *lArray, *gArray;
1686 
1687     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1688     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1689     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1690     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1691     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1692     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1693   } else {
1694     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1695   }
1696   PetscFunctionReturn(0);
1697 }
1698 
1699 #undef __FUNCT__
1700 #define __FUNCT__ "DMGlobalToLocalEnd"
1701 /*@
1702     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1703 
1704     Neighbor-wise Collective on DM
1705 
1706     Input Parameters:
1707 +   dm - the DM object
1708 .   g - the global vector
1709 .   mode - INSERT_VALUES or ADD_VALUES
1710 -   l - the local vector
1711 
1712 
1713     Level: beginner
1714 
1715 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1716 
1717 @*/
1718 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1719 {
1720   PetscSF        sf;
1721   PetscErrorCode ierr;
1722   PetscScalar    *lArray, *gArray;
1723   DMGlobalToLocalHookLink link;
1724 
1725   PetscFunctionBegin;
1726   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1727   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1728   if (sf) {
1729   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1730 
1731     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1732     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1733     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1734     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1735     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1736   } else {
1737     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1738   }
1739   for (link=dm->gtolhook; link; link=link->next) {
1740     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1741   }
1742   PetscFunctionReturn(0);
1743 }
1744 
1745 #undef __FUNCT__
1746 #define __FUNCT__ "DMLocalToGlobalBegin"
1747 /*@
1748     DMLocalToGlobalBegin - updates global vectors from local vectors
1749 
1750     Neighbor-wise Collective on DM
1751 
1752     Input Parameters:
1753 +   dm - the DM object
1754 .   l - the local vector
1755 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1756            base point.
1757 - - the global vector
1758 
1759     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1760            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1761            global array to the final global array with VecAXPY().
1762 
1763     Level: beginner
1764 
1765 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1766 
1767 @*/
1768 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1769 {
1770   PetscSF        sf;
1771   PetscErrorCode ierr;
1772 
1773   PetscFunctionBegin;
1774   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1775   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1776   if (sf) {
1777     MPI_Op       op;
1778     PetscScalar *lArray, *gArray;
1779 
1780     switch(mode) {
1781     case INSERT_VALUES:
1782     case INSERT_ALL_VALUES:
1783 #if defined(PETSC_HAVE_MPI_REPLACE)
1784       op = MPI_REPLACE; break;
1785 #else
1786       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1787 #endif
1788     case ADD_VALUES:
1789     case ADD_ALL_VALUES:
1790       op = MPI_SUM; break;
1791   default:
1792     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1793     }
1794     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1795     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1796     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1797     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1798     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1799   } else {
1800     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1801   }
1802   PetscFunctionReturn(0);
1803 }
1804 
1805 #undef __FUNCT__
1806 #define __FUNCT__ "DMLocalToGlobalEnd"
1807 /*@
1808     DMLocalToGlobalEnd - updates global vectors from local vectors
1809 
1810     Neighbor-wise Collective on DM
1811 
1812     Input Parameters:
1813 +   dm - the DM object
1814 .   l - the local vector
1815 .   mode - INSERT_VALUES or ADD_VALUES
1816 -   g - the global vector
1817 
1818 
1819     Level: beginner
1820 
1821 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1822 
1823 @*/
1824 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1825 {
1826   PetscSF        sf;
1827   PetscErrorCode ierr;
1828 
1829   PetscFunctionBegin;
1830   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1831   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1832   if (sf) {
1833     MPI_Op       op;
1834     PetscScalar *lArray, *gArray;
1835 
1836     switch(mode) {
1837     case INSERT_VALUES:
1838     case INSERT_ALL_VALUES:
1839 #if defined(PETSC_HAVE_MPI_REPLACE)
1840       op = MPI_REPLACE; break;
1841 #else
1842       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1843 #endif
1844     case ADD_VALUES:
1845     case ADD_ALL_VALUES:
1846       op = MPI_SUM; break;
1847     default:
1848       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1849     }
1850     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1851     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1852     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1853     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1854     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1855   } else {
1856     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1857   }
1858   PetscFunctionReturn(0);
1859 }
1860 
1861 #undef __FUNCT__
1862 #define __FUNCT__ "DMCoarsen"
1863 /*@
1864     DMCoarsen - Coarsens a DM object
1865 
1866     Collective on DM
1867 
1868     Input Parameter:
1869 +   dm - the DM object
1870 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1871 
1872     Output Parameter:
1873 .   dmc - the coarsened DM
1874 
1875     Level: developer
1876 
1877 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1878 
1879 @*/
1880 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1881 {
1882   PetscErrorCode ierr;
1883   DMCoarsenHookLink link;
1884 
1885   PetscFunctionBegin;
1886   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1887   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1888   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1889   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1890   (*dmc)->ctx       = dm->ctx;
1891   (*dmc)->levelup   = dm->levelup;
1892   (*dmc)->leveldown = dm->leveldown + 1;
1893   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1894   for (link=dm->coarsenhook; link; link=link->next) {
1895     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1896   }
1897   PetscFunctionReturn(0);
1898 }
1899 
1900 #undef __FUNCT__
1901 #define __FUNCT__ "DMCoarsenHookAdd"
1902 /*@
1903    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1904 
1905    Logically Collective
1906 
1907    Input Arguments:
1908 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1909 .  coarsenhook - function to run when setting up a coarser level
1910 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1911 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1912 
1913    Calling sequence of coarsenhook:
1914 $    coarsenhook(DM fine,DM coarse,void *ctx);
1915 
1916 +  fine - fine level DM
1917 .  coarse - coarse level DM to restrict problem to
1918 -  ctx - optional user-defined function context
1919 
1920    Calling sequence for restricthook:
1921 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1922 
1923 +  fine - fine level DM
1924 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1925 .  rscale - scaling vector for restriction
1926 .  inject - matrix restricting by injection
1927 .  coarse - coarse level DM to update
1928 -  ctx - optional user-defined function context
1929 
1930    Level: advanced
1931 
1932    Notes:
1933    This function is only needed if auxiliary data needs to be set up on coarse grids.
1934 
1935    If this function is called multiple times, the hooks will be run in the order they are added.
1936 
1937    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1938    extract the finest level information from its context (instead of from the SNES).
1939 
1940 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1941 @*/
1942 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1943 {
1944   PetscErrorCode ierr;
1945   DMCoarsenHookLink link,*p;
1946 
1947   PetscFunctionBegin;
1948   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1949   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1950   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1951   link->coarsenhook = coarsenhook;
1952   link->restricthook = restricthook;
1953   link->ctx = ctx;
1954   link->next = PETSC_NULL;
1955   *p = link;
1956   PetscFunctionReturn(0);
1957 }
1958 
1959 #undef __FUNCT__
1960 #define __FUNCT__ "DMRestrict"
1961 /*@
1962    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1963 
1964    Collective if any hooks are
1965 
1966    Input Arguments:
1967 +  fine - finer DM to use as a base
1968 .  restrct - restriction matrix, apply using MatRestrict()
1969 .  inject - injection matrix, also use MatRestrict()
1970 -  coarse - coarer DM to update
1971 
1972    Level: developer
1973 
1974 .seealso: DMCoarsenHookAdd(), MatRestrict()
1975 @*/
1976 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1977 {
1978   PetscErrorCode ierr;
1979   DMCoarsenHookLink link;
1980 
1981   PetscFunctionBegin;
1982   for (link=fine->coarsenhook; link; link=link->next) {
1983     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1984   }
1985   PetscFunctionReturn(0);
1986 }
1987 
1988 #undef __FUNCT__
1989 #define __FUNCT__ "DMSubDomainHookAdd"
1990 /*@
1991    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1992 
1993    Logically Collective
1994 
1995    Input Arguments:
1996 +  global - global DM
1997 .
1998 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1999 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
2000 
2001    Calling sequence for restricthook:
2002 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
2003 
2004 +  global - global DM
2005 .  out    - scatter to the outer (with ghost and overlap points) block vector
2006 .  in     - scatter to block vector values only owned locally
2007 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
2008 -  ctx - optional user-defined function context
2009 
2010    Level: advanced
2011 
2012    Notes:
2013    This function is only needed if auxiliary data needs to be set up on coarse grids.
2014 
2015    If this function is called multiple times, the hooks will be run in the order they are added.
2016 
2017    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2018    extract the finest level information from its context (instead of from the SNES).
2019 
2020 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2021 @*/
2022 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2023 {
2024   PetscErrorCode ierr;
2025   DMSubDomainHookLink link,*p;
2026 
2027   PetscFunctionBegin;
2028   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2029   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2030   ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2031   link->restricthook = restricthook;
2032   link->ddhook = ddhook;
2033   link->ctx = ctx;
2034   link->next = PETSC_NULL;
2035   *p = link;
2036   PetscFunctionReturn(0);
2037 }
2038 
2039 #undef __FUNCT__
2040 #define __FUNCT__ "DMSubDomainRestrict"
2041 /*@
2042    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2043 
2044    Collective if any hooks are
2045 
2046    Input Arguments:
2047 +  fine - finer DM to use as a base
2048 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2049 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2050 -  coarse - coarer DM to update
2051 
2052    Level: developer
2053 
2054 .seealso: DMCoarsenHookAdd(), MatRestrict()
2055 @*/
2056 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2057 {
2058   PetscErrorCode ierr;
2059   DMSubDomainHookLink link;
2060 
2061   PetscFunctionBegin;
2062   for (link=global->subdomainhook; link; link=link->next) {
2063     if (link->restricthook) {ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);}
2064   }
2065   PetscFunctionReturn(0);
2066 }
2067 
2068 #undef __FUNCT__
2069 #define __FUNCT__ "DMGetCoarsenLevel"
2070 /*@
2071     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2072 
2073     Not Collective
2074 
2075     Input Parameter:
2076 .   dm - the DM object
2077 
2078     Output Parameter:
2079 .   level - number of coarsenings
2080 
2081     Level: developer
2082 
2083 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2084 
2085 @*/
2086 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2087 {
2088   PetscFunctionBegin;
2089   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2090   *level = dm->leveldown;
2091   PetscFunctionReturn(0);
2092 }
2093 
2094 
2095 
2096 #undef __FUNCT__
2097 #define __FUNCT__ "DMRefineHierarchy"
2098 /*@C
2099     DMRefineHierarchy - Refines a DM object, all levels at once
2100 
2101     Collective on DM
2102 
2103     Input Parameter:
2104 +   dm - the DM object
2105 -   nlevels - the number of levels of refinement
2106 
2107     Output Parameter:
2108 .   dmf - the refined DM hierarchy
2109 
2110     Level: developer
2111 
2112 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2113 
2114 @*/
2115 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2116 {
2117   PetscErrorCode ierr;
2118 
2119   PetscFunctionBegin;
2120   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2121   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2122   if (nlevels == 0) PetscFunctionReturn(0);
2123   if (dm->ops->refinehierarchy) {
2124     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2125   } else if (dm->ops->refine) {
2126     PetscInt i;
2127 
2128     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2129     for (i=1; i<nlevels; i++) {
2130       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2131     }
2132   } else {
2133     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2134   }
2135   PetscFunctionReturn(0);
2136 }
2137 
2138 #undef __FUNCT__
2139 #define __FUNCT__ "DMCoarsenHierarchy"
2140 /*@C
2141     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2142 
2143     Collective on DM
2144 
2145     Input Parameter:
2146 +   dm - the DM object
2147 -   nlevels - the number of levels of coarsening
2148 
2149     Output Parameter:
2150 .   dmc - the coarsened DM hierarchy
2151 
2152     Level: developer
2153 
2154 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2155 
2156 @*/
2157 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2158 {
2159   PetscErrorCode ierr;
2160 
2161   PetscFunctionBegin;
2162   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2163   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2164   if (nlevels == 0) PetscFunctionReturn(0);
2165   PetscValidPointer(dmc,3);
2166   if (dm->ops->coarsenhierarchy) {
2167     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2168   } else if (dm->ops->coarsen) {
2169     PetscInt i;
2170 
2171     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2172     for (i=1; i<nlevels; i++) {
2173       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2174     }
2175   } else {
2176     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2177   }
2178   PetscFunctionReturn(0);
2179 }
2180 
2181 #undef __FUNCT__
2182 #define __FUNCT__ "DMCreateAggregates"
2183 /*@
2184    DMCreateAggregates - Gets the aggregates that map between
2185    grids associated with two DMs.
2186 
2187    Collective on DM
2188 
2189    Input Parameters:
2190 +  dmc - the coarse grid DM
2191 -  dmf - the fine grid DM
2192 
2193    Output Parameters:
2194 .  rest - the restriction matrix (transpose of the projection matrix)
2195 
2196    Level: intermediate
2197 
2198 .keywords: interpolation, restriction, multigrid
2199 
2200 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2201 @*/
2202 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2203 {
2204   PetscErrorCode ierr;
2205 
2206   PetscFunctionBegin;
2207   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2208   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2209   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2210   PetscFunctionReturn(0);
2211 }
2212 
2213 #undef __FUNCT__
2214 #define __FUNCT__ "DMSetApplicationContextDestroy"
2215 /*@C
2216     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2217 
2218     Not Collective
2219 
2220     Input Parameters:
2221 +   dm - the DM object
2222 -   destroy - the destroy function
2223 
2224     Level: intermediate
2225 
2226 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2227 
2228 @*/
2229 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2230 {
2231   PetscFunctionBegin;
2232   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2233   dm->ctxdestroy = destroy;
2234   PetscFunctionReturn(0);
2235 }
2236 
2237 #undef __FUNCT__
2238 #define __FUNCT__ "DMSetApplicationContext"
2239 /*@
2240     DMSetApplicationContext - Set a user context into a DM object
2241 
2242     Not Collective
2243 
2244     Input Parameters:
2245 +   dm - the DM object
2246 -   ctx - the user context
2247 
2248     Level: intermediate
2249 
2250 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2251 
2252 @*/
2253 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2254 {
2255   PetscFunctionBegin;
2256   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2257   dm->ctx = ctx;
2258   PetscFunctionReturn(0);
2259 }
2260 
2261 #undef __FUNCT__
2262 #define __FUNCT__ "DMGetApplicationContext"
2263 /*@
2264     DMGetApplicationContext - Gets a user context from a DM object
2265 
2266     Not Collective
2267 
2268     Input Parameter:
2269 .   dm - the DM object
2270 
2271     Output Parameter:
2272 .   ctx - the user context
2273 
2274     Level: intermediate
2275 
2276 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2277 
2278 @*/
2279 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2280 {
2281   PetscFunctionBegin;
2282   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2283   *(void**)ctx = dm->ctx;
2284   PetscFunctionReturn(0);
2285 }
2286 
2287 #undef __FUNCT__
2288 #define __FUNCT__ "DMSetVariableBounds"
2289 /*@C
2290     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2291 
2292     Logically Collective on DM
2293 
2294     Input Parameter:
2295 +   dm - the DM object
2296 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2297 
2298     Level: intermediate
2299 
2300 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2301          DMSetJacobian()
2302 
2303 @*/
2304 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2305 {
2306   PetscFunctionBegin;
2307   dm->ops->computevariablebounds = f;
2308   PetscFunctionReturn(0);
2309 }
2310 
2311 #undef __FUNCT__
2312 #define __FUNCT__ "DMHasVariableBounds"
2313 /*@
2314     DMHasVariableBounds - does the DM object have a variable bounds function?
2315 
2316     Not Collective
2317 
2318     Input Parameter:
2319 .   dm - the DM object to destroy
2320 
2321     Output Parameter:
2322 .   flg - PETSC_TRUE if the variable bounds function exists
2323 
2324     Level: developer
2325 
2326 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2327 
2328 @*/
2329 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2330 {
2331   PetscFunctionBegin;
2332   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2333   PetscFunctionReturn(0);
2334 }
2335 
2336 #undef __FUNCT__
2337 #define __FUNCT__ "DMComputeVariableBounds"
2338 /*@C
2339     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2340 
2341     Logically Collective on DM
2342 
2343     Input Parameters:
2344 +   dm - the DM object to destroy
2345 -   x  - current solution at which the bounds are computed
2346 
2347     Output parameters:
2348 +   xl - lower bound
2349 -   xu - upper bound
2350 
2351     Level: intermediate
2352 
2353 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2354 
2355 @*/
2356 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2357 {
2358   PetscErrorCode ierr;
2359   PetscFunctionBegin;
2360   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2361   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2362   if (dm->ops->computevariablebounds) {
2363     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2364   }
2365   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2366   PetscFunctionReturn(0);
2367 }
2368 
2369 #undef __FUNCT__
2370 #define __FUNCT__ "DMHasColoring"
2371 /*@
2372     DMHasColoring - does the DM object have a method of providing a coloring?
2373 
2374     Not Collective
2375 
2376     Input Parameter:
2377 .   dm - the DM object
2378 
2379     Output Parameter:
2380 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2381 
2382     Level: developer
2383 
2384 .seealso DMHasFunction(), DMCreateColoring()
2385 
2386 @*/
2387 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2388 {
2389   PetscFunctionBegin;
2390   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2391   PetscFunctionReturn(0);
2392 }
2393 
2394 #undef  __FUNCT__
2395 #define __FUNCT__ "DMSetVec"
2396 /*@C
2397     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2398 
2399     Collective on DM
2400 
2401     Input Parameter:
2402 +   dm - the DM object
2403 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2404 
2405     Level: developer
2406 
2407 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2408 
2409 @*/
2410 PetscErrorCode  DMSetVec(DM dm,Vec x)
2411 {
2412   PetscErrorCode ierr;
2413   PetscFunctionBegin;
2414   if (x) {
2415     if (!dm->x) {
2416       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2417     }
2418     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2419   }
2420   else if (dm->x) {
2421     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2422   }
2423   PetscFunctionReturn(0);
2424 }
2425 
2426 PetscFList DMList                       = PETSC_NULL;
2427 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2428 
2429 #undef __FUNCT__
2430 #define __FUNCT__ "DMSetType"
2431 /*@C
2432   DMSetType - Builds a DM, for a particular DM implementation.
2433 
2434   Collective on DM
2435 
2436   Input Parameters:
2437 + dm     - The DM object
2438 - method - The name of the DM type
2439 
2440   Options Database Key:
2441 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2442 
2443   Notes:
2444   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2445 
2446   Level: intermediate
2447 
2448 .keywords: DM, set, type
2449 .seealso: DMGetType(), DMCreate()
2450 @*/
2451 PetscErrorCode  DMSetType(DM dm, DMType method)
2452 {
2453   PetscErrorCode (*r)(DM);
2454   PetscBool      match;
2455   PetscErrorCode ierr;
2456 
2457   PetscFunctionBegin;
2458   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2459   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2460   if (match) PetscFunctionReturn(0);
2461 
2462   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2463   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2464   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2465 
2466   if (dm->ops->destroy) {
2467     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2468     dm->ops->destroy = PETSC_NULL;
2469   }
2470   ierr = (*r)(dm);CHKERRQ(ierr);
2471   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2472   PetscFunctionReturn(0);
2473 }
2474 
2475 #undef __FUNCT__
2476 #define __FUNCT__ "DMGetType"
2477 /*@C
2478   DMGetType - Gets the DM type name (as a string) from the DM.
2479 
2480   Not Collective
2481 
2482   Input Parameter:
2483 . dm  - The DM
2484 
2485   Output Parameter:
2486 . type - The DM type name
2487 
2488   Level: intermediate
2489 
2490 .keywords: DM, get, type, name
2491 .seealso: DMSetType(), DMCreate()
2492 @*/
2493 PetscErrorCode  DMGetType(DM dm, DMType *type)
2494 {
2495   PetscErrorCode ierr;
2496 
2497   PetscFunctionBegin;
2498   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2499   PetscValidCharPointer(type,2);
2500   if (!DMRegisterAllCalled) {
2501     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2502   }
2503   *type = ((PetscObject)dm)->type_name;
2504   PetscFunctionReturn(0);
2505 }
2506 
2507 #undef __FUNCT__
2508 #define __FUNCT__ "DMConvert"
2509 /*@C
2510   DMConvert - Converts a DM to another DM, either of the same or different type.
2511 
2512   Collective on DM
2513 
2514   Input Parameters:
2515 + dm - the DM
2516 - newtype - new DM type (use "same" for the same type)
2517 
2518   Output Parameter:
2519 . M - pointer to new DM
2520 
2521   Notes:
2522   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2523   the MPI communicator of the generated DM is always the same as the communicator
2524   of the input DM.
2525 
2526   Level: intermediate
2527 
2528 .seealso: DMCreate()
2529 @*/
2530 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2531 {
2532   DM             B;
2533   char           convname[256];
2534   PetscBool      sametype, issame;
2535   PetscErrorCode ierr;
2536 
2537   PetscFunctionBegin;
2538   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2539   PetscValidType(dm,1);
2540   PetscValidPointer(M,3);
2541   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2542   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2543   {
2544     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2545 
2546     /*
2547        Order of precedence:
2548        1) See if a specialized converter is known to the current DM.
2549        2) See if a specialized converter is known to the desired DM class.
2550        3) See if a good general converter is registered for the desired class
2551        4) See if a good general converter is known for the current matrix.
2552        5) Use a really basic converter.
2553     */
2554 
2555     /* 1) See if a specialized converter is known to the current DM and the desired class */
2556     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2557     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2558     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2559     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2560     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2561     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2562     if (conv) goto foundconv;
2563 
2564     /* 2)  See if a specialized converter is known to the desired DM class. */
2565     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2566     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2567     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2568     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2569     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2570     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2571     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2572     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2573     if (conv) {
2574       ierr = DMDestroy(&B);CHKERRQ(ierr);
2575       goto foundconv;
2576     }
2577 
2578 #if 0
2579     /* 3) See if a good general converter is registered for the desired class */
2580     conv = B->ops->convertfrom;
2581     ierr = DMDestroy(&B);CHKERRQ(ierr);
2582     if (conv) goto foundconv;
2583 
2584     /* 4) See if a good general converter is known for the current matrix */
2585     if (dm->ops->convert) {
2586       conv = dm->ops->convert;
2587     }
2588     if (conv) goto foundconv;
2589 #endif
2590 
2591     /* 5) Use a really basic converter. */
2592     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2593 
2594     foundconv:
2595     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2596     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2597     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2598   }
2599   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2600   PetscFunctionReturn(0);
2601 }
2602 
2603 /*--------------------------------------------------------------------------------------------------------------------*/
2604 
2605 #undef __FUNCT__
2606 #define __FUNCT__ "DMRegister"
2607 /*@C
2608   DMRegister - See DMRegisterDynamic()
2609 
2610   Level: advanced
2611 @*/
2612 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2613 {
2614   char fullname[PETSC_MAX_PATH_LEN];
2615   PetscErrorCode ierr;
2616 
2617   PetscFunctionBegin;
2618   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2619   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2620   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2621   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2622   PetscFunctionReturn(0);
2623 }
2624 
2625 
2626 /*--------------------------------------------------------------------------------------------------------------------*/
2627 #undef __FUNCT__
2628 #define __FUNCT__ "DMRegisterDestroy"
2629 /*@C
2630    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2631 
2632    Not Collective
2633 
2634    Level: advanced
2635 
2636 .keywords: DM, register, destroy
2637 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2638 @*/
2639 PetscErrorCode  DMRegisterDestroy(void)
2640 {
2641   PetscErrorCode ierr;
2642 
2643   PetscFunctionBegin;
2644   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2645   DMRegisterAllCalled = PETSC_FALSE;
2646   PetscFunctionReturn(0);
2647 }
2648 
2649 #undef __FUNCT__
2650 #define __FUNCT__ "DMLoad"
2651 /*@C
2652   DMLoad - Loads a DM that has been stored in binary  with DMView().
2653 
2654   Collective on PetscViewer
2655 
2656   Input Parameters:
2657 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2658            some related function before a call to DMLoad().
2659 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2660            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2661 
2662    Level: intermediate
2663 
2664   Notes:
2665    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2666 
2667   Notes for advanced users:
2668   Most users should not need to know the details of the binary storage
2669   format, since DMLoad() and DMView() completely hide these details.
2670   But for anyone who's interested, the standard binary matrix storage
2671   format is
2672 .vb
2673      has not yet been determined
2674 .ve
2675 
2676 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2677 @*/
2678 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2679 {
2680   PetscErrorCode ierr;
2681   PetscBool      isbinary;
2682   PetscInt       classid;
2683   char           type[256];
2684 
2685   PetscFunctionBegin;
2686   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2687   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2688   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2689   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2690 
2691   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2692   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2693   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2694   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2695   if (newdm->ops->load) {
2696     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2697   }
2698   PetscFunctionReturn(0);
2699 }
2700 
2701 /******************************** FEM Support **********************************/
2702 
2703 #undef __FUNCT__
2704 #define __FUNCT__ "DMPrintCellVector"
2705 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2706   PetscInt       f;
2707   PetscErrorCode ierr;
2708 
2709   PetscFunctionBegin;
2710   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2711   for (f = 0; f < len; ++f) {
2712     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2713   }
2714   PetscFunctionReturn(0);
2715 }
2716 
2717 #undef __FUNCT__
2718 #define __FUNCT__ "DMPrintCellMatrix"
2719 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2720   PetscInt       f, g;
2721   PetscErrorCode ierr;
2722 
2723   PetscFunctionBegin;
2724   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2725   for (f = 0; f < rows; ++f) {
2726     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2727     for (g = 0; g < cols; ++g) {
2728       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2729     }
2730     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2731   }
2732   PetscFunctionReturn(0);
2733 }
2734 
2735 #undef __FUNCT__
2736 #define __FUNCT__ "DMGetDefaultSection"
2737 /*@
2738   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2739 
2740   Input Parameter:
2741 . dm - The DM
2742 
2743   Output Parameter:
2744 . section - The PetscSection
2745 
2746   Level: intermediate
2747 
2748   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2749 
2750 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2751 @*/
2752 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2753   PetscFunctionBegin;
2754   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2755   PetscValidPointer(section, 2);
2756   *section = dm->defaultSection;
2757   PetscFunctionReturn(0);
2758 }
2759 
2760 #undef __FUNCT__
2761 #define __FUNCT__ "DMSetDefaultSection"
2762 /*@
2763   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2764 
2765   Input Parameters:
2766 + dm - The DM
2767 - section - The PetscSection
2768 
2769   Level: intermediate
2770 
2771   Note: Any existing Section will be destroyed
2772 
2773 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2774 @*/
2775 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2776   PetscInt       numFields;
2777   PetscInt       f;
2778   PetscErrorCode ierr;
2779 
2780   PetscFunctionBegin;
2781   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2782   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2783   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2784   dm->defaultSection = section;
2785   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2786   if (numFields) {
2787     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2788     for (f = 0; f < numFields; ++f) {
2789       const char *name;
2790 
2791       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2792       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2793     }
2794   }
2795   PetscFunctionReturn(0);
2796 }
2797 
2798 #undef __FUNCT__
2799 #define __FUNCT__ "DMGetDefaultGlobalSection"
2800 /*@
2801   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2802 
2803   Collective on DM
2804 
2805   Input Parameter:
2806 . dm - The DM
2807 
2808   Output Parameter:
2809 . section - The PetscSection
2810 
2811   Level: intermediate
2812 
2813   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2814 
2815 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2816 @*/
2817 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2818   PetscErrorCode ierr;
2819 
2820   PetscFunctionBegin;
2821   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2822   PetscValidPointer(section, 2);
2823   if (!dm->defaultGlobalSection) {
2824     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2825     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2826     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2827   }
2828   *section = dm->defaultGlobalSection;
2829   PetscFunctionReturn(0);
2830 }
2831 
2832 #undef __FUNCT__
2833 #define __FUNCT__ "DMSetDefaultGlobalSection"
2834 /*@
2835   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2836 
2837   Input Parameters:
2838 + dm - The DM
2839 - section - The PetscSection
2840 
2841   Level: intermediate
2842 
2843   Note: Any existing Section will be destroyed
2844 
2845 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2846 @*/
2847 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2848   PetscErrorCode ierr;
2849 
2850   PetscFunctionBegin;
2851   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2852   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2853   dm->defaultGlobalSection = section;
2854   PetscFunctionReturn(0);
2855 }
2856 
2857 #undef __FUNCT__
2858 #define __FUNCT__ "DMGetDefaultSF"
2859 /*@
2860   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2861   it is created from the default PetscSection layouts in the DM.
2862 
2863   Input Parameter:
2864 . dm - The DM
2865 
2866   Output Parameter:
2867 . sf - The PetscSF
2868 
2869   Level: intermediate
2870 
2871   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2872 
2873 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2874 @*/
2875 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2876   PetscInt       nroots;
2877   PetscErrorCode ierr;
2878 
2879   PetscFunctionBegin;
2880   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2881   PetscValidPointer(sf, 2);
2882   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2883   if (nroots < 0) {
2884     PetscSection section, gSection;
2885 
2886     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2887     if (section) {
2888       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2889       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2890     } else {
2891       *sf = PETSC_NULL;
2892       PetscFunctionReturn(0);
2893     }
2894   }
2895   *sf = dm->defaultSF;
2896   PetscFunctionReturn(0);
2897 }
2898 
2899 #undef __FUNCT__
2900 #define __FUNCT__ "DMSetDefaultSF"
2901 /*@
2902   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2903 
2904   Input Parameters:
2905 + dm - The DM
2906 - sf - The PetscSF
2907 
2908   Level: intermediate
2909 
2910   Note: Any previous SF is destroyed
2911 
2912 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2913 @*/
2914 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2915   PetscErrorCode ierr;
2916 
2917   PetscFunctionBegin;
2918   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2919   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2920   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2921   dm->defaultSF = sf;
2922   PetscFunctionReturn(0);
2923 }
2924 
2925 #undef __FUNCT__
2926 #define __FUNCT__ "DMCreateDefaultSF"
2927 /*@C
2928   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2929   describing the data layout.
2930 
2931   Input Parameters:
2932 + dm - The DM
2933 . localSection - PetscSection describing the local data layout
2934 - globalSection - PetscSection describing the global data layout
2935 
2936   Level: intermediate
2937 
2938 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2939 @*/
2940 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2941 {
2942   MPI_Comm        comm = ((PetscObject) dm)->comm;
2943   PetscLayout     layout;
2944   const PetscInt *ranges;
2945   PetscInt       *local;
2946   PetscSFNode    *remote;
2947   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2948   PetscMPIInt     size, rank;
2949   PetscErrorCode  ierr;
2950 
2951   PetscFunctionBegin;
2952   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2953   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2954   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2955   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2956   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2957   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2958   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2959   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2960   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2961   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2962   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2963     PetscInt gdof, gcdof;
2964 
2965     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2966     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2967     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2968   }
2969   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2970   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2971   for (p = pStart, l = 0; p < pEnd; ++p) {
2972     const PetscInt *cind;
2973     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2974 
2975     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2976     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2977     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2978     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2979     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2980     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2981     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2982     if (!gdof) continue; /* Censored point */
2983     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2984     if (gsize != dof-cdof) {
2985       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2986       cdof = 0; /* Ignore constraints */
2987     }
2988     for (d = 0, c = 0; d < dof; ++d) {
2989       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2990       local[l+d-c] = off+d;
2991     }
2992     if (gdof < 0) {
2993       for(d = 0; d < gsize; ++d, ++l) {
2994         PetscInt offset = -(goff+1) + d, r;
2995 
2996         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2997         if (r < 0) r = -(r+2);
2998         remote[l].rank  = r;
2999         remote[l].index = offset - ranges[r];
3000       }
3001     } else {
3002       for(d = 0; d < gsize; ++d, ++l) {
3003         remote[l].rank  = rank;
3004         remote[l].index = goff+d - ranges[rank];
3005       }
3006     }
3007   }
3008   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3009   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3010   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3011   PetscFunctionReturn(0);
3012 }
3013 
3014 #undef __FUNCT__
3015 #define __FUNCT__ "DMGetPointSF"
3016 /*@
3017   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3018 
3019   Input Parameter:
3020 . dm - The DM
3021 
3022   Output Parameter:
3023 . sf - The PetscSF
3024 
3025   Level: intermediate
3026 
3027   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3028 
3029 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3030 @*/
3031 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
3032   PetscFunctionBegin;
3033   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3034   PetscValidPointer(sf, 2);
3035   *sf = dm->sf;
3036   PetscFunctionReturn(0);
3037 }
3038 
3039 #undef __FUNCT__
3040 #define __FUNCT__ "DMSetPointSF"
3041 /*@
3042   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3043 
3044   Input Parameters:
3045 + dm - The DM
3046 - sf - The PetscSF
3047 
3048   Level: intermediate
3049 
3050 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3051 @*/
3052 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
3053   PetscErrorCode ierr;
3054 
3055   PetscFunctionBegin;
3056   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3057   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3058   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3059   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3060   dm->sf = sf;
3061   PetscFunctionReturn(0);
3062 }
3063 
3064 #undef __FUNCT__
3065 #define __FUNCT__ "DMGetNumFields"
3066 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3067 {
3068   PetscFunctionBegin;
3069   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3070   PetscValidPointer(numFields, 2);
3071   *numFields = dm->numFields;
3072   PetscFunctionReturn(0);
3073 }
3074 
3075 #undef __FUNCT__
3076 #define __FUNCT__ "DMSetNumFields"
3077 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3078 {
3079   PetscInt       f;
3080   PetscErrorCode ierr;
3081 
3082   PetscFunctionBegin;
3083   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3084   for (f = 0; f < dm->numFields; ++f) {
3085     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3086   }
3087   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3088   dm->numFields = numFields;
3089   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3090   for (f = 0; f < dm->numFields; ++f) {
3091     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3092   }
3093   PetscFunctionReturn(0);
3094 }
3095 
3096 #undef __FUNCT__
3097 #define __FUNCT__ "DMGetField"
3098 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3099 {
3100   PetscFunctionBegin;
3101   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3102   PetscValidPointer(field, 2);
3103   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3104   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3105   *field = dm->fields[f];
3106   PetscFunctionReturn(0);
3107 }
3108 
3109 #undef __FUNCT__
3110 #define __FUNCT__ "DMSetCoordinates"
3111 /*@
3112   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3113 
3114   Collective on DM
3115 
3116   Input Parameters:
3117 + dm - the DM
3118 - c - coordinate vector
3119 
3120   Note:
3121   The coordinates do include those for ghost points, which are in the local vector
3122 
3123   Level: intermediate
3124 
3125 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3126 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3127 @*/
3128 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3129 {
3130   PetscErrorCode ierr;
3131 
3132   PetscFunctionBegin;
3133   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3134   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3135   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3136   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3137   dm->coordinates = c;
3138   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3139   PetscFunctionReturn(0);
3140 }
3141 
3142 #undef __FUNCT__
3143 #define __FUNCT__ "DMSetCoordinatesLocal"
3144 /*@
3145   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3146 
3147   Collective on DM
3148 
3149    Input Parameters:
3150 +  dm - the DM
3151 -  c - coordinate vector
3152 
3153   Note:
3154   The coordinates of ghost points can be set using DMSetCoordinates()
3155   followed by DMGetCoordinatesLocal(). This is intended to enable the
3156   setting of ghost coordinates outside of the domain.
3157 
3158   Level: intermediate
3159 
3160 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3161 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3162 @*/
3163 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3164 {
3165   PetscErrorCode ierr;
3166 
3167   PetscFunctionBegin;
3168   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3169   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3170   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3171   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3172   dm->coordinatesLocal = c;
3173   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3174   PetscFunctionReturn(0);
3175 }
3176 
3177 #undef __FUNCT__
3178 #define __FUNCT__ "DMGetCoordinates"
3179 /*@
3180   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3181 
3182   Not Collective
3183 
3184   Input Parameter:
3185 . dm - the DM
3186 
3187   Output Parameter:
3188 . c - global coordinate vector
3189 
3190   Note:
3191   This is a borrowed reference, so the user should NOT destroy this vector
3192 
3193   Each process has only the local coordinates (does NOT have the ghost coordinates).
3194 
3195   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3196   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3197 
3198   Level: intermediate
3199 
3200 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3201 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3202 @*/
3203 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3204 {
3205   PetscErrorCode ierr;
3206 
3207   PetscFunctionBegin;
3208   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3209   PetscValidPointer(c,2);
3210   if (!dm->coordinates && dm->coordinatesLocal) {
3211     DM cdm = PETSC_NULL;
3212 
3213     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3214     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3215     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3216     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3217     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3218   }
3219   *c = dm->coordinates;
3220   PetscFunctionReturn(0);
3221 }
3222 
3223 #undef __FUNCT__
3224 #define __FUNCT__ "DMGetCoordinatesLocal"
3225 /*@
3226   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3227 
3228   Collective on DM
3229 
3230   Input Parameter:
3231 . dm - the DM
3232 
3233   Output Parameter:
3234 . c - coordinate vector
3235 
3236   Note:
3237   This is a borrowed reference, so the user should NOT destroy this vector
3238 
3239   Each process has the local and ghost coordinates
3240 
3241   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3242   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3243 
3244   Level: intermediate
3245 
3246 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3247 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3248 @*/
3249 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3250 {
3251   PetscErrorCode ierr;
3252 
3253   PetscFunctionBegin;
3254   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3255   PetscValidPointer(c,2);
3256   if (!dm->coordinatesLocal && dm->coordinates) {
3257     DM cdm = PETSC_NULL;
3258 
3259     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3260     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3261     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3262     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3263     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3264   }
3265   *c = dm->coordinatesLocal;
3266   PetscFunctionReturn(0);
3267 }
3268 
3269 #undef __FUNCT__
3270 #define __FUNCT__ "DMGetCoordinateDM"
3271 /*@
3272   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3273 
3274   Collective on DM
3275 
3276   Input Parameter:
3277 . dm - the DM
3278 
3279   Output Parameter:
3280 . cdm - coordinate DM
3281 
3282   Level: intermediate
3283 
3284 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3285 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3286 @*/
3287 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3288 {
3289   PetscErrorCode ierr;
3290 
3291   PetscFunctionBegin;
3292   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3293   PetscValidPointer(cdm,2);
3294   if (!dm->coordinateDM) {
3295     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3296     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3297   }
3298   *cdm = dm->coordinateDM;
3299   PetscFunctionReturn(0);
3300 }
3301