xref: /petsc/src/dm/interface/dm.c (revision f3f0eb19a322e1923f66aeb6cc7ffacd1eb54b2a)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMViewFromOptions"
8 /*
9   Processes command line options to determine if/how a dm
10   is to be viewed.
11 
12 */
13 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
14 {
15   PetscErrorCode    ierr;
16   PetscBool         flg;
17   PetscViewer       viewer;
18   PetscViewerFormat format;
19 
20   PetscFunctionBegin;
21   ierr = PetscOptionsGetViewer(((PetscObject)dm)->comm,((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
22   if (flg) {
23     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
24     ierr = DMView(dm,viewer);CHKERRQ(ierr);
25     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
26     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMCreate"
34 /*@
35   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
36 
37    If you never  call DMSetType()  it will generate an
38    error when you try to use the vector.
39 
40   Collective on MPI_Comm
41 
42   Input Parameter:
43 . comm - The communicator for the DM object
44 
45   Output Parameter:
46 . dm - The DM object
47 
48   Level: beginner
49 
50 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
51 @*/
52 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
53 {
54   DM             v;
55   PetscErrorCode ierr;
56 
57   PetscFunctionBegin;
58   PetscValidPointer(dm,2);
59   *dm = PETSC_NULL;
60 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
61   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
62   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
63   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
64 #endif
65 
66   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
67   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
68 
69 
70   v->ltogmap      = PETSC_NULL;
71   v->ltogmapb     = PETSC_NULL;
72   v->bs           = 1;
73   v->coloringtype = IS_COLORING_GLOBAL;
74   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
75   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
76   v->defaultSection       = PETSC_NULL;
77   v->defaultGlobalSection = PETSC_NULL;
78   {
79     PetscInt i;
80     for (i = 0; i < 10; ++i) {
81       v->nullspaceConstructors[i] = PETSC_NULL;
82     }
83   }
84   v->numFields = 0;
85   v->fields    = PETSC_NULL;
86 
87   *dm = v;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMSetVecType"
93 /*@C
94        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
95 
96    Logically Collective on DMDA
97 
98    Input Parameter:
99 +  da - initial distributed array
100 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
101 
102    Options Database:
103 .   -dm_vec_type ctype
104 
105    Level: intermediate
106 
107 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
108 @*/
109 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   PetscValidHeaderSpecific(da,DM_CLASSID,1);
115   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
116   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "VecGetDM"
122 /*@
123   VecGetDM - Gets the DM defining the data layout of the vector
124 
125   Not collective
126 
127   Input Parameter:
128 . v - The Vec
129 
130   Output Parameter:
131 . dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecGetDM(Vec v, DM *dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidPointer(dm,2);
144   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "VecSetDM"
150 /*@
151   VecSetDM - Sets the DM defining the data layout of the vector
152 
153   Not collective
154 
155   Input Parameters:
156 + v - The Vec
157 - dm - The DM
158 
159   Level: intermediate
160 
161 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
162 @*/
163 PetscErrorCode VecSetDM(Vec v, DM dm)
164 {
165   PetscErrorCode ierr;
166 
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
169   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
170   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "DMSetMatType"
176 /*@C
177        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
178 
179    Logically Collective on DM
180 
181    Input Parameter:
182 +  dm - the DM context
183 .  ctype - the matrix type
184 
185    Options Database:
186 .   -dm_mat_type ctype
187 
188    Level: intermediate
189 
190 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
191 @*/
192 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
193 {
194   PetscErrorCode ierr;
195   PetscFunctionBegin;
196   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
197   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
198   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
199   PetscFunctionReturn(0);
200 }
201 
202 #undef __FUNCT__
203 #define __FUNCT__ "MatGetDM"
204 /*@
205   MatGetDM - Gets the DM defining the data layout of the matrix
206 
207   Not collective
208 
209   Input Parameter:
210 . A - The Mat
211 
212   Output Parameter:
213 . dm - The DM
214 
215   Level: intermediate
216 
217 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
218 @*/
219 PetscErrorCode MatGetDM(Mat A, DM *dm)
220 {
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
225   PetscValidPointer(dm,2);
226   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "MatSetDM"
232 /*@
233   MatSetDM - Sets the DM defining the data layout of the matrix
234 
235   Not collective
236 
237   Input Parameters:
238 + A - The Mat
239 - dm - The DM
240 
241   Level: intermediate
242 
243 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
244 @*/
245 PetscErrorCode MatSetDM(Mat A, DM dm)
246 {
247   PetscErrorCode ierr;
248 
249   PetscFunctionBegin;
250   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
251   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
252   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "DMSetOptionsPrefix"
258 /*@C
259    DMSetOptionsPrefix - Sets the prefix used for searching for all
260    DMDA options in the database.
261 
262    Logically Collective on DMDA
263 
264    Input Parameter:
265 +  da - the DMDA context
266 -  prefix - the prefix to prepend to all option names
267 
268    Notes:
269    A hyphen (-) must NOT be given at the beginning of the prefix name.
270    The first character of all runtime options is AUTOMATICALLY the hyphen.
271 
272    Level: advanced
273 
274 .keywords: DMDA, set, options, prefix, database
275 
276 .seealso: DMSetFromOptions()
277 @*/
278 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
279 {
280   PetscErrorCode ierr;
281 
282   PetscFunctionBegin;
283   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
284   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
285   PetscFunctionReturn(0);
286 }
287 
288 #undef __FUNCT__
289 #define __FUNCT__ "DMDestroy"
290 /*@
291     DMDestroy - Destroys a vector packer or DMDA.
292 
293     Collective on DM
294 
295     Input Parameter:
296 .   dm - the DM object to destroy
297 
298     Level: developer
299 
300 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
301 
302 @*/
303 PetscErrorCode  DMDestroy(DM *dm)
304 {
305   PetscInt       i, cnt = 0, f;
306   DMNamedVecLink nlink,nnext;
307   PetscErrorCode ierr;
308 
309   PetscFunctionBegin;
310   if (!*dm) PetscFunctionReturn(0);
311   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
312 
313   /* count all the circular references of DM and its contained Vecs */
314   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
315     if ((*dm)->localin[i])  {cnt++;}
316     if ((*dm)->globalin[i]) {cnt++;}
317   }
318   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
319   if ((*dm)->x) {
320     DM obj;
321     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
322     if (obj == *dm) cnt++;
323   }
324 
325   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
326   /*
327      Need this test because the dm references the vectors that
328      reference the dm, so destroying the dm calls destroy on the
329      vectors that cause another destroy on the dm
330   */
331   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
332   ((PetscObject) (*dm))->refct = 0;
333   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
334     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
335     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
336   }
337   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
338     nnext = nlink->next;
339     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
340     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
341     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
342     ierr = PetscFree(nlink);CHKERRQ(ierr);
343   }
344   (*dm)->namedglobal = PETSC_NULL;
345 
346   /* Destroy the list of hooks */
347   {
348     DMCoarsenHookLink link,next;
349     for (link=(*dm)->coarsenhook; link; link=next) {
350       next = link->next;
351       ierr = PetscFree(link);CHKERRQ(ierr);
352     }
353     (*dm)->coarsenhook = PETSC_NULL;
354   }
355   {
356     DMRefineHookLink link,next;
357     for (link=(*dm)->refinehook; link; link=next) {
358       next = link->next;
359       ierr = PetscFree(link);CHKERRQ(ierr);
360     }
361     (*dm)->refinehook = PETSC_NULL;
362   }
363   {
364     DMSubDomainHookLink link,next;
365     for (link=(*dm)->subdomainhook; link; link=next) {
366       next = link->next;
367       ierr = PetscFree(link);CHKERRQ(ierr);
368     }
369     (*dm)->subdomainhook = PETSC_NULL;
370   }
371   {
372     DMGlobalToLocalHookLink link,next;
373     for (link=(*dm)->gtolhook; link; link=next) {
374       next = link->next;
375       ierr = PetscFree(link);CHKERRQ(ierr);
376     }
377     (*dm)->gtolhook = PETSC_NULL;
378   }
379   /* Destroy the work arrays */
380   {
381     DMWorkLink link,next;
382     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
383     for (link=(*dm)->workin; link; link=next) {
384       next = link->next;
385       ierr = PetscFree(link->mem);CHKERRQ(ierr);
386       ierr = PetscFree(link);CHKERRQ(ierr);
387     }
388     (*dm)->workin = PETSC_NULL;
389   }
390 
391   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
392   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
393   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
394 
395   if ((*dm)->ctx && (*dm)->ctxdestroy) {
396     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
397   }
398   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
399   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
400   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
401   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
402   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
403   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
404   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
405 
406   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
407   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
408   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
409   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
410   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
411 
412   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
413   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
414   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
415 
416   for (f = 0; f < (*dm)->numFields; ++f) {
417     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
418   }
419   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
420   /* if memory was published with AMS then destroy it */
421   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
422 
423   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
424   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
425   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
426   PetscFunctionReturn(0);
427 }
428 
429 #undef __FUNCT__
430 #define __FUNCT__ "DMSetUp"
431 /*@
432     DMSetUp - sets up the data structures inside a DM object
433 
434     Collective on DM
435 
436     Input Parameter:
437 .   dm - the DM object to setup
438 
439     Level: developer
440 
441 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
442 
443 @*/
444 PetscErrorCode  DMSetUp(DM dm)
445 {
446   PetscErrorCode ierr;
447 
448   PetscFunctionBegin;
449   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
450   if (dm->setupcalled) PetscFunctionReturn(0);
451   if (dm->ops->setup) {
452     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
453   }
454   dm->setupcalled = PETSC_TRUE;
455   PetscFunctionReturn(0);
456 }
457 
458 #undef __FUNCT__
459 #define __FUNCT__ "DMSetFromOptions"
460 /*@
461     DMSetFromOptions - sets parameters in a DM from the options database
462 
463     Collective on DM
464 
465     Input Parameter:
466 .   dm - the DM object to set options for
467 
468     Options Database:
469 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
470 .   -dm_vec_type <type>  type of vector to create inside DM
471 .   -dm_mat_type <type>  type of matrix to create inside DM
472 -   -dm_coloring_type <global or ghosted>
473 
474     Level: developer
475 
476 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
477 
478 @*/
479 PetscErrorCode  DMSetFromOptions(DM dm)
480 {
481   char           typeName[256] = MATAIJ;
482   PetscBool      flg;
483   PetscErrorCode ierr;
484 
485   PetscFunctionBegin;
486   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
487   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
488     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
489     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
490     if (flg) {
491       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
492     }
493     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
494     if (flg) {
495       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
496     }
497     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
498     if (dm->ops->setfromoptions) {
499       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
500     }
501     /* process any options handlers added with PetscObjectAddOptionsHandler() */
502     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
503   ierr = PetscOptionsEnd();CHKERRQ(ierr);
504   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
505   PetscFunctionReturn(0);
506 }
507 
508 #undef __FUNCT__
509 #define __FUNCT__ "DMView"
510 /*@C
511     DMView - Views a vector packer or DMDA.
512 
513     Collective on DM
514 
515     Input Parameter:
516 +   dm - the DM object to view
517 -   v - the viewer
518 
519     Level: developer
520 
521 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
522 
523 @*/
524 PetscErrorCode  DMView(DM dm,PetscViewer v)
525 {
526   PetscErrorCode ierr;
527   PetscBool      isbinary;
528 
529   PetscFunctionBegin;
530   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
531   if (!v) {
532     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
533   }
534   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
535   if (isbinary) {
536     PetscInt         classid = DM_FILE_CLASSID;
537     char             type[256];
538 
539     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
540     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
541     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
542   }
543   if (dm->ops->view) {
544     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
545   }
546   PetscFunctionReturn(0);
547 }
548 
549 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
550 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
551 
552 #undef __FUNCT__
553 #define __FUNCT__ "DMCreateGlobalVector"
554 /*@
555     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
556 
557     Collective on DM
558 
559     Input Parameter:
560 .   dm - the DM object
561 
562     Output Parameter:
563 .   vec - the global vector
564 
565     Level: beginner
566 
567 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
568 
569 @*/
570 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
571 {
572   PetscErrorCode ierr;
573 
574   PetscFunctionBegin;
575   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
576   if (dm->defaultSection) {
577     PetscSection gSection;
578     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
579 
580     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
581     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
582     for(p = pStart; p < pEnd; ++p) {
583       PetscInt dof, cdof;
584 
585       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
586       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
587       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
588       if ((dof > 0) && (dof-cdof != blockSize)) {
589         blockSize = 1;
590         break;
591       }
592     }
593     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
594     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
595     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
596     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
597     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
598     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
599     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
600     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
601     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
602     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
603     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
604     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
605   } else {
606     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
607   }
608   PetscFunctionReturn(0);
609 }
610 
611 #undef __FUNCT__
612 #define __FUNCT__ "DMCreateLocalVector"
613 /*@
614     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
615 
616     Not Collective
617 
618     Input Parameter:
619 .   dm - the DM object
620 
621     Output Parameter:
622 .   vec - the local vector
623 
624     Level: beginner
625 
626 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
627 
628 @*/
629 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
630 {
631   PetscErrorCode ierr;
632 
633   PetscFunctionBegin;
634   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
635   if (dm->defaultSection) {
636     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
637 
638     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
639     for(p = pStart; p < pEnd; ++p) {
640       PetscInt dof;
641 
642       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
643       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
644       if ((dof > 0) && (dof != blockSize)) {
645         blockSize = 1;
646         break;
647       }
648     }
649     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
650     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
651     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
652     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
653     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
654     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
655     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
656   } else {
657     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
658   }
659   PetscFunctionReturn(0);
660 }
661 
662 #undef __FUNCT__
663 #define __FUNCT__ "DMGetLocalToGlobalMapping"
664 /*@
665    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
666 
667    Collective on DM
668 
669    Input Parameter:
670 .  dm - the DM that provides the mapping
671 
672    Output Parameter:
673 .  ltog - the mapping
674 
675    Level: intermediate
676 
677    Notes:
678    This mapping can then be used by VecSetLocalToGlobalMapping() or
679    MatSetLocalToGlobalMapping().
680 
681 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
682 @*/
683 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
684 {
685   PetscErrorCode ierr;
686 
687   PetscFunctionBegin;
688   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
689   PetscValidPointer(ltog,2);
690   if (!dm->ltogmap) {
691     PetscSection section, sectionGlobal;
692 
693     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
694     if (section) {
695       PetscInt      *ltog;
696       PetscInt       pStart, pEnd, size, p, l;
697 
698       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
699       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
700       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
701       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
702       for (p = pStart, l = 0; p < pEnd; ++p) {
703         PetscInt dof, off, c;
704 
705         /* Should probably use constrained dofs */
706         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
707         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
708         for (c = 0; c < dof; ++c, ++l) {
709           ltog[l] = off+c;
710         }
711       }
712       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
713       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
714     } else {
715       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
716       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
717     }
718   }
719   *ltog = dm->ltogmap;
720   PetscFunctionReturn(0);
721 }
722 
723 #undef __FUNCT__
724 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
725 /*@
726    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
727 
728    Collective on DM
729 
730    Input Parameter:
731 .  da - the distributed array that provides the mapping
732 
733    Output Parameter:
734 .  ltog - the block mapping
735 
736    Level: intermediate
737 
738    Notes:
739    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
740    MatSetLocalToGlobalMappingBlock().
741 
742 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
743 @*/
744 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
745 {
746   PetscErrorCode ierr;
747 
748   PetscFunctionBegin;
749   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
750   PetscValidPointer(ltog,2);
751   if (!dm->ltogmapb) {
752     PetscInt bs;
753     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
754     if (bs > 1) {
755       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
756       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
757     } else {
758       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
759       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
760     }
761   }
762   *ltog = dm->ltogmapb;
763   PetscFunctionReturn(0);
764 }
765 
766 #undef __FUNCT__
767 #define __FUNCT__ "DMGetBlockSize"
768 /*@
769    DMGetBlockSize - Gets the inherent block size associated with a DM
770 
771    Not Collective
772 
773    Input Parameter:
774 .  dm - the DM with block structure
775 
776    Output Parameter:
777 .  bs - the block size, 1 implies no exploitable block structure
778 
779    Level: intermediate
780 
781 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
782 @*/
783 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
784 {
785   PetscFunctionBegin;
786   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
787   PetscValidPointer(bs,2);
788   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
789   *bs = dm->bs;
790   PetscFunctionReturn(0);
791 }
792 
793 #undef __FUNCT__
794 #define __FUNCT__ "DMCreateInterpolation"
795 /*@
796     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
797 
798     Collective on DM
799 
800     Input Parameter:
801 +   dm1 - the DM object
802 -   dm2 - the second, finer DM object
803 
804     Output Parameter:
805 +  mat - the interpolation
806 -  vec - the scaling (optional)
807 
808     Level: developer
809 
810     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
811         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
812 
813         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
814         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
815 
816 
817 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
818 
819 @*/
820 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
821 {
822   PetscErrorCode ierr;
823 
824   PetscFunctionBegin;
825   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
826   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
827   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
828   PetscFunctionReturn(0);
829 }
830 
831 #undef __FUNCT__
832 #define __FUNCT__ "DMCreateInjection"
833 /*@
834     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
835 
836     Collective on DM
837 
838     Input Parameter:
839 +   dm1 - the DM object
840 -   dm2 - the second, finer DM object
841 
842     Output Parameter:
843 .   ctx - the injection
844 
845     Level: developer
846 
847    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
848         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
849 
850 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
851 
852 @*/
853 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
854 {
855   PetscErrorCode ierr;
856 
857   PetscFunctionBegin;
858   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
859   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
860   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
861   PetscFunctionReturn(0);
862 }
863 
864 #undef __FUNCT__
865 #define __FUNCT__ "DMCreateColoring"
866 /*@C
867     DMCreateColoring - Gets coloring for a DMDA or DMComposite
868 
869     Collective on DM
870 
871     Input Parameter:
872 +   dm - the DM object
873 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
874 -   matype - either MATAIJ or MATBAIJ
875 
876     Output Parameter:
877 .   coloring - the coloring
878 
879     Level: developer
880 
881 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
882 
883 @*/
884 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
885 {
886   PetscErrorCode ierr;
887 
888   PetscFunctionBegin;
889   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
890   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
891   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
892   PetscFunctionReturn(0);
893 }
894 
895 #undef __FUNCT__
896 #define __FUNCT__ "DMCreateMatrix"
897 /*@C
898     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
899 
900     Collective on DM
901 
902     Input Parameter:
903 +   dm - the DM object
904 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
905             any type which inherits from one of these (such as MATAIJ)
906 
907     Output Parameter:
908 .   mat - the empty Jacobian
909 
910     Level: beginner
911 
912     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
913        do not need to do it yourself.
914 
915        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
916        the nonzero pattern call DMDASetMatPreallocateOnly()
917 
918        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
919        internally by PETSc.
920 
921        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
922        the indices for the global numbering for DMDAs which is complicated.
923 
924 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
925 
926 @*/
927 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
928 {
929   PetscErrorCode ierr;
930 
931   PetscFunctionBegin;
932   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
933 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
934   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
935 #endif
936   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
937   PetscValidPointer(mat,3);
938   if (dm->mattype) {
939     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
940   } else {
941     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
942   }
943   PetscFunctionReturn(0);
944 }
945 
946 #undef __FUNCT__
947 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
948 /*@
949   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
950     preallocated but the nonzero structure and zero values will not be set.
951 
952   Logically Collective on DMDA
953 
954   Input Parameter:
955 + dm - the DM
956 - only - PETSC_TRUE if only want preallocation
957 
958   Level: developer
959 .seealso DMCreateMatrix()
960 @*/
961 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
962 {
963   PetscFunctionBegin;
964   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
965   dm->prealloc_only = only;
966   PetscFunctionReturn(0);
967 }
968 
969 #undef __FUNCT__
970 #define __FUNCT__ "DMGetWorkArray"
971 /*@C
972   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
973 
974   Not Collective
975 
976   Input Parameters:
977 + dm - the DM object
978 . count - The minium size
979 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
980 
981   Output Parameter:
982 . array - the work array
983 
984   Level: developer
985 
986 .seealso DMDestroy(), DMCreate()
987 @*/
988 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
989 {
990   PetscErrorCode ierr;
991   DMWorkLink link;
992   size_t size;
993 
994   PetscFunctionBegin;
995   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
996   PetscValidPointer(mem,4);
997   if (dm->workin) {
998     link = dm->workin;
999     dm->workin = dm->workin->next;
1000   } else {
1001     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1002   }
1003   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1004   if (size*count > link->bytes) {
1005     ierr = PetscFree(link->mem);CHKERRQ(ierr);
1006     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1007     link->bytes = size*count;
1008   }
1009   link->next = dm->workout;
1010   dm->workout = link;
1011   *(void**)mem = link->mem;
1012   PetscFunctionReturn(0);
1013 }
1014 
1015 #undef __FUNCT__
1016 #define __FUNCT__ "DMRestoreWorkArray"
1017 /*@C
1018   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1019 
1020   Not Collective
1021 
1022   Input Parameters:
1023 + dm - the DM object
1024 . count - The minium size
1025 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1026 
1027   Output Parameter:
1028 . array - the work array
1029 
1030   Level: developer
1031 
1032 .seealso DMDestroy(), DMCreate()
1033 @*/
1034 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1035 {
1036   DMWorkLink *p,link;
1037 
1038   PetscFunctionBegin;
1039   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1040   PetscValidPointer(mem,4);
1041   for (p=&dm->workout; (link=*p); p=&link->next) {
1042     if (link->mem == *(void**)mem) {
1043       *p = link->next;
1044       link->next = dm->workin;
1045       dm->workin = link;
1046       *(void**)mem = PETSC_NULL;
1047       PetscFunctionReturn(0);
1048     }
1049   }
1050   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 #undef __FUNCT__
1055 #define __FUNCT__ "DMSetNullSpaceConstructor"
1056 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1057 {
1058   PetscFunctionBegin;
1059   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1060   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1061   dm->nullspaceConstructors[field] = nullsp;
1062   PetscFunctionReturn(0);
1063 }
1064 
1065 #undef __FUNCT__
1066 #define __FUNCT__ "DMCreateFieldIS"
1067 /*@C
1068   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1069 
1070   Not collective
1071 
1072   Input Parameter:
1073 . dm - the DM object
1074 
1075   Output Parameters:
1076 + numFields  - The number of fields (or PETSC_NULL if not requested)
1077 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1078 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1079 
1080   Level: intermediate
1081 
1082   Notes:
1083   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1084   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1085   PetscFree().
1086 
1087 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1088 @*/
1089 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1090 {
1091   PetscSection   section, sectionGlobal;
1092   PetscErrorCode ierr;
1093 
1094   PetscFunctionBegin;
1095   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1096   if (numFields) {
1097     PetscValidPointer(numFields,2);
1098     *numFields = 0;
1099   }
1100   if (fieldNames) {
1101     PetscValidPointer(fieldNames,3);
1102     *fieldNames = PETSC_NULL;
1103   }
1104   if (fields) {
1105     PetscValidPointer(fields,4);
1106     *fields = PETSC_NULL;
1107   }
1108   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1109   if (section) {
1110     PetscInt *fieldSizes, **fieldIndices;
1111     PetscInt  nF, f, pStart, pEnd, p;
1112 
1113     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1114     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1115     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1116     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1117     for (f = 0; f < nF; ++f) {
1118       fieldSizes[f] = 0;
1119     }
1120     for (p = pStart; p < pEnd; ++p) {
1121       PetscInt gdof;
1122 
1123       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1124       if (gdof > 0) {
1125         for (f = 0; f < nF; ++f) {
1126           PetscInt fdof, fcdof;
1127 
1128           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1129           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1130           fieldSizes[f] += fdof-fcdof;
1131         }
1132       }
1133     }
1134     for (f = 0; f < nF; ++f) {
1135       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1136       fieldSizes[f] = 0;
1137     }
1138     for (p = pStart; p < pEnd; ++p) {
1139       PetscInt gdof, goff;
1140 
1141       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1142       if (gdof > 0) {
1143         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1144         for (f = 0; f < nF; ++f) {
1145           PetscInt fdof, fcdof, fc;
1146 
1147           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1148           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1149           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1150             fieldIndices[f][fieldSizes[f]] = goff++;
1151           }
1152         }
1153       }
1154     }
1155     if (numFields) {*numFields = nF;}
1156     if (fieldNames) {
1157       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1158       for (f = 0; f < nF; ++f) {
1159         const char *fieldName;
1160 
1161         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1162         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1163       }
1164     }
1165     if (fields) {
1166       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1167       for (f = 0; f < nF; ++f) {
1168         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1169       }
1170     }
1171     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1172   } else {
1173     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1174   }
1175   PetscFunctionReturn(0);
1176 }
1177 
1178 #undef __FUNCT__
1179 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1180 /*@C
1181   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1182 
1183   Not Collective
1184 
1185   Input Parameters:
1186 + dm   - the DM object
1187 - name - the name of the field decomposition
1188 
1189   Output Parameter:
1190 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1191 
1192   Level: advanced
1193 
1194 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1195 @*/
1196 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1197 {
1198   PetscErrorCode ierr;
1199 
1200   PetscFunctionBegin;
1201   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1202   PetscValidCharPointer(name,2);
1203   PetscValidPointer(ddm,3);
1204   *ddm = PETSC_NULL;
1205   if (dm->ops->createfielddecompositiondm) {
1206     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1207   }
1208   PetscFunctionReturn(0);
1209 }
1210 
1211 #undef __FUNCT__
1212 #define __FUNCT__ "DMCreateFieldDecomposition"
1213 /*@C
1214   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1215                           corresponding to different fields: each IS contains the global indices of the dofs of the
1216                           corresponding field. The optional list of DMs define the DM for each subproblem.
1217                           Generalizes DMCreateFieldIS().
1218 
1219   Not collective
1220 
1221   Input Parameter:
1222 . dm - the DM object
1223 
1224   Output Parameters:
1225 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1226 . namelist  - The name for each field (or PETSC_NULL if not requested)
1227 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1228 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1229 
1230   Level: intermediate
1231 
1232   Notes:
1233   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1234   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1235   and all of the arrays should be freed with PetscFree().
1236 
1237 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1238 @*/
1239 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1240 {
1241   PetscErrorCode ierr;
1242 
1243   PetscFunctionBegin;
1244   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1245   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1246   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1247   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1248   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1249   if (!dm->ops->createfielddecomposition) {
1250     PetscSection section;
1251     PetscInt     numFields, f;
1252 
1253     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1254     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1255     if (section && numFields && dm->ops->createsubdm) {
1256       *len = numFields;
1257       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1258       for (f = 0; f < numFields; ++f) {
1259         const char *fieldName;
1260 
1261         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1262         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1263         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1264       }
1265     } else {
1266       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1267       /* By default there are no DMs associated with subproblems. */
1268       if (dmlist) *dmlist = PETSC_NULL;
1269     }
1270   }
1271   else {
1272     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1273   }
1274   PetscFunctionReturn(0);
1275 }
1276 
1277 #undef __FUNCT__
1278 #define __FUNCT__ "DMCreateSubDM"
1279 /*@C
1280   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1281                   The fields are defined by DMCreateFieldIS().
1282 
1283   Not collective
1284 
1285   Input Parameters:
1286 + dm - the DM object
1287 . numFields - number of fields in this subproblem
1288 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1289 
1290   Output Parameters:
1291 . is - The global indices for the subproblem
1292 - dm - The DM for the subproblem
1293 
1294   Level: intermediate
1295 
1296 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1297 @*/
1298 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1299 {
1300   PetscErrorCode ierr;
1301 
1302   PetscFunctionBegin;
1303   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1304   PetscValidPointer(fields,3);
1305   if (is) {PetscValidPointer(is,4);}
1306   if (subdm) {PetscValidPointer(subdm,5);}
1307   if (dm->ops->createsubdm) {
1308     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1309   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1310   PetscFunctionReturn(0);
1311 }
1312 
1313 #undef __FUNCT__
1314 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1315 /*@C
1316   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1317 
1318   Not Collective
1319 
1320   Input Parameters:
1321 + dm   - the DM object
1322 - name - the name of the subdomain decomposition
1323 
1324   Output Parameter:
1325 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1326 
1327   Level: advanced
1328 
1329 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1330 @*/
1331 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1332 {
1333   PetscErrorCode ierr;
1334 
1335   PetscFunctionBegin;
1336   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1337   PetscValidCharPointer(name,2);
1338   PetscValidPointer(ddm,3);
1339   *ddm = PETSC_NULL;
1340   if (dm->ops->createdomaindecompositiondm) {
1341     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1342   }
1343   PetscFunctionReturn(0);
1344 }
1345 
1346 #undef __FUNCT__
1347 #define __FUNCT__ "DMCreateDomainDecomposition"
1348 /*@C
1349   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1350                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1351                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1352                           define a nonoverlapping covering, while outer subdomains can overlap.
1353                           The optional list of DMs define the DM for each subproblem.
1354 
1355   Not collective
1356 
1357   Input Parameter:
1358 . dm - the DM object
1359 
1360   Output Parameters:
1361 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1362 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1363 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1364 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1365 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1366 
1367   Level: intermediate
1368 
1369   Notes:
1370   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1371   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1372   and all of the arrays should be freed with PetscFree().
1373 
1374 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1375 @*/
1376 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1377 {
1378   PetscErrorCode ierr;
1379   DMSubDomainHookLink link;
1380   PetscInt            i,l;
1381 
1382   PetscFunctionBegin;
1383   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1384   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1385   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1386   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1387   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1388   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1389   if (dm->ops->createdomaindecomposition) {
1390     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1391   } else {
1392     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DMCreateDomainDecomposition not supported for this DM type!");CHKERRQ(ierr);
1393   }
1394   if (len) *len = l;
1395   for (i = 0; i < l; i++) {
1396     for (link=dm->subdomainhook; link; link=link->next) {
1397       if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1398     }
1399     (*dmlist)[i]->ctx = dm->ctx;
1400   }
1401   PetscFunctionReturn(0);
1402 }
1403 
1404 
1405 #undef __FUNCT__
1406 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1407 /*@C
1408   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1409 
1410   Not collective
1411 
1412   Input Parameters:
1413 + dm - the DM object
1414 . n  - the number of subdomain scatters
1415 - subdms - the local subdomains
1416 
1417   Output Parameters:
1418 + n     - the number of scatters returned
1419 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1420 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1421 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1422 
1423   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1424   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1425   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1426   solution and residual data.
1427 
1428   Level: developer
1429 
1430 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1431 @*/
1432 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1433 {
1434   PetscErrorCode ierr;
1435 
1436   PetscFunctionBegin;
1437   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1438   PetscValidPointer(subdms,3);
1439   PetscValidPointer(iscat,4);
1440   PetscValidPointer(oscat,5);
1441   PetscValidPointer(gscat,6);
1442   if (dm->ops->createddscatters) {
1443     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat); CHKERRQ(ierr);
1444   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1445   PetscFunctionReturn(0);
1446 }
1447 
1448 #undef __FUNCT__
1449 #define __FUNCT__ "DMRefine"
1450 /*@
1451   DMRefine - Refines a DM object
1452 
1453   Collective on DM
1454 
1455   Input Parameter:
1456 + dm   - the DM object
1457 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1458 
1459   Output Parameter:
1460 . dmf - the refined DM, or PETSC_NULL
1461 
1462   Note: If no refinement was done, the return value is PETSC_NULL
1463 
1464   Level: developer
1465 
1466 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1467 @*/
1468 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1469 {
1470   PetscErrorCode ierr;
1471   DMRefineHookLink link;
1472 
1473   PetscFunctionBegin;
1474   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1475   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1476   if (*dmf) {
1477     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1478     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1479     (*dmf)->ctx       = dm->ctx;
1480     (*dmf)->leveldown = dm->leveldown;
1481     (*dmf)->levelup   = dm->levelup + 1;
1482     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1483     for (link=dm->refinehook; link; link=link->next) {
1484       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1485     }
1486   }
1487   PetscFunctionReturn(0);
1488 }
1489 
1490 #undef __FUNCT__
1491 #define __FUNCT__ "DMRefineHookAdd"
1492 /*@
1493    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1494 
1495    Logically Collective
1496 
1497    Input Arguments:
1498 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1499 .  refinehook - function to run when setting up a coarser level
1500 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1501 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1502 
1503    Calling sequence of refinehook:
1504 $    refinehook(DM coarse,DM fine,void *ctx);
1505 
1506 +  coarse - coarse level DM
1507 .  fine - fine level DM to interpolate problem to
1508 -  ctx - optional user-defined function context
1509 
1510    Calling sequence for interphook:
1511 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1512 
1513 +  coarse - coarse level DM
1514 .  interp - matrix interpolating a coarse-level solution to the finer grid
1515 .  fine - fine level DM to update
1516 -  ctx - optional user-defined function context
1517 
1518    Level: advanced
1519 
1520    Notes:
1521    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1522 
1523    If this function is called multiple times, the hooks will be run in the order they are added.
1524 
1525 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1526 @*/
1527 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1528 {
1529   PetscErrorCode ierr;
1530   DMRefineHookLink link,*p;
1531 
1532   PetscFunctionBegin;
1533   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1534   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1535   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1536   link->refinehook = refinehook;
1537   link->interphook = interphook;
1538   link->ctx = ctx;
1539   link->next = PETSC_NULL;
1540   *p = link;
1541   PetscFunctionReturn(0);
1542 }
1543 
1544 #undef __FUNCT__
1545 #define __FUNCT__ "DMInterpolate"
1546 /*@
1547    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1548 
1549    Collective if any hooks are
1550 
1551    Input Arguments:
1552 +  coarse - coarser DM to use as a base
1553 .  restrct - interpolation matrix, apply using MatInterpolate()
1554 -  fine - finer DM to update
1555 
1556    Level: developer
1557 
1558 .seealso: DMRefineHookAdd(), MatInterpolate()
1559 @*/
1560 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1561 {
1562   PetscErrorCode ierr;
1563   DMRefineHookLink link;
1564 
1565   PetscFunctionBegin;
1566   for (link=fine->refinehook; link; link=link->next) {
1567     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1568   }
1569   PetscFunctionReturn(0);
1570 }
1571 
1572 #undef __FUNCT__
1573 #define __FUNCT__ "DMGetRefineLevel"
1574 /*@
1575     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1576 
1577     Not Collective
1578 
1579     Input Parameter:
1580 .   dm - the DM object
1581 
1582     Output Parameter:
1583 .   level - number of refinements
1584 
1585     Level: developer
1586 
1587 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1588 
1589 @*/
1590 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1591 {
1592   PetscFunctionBegin;
1593   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1594   *level = dm->levelup;
1595   PetscFunctionReturn(0);
1596 }
1597 
1598 #undef __FUNCT__
1599 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1600 /*@
1601    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1602 
1603    Logically Collective
1604 
1605    Input Arguments:
1606 +  dm - the DM
1607 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1608 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1609 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1610 
1611    Calling sequence for beginhook:
1612 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1613 
1614 +  dm - global DM
1615 .  g - global vector
1616 .  mode - mode
1617 .  l - local vector
1618 -  ctx - optional user-defined function context
1619 
1620 
1621    Calling sequence for endhook:
1622 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1623 
1624 +  global - global DM
1625 -  ctx - optional user-defined function context
1626 
1627    Level: advanced
1628 
1629    Notes:
1630    This function is only needed if auxiliary data needs to be set up on coarse grids.
1631 
1632    If this function is called multiple times, the hooks will be run in the order they are added.
1633 
1634    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1635    extract the finest level information from its context (instead of from the SNES).
1636 
1637 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1638 @*/
1639 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1640 {
1641   PetscErrorCode ierr;
1642   DMGlobalToLocalHookLink link,*p;
1643 
1644   PetscFunctionBegin;
1645   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1646   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1647   ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1648   link->beginhook = beginhook;
1649   link->endhook = endhook;
1650   link->ctx = ctx;
1651   link->next = PETSC_NULL;
1652   *p = link;
1653   PetscFunctionReturn(0);
1654 }
1655 
1656 #undef __FUNCT__
1657 #define __FUNCT__ "DMGlobalToLocalBegin"
1658 /*@
1659     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1660 
1661     Neighbor-wise Collective on DM
1662 
1663     Input Parameters:
1664 +   dm - the DM object
1665 .   g - the global vector
1666 .   mode - INSERT_VALUES or ADD_VALUES
1667 -   l - the local vector
1668 
1669 
1670     Level: beginner
1671 
1672 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1673 
1674 @*/
1675 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1676 {
1677   PetscSF        sf;
1678   PetscErrorCode ierr;
1679   DMGlobalToLocalHookLink link;
1680 
1681   PetscFunctionBegin;
1682   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1683   for (link=dm->gtolhook; link; link=link->next) {
1684     if (link->beginhook) {ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1685   }
1686   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1687   if (sf) {
1688     PetscScalar *lArray, *gArray;
1689 
1690     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1691     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1692     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1693     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1694     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1695     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1696   } else {
1697     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1698   }
1699   PetscFunctionReturn(0);
1700 }
1701 
1702 #undef __FUNCT__
1703 #define __FUNCT__ "DMGlobalToLocalEnd"
1704 /*@
1705     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1706 
1707     Neighbor-wise Collective on DM
1708 
1709     Input Parameters:
1710 +   dm - the DM object
1711 .   g - the global vector
1712 .   mode - INSERT_VALUES or ADD_VALUES
1713 -   l - the local vector
1714 
1715 
1716     Level: beginner
1717 
1718 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1719 
1720 @*/
1721 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1722 {
1723   PetscSF        sf;
1724   PetscErrorCode ierr;
1725   PetscScalar    *lArray, *gArray;
1726   DMGlobalToLocalHookLink link;
1727 
1728   PetscFunctionBegin;
1729   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1730   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1731   if (sf) {
1732   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1733 
1734     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1735     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1736     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1737     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1738     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1739   } else {
1740     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1741   }
1742   for (link=dm->gtolhook; link; link=link->next) {
1743     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1744   }
1745   PetscFunctionReturn(0);
1746 }
1747 
1748 #undef __FUNCT__
1749 #define __FUNCT__ "DMLocalToGlobalBegin"
1750 /*@
1751     DMLocalToGlobalBegin - updates global vectors from local vectors
1752 
1753     Neighbor-wise Collective on DM
1754 
1755     Input Parameters:
1756 +   dm - the DM object
1757 .   l - the local vector
1758 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1759            base point.
1760 - - the global vector
1761 
1762     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1763            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1764            global array to the final global array with VecAXPY().
1765 
1766     Level: beginner
1767 
1768 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1769 
1770 @*/
1771 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1772 {
1773   PetscSF        sf;
1774   PetscErrorCode ierr;
1775 
1776   PetscFunctionBegin;
1777   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1778   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1779   if (sf) {
1780     MPI_Op       op;
1781     PetscScalar *lArray, *gArray;
1782 
1783     switch(mode) {
1784     case INSERT_VALUES:
1785     case INSERT_ALL_VALUES:
1786 #if defined(PETSC_HAVE_MPI_REPLACE)
1787       op = MPI_REPLACE; break;
1788 #else
1789       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1790 #endif
1791     case ADD_VALUES:
1792     case ADD_ALL_VALUES:
1793       op = MPI_SUM; break;
1794   default:
1795     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1796     }
1797     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1798     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1799     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1800     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1801     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1802   } else {
1803     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1804   }
1805   PetscFunctionReturn(0);
1806 }
1807 
1808 #undef __FUNCT__
1809 #define __FUNCT__ "DMLocalToGlobalEnd"
1810 /*@
1811     DMLocalToGlobalEnd - updates global vectors from local vectors
1812 
1813     Neighbor-wise Collective on DM
1814 
1815     Input Parameters:
1816 +   dm - the DM object
1817 .   l - the local vector
1818 .   mode - INSERT_VALUES or ADD_VALUES
1819 -   g - the global vector
1820 
1821 
1822     Level: beginner
1823 
1824 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1825 
1826 @*/
1827 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1828 {
1829   PetscSF        sf;
1830   PetscErrorCode ierr;
1831 
1832   PetscFunctionBegin;
1833   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1834   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1835   if (sf) {
1836     MPI_Op       op;
1837     PetscScalar *lArray, *gArray;
1838 
1839     switch(mode) {
1840     case INSERT_VALUES:
1841     case INSERT_ALL_VALUES:
1842 #if defined(PETSC_HAVE_MPI_REPLACE)
1843       op = MPI_REPLACE; break;
1844 #else
1845       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1846 #endif
1847     case ADD_VALUES:
1848     case ADD_ALL_VALUES:
1849       op = MPI_SUM; break;
1850     default:
1851       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1852     }
1853     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1854     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1855     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1856     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1857     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1858   } else {
1859     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1860   }
1861   PetscFunctionReturn(0);
1862 }
1863 
1864 #undef __FUNCT__
1865 #define __FUNCT__ "DMCoarsen"
1866 /*@
1867     DMCoarsen - Coarsens a DM object
1868 
1869     Collective on DM
1870 
1871     Input Parameter:
1872 +   dm - the DM object
1873 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1874 
1875     Output Parameter:
1876 .   dmc - the coarsened DM
1877 
1878     Level: developer
1879 
1880 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1881 
1882 @*/
1883 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1884 {
1885   PetscErrorCode ierr;
1886   DMCoarsenHookLink link;
1887 
1888   PetscFunctionBegin;
1889   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1890   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1891   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1892   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1893   (*dmc)->ctx       = dm->ctx;
1894   (*dmc)->levelup   = dm->levelup;
1895   (*dmc)->leveldown = dm->leveldown + 1;
1896   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1897   for (link=dm->coarsenhook; link; link=link->next) {
1898     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1899   }
1900   PetscFunctionReturn(0);
1901 }
1902 
1903 #undef __FUNCT__
1904 #define __FUNCT__ "DMCoarsenHookAdd"
1905 /*@
1906    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1907 
1908    Logically Collective
1909 
1910    Input Arguments:
1911 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1912 .  coarsenhook - function to run when setting up a coarser level
1913 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1914 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1915 
1916    Calling sequence of coarsenhook:
1917 $    coarsenhook(DM fine,DM coarse,void *ctx);
1918 
1919 +  fine - fine level DM
1920 .  coarse - coarse level DM to restrict problem to
1921 -  ctx - optional user-defined function context
1922 
1923    Calling sequence for restricthook:
1924 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1925 
1926 +  fine - fine level DM
1927 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1928 .  rscale - scaling vector for restriction
1929 .  inject - matrix restricting by injection
1930 .  coarse - coarse level DM to update
1931 -  ctx - optional user-defined function context
1932 
1933    Level: advanced
1934 
1935    Notes:
1936    This function is only needed if auxiliary data needs to be set up on coarse grids.
1937 
1938    If this function is called multiple times, the hooks will be run in the order they are added.
1939 
1940    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1941    extract the finest level information from its context (instead of from the SNES).
1942 
1943 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1944 @*/
1945 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1946 {
1947   PetscErrorCode ierr;
1948   DMCoarsenHookLink link,*p;
1949 
1950   PetscFunctionBegin;
1951   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1952   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1953   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1954   link->coarsenhook = coarsenhook;
1955   link->restricthook = restricthook;
1956   link->ctx = ctx;
1957   link->next = PETSC_NULL;
1958   *p = link;
1959   PetscFunctionReturn(0);
1960 }
1961 
1962 #undef __FUNCT__
1963 #define __FUNCT__ "DMRestrict"
1964 /*@
1965    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1966 
1967    Collective if any hooks are
1968 
1969    Input Arguments:
1970 +  fine - finer DM to use as a base
1971 .  restrct - restriction matrix, apply using MatRestrict()
1972 .  inject - injection matrix, also use MatRestrict()
1973 -  coarse - coarer DM to update
1974 
1975    Level: developer
1976 
1977 .seealso: DMCoarsenHookAdd(), MatRestrict()
1978 @*/
1979 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1980 {
1981   PetscErrorCode ierr;
1982   DMCoarsenHookLink link;
1983 
1984   PetscFunctionBegin;
1985   for (link=fine->coarsenhook; link; link=link->next) {
1986     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1987   }
1988   PetscFunctionReturn(0);
1989 }
1990 
1991 #undef __FUNCT__
1992 #define __FUNCT__ "DMSubDomainHookAdd"
1993 /*@
1994    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1995 
1996    Logically Collective
1997 
1998    Input Arguments:
1999 +  global - global DM
2000 .
2001 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2002 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
2003 
2004    Calling sequence for restricthook:
2005 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
2006 
2007 +  global - global DM
2008 .  out    - scatter to the outer (with ghost and overlap points) block vector
2009 .  in     - scatter to block vector values only owned locally
2010 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
2011 -  ctx - optional user-defined function context
2012 
2013    Level: advanced
2014 
2015    Notes:
2016    This function is only needed if auxiliary data needs to be set up on coarse grids.
2017 
2018    If this function is called multiple times, the hooks will be run in the order they are added.
2019 
2020    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2021    extract the finest level information from its context (instead of from the SNES).
2022 
2023 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2024 @*/
2025 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2026 {
2027   PetscErrorCode ierr;
2028   DMSubDomainHookLink link,*p;
2029 
2030   PetscFunctionBegin;
2031   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2032   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2033   ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2034   link->restricthook = restricthook;
2035   link->ddhook = ddhook;
2036   link->ctx = ctx;
2037   link->next = PETSC_NULL;
2038   *p = link;
2039   PetscFunctionReturn(0);
2040 }
2041 
2042 #undef __FUNCT__
2043 #define __FUNCT__ "DMSubDomainRestrict"
2044 /*@
2045    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2046 
2047    Collective if any hooks are
2048 
2049    Input Arguments:
2050 +  fine - finer DM to use as a base
2051 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2052 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2053 -  coarse - coarer DM to update
2054 
2055    Level: developer
2056 
2057 .seealso: DMCoarsenHookAdd(), MatRestrict()
2058 @*/
2059 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2060 {
2061   PetscErrorCode ierr;
2062   DMSubDomainHookLink link;
2063 
2064   PetscFunctionBegin;
2065   for (link=global->subdomainhook; link; link=link->next) {
2066     if (link->restricthook) {ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);}
2067   }
2068   PetscFunctionReturn(0);
2069 }
2070 
2071 #undef __FUNCT__
2072 #define __FUNCT__ "DMGetCoarsenLevel"
2073 /*@
2074     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2075 
2076     Not Collective
2077 
2078     Input Parameter:
2079 .   dm - the DM object
2080 
2081     Output Parameter:
2082 .   level - number of coarsenings
2083 
2084     Level: developer
2085 
2086 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2087 
2088 @*/
2089 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2090 {
2091   PetscFunctionBegin;
2092   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2093   *level = dm->leveldown;
2094   PetscFunctionReturn(0);
2095 }
2096 
2097 
2098 
2099 #undef __FUNCT__
2100 #define __FUNCT__ "DMRefineHierarchy"
2101 /*@C
2102     DMRefineHierarchy - Refines a DM object, all levels at once
2103 
2104     Collective on DM
2105 
2106     Input Parameter:
2107 +   dm - the DM object
2108 -   nlevels - the number of levels of refinement
2109 
2110     Output Parameter:
2111 .   dmf - the refined DM hierarchy
2112 
2113     Level: developer
2114 
2115 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2116 
2117 @*/
2118 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2119 {
2120   PetscErrorCode ierr;
2121 
2122   PetscFunctionBegin;
2123   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2124   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2125   if (nlevels == 0) PetscFunctionReturn(0);
2126   if (dm->ops->refinehierarchy) {
2127     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2128   } else if (dm->ops->refine) {
2129     PetscInt i;
2130 
2131     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2132     for (i=1; i<nlevels; i++) {
2133       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2134     }
2135   } else {
2136     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2137   }
2138   PetscFunctionReturn(0);
2139 }
2140 
2141 #undef __FUNCT__
2142 #define __FUNCT__ "DMCoarsenHierarchy"
2143 /*@C
2144     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2145 
2146     Collective on DM
2147 
2148     Input Parameter:
2149 +   dm - the DM object
2150 -   nlevels - the number of levels of coarsening
2151 
2152     Output Parameter:
2153 .   dmc - the coarsened DM hierarchy
2154 
2155     Level: developer
2156 
2157 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2158 
2159 @*/
2160 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2161 {
2162   PetscErrorCode ierr;
2163 
2164   PetscFunctionBegin;
2165   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2166   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2167   if (nlevels == 0) PetscFunctionReturn(0);
2168   PetscValidPointer(dmc,3);
2169   if (dm->ops->coarsenhierarchy) {
2170     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2171   } else if (dm->ops->coarsen) {
2172     PetscInt i;
2173 
2174     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2175     for (i=1; i<nlevels; i++) {
2176       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2177     }
2178   } else {
2179     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2180   }
2181   PetscFunctionReturn(0);
2182 }
2183 
2184 #undef __FUNCT__
2185 #define __FUNCT__ "DMCreateAggregates"
2186 /*@
2187    DMCreateAggregates - Gets the aggregates that map between
2188    grids associated with two DMs.
2189 
2190    Collective on DM
2191 
2192    Input Parameters:
2193 +  dmc - the coarse grid DM
2194 -  dmf - the fine grid DM
2195 
2196    Output Parameters:
2197 .  rest - the restriction matrix (transpose of the projection matrix)
2198 
2199    Level: intermediate
2200 
2201 .keywords: interpolation, restriction, multigrid
2202 
2203 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2204 @*/
2205 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2206 {
2207   PetscErrorCode ierr;
2208 
2209   PetscFunctionBegin;
2210   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2211   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2212   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2213   PetscFunctionReturn(0);
2214 }
2215 
2216 #undef __FUNCT__
2217 #define __FUNCT__ "DMSetApplicationContextDestroy"
2218 /*@C
2219     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2220 
2221     Not Collective
2222 
2223     Input Parameters:
2224 +   dm - the DM object
2225 -   destroy - the destroy function
2226 
2227     Level: intermediate
2228 
2229 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2230 
2231 @*/
2232 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2233 {
2234   PetscFunctionBegin;
2235   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2236   dm->ctxdestroy = destroy;
2237   PetscFunctionReturn(0);
2238 }
2239 
2240 #undef __FUNCT__
2241 #define __FUNCT__ "DMSetApplicationContext"
2242 /*@
2243     DMSetApplicationContext - Set a user context into a DM object
2244 
2245     Not Collective
2246 
2247     Input Parameters:
2248 +   dm - the DM object
2249 -   ctx - the user context
2250 
2251     Level: intermediate
2252 
2253 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2254 
2255 @*/
2256 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2257 {
2258   PetscFunctionBegin;
2259   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2260   dm->ctx = ctx;
2261   PetscFunctionReturn(0);
2262 }
2263 
2264 #undef __FUNCT__
2265 #define __FUNCT__ "DMGetApplicationContext"
2266 /*@
2267     DMGetApplicationContext - Gets a user context from a DM object
2268 
2269     Not Collective
2270 
2271     Input Parameter:
2272 .   dm - the DM object
2273 
2274     Output Parameter:
2275 .   ctx - the user context
2276 
2277     Level: intermediate
2278 
2279 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2280 
2281 @*/
2282 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2283 {
2284   PetscFunctionBegin;
2285   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2286   *(void**)ctx = dm->ctx;
2287   PetscFunctionReturn(0);
2288 }
2289 
2290 #undef __FUNCT__
2291 #define __FUNCT__ "DMSetVariableBounds"
2292 /*@C
2293     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2294 
2295     Logically Collective on DM
2296 
2297     Input Parameter:
2298 +   dm - the DM object
2299 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2300 
2301     Level: intermediate
2302 
2303 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2304          DMSetJacobian()
2305 
2306 @*/
2307 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2308 {
2309   PetscFunctionBegin;
2310   dm->ops->computevariablebounds = f;
2311   PetscFunctionReturn(0);
2312 }
2313 
2314 #undef __FUNCT__
2315 #define __FUNCT__ "DMHasVariableBounds"
2316 /*@
2317     DMHasVariableBounds - does the DM object have a variable bounds function?
2318 
2319     Not Collective
2320 
2321     Input Parameter:
2322 .   dm - the DM object to destroy
2323 
2324     Output Parameter:
2325 .   flg - PETSC_TRUE if the variable bounds function exists
2326 
2327     Level: developer
2328 
2329 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2330 
2331 @*/
2332 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2333 {
2334   PetscFunctionBegin;
2335   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2336   PetscFunctionReturn(0);
2337 }
2338 
2339 #undef __FUNCT__
2340 #define __FUNCT__ "DMComputeVariableBounds"
2341 /*@C
2342     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2343 
2344     Logically Collective on DM
2345 
2346     Input Parameters:
2347 +   dm - the DM object to destroy
2348 -   x  - current solution at which the bounds are computed
2349 
2350     Output parameters:
2351 +   xl - lower bound
2352 -   xu - upper bound
2353 
2354     Level: intermediate
2355 
2356 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2357 
2358 @*/
2359 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2360 {
2361   PetscErrorCode ierr;
2362   PetscFunctionBegin;
2363   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2364   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2365   if (dm->ops->computevariablebounds) {
2366     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2367   }
2368   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2369   PetscFunctionReturn(0);
2370 }
2371 
2372 #undef __FUNCT__
2373 #define __FUNCT__ "DMHasColoring"
2374 /*@
2375     DMHasColoring - does the DM object have a method of providing a coloring?
2376 
2377     Not Collective
2378 
2379     Input Parameter:
2380 .   dm - the DM object
2381 
2382     Output Parameter:
2383 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2384 
2385     Level: developer
2386 
2387 .seealso DMHasFunction(), DMCreateColoring()
2388 
2389 @*/
2390 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2391 {
2392   PetscFunctionBegin;
2393   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2394   PetscFunctionReturn(0);
2395 }
2396 
2397 #undef  __FUNCT__
2398 #define __FUNCT__ "DMSetVec"
2399 /*@C
2400     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2401 
2402     Collective on DM
2403 
2404     Input Parameter:
2405 +   dm - the DM object
2406 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2407 
2408     Level: developer
2409 
2410 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2411 
2412 @*/
2413 PetscErrorCode  DMSetVec(DM dm,Vec x)
2414 {
2415   PetscErrorCode ierr;
2416   PetscFunctionBegin;
2417   if (x) {
2418     if (!dm->x) {
2419       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2420     }
2421     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2422   }
2423   else if (dm->x) {
2424     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2425   }
2426   PetscFunctionReturn(0);
2427 }
2428 
2429 PetscFList DMList                       = PETSC_NULL;
2430 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2431 
2432 #undef __FUNCT__
2433 #define __FUNCT__ "DMSetType"
2434 /*@C
2435   DMSetType - Builds a DM, for a particular DM implementation.
2436 
2437   Collective on DM
2438 
2439   Input Parameters:
2440 + dm     - The DM object
2441 - method - The name of the DM type
2442 
2443   Options Database Key:
2444 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2445 
2446   Notes:
2447   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2448 
2449   Level: intermediate
2450 
2451 .keywords: DM, set, type
2452 .seealso: DMGetType(), DMCreate()
2453 @*/
2454 PetscErrorCode  DMSetType(DM dm, DMType method)
2455 {
2456   PetscErrorCode (*r)(DM);
2457   PetscBool      match;
2458   PetscErrorCode ierr;
2459 
2460   PetscFunctionBegin;
2461   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2462   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2463   if (match) PetscFunctionReturn(0);
2464 
2465   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2466   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2467   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2468 
2469   if (dm->ops->destroy) {
2470     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2471     dm->ops->destroy = PETSC_NULL;
2472   }
2473   ierr = (*r)(dm);CHKERRQ(ierr);
2474   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2475   PetscFunctionReturn(0);
2476 }
2477 
2478 #undef __FUNCT__
2479 #define __FUNCT__ "DMGetType"
2480 /*@C
2481   DMGetType - Gets the DM type name (as a string) from the DM.
2482 
2483   Not Collective
2484 
2485   Input Parameter:
2486 . dm  - The DM
2487 
2488   Output Parameter:
2489 . type - The DM type name
2490 
2491   Level: intermediate
2492 
2493 .keywords: DM, get, type, name
2494 .seealso: DMSetType(), DMCreate()
2495 @*/
2496 PetscErrorCode  DMGetType(DM dm, DMType *type)
2497 {
2498   PetscErrorCode ierr;
2499 
2500   PetscFunctionBegin;
2501   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2502   PetscValidCharPointer(type,2);
2503   if (!DMRegisterAllCalled) {
2504     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2505   }
2506   *type = ((PetscObject)dm)->type_name;
2507   PetscFunctionReturn(0);
2508 }
2509 
2510 #undef __FUNCT__
2511 #define __FUNCT__ "DMConvert"
2512 /*@C
2513   DMConvert - Converts a DM to another DM, either of the same or different type.
2514 
2515   Collective on DM
2516 
2517   Input Parameters:
2518 + dm - the DM
2519 - newtype - new DM type (use "same" for the same type)
2520 
2521   Output Parameter:
2522 . M - pointer to new DM
2523 
2524   Notes:
2525   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2526   the MPI communicator of the generated DM is always the same as the communicator
2527   of the input DM.
2528 
2529   Level: intermediate
2530 
2531 .seealso: DMCreate()
2532 @*/
2533 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2534 {
2535   DM             B;
2536   char           convname[256];
2537   PetscBool      sametype, issame;
2538   PetscErrorCode ierr;
2539 
2540   PetscFunctionBegin;
2541   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2542   PetscValidType(dm,1);
2543   PetscValidPointer(M,3);
2544   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2545   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2546   {
2547     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2548 
2549     /*
2550        Order of precedence:
2551        1) See if a specialized converter is known to the current DM.
2552        2) See if a specialized converter is known to the desired DM class.
2553        3) See if a good general converter is registered for the desired class
2554        4) See if a good general converter is known for the current matrix.
2555        5) Use a really basic converter.
2556     */
2557 
2558     /* 1) See if a specialized converter is known to the current DM and the desired class */
2559     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2560     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2561     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2562     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2563     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2564     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2565     if (conv) goto foundconv;
2566 
2567     /* 2)  See if a specialized converter is known to the desired DM class. */
2568     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2569     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2570     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2571     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2572     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2573     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2574     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2575     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2576     if (conv) {
2577       ierr = DMDestroy(&B);CHKERRQ(ierr);
2578       goto foundconv;
2579     }
2580 
2581 #if 0
2582     /* 3) See if a good general converter is registered for the desired class */
2583     conv = B->ops->convertfrom;
2584     ierr = DMDestroy(&B);CHKERRQ(ierr);
2585     if (conv) goto foundconv;
2586 
2587     /* 4) See if a good general converter is known for the current matrix */
2588     if (dm->ops->convert) {
2589       conv = dm->ops->convert;
2590     }
2591     if (conv) goto foundconv;
2592 #endif
2593 
2594     /* 5) Use a really basic converter. */
2595     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2596 
2597     foundconv:
2598     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2599     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2600     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2601   }
2602   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2603   PetscFunctionReturn(0);
2604 }
2605 
2606 /*--------------------------------------------------------------------------------------------------------------------*/
2607 
2608 #undef __FUNCT__
2609 #define __FUNCT__ "DMRegister"
2610 /*@C
2611   DMRegister - See DMRegisterDynamic()
2612 
2613   Level: advanced
2614 @*/
2615 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2616 {
2617   char fullname[PETSC_MAX_PATH_LEN];
2618   PetscErrorCode ierr;
2619 
2620   PetscFunctionBegin;
2621   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2622   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2623   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2624   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2625   PetscFunctionReturn(0);
2626 }
2627 
2628 
2629 /*--------------------------------------------------------------------------------------------------------------------*/
2630 #undef __FUNCT__
2631 #define __FUNCT__ "DMRegisterDestroy"
2632 /*@C
2633    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2634 
2635    Not Collective
2636 
2637    Level: advanced
2638 
2639 .keywords: DM, register, destroy
2640 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2641 @*/
2642 PetscErrorCode  DMRegisterDestroy(void)
2643 {
2644   PetscErrorCode ierr;
2645 
2646   PetscFunctionBegin;
2647   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2648   DMRegisterAllCalled = PETSC_FALSE;
2649   PetscFunctionReturn(0);
2650 }
2651 
2652 #undef __FUNCT__
2653 #define __FUNCT__ "DMLoad"
2654 /*@C
2655   DMLoad - Loads a DM that has been stored in binary  with DMView().
2656 
2657   Collective on PetscViewer
2658 
2659   Input Parameters:
2660 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2661            some related function before a call to DMLoad().
2662 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2663            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2664 
2665    Level: intermediate
2666 
2667   Notes:
2668    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2669 
2670   Notes for advanced users:
2671   Most users should not need to know the details of the binary storage
2672   format, since DMLoad() and DMView() completely hide these details.
2673   But for anyone who's interested, the standard binary matrix storage
2674   format is
2675 .vb
2676      has not yet been determined
2677 .ve
2678 
2679 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2680 @*/
2681 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2682 {
2683   PetscErrorCode ierr;
2684   PetscBool      isbinary;
2685   PetscInt       classid;
2686   char           type[256];
2687 
2688   PetscFunctionBegin;
2689   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2690   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2691   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2692   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2693 
2694   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2695   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2696   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2697   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2698   if (newdm->ops->load) {
2699     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2700   }
2701   PetscFunctionReturn(0);
2702 }
2703 
2704 /******************************** FEM Support **********************************/
2705 
2706 #undef __FUNCT__
2707 #define __FUNCT__ "DMPrintCellVector"
2708 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2709   PetscInt       f;
2710   PetscErrorCode ierr;
2711 
2712   PetscFunctionBegin;
2713   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2714   for (f = 0; f < len; ++f) {
2715     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2716   }
2717   PetscFunctionReturn(0);
2718 }
2719 
2720 #undef __FUNCT__
2721 #define __FUNCT__ "DMPrintCellMatrix"
2722 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2723   PetscInt       f, g;
2724   PetscErrorCode ierr;
2725 
2726   PetscFunctionBegin;
2727   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2728   for (f = 0; f < rows; ++f) {
2729     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2730     for (g = 0; g < cols; ++g) {
2731       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2732     }
2733     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2734   }
2735   PetscFunctionReturn(0);
2736 }
2737 
2738 #undef __FUNCT__
2739 #define __FUNCT__ "DMGetDefaultSection"
2740 /*@
2741   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2742 
2743   Input Parameter:
2744 . dm - The DM
2745 
2746   Output Parameter:
2747 . section - The PetscSection
2748 
2749   Level: intermediate
2750 
2751   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2752 
2753 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2754 @*/
2755 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2756   PetscFunctionBegin;
2757   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2758   PetscValidPointer(section, 2);
2759   *section = dm->defaultSection;
2760   PetscFunctionReturn(0);
2761 }
2762 
2763 #undef __FUNCT__
2764 #define __FUNCT__ "DMSetDefaultSection"
2765 /*@
2766   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2767 
2768   Input Parameters:
2769 + dm - The DM
2770 - section - The PetscSection
2771 
2772   Level: intermediate
2773 
2774   Note: Any existing Section will be destroyed
2775 
2776 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2777 @*/
2778 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2779   PetscInt       numFields;
2780   PetscInt       f;
2781   PetscErrorCode ierr;
2782 
2783   PetscFunctionBegin;
2784   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2785   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2786   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2787   dm->defaultSection = section;
2788   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2789   if (numFields) {
2790     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2791     for (f = 0; f < numFields; ++f) {
2792       const char *name;
2793 
2794       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2795       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2796     }
2797   }
2798   PetscFunctionReturn(0);
2799 }
2800 
2801 #undef __FUNCT__
2802 #define __FUNCT__ "DMGetDefaultGlobalSection"
2803 /*@
2804   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2805 
2806   Collective on DM
2807 
2808   Input Parameter:
2809 . dm - The DM
2810 
2811   Output Parameter:
2812 . section - The PetscSection
2813 
2814   Level: intermediate
2815 
2816   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2817 
2818 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2819 @*/
2820 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2821   PetscErrorCode ierr;
2822 
2823   PetscFunctionBegin;
2824   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2825   PetscValidPointer(section, 2);
2826   if (!dm->defaultGlobalSection) {
2827     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2828     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2829     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2830   }
2831   *section = dm->defaultGlobalSection;
2832   PetscFunctionReturn(0);
2833 }
2834 
2835 #undef __FUNCT__
2836 #define __FUNCT__ "DMSetDefaultGlobalSection"
2837 /*@
2838   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2839 
2840   Input Parameters:
2841 + dm - The DM
2842 - section - The PetscSection
2843 
2844   Level: intermediate
2845 
2846   Note: Any existing Section will be destroyed
2847 
2848 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2849 @*/
2850 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2851   PetscErrorCode ierr;
2852 
2853   PetscFunctionBegin;
2854   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2855   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2856   dm->defaultGlobalSection = section;
2857   PetscFunctionReturn(0);
2858 }
2859 
2860 #undef __FUNCT__
2861 #define __FUNCT__ "DMGetDefaultSF"
2862 /*@
2863   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2864   it is created from the default PetscSection layouts in the DM.
2865 
2866   Input Parameter:
2867 . dm - The DM
2868 
2869   Output Parameter:
2870 . sf - The PetscSF
2871 
2872   Level: intermediate
2873 
2874   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2875 
2876 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2877 @*/
2878 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2879   PetscInt       nroots;
2880   PetscErrorCode ierr;
2881 
2882   PetscFunctionBegin;
2883   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2884   PetscValidPointer(sf, 2);
2885   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2886   if (nroots < 0) {
2887     PetscSection section, gSection;
2888 
2889     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2890     if (section) {
2891       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2892       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2893     } else {
2894       *sf = PETSC_NULL;
2895       PetscFunctionReturn(0);
2896     }
2897   }
2898   *sf = dm->defaultSF;
2899   PetscFunctionReturn(0);
2900 }
2901 
2902 #undef __FUNCT__
2903 #define __FUNCT__ "DMSetDefaultSF"
2904 /*@
2905   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2906 
2907   Input Parameters:
2908 + dm - The DM
2909 - sf - The PetscSF
2910 
2911   Level: intermediate
2912 
2913   Note: Any previous SF is destroyed
2914 
2915 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2916 @*/
2917 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2918   PetscErrorCode ierr;
2919 
2920   PetscFunctionBegin;
2921   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2922   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2923   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2924   dm->defaultSF = sf;
2925   PetscFunctionReturn(0);
2926 }
2927 
2928 #undef __FUNCT__
2929 #define __FUNCT__ "DMCreateDefaultSF"
2930 /*@C
2931   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2932   describing the data layout.
2933 
2934   Input Parameters:
2935 + dm - The DM
2936 . localSection - PetscSection describing the local data layout
2937 - globalSection - PetscSection describing the global data layout
2938 
2939   Level: intermediate
2940 
2941 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2942 @*/
2943 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2944 {
2945   MPI_Comm        comm = ((PetscObject) dm)->comm;
2946   PetscLayout     layout;
2947   const PetscInt *ranges;
2948   PetscInt       *local;
2949   PetscSFNode    *remote;
2950   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2951   PetscMPIInt     size, rank;
2952   PetscErrorCode  ierr;
2953 
2954   PetscFunctionBegin;
2955   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2956   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2957   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2958   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2959   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2960   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2961   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2962   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2963   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2964   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2965   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2966     PetscInt gdof, gcdof;
2967 
2968     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2969     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2970     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2971   }
2972   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2973   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2974   for (p = pStart, l = 0; p < pEnd; ++p) {
2975     const PetscInt *cind;
2976     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2977 
2978     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2979     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2980     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2981     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2982     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2983     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2984     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2985     if (!gdof) continue; /* Censored point */
2986     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2987     if (gsize != dof-cdof) {
2988       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2989       cdof = 0; /* Ignore constraints */
2990     }
2991     for (d = 0, c = 0; d < dof; ++d) {
2992       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2993       local[l+d-c] = off+d;
2994     }
2995     if (gdof < 0) {
2996       for(d = 0; d < gsize; ++d, ++l) {
2997         PetscInt offset = -(goff+1) + d, r;
2998 
2999         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3000         if (r < 0) r = -(r+2);
3001         remote[l].rank  = r;
3002         remote[l].index = offset - ranges[r];
3003       }
3004     } else {
3005       for(d = 0; d < gsize; ++d, ++l) {
3006         remote[l].rank  = rank;
3007         remote[l].index = goff+d - ranges[rank];
3008       }
3009     }
3010   }
3011   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3012   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3013   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3014   PetscFunctionReturn(0);
3015 }
3016 
3017 #undef __FUNCT__
3018 #define __FUNCT__ "DMGetPointSF"
3019 /*@
3020   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3021 
3022   Input Parameter:
3023 . dm - The DM
3024 
3025   Output Parameter:
3026 . sf - The PetscSF
3027 
3028   Level: intermediate
3029 
3030   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3031 
3032 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3033 @*/
3034 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
3035   PetscFunctionBegin;
3036   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3037   PetscValidPointer(sf, 2);
3038   *sf = dm->sf;
3039   PetscFunctionReturn(0);
3040 }
3041 
3042 #undef __FUNCT__
3043 #define __FUNCT__ "DMSetPointSF"
3044 /*@
3045   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3046 
3047   Input Parameters:
3048 + dm - The DM
3049 - sf - The PetscSF
3050 
3051   Level: intermediate
3052 
3053 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3054 @*/
3055 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
3056   PetscErrorCode ierr;
3057 
3058   PetscFunctionBegin;
3059   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3060   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3061   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3062   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3063   dm->sf = sf;
3064   PetscFunctionReturn(0);
3065 }
3066 
3067 #undef __FUNCT__
3068 #define __FUNCT__ "DMGetNumFields"
3069 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3070 {
3071   PetscFunctionBegin;
3072   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3073   PetscValidPointer(numFields, 2);
3074   *numFields = dm->numFields;
3075   PetscFunctionReturn(0);
3076 }
3077 
3078 #undef __FUNCT__
3079 #define __FUNCT__ "DMSetNumFields"
3080 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3081 {
3082   PetscInt       f;
3083   PetscErrorCode ierr;
3084 
3085   PetscFunctionBegin;
3086   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3087   for (f = 0; f < dm->numFields; ++f) {
3088     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3089   }
3090   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3091   dm->numFields = numFields;
3092   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3093   for (f = 0; f < dm->numFields; ++f) {
3094     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3095   }
3096   PetscFunctionReturn(0);
3097 }
3098 
3099 #undef __FUNCT__
3100 #define __FUNCT__ "DMGetField"
3101 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3102 {
3103   PetscFunctionBegin;
3104   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3105   PetscValidPointer(field, 2);
3106   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3107   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3108   *field = dm->fields[f];
3109   PetscFunctionReturn(0);
3110 }
3111 
3112 #undef __FUNCT__
3113 #define __FUNCT__ "DMSetCoordinates"
3114 /*@
3115   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3116 
3117   Collective on DM
3118 
3119   Input Parameters:
3120 + dm - the DM
3121 - c - coordinate vector
3122 
3123   Note:
3124   The coordinates do include those for ghost points, which are in the local vector
3125 
3126   Level: intermediate
3127 
3128 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3129 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3130 @*/
3131 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3132 {
3133   PetscErrorCode ierr;
3134 
3135   PetscFunctionBegin;
3136   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3137   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3138   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3139   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3140   dm->coordinates = c;
3141   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3142   PetscFunctionReturn(0);
3143 }
3144 
3145 #undef __FUNCT__
3146 #define __FUNCT__ "DMSetCoordinatesLocal"
3147 /*@
3148   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3149 
3150   Collective on DM
3151 
3152    Input Parameters:
3153 +  dm - the DM
3154 -  c - coordinate vector
3155 
3156   Note:
3157   The coordinates of ghost points can be set using DMSetCoordinates()
3158   followed by DMGetCoordinatesLocal(). This is intended to enable the
3159   setting of ghost coordinates outside of the domain.
3160 
3161   Level: intermediate
3162 
3163 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3164 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3165 @*/
3166 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3167 {
3168   PetscErrorCode ierr;
3169 
3170   PetscFunctionBegin;
3171   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3172   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3173   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3174   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3175   dm->coordinatesLocal = c;
3176   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3177   PetscFunctionReturn(0);
3178 }
3179 
3180 #undef __FUNCT__
3181 #define __FUNCT__ "DMGetCoordinates"
3182 /*@
3183   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3184 
3185   Not Collective
3186 
3187   Input Parameter:
3188 . dm - the DM
3189 
3190   Output Parameter:
3191 . c - global coordinate vector
3192 
3193   Note:
3194   This is a borrowed reference, so the user should NOT destroy this vector
3195 
3196   Each process has only the local coordinates (does NOT have the ghost coordinates).
3197 
3198   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3199   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3200 
3201   Level: intermediate
3202 
3203 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3204 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3205 @*/
3206 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3207 {
3208   PetscErrorCode ierr;
3209 
3210   PetscFunctionBegin;
3211   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3212   PetscValidPointer(c,2);
3213   if (!dm->coordinates && dm->coordinatesLocal) {
3214     DM cdm = PETSC_NULL;
3215 
3216     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3217     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3218     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3219     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3220     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3221   }
3222   *c = dm->coordinates;
3223   PetscFunctionReturn(0);
3224 }
3225 
3226 #undef __FUNCT__
3227 #define __FUNCT__ "DMGetCoordinatesLocal"
3228 /*@
3229   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3230 
3231   Collective on DM
3232 
3233   Input Parameter:
3234 . dm - the DM
3235 
3236   Output Parameter:
3237 . c - coordinate vector
3238 
3239   Note:
3240   This is a borrowed reference, so the user should NOT destroy this vector
3241 
3242   Each process has the local and ghost coordinates
3243 
3244   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3245   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3246 
3247   Level: intermediate
3248 
3249 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3250 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3251 @*/
3252 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3253 {
3254   PetscErrorCode ierr;
3255 
3256   PetscFunctionBegin;
3257   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3258   PetscValidPointer(c,2);
3259   if (!dm->coordinatesLocal && dm->coordinates) {
3260     DM cdm = PETSC_NULL;
3261 
3262     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3263     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3264     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3265     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3266     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3267   }
3268   *c = dm->coordinatesLocal;
3269   PetscFunctionReturn(0);
3270 }
3271 
3272 #undef __FUNCT__
3273 #define __FUNCT__ "DMGetCoordinateDM"
3274 /*@
3275   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3276 
3277   Collective on DM
3278 
3279   Input Parameter:
3280 . dm - the DM
3281 
3282   Output Parameter:
3283 . cdm - coordinate DM
3284 
3285   Level: intermediate
3286 
3287 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3288 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3289 @*/
3290 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3291 {
3292   PetscErrorCode ierr;
3293 
3294   PetscFunctionBegin;
3295   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3296   PetscValidPointer(cdm,2);
3297   if (!dm->coordinateDM) {
3298     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3299     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3300   }
3301   *cdm = dm->coordinateDM;
3302   PetscFunctionReturn(0);
3303 }
3304