xref: /petsc/src/dm/interface/dm.c (revision 2a34c10ce821da49e32a0f08e4bd95dc44083eea)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMCreate"
8 /*@
9   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
10 
11    If you never  call DMSetType()  it will generate an
12    error when you try to use the vector.
13 
14   Collective on MPI_Comm
15 
16   Input Parameter:
17 . comm - The communicator for the DM object
18 
19   Output Parameter:
20 . dm - The DM object
21 
22   Level: beginner
23 
24 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
25 @*/
26 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
27 {
28   DM             v;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   PetscValidPointer(dm,2);
33   *dm = PETSC_NULL;
34 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
35   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
36   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
37   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
38 #endif
39 
40   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
41   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
42 
43 
44   v->ltogmap      = PETSC_NULL;
45   v->ltogmapb     = PETSC_NULL;
46   v->bs           = 1;
47   v->coloringtype = IS_COLORING_GLOBAL;
48   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
49   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
50   v->defaultSection       = PETSC_NULL;
51   v->defaultGlobalSection = PETSC_NULL;
52   {
53     PetscInt i;
54     for (i = 0; i < 10; ++i) {
55       v->nullspaceConstructors[i] = PETSC_NULL;
56     }
57   }
58   v->numFields = 0;
59   v->fields    = PETSC_NULL;
60 
61   *dm = v;
62   PetscFunctionReturn(0);
63 }
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "DMSetVecType"
67 /*@C
68        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
69 
70    Logically Collective on DMDA
71 
72    Input Parameter:
73 +  da - initial distributed array
74 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
75 
76    Options Database:
77 .   -dm_vec_type ctype
78 
79    Level: intermediate
80 
81 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
82 @*/
83 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
84 {
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   PetscValidHeaderSpecific(da,DM_CLASSID,1);
89   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
90   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
91   PetscFunctionReturn(0);
92 }
93 
94 #undef __FUNCT__
95 #define __FUNCT__ "VecGetDM"
96 /*@
97   VecGetDM - Gets the DM defining the data layout of the vector
98 
99   Not collective
100 
101   Input Parameter:
102 . v - The Vec
103 
104   Output Parameter:
105 . dm - The DM
106 
107   Level: intermediate
108 
109 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
110 @*/
111 PetscErrorCode VecGetDM(Vec v, DM *dm)
112 {
113   PetscErrorCode ierr;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
117   PetscValidPointer(dm,2);
118   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "VecSetDM"
124 /*@
125   VecSetDM - Sets the DM defining the data layout of the vector
126 
127   Not collective
128 
129   Input Parameters:
130 + v - The Vec
131 - dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecSetDM(Vec v, DM dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
144   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "DMSetMatType"
150 /*@C
151        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
152 
153    Logically Collective on DM
154 
155    Input Parameter:
156 +  dm - the DM context
157 .  ctype - the matrix type
158 
159    Options Database:
160 .   -dm_mat_type ctype
161 
162    Level: intermediate
163 
164 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
165 @*/
166 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
167 {
168   PetscErrorCode ierr;
169   PetscFunctionBegin;
170   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
171   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
172   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "MatGetDM"
178 /*@
179   MatGetDM - Gets the DM defining the data layout of the matrix
180 
181   Not collective
182 
183   Input Parameter:
184 . A - The Mat
185 
186   Output Parameter:
187 . dm - The DM
188 
189   Level: intermediate
190 
191 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
192 @*/
193 PetscErrorCode MatGetDM(Mat A, DM *dm)
194 {
195   PetscErrorCode ierr;
196 
197   PetscFunctionBegin;
198   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
199   PetscValidPointer(dm,2);
200   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 #undef __FUNCT__
205 #define __FUNCT__ "MatSetDM"
206 /*@
207   MatSetDM - Sets the DM defining the data layout of the matrix
208 
209   Not collective
210 
211   Input Parameters:
212 + A - The Mat
213 - dm - The DM
214 
215   Level: intermediate
216 
217 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
218 @*/
219 PetscErrorCode MatSetDM(Mat A, DM dm)
220 {
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
225   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
226   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "DMSetOptionsPrefix"
232 /*@C
233    DMSetOptionsPrefix - Sets the prefix used for searching for all
234    DMDA options in the database.
235 
236    Logically Collective on DMDA
237 
238    Input Parameter:
239 +  da - the DMDA context
240 -  prefix - the prefix to prepend to all option names
241 
242    Notes:
243    A hyphen (-) must NOT be given at the beginning of the prefix name.
244    The first character of all runtime options is AUTOMATICALLY the hyphen.
245 
246    Level: advanced
247 
248 .keywords: DMDA, set, options, prefix, database
249 
250 .seealso: DMSetFromOptions()
251 @*/
252 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
253 {
254   PetscErrorCode ierr;
255 
256   PetscFunctionBegin;
257   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
258   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
259   PetscFunctionReturn(0);
260 }
261 
262 #undef __FUNCT__
263 #define __FUNCT__ "DMDestroy"
264 /*@
265     DMDestroy - Destroys a vector packer or DMDA.
266 
267     Collective on DM
268 
269     Input Parameter:
270 .   dm - the DM object to destroy
271 
272     Level: developer
273 
274 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
275 
276 @*/
277 PetscErrorCode  DMDestroy(DM *dm)
278 {
279   PetscInt       i, cnt = 0, f;
280   DMNamedVecLink nlink,nnext;
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   if (!*dm) PetscFunctionReturn(0);
285   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
286 
287   /* count all the circular references of DM and its contained Vecs */
288   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
289     if ((*dm)->localin[i])  {cnt++;}
290     if ((*dm)->globalin[i]) {cnt++;}
291   }
292   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
293   if ((*dm)->x) {
294     DM obj;
295     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
296     if (obj == *dm) cnt++;
297   }
298 
299   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
300   /*
301      Need this test because the dm references the vectors that
302      reference the dm, so destroying the dm calls destroy on the
303      vectors that cause another destroy on the dm
304   */
305   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
306   ((PetscObject) (*dm))->refct = 0;
307   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
308     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
309     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
310   }
311   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
312     nnext = nlink->next;
313     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
314     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
315     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
316     ierr = PetscFree(nlink);CHKERRQ(ierr);
317   }
318   (*dm)->namedglobal = PETSC_NULL;
319 
320   /* Destroy the list of hooks */
321   {
322     DMCoarsenHookLink link,next;
323     for (link=(*dm)->coarsenhook; link; link=next) {
324       next = link->next;
325       ierr = PetscFree(link);CHKERRQ(ierr);
326     }
327     (*dm)->coarsenhook = PETSC_NULL;
328   }
329   {
330     DMRefineHookLink link,next;
331     for (link=(*dm)->refinehook; link; link=next) {
332       next = link->next;
333       ierr = PetscFree(link);CHKERRQ(ierr);
334     }
335     (*dm)->refinehook = PETSC_NULL;
336   }
337   /* Destroy the work arrays */
338   {
339     DMWorkLink link,next;
340     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
341     for (link=(*dm)->workin; link; link=next) {
342       next = link->next;
343       ierr = PetscFree(link->mem);CHKERRQ(ierr);
344       ierr = PetscFree(link);CHKERRQ(ierr);
345     }
346     (*dm)->workin = PETSC_NULL;
347   }
348 
349   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
350   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
351   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
352 
353   if ((*dm)->ctx && (*dm)->ctxdestroy) {
354     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
355   }
356   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
357   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
358   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
359   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
360   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
361   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
362   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
363 
364   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
365   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
366   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
367   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
368   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
369 
370   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
371   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
372   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
373 
374   for (f = 0; f < (*dm)->numFields; ++f) {
375     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
376   }
377   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
378   /* if memory was published with AMS then destroy it */
379   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
380 
381   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
382   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
383   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
384   PetscFunctionReturn(0);
385 }
386 
387 #undef __FUNCT__
388 #define __FUNCT__ "DMSetUp"
389 /*@
390     DMSetUp - sets up the data structures inside a DM object
391 
392     Collective on DM
393 
394     Input Parameter:
395 .   dm - the DM object to setup
396 
397     Level: developer
398 
399 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
400 
401 @*/
402 PetscErrorCode  DMSetUp(DM dm)
403 {
404   PetscErrorCode ierr;
405 
406   PetscFunctionBegin;
407   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
408   if (dm->setupcalled) PetscFunctionReturn(0);
409   if (dm->ops->setup) {
410     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
411   }
412   dm->setupcalled = PETSC_TRUE;
413   PetscFunctionReturn(0);
414 }
415 
416 #undef __FUNCT__
417 #define __FUNCT__ "DMSetFromOptions"
418 /*@
419     DMSetFromOptions - sets parameters in a DM from the options database
420 
421     Collective on DM
422 
423     Input Parameter:
424 .   dm - the DM object to set options for
425 
426     Options Database:
427 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
428 .   -dm_vec_type <type>  type of vector to create inside DM
429 .   -dm_mat_type <type>  type of matrix to create inside DM
430 -   -dm_coloring_type <global or ghosted>
431 
432     Level: developer
433 
434 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
435 
436 @*/
437 PetscErrorCode  DMSetFromOptions(DM dm)
438 {
439   PetscBool      flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg;
440   PetscErrorCode ierr;
441   char           typeName[256] = MATAIJ;
442 
443   PetscFunctionBegin;
444   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
445   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
446     ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr);
447     ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr);
448     ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr);
449     ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr);
450     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
451     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
452     if (flg) {
453       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
454     }
455     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
456     if (flg) {
457       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
458     }
459     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
460     if (dm->ops->setfromoptions) {
461       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
462     }
463     /* process any options handlers added with PetscObjectAddOptionsHandler() */
464     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
465   ierr = PetscOptionsEnd();CHKERRQ(ierr);
466   if (flg1) {
467     ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
468   }
469   if (flg2) {
470     PetscViewer viewer;
471 
472     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
473     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
474     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr);
475     ierr = DMView(dm, viewer);CHKERRQ(ierr);
476     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
477   }
478   if (flg3) {
479     PetscViewer viewer;
480 
481     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
482     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
483     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr);
484     ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr);
485     ierr = DMView(dm, viewer);CHKERRQ(ierr);
486     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
487   }
488   if (flg4) {
489     PetscViewer viewer;
490 
491     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
492     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
493     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr);
494     ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr);
495     ierr = DMView(dm, viewer);CHKERRQ(ierr);
496     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
497   }
498   PetscFunctionReturn(0);
499 }
500 
501 #undef __FUNCT__
502 #define __FUNCT__ "DMView"
503 /*@C
504     DMView - Views a vector packer or DMDA.
505 
506     Collective on DM
507 
508     Input Parameter:
509 +   dm - the DM object to view
510 -   v - the viewer
511 
512     Level: developer
513 
514 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
515 
516 @*/
517 PetscErrorCode  DMView(DM dm,PetscViewer v)
518 {
519   PetscErrorCode ierr;
520   PetscBool      isbinary;
521 
522   PetscFunctionBegin;
523   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
524   if (!v) {
525     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
526   }
527   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
528   if (isbinary) {
529     PetscInt         classid = DM_FILE_CLASSID;
530     MPI_Comm         comm;
531     PetscMPIInt      rank;
532     char             type[256];
533 
534     ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
535     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
536     if (!rank) {
537       ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
538       ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
539       ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
540     }
541   }
542   if (dm->ops->view) {
543     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
544   }
545   PetscFunctionReturn(0);
546 }
547 
548 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
549 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
550 
551 #undef __FUNCT__
552 #define __FUNCT__ "DMCreateGlobalVector"
553 /*@
554     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
555 
556     Collective on DM
557 
558     Input Parameter:
559 .   dm - the DM object
560 
561     Output Parameter:
562 .   vec - the global vector
563 
564     Level: beginner
565 
566 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
567 
568 @*/
569 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
570 {
571   PetscErrorCode ierr;
572 
573   PetscFunctionBegin;
574   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
575   if (dm->defaultSection) {
576     PetscSection gSection;
577     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
578 
579     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
580     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
581     for(p = pStart; p < pEnd; ++p) {
582       PetscInt dof, cdof;
583 
584       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
585       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
586       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
587       if ((dof > 0) && (dof-cdof != blockSize)) {
588         blockSize = 1;
589         break;
590       }
591     }
592     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
593     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
594     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
595     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
596     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
597     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
598     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
599     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
600     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
601     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
602     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
603     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
604   } else {
605     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
606   }
607   PetscFunctionReturn(0);
608 }
609 
610 #undef __FUNCT__
611 #define __FUNCT__ "DMCreateLocalVector"
612 /*@
613     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
614 
615     Not Collective
616 
617     Input Parameter:
618 .   dm - the DM object
619 
620     Output Parameter:
621 .   vec - the local vector
622 
623     Level: beginner
624 
625 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
626 
627 @*/
628 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
629 {
630   PetscErrorCode ierr;
631 
632   PetscFunctionBegin;
633   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
634   if (dm->defaultSection) {
635     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
636 
637     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
638     for(p = pStart; p < pEnd; ++p) {
639       PetscInt dof;
640 
641       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
642       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
643       if ((dof > 0) && (dof != blockSize)) {
644         blockSize = 1;
645         break;
646       }
647     }
648     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
649     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
650     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
651     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
652     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
653     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
654     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
655   } else {
656     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
657   }
658   PetscFunctionReturn(0);
659 }
660 
661 #undef __FUNCT__
662 #define __FUNCT__ "DMGetLocalToGlobalMapping"
663 /*@
664    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
665 
666    Collective on DM
667 
668    Input Parameter:
669 .  dm - the DM that provides the mapping
670 
671    Output Parameter:
672 .  ltog - the mapping
673 
674    Level: intermediate
675 
676    Notes:
677    This mapping can then be used by VecSetLocalToGlobalMapping() or
678    MatSetLocalToGlobalMapping().
679 
680 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
681 @*/
682 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
683 {
684   PetscErrorCode ierr;
685 
686   PetscFunctionBegin;
687   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
688   PetscValidPointer(ltog,2);
689   if (!dm->ltogmap) {
690     PetscSection section, sectionGlobal;
691 
692     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
693     if (section) {
694       PetscInt      *ltog;
695       PetscInt       pStart, pEnd, size, p, l;
696 
697       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
698       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
699       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
700       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
701       for (p = pStart, l = 0; p < pEnd; ++p) {
702         PetscInt dof, off, c;
703 
704         /* Should probably use constrained dofs */
705         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
706         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
707         for (c = 0; c < dof; ++c, ++l) {
708           ltog[l] = off+c;
709         }
710       }
711       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
712       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
713     } else {
714       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
715       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
716     }
717   }
718   *ltog = dm->ltogmap;
719   PetscFunctionReturn(0);
720 }
721 
722 #undef __FUNCT__
723 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
724 /*@
725    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
726 
727    Collective on DM
728 
729    Input Parameter:
730 .  da - the distributed array that provides the mapping
731 
732    Output Parameter:
733 .  ltog - the block mapping
734 
735    Level: intermediate
736 
737    Notes:
738    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
739    MatSetLocalToGlobalMappingBlock().
740 
741 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
742 @*/
743 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
744 {
745   PetscErrorCode ierr;
746 
747   PetscFunctionBegin;
748   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
749   PetscValidPointer(ltog,2);
750   if (!dm->ltogmapb) {
751     PetscInt bs;
752     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
753     if (bs > 1) {
754       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
755       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
756     } else {
757       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
758       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
759     }
760   }
761   *ltog = dm->ltogmapb;
762   PetscFunctionReturn(0);
763 }
764 
765 #undef __FUNCT__
766 #define __FUNCT__ "DMGetBlockSize"
767 /*@
768    DMGetBlockSize - Gets the inherent block size associated with a DM
769 
770    Not Collective
771 
772    Input Parameter:
773 .  dm - the DM with block structure
774 
775    Output Parameter:
776 .  bs - the block size, 1 implies no exploitable block structure
777 
778    Level: intermediate
779 
780 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
781 @*/
782 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
783 {
784   PetscFunctionBegin;
785   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
786   PetscValidPointer(bs,2);
787   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
788   *bs = dm->bs;
789   PetscFunctionReturn(0);
790 }
791 
792 #undef __FUNCT__
793 #define __FUNCT__ "DMCreateInterpolation"
794 /*@
795     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
796 
797     Collective on DM
798 
799     Input Parameter:
800 +   dm1 - the DM object
801 -   dm2 - the second, finer DM object
802 
803     Output Parameter:
804 +  mat - the interpolation
805 -  vec - the scaling (optional)
806 
807     Level: developer
808 
809     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
810         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
811 
812         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
813         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
814 
815 
816 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
817 
818 @*/
819 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
820 {
821   PetscErrorCode ierr;
822 
823   PetscFunctionBegin;
824   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
825   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
826   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
827   PetscFunctionReturn(0);
828 }
829 
830 #undef __FUNCT__
831 #define __FUNCT__ "DMCreateInjection"
832 /*@
833     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
834 
835     Collective on DM
836 
837     Input Parameter:
838 +   dm1 - the DM object
839 -   dm2 - the second, finer DM object
840 
841     Output Parameter:
842 .   ctx - the injection
843 
844     Level: developer
845 
846    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
847         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
848 
849 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
850 
851 @*/
852 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
853 {
854   PetscErrorCode ierr;
855 
856   PetscFunctionBegin;
857   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
858   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
859   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
860   PetscFunctionReturn(0);
861 }
862 
863 #undef __FUNCT__
864 #define __FUNCT__ "DMCreateColoring"
865 /*@C
866     DMCreateColoring - Gets coloring for a DMDA or DMComposite
867 
868     Collective on DM
869 
870     Input Parameter:
871 +   dm - the DM object
872 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
873 -   matype - either MATAIJ or MATBAIJ
874 
875     Output Parameter:
876 .   coloring - the coloring
877 
878     Level: developer
879 
880 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
881 
882 @*/
883 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
884 {
885   PetscErrorCode ierr;
886 
887   PetscFunctionBegin;
888   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
889   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
890   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
891   PetscFunctionReturn(0);
892 }
893 
894 #undef __FUNCT__
895 #define __FUNCT__ "DMCreateMatrix"
896 /*@C
897     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
898 
899     Collective on DM
900 
901     Input Parameter:
902 +   dm - the DM object
903 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
904             any type which inherits from one of these (such as MATAIJ)
905 
906     Output Parameter:
907 .   mat - the empty Jacobian
908 
909     Level: beginner
910 
911     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
912        do not need to do it yourself.
913 
914        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
915        the nonzero pattern call DMDASetMatPreallocateOnly()
916 
917        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
918        internally by PETSc.
919 
920        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
921        the indices for the global numbering for DMDAs which is complicated.
922 
923 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
924 
925 @*/
926 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
927 {
928   PetscErrorCode ierr;
929 
930   PetscFunctionBegin;
931   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
932 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
933   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
934 #endif
935   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
936   PetscValidPointer(mat,3);
937   if (dm->mattype) {
938     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
939   } else {
940     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
941   }
942   PetscFunctionReturn(0);
943 }
944 
945 #undef __FUNCT__
946 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
947 /*@
948   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
949     preallocated but the nonzero structure and zero values will not be set.
950 
951   Logically Collective on DMDA
952 
953   Input Parameter:
954 + dm - the DM
955 - only - PETSC_TRUE if only want preallocation
956 
957   Level: developer
958 .seealso DMCreateMatrix()
959 @*/
960 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
961 {
962   PetscFunctionBegin;
963   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
964   dm->prealloc_only = only;
965   PetscFunctionReturn(0);
966 }
967 
968 #undef __FUNCT__
969 #define __FUNCT__ "DMGetWorkArray"
970 /*@C
971   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
972 
973   Not Collective
974 
975   Input Parameters:
976 + dm - the DM object
977 . count - The minium size
978 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
979 
980   Output Parameter:
981 . array - the work array
982 
983   Level: developer
984 
985 .seealso DMDestroy(), DMCreate()
986 @*/
987 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
988 {
989   PetscErrorCode ierr;
990   DMWorkLink link;
991   size_t size;
992 
993   PetscFunctionBegin;
994   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
995   PetscValidPointer(mem,4);
996   if (dm->workin) {
997     link = dm->workin;
998     dm->workin = dm->workin->next;
999   } else {
1000     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1001   }
1002   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1003   if (size*count > link->bytes) {
1004     ierr = PetscFree(link->mem);CHKERRQ(ierr);
1005     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1006     link->bytes = size*count;
1007   }
1008   link->next = dm->workout;
1009   dm->workout = link;
1010   *(void**)mem = link->mem;
1011   PetscFunctionReturn(0);
1012 }
1013 
1014 #undef __FUNCT__
1015 #define __FUNCT__ "DMRestoreWorkArray"
1016 /*@C
1017   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1018 
1019   Not Collective
1020 
1021   Input Parameters:
1022 + dm - the DM object
1023 . count - The minium size
1024 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1025 
1026   Output Parameter:
1027 . array - the work array
1028 
1029   Level: developer
1030 
1031 .seealso DMDestroy(), DMCreate()
1032 @*/
1033 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1034 {
1035   DMWorkLink *p,link;
1036 
1037   PetscFunctionBegin;
1038   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1039   PetscValidPointer(mem,4);
1040   for (p=&dm->workout; (link=*p); p=&link->next) {
1041     if (link->mem == *(void**)mem) {
1042       *p = link->next;
1043       link->next = dm->workin;
1044       dm->workin = link;
1045       *(void**)mem = PETSC_NULL;
1046       PetscFunctionReturn(0);
1047     }
1048   }
1049   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1050   PetscFunctionReturn(0);
1051 }
1052 
1053 #undef __FUNCT__
1054 #define __FUNCT__ "DMSetNullSpaceConstructor"
1055 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1056 {
1057   PetscFunctionBegin;
1058   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1059   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1060   dm->nullspaceConstructors[field] = nullsp;
1061   PetscFunctionReturn(0);
1062 }
1063 
1064 #undef __FUNCT__
1065 #define __FUNCT__ "DMCreateFieldIS"
1066 /*@C
1067   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1068 
1069   Not collective
1070 
1071   Input Parameter:
1072 . dm - the DM object
1073 
1074   Output Parameters:
1075 + numFields  - The number of fields (or PETSC_NULL if not requested)
1076 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1077 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1078 
1079   Level: intermediate
1080 
1081   Notes:
1082   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1083   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1084   PetscFree().
1085 
1086 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1087 @*/
1088 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1089 {
1090   PetscSection   section, sectionGlobal;
1091   PetscErrorCode ierr;
1092 
1093   PetscFunctionBegin;
1094   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1095   if (numFields) {
1096     PetscValidPointer(numFields,2);
1097     *numFields = 0;
1098   }
1099   if (fieldNames) {
1100     PetscValidPointer(fieldNames,3);
1101     *fieldNames = PETSC_NULL;
1102   }
1103   if (fields) {
1104     PetscValidPointer(fields,4);
1105     *fields = PETSC_NULL;
1106   }
1107   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1108   if (section) {
1109     PetscInt *fieldSizes, **fieldIndices;
1110     PetscInt  nF, f, pStart, pEnd, p;
1111 
1112     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1113     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1114     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1115     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1116     for (f = 0; f < nF; ++f) {
1117       fieldSizes[f] = 0;
1118     }
1119     for (p = pStart; p < pEnd; ++p) {
1120       PetscInt gdof;
1121 
1122       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1123       if (gdof > 0) {
1124         for (f = 0; f < nF; ++f) {
1125           PetscInt fdof, fcdof;
1126 
1127           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1128           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1129           fieldSizes[f] += fdof-fcdof;
1130         }
1131       }
1132     }
1133     for (f = 0; f < nF; ++f) {
1134       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1135       fieldSizes[f] = 0;
1136     }
1137     for (p = pStart; p < pEnd; ++p) {
1138       PetscInt gdof, goff;
1139 
1140       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1141       if (gdof > 0) {
1142         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1143         for (f = 0; f < nF; ++f) {
1144           PetscInt fdof, fcdof, fc;
1145 
1146           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1147           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1148           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1149             fieldIndices[f][fieldSizes[f]] = goff++;
1150           }
1151         }
1152       }
1153     }
1154     if (numFields) {*numFields = nF;}
1155     if (fieldNames) {
1156       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1157       for (f = 0; f < nF; ++f) {
1158         const char *fieldName;
1159 
1160         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1161         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1162       }
1163     }
1164     if (fields) {
1165       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1166       for (f = 0; f < nF; ++f) {
1167         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1168       }
1169     }
1170     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1171   } else {
1172     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1173   }
1174   PetscFunctionReturn(0);
1175 }
1176 
1177 #undef __FUNCT__
1178 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1179 /*@C
1180   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1181 
1182   Not Collective
1183 
1184   Input Parameters:
1185 + dm   - the DM object
1186 - name - the name of the field decomposition
1187 
1188   Output Parameter:
1189 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1190 
1191   Level: advanced
1192 
1193 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1194 @*/
1195 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1196 {
1197   PetscErrorCode ierr;
1198 
1199   PetscFunctionBegin;
1200   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1201   PetscValidCharPointer(name,2);
1202   PetscValidPointer(ddm,3);
1203   *ddm = PETSC_NULL;
1204   if (dm->ops->createfielddecompositiondm) {
1205     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1206   }
1207   PetscFunctionReturn(0);
1208 }
1209 
1210 #undef __FUNCT__
1211 #define __FUNCT__ "DMCreateFieldDecomposition"
1212 /*@C
1213   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1214                           corresponding to different fields: each IS contains the global indices of the dofs of the
1215                           corresponding field. The optional list of DMs define the DM for each subproblem.
1216                           Generalizes DMCreateFieldIS().
1217 
1218   Not collective
1219 
1220   Input Parameter:
1221 . dm - the DM object
1222 
1223   Output Parameters:
1224 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1225 . namelist  - The name for each field (or PETSC_NULL if not requested)
1226 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1227 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1228 
1229   Level: intermediate
1230 
1231   Notes:
1232   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1233   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1234   and all of the arrays should be freed with PetscFree().
1235 
1236 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1237 @*/
1238 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1239 {
1240   PetscErrorCode ierr;
1241 
1242   PetscFunctionBegin;
1243   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1244   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1245   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1246   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1247   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1248   if (!dm->ops->createfielddecomposition) {
1249     PetscSection section;
1250     PetscInt     numFields, f;
1251 
1252     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1253     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1254     if (section && numFields && dm->ops->createsubdm) {
1255       *len = numFields;
1256       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1257       for (f = 0; f < numFields; ++f) {
1258         const char *fieldName;
1259 
1260         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1261         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1262         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1263       }
1264     } else {
1265       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1266       /* By default there are no DMs associated with subproblems. */
1267       if (dmlist) *dmlist = PETSC_NULL;
1268     }
1269   }
1270   else {
1271     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1272   }
1273   PetscFunctionReturn(0);
1274 }
1275 
1276 #undef __FUNCT__
1277 #define __FUNCT__ "DMCreateSubDM"
1278 /*@C
1279   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1280                   The fields are defined by DMCreateFieldIS().
1281 
1282   Not collective
1283 
1284   Input Parameters:
1285 + dm - the DM object
1286 . numFields - number of fields in this subproblem
1287 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1288 
1289   Output Parameters:
1290 . is - The global indices for the subproblem
1291 - dm - The DM for the subproblem
1292 
1293   Level: intermediate
1294 
1295 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1296 @*/
1297 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1298 {
1299   PetscErrorCode ierr;
1300 
1301   PetscFunctionBegin;
1302   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1303   PetscValidPointer(fields,3);
1304   if (is) {PetscValidPointer(is,4);}
1305   if (subdm) {PetscValidPointer(subdm,5);}
1306   if (dm->ops->createsubdm) {
1307     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1308   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1309   PetscFunctionReturn(0);
1310 }
1311 
1312 #undef __FUNCT__
1313 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1314 /*@C
1315   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1316 
1317   Not Collective
1318 
1319   Input Parameters:
1320 + dm   - the DM object
1321 - name - the name of the subdomain decomposition
1322 
1323   Output Parameter:
1324 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1325 
1326   Level: advanced
1327 
1328 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1329 @*/
1330 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1331 {
1332   PetscErrorCode ierr;
1333 
1334   PetscFunctionBegin;
1335   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1336   PetscValidCharPointer(name,2);
1337   PetscValidPointer(ddm,3);
1338   *ddm = PETSC_NULL;
1339   if (dm->ops->createdomaindecompositiondm) {
1340     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1341   }
1342   PetscFunctionReturn(0);
1343 }
1344 
1345 #undef __FUNCT__
1346 #define __FUNCT__ "DMCreateDomainDecomposition"
1347 /*@C
1348   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1349                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1350                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1351                           define a nonoverlapping covering, while outer subdomains can overlap.
1352                           The optional list of DMs define the DM for each subproblem.
1353 
1354   Not collective
1355 
1356   Input Parameter:
1357 . dm - the DM object
1358 
1359   Output Parameters:
1360 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1361 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1362 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1363 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1364 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1365 
1366   Level: intermediate
1367 
1368   Notes:
1369   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1370   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1371   and all of the arrays should be freed with PetscFree().
1372 
1373 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1374 @*/
1375 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1376 {
1377   PetscErrorCode ierr;
1378 
1379   PetscFunctionBegin;
1380   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1381   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1382   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1383   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1384   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1385   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1386   if (dm->ops->createdomaindecomposition) {
1387     ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1388   }
1389   PetscFunctionReturn(0);
1390 }
1391 
1392 #undef __FUNCT__
1393 #define __FUNCT__ "DMRefine"
1394 /*@
1395   DMRefine - Refines a DM object
1396 
1397   Collective on DM
1398 
1399   Input Parameter:
1400 + dm   - the DM object
1401 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1402 
1403   Output Parameter:
1404 . dmf - the refined DM, or PETSC_NULL
1405 
1406   Note: If no refinement was done, the return value is PETSC_NULL
1407 
1408   Level: developer
1409 
1410 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1411 @*/
1412 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1413 {
1414   PetscErrorCode ierr;
1415   DMRefineHookLink link;
1416 
1417   PetscFunctionBegin;
1418   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1419   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1420   if (*dmf) {
1421     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1422     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1423     (*dmf)->ctx       = dm->ctx;
1424     (*dmf)->leveldown = dm->leveldown;
1425     (*dmf)->levelup   = dm->levelup + 1;
1426     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1427     for (link=dm->refinehook; link; link=link->next) {
1428       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1429     }
1430   }
1431   PetscFunctionReturn(0);
1432 }
1433 
1434 #undef __FUNCT__
1435 #define __FUNCT__ "DMRefineHookAdd"
1436 /*@
1437    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1438 
1439    Logically Collective
1440 
1441    Input Arguments:
1442 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1443 .  refinehook - function to run when setting up a coarser level
1444 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1445 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1446 
1447    Calling sequence of refinehook:
1448 $    refinehook(DM coarse,DM fine,void *ctx);
1449 
1450 +  coarse - coarse level DM
1451 .  fine - fine level DM to interpolate problem to
1452 -  ctx - optional user-defined function context
1453 
1454    Calling sequence for interphook:
1455 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1456 
1457 +  coarse - coarse level DM
1458 .  interp - matrix interpolating a coarse-level solution to the finer grid
1459 .  fine - fine level DM to update
1460 -  ctx - optional user-defined function context
1461 
1462    Level: advanced
1463 
1464    Notes:
1465    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1466 
1467    If this function is called multiple times, the hooks will be run in the order they are added.
1468 
1469 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1470 @*/
1471 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1472 {
1473   PetscErrorCode ierr;
1474   DMRefineHookLink link,*p;
1475 
1476   PetscFunctionBegin;
1477   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1478   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1479   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1480   link->refinehook = refinehook;
1481   link->interphook = interphook;
1482   link->ctx = ctx;
1483   link->next = PETSC_NULL;
1484   *p = link;
1485   PetscFunctionReturn(0);
1486 }
1487 
1488 #undef __FUNCT__
1489 #define __FUNCT__ "DMInterpolate"
1490 /*@
1491    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1492 
1493    Collective if any hooks are
1494 
1495    Input Arguments:
1496 +  coarse - coarser DM to use as a base
1497 .  restrct - interpolation matrix, apply using MatInterpolate()
1498 -  fine - finer DM to update
1499 
1500    Level: developer
1501 
1502 .seealso: DMRefineHookAdd(), MatInterpolate()
1503 @*/
1504 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1505 {
1506   PetscErrorCode ierr;
1507   DMRefineHookLink link;
1508 
1509   PetscFunctionBegin;
1510   for (link=fine->refinehook; link; link=link->next) {
1511     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1512   }
1513   PetscFunctionReturn(0);
1514 }
1515 
1516 #undef __FUNCT__
1517 #define __FUNCT__ "DMGetRefineLevel"
1518 /*@
1519     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1520 
1521     Not Collective
1522 
1523     Input Parameter:
1524 .   dm - the DM object
1525 
1526     Output Parameter:
1527 .   level - number of refinements
1528 
1529     Level: developer
1530 
1531 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1532 
1533 @*/
1534 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1535 {
1536   PetscFunctionBegin;
1537   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1538   *level = dm->levelup;
1539   PetscFunctionReturn(0);
1540 }
1541 
1542 #undef __FUNCT__
1543 #define __FUNCT__ "DMGlobalToLocalBegin"
1544 /*@
1545     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1546 
1547     Neighbor-wise Collective on DM
1548 
1549     Input Parameters:
1550 +   dm - the DM object
1551 .   g - the global vector
1552 .   mode - INSERT_VALUES or ADD_VALUES
1553 -   l - the local vector
1554 
1555 
1556     Level: beginner
1557 
1558 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1559 
1560 @*/
1561 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1562 {
1563   PetscSF        sf;
1564   PetscErrorCode ierr;
1565 
1566   PetscFunctionBegin;
1567   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1568   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1569   if (sf) {
1570     PetscScalar *lArray, *gArray;
1571 
1572     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1573     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1574     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1575     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1576     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1577     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1578   } else {
1579     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1580   }
1581   PetscFunctionReturn(0);
1582 }
1583 
1584 #undef __FUNCT__
1585 #define __FUNCT__ "DMGlobalToLocalEnd"
1586 /*@
1587     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1588 
1589     Neighbor-wise Collective on DM
1590 
1591     Input Parameters:
1592 +   dm - the DM object
1593 .   g - the global vector
1594 .   mode - INSERT_VALUES or ADD_VALUES
1595 -   l - the local vector
1596 
1597 
1598     Level: beginner
1599 
1600 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1601 
1602 @*/
1603 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1604 {
1605   PetscSF        sf;
1606   PetscErrorCode ierr;
1607   PetscScalar    *lArray, *gArray;
1608 
1609   PetscFunctionBegin;
1610   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1611   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1612   if (sf) {
1613   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1614 
1615     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1616     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1617     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1618     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1619     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1620   } else {
1621     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1622   }
1623   PetscFunctionReturn(0);
1624 }
1625 
1626 #undef __FUNCT__
1627 #define __FUNCT__ "DMLocalToGlobalBegin"
1628 /*@
1629     DMLocalToGlobalBegin - updates global vectors from local vectors
1630 
1631     Neighbor-wise Collective on DM
1632 
1633     Input Parameters:
1634 +   dm - the DM object
1635 .   l - the local vector
1636 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1637            base point.
1638 - - the global vector
1639 
1640     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1641            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1642            global array to the final global array with VecAXPY().
1643 
1644     Level: beginner
1645 
1646 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1647 
1648 @*/
1649 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1650 {
1651   PetscSF        sf;
1652   PetscErrorCode ierr;
1653 
1654   PetscFunctionBegin;
1655   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1656   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1657   if (sf) {
1658     MPI_Op       op;
1659     PetscScalar *lArray, *gArray;
1660 
1661     switch(mode) {
1662     case INSERT_VALUES:
1663     case INSERT_ALL_VALUES:
1664 #if defined(PETSC_HAVE_MPI_REPLACE)
1665       op = MPI_REPLACE; break;
1666 #else
1667       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1668 #endif
1669     case ADD_VALUES:
1670     case ADD_ALL_VALUES:
1671       op = MPI_SUM; break;
1672   default:
1673     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1674     }
1675     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1676     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1677     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1678     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1679     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1680   } else {
1681     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1682   }
1683   PetscFunctionReturn(0);
1684 }
1685 
1686 #undef __FUNCT__
1687 #define __FUNCT__ "DMLocalToGlobalEnd"
1688 /*@
1689     DMLocalToGlobalEnd - updates global vectors from local vectors
1690 
1691     Neighbor-wise Collective on DM
1692 
1693     Input Parameters:
1694 +   dm - the DM object
1695 .   l - the local vector
1696 .   mode - INSERT_VALUES or ADD_VALUES
1697 -   g - the global vector
1698 
1699 
1700     Level: beginner
1701 
1702 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1703 
1704 @*/
1705 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1706 {
1707   PetscSF        sf;
1708   PetscErrorCode ierr;
1709 
1710   PetscFunctionBegin;
1711   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1712   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1713   if (sf) {
1714     MPI_Op       op;
1715     PetscScalar *lArray, *gArray;
1716 
1717     switch(mode) {
1718     case INSERT_VALUES:
1719     case INSERT_ALL_VALUES:
1720 #if defined(PETSC_HAVE_MPI_REPLACE)
1721       op = MPI_REPLACE; break;
1722 #else
1723       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1724 #endif
1725     case ADD_VALUES:
1726     case ADD_ALL_VALUES:
1727       op = MPI_SUM; break;
1728     default:
1729       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1730     }
1731     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1732     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1733     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1734     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1735     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1736   } else {
1737     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1738   }
1739   PetscFunctionReturn(0);
1740 }
1741 
1742 #undef __FUNCT__
1743 #define __FUNCT__ "DMCoarsen"
1744 /*@
1745     DMCoarsen - Coarsens a DM object
1746 
1747     Collective on DM
1748 
1749     Input Parameter:
1750 +   dm - the DM object
1751 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1752 
1753     Output Parameter:
1754 .   dmc - the coarsened DM
1755 
1756     Level: developer
1757 
1758 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1759 
1760 @*/
1761 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1762 {
1763   PetscErrorCode ierr;
1764   DMCoarsenHookLink link;
1765 
1766   PetscFunctionBegin;
1767   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1768   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1769   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1770   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1771   (*dmc)->ctx       = dm->ctx;
1772   (*dmc)->levelup   = dm->levelup;
1773   (*dmc)->leveldown = dm->leveldown + 1;
1774   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1775   for (link=dm->coarsenhook; link; link=link->next) {
1776     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1777   }
1778   PetscFunctionReturn(0);
1779 }
1780 
1781 #undef __FUNCT__
1782 #define __FUNCT__ "DMCoarsenHookAdd"
1783 /*@
1784    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1785 
1786    Logically Collective
1787 
1788    Input Arguments:
1789 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1790 .  coarsenhook - function to run when setting up a coarser level
1791 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1792 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1793 
1794    Calling sequence of coarsenhook:
1795 $    coarsenhook(DM fine,DM coarse,void *ctx);
1796 
1797 +  fine - fine level DM
1798 .  coarse - coarse level DM to restrict problem to
1799 -  ctx - optional user-defined function context
1800 
1801    Calling sequence for restricthook:
1802 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1803 
1804 +  fine - fine level DM
1805 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1806 .  rscale - scaling vector for restriction
1807 .  inject - matrix restricting by injection
1808 .  coarse - coarse level DM to update
1809 -  ctx - optional user-defined function context
1810 
1811    Level: advanced
1812 
1813    Notes:
1814    This function is only needed if auxiliary data needs to be set up on coarse grids.
1815 
1816    If this function is called multiple times, the hooks will be run in the order they are added.
1817 
1818    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1819    extract the finest level information from its context (instead of from the SNES).
1820 
1821 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1822 @*/
1823 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1824 {
1825   PetscErrorCode ierr;
1826   DMCoarsenHookLink link,*p;
1827 
1828   PetscFunctionBegin;
1829   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1830   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1831   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1832   link->coarsenhook = coarsenhook;
1833   link->restricthook = restricthook;
1834   link->ctx = ctx;
1835   link->next = PETSC_NULL;
1836   *p = link;
1837   PetscFunctionReturn(0);
1838 }
1839 
1840 #undef __FUNCT__
1841 #define __FUNCT__ "DMRestrict"
1842 /*@
1843    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1844 
1845    Collective if any hooks are
1846 
1847    Input Arguments:
1848 +  fine - finer DM to use as a base
1849 .  restrct - restriction matrix, apply using MatRestrict()
1850 .  inject - injection matrix, also use MatRestrict()
1851 -  coarse - coarer DM to update
1852 
1853    Level: developer
1854 
1855 .seealso: DMCoarsenHookAdd(), MatRestrict()
1856 @*/
1857 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1858 {
1859   PetscErrorCode ierr;
1860   DMCoarsenHookLink link;
1861 
1862   PetscFunctionBegin;
1863   for (link=fine->coarsenhook; link; link=link->next) {
1864     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1865   }
1866   PetscFunctionReturn(0);
1867 }
1868 
1869 #undef __FUNCT__
1870 #define __FUNCT__ "DMBlockRestrictHookAdd"
1871 /*@
1872    DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1873 
1874    Logically Collective
1875 
1876    Input Arguments:
1877 +  global - global DM
1878 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1879 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1880 
1881    Calling sequence for restricthook:
1882 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1883 
1884 +  global - global DM
1885 .  out    - scatter to the outer (with ghost and overlap points) block vector
1886 .  in     - scatter to block vector values only owned locally
1887 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1888 -  ctx - optional user-defined function context
1889 
1890    Level: advanced
1891 
1892    Notes:
1893    This function is only needed if auxiliary data needs to be set up on coarse grids.
1894 
1895    If this function is called multiple times, the hooks will be run in the order they are added.
1896 
1897    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1898    extract the finest level information from its context (instead of from the SNES).
1899 
1900 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1901 @*/
1902 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1903 {
1904   PetscErrorCode ierr;
1905   DMBlockRestrictHookLink link,*p;
1906 
1907   PetscFunctionBegin;
1908   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1909   for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1910   ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr);
1911   link->restricthook = restricthook;
1912   link->ctx = ctx;
1913   link->next = PETSC_NULL;
1914   *p = link;
1915   PetscFunctionReturn(0);
1916 }
1917 
1918 #undef __FUNCT__
1919 #define __FUNCT__ "DMBlockRestrict"
1920 /*@
1921    DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd()
1922 
1923    Collective if any hooks are
1924 
1925    Input Arguments:
1926 +  fine - finer DM to use as a base
1927 .  restrct - restriction matrix, apply using MatRestrict()
1928 .  inject - injection matrix, also use MatRestrict()
1929 -  coarse - coarer DM to update
1930 
1931    Level: developer
1932 
1933 .seealso: DMCoarsenHookAdd(), MatRestrict()
1934 @*/
1935 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block)
1936 {
1937   PetscErrorCode ierr;
1938   DMBlockRestrictHookLink link;
1939 
1940   PetscFunctionBegin;
1941   for (link=global->blockrestricthook; link; link=link->next) {
1942     if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);}
1943   }
1944   PetscFunctionReturn(0);
1945 }
1946 
1947 #undef __FUNCT__
1948 #define __FUNCT__ "DMGetCoarsenLevel"
1949 /*@
1950     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
1951 
1952     Not Collective
1953 
1954     Input Parameter:
1955 .   dm - the DM object
1956 
1957     Output Parameter:
1958 .   level - number of coarsenings
1959 
1960     Level: developer
1961 
1962 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1963 
1964 @*/
1965 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
1966 {
1967   PetscFunctionBegin;
1968   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1969   *level = dm->leveldown;
1970   PetscFunctionReturn(0);
1971 }
1972 
1973 
1974 
1975 #undef __FUNCT__
1976 #define __FUNCT__ "DMRefineHierarchy"
1977 /*@C
1978     DMRefineHierarchy - Refines a DM object, all levels at once
1979 
1980     Collective on DM
1981 
1982     Input Parameter:
1983 +   dm - the DM object
1984 -   nlevels - the number of levels of refinement
1985 
1986     Output Parameter:
1987 .   dmf - the refined DM hierarchy
1988 
1989     Level: developer
1990 
1991 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1992 
1993 @*/
1994 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
1995 {
1996   PetscErrorCode ierr;
1997 
1998   PetscFunctionBegin;
1999   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2000   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2001   if (nlevels == 0) PetscFunctionReturn(0);
2002   if (dm->ops->refinehierarchy) {
2003     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2004   } else if (dm->ops->refine) {
2005     PetscInt i;
2006 
2007     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2008     for (i=1; i<nlevels; i++) {
2009       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2010     }
2011   } else {
2012     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2013   }
2014   PetscFunctionReturn(0);
2015 }
2016 
2017 #undef __FUNCT__
2018 #define __FUNCT__ "DMCoarsenHierarchy"
2019 /*@C
2020     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2021 
2022     Collective on DM
2023 
2024     Input Parameter:
2025 +   dm - the DM object
2026 -   nlevels - the number of levels of coarsening
2027 
2028     Output Parameter:
2029 .   dmc - the coarsened DM hierarchy
2030 
2031     Level: developer
2032 
2033 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2034 
2035 @*/
2036 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2037 {
2038   PetscErrorCode ierr;
2039 
2040   PetscFunctionBegin;
2041   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2042   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2043   if (nlevels == 0) PetscFunctionReturn(0);
2044   PetscValidPointer(dmc,3);
2045   if (dm->ops->coarsenhierarchy) {
2046     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2047   } else if (dm->ops->coarsen) {
2048     PetscInt i;
2049 
2050     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2051     for (i=1; i<nlevels; i++) {
2052       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2053     }
2054   } else {
2055     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2056   }
2057   PetscFunctionReturn(0);
2058 }
2059 
2060 #undef __FUNCT__
2061 #define __FUNCT__ "DMCreateAggregates"
2062 /*@
2063    DMCreateAggregates - Gets the aggregates that map between
2064    grids associated with two DMs.
2065 
2066    Collective on DM
2067 
2068    Input Parameters:
2069 +  dmc - the coarse grid DM
2070 -  dmf - the fine grid DM
2071 
2072    Output Parameters:
2073 .  rest - the restriction matrix (transpose of the projection matrix)
2074 
2075    Level: intermediate
2076 
2077 .keywords: interpolation, restriction, multigrid
2078 
2079 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2080 @*/
2081 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2082 {
2083   PetscErrorCode ierr;
2084 
2085   PetscFunctionBegin;
2086   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2087   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2088   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2089   PetscFunctionReturn(0);
2090 }
2091 
2092 #undef __FUNCT__
2093 #define __FUNCT__ "DMSetApplicationContextDestroy"
2094 /*@C
2095     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2096 
2097     Not Collective
2098 
2099     Input Parameters:
2100 +   dm - the DM object
2101 -   destroy - the destroy function
2102 
2103     Level: intermediate
2104 
2105 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2106 
2107 @*/
2108 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2109 {
2110   PetscFunctionBegin;
2111   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2112   dm->ctxdestroy = destroy;
2113   PetscFunctionReturn(0);
2114 }
2115 
2116 #undef __FUNCT__
2117 #define __FUNCT__ "DMSetApplicationContext"
2118 /*@
2119     DMSetApplicationContext - Set a user context into a DM object
2120 
2121     Not Collective
2122 
2123     Input Parameters:
2124 +   dm - the DM object
2125 -   ctx - the user context
2126 
2127     Level: intermediate
2128 
2129 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2130 
2131 @*/
2132 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2133 {
2134   PetscFunctionBegin;
2135   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2136   dm->ctx = ctx;
2137   PetscFunctionReturn(0);
2138 }
2139 
2140 #undef __FUNCT__
2141 #define __FUNCT__ "DMGetApplicationContext"
2142 /*@
2143     DMGetApplicationContext - Gets a user context from a DM object
2144 
2145     Not Collective
2146 
2147     Input Parameter:
2148 .   dm - the DM object
2149 
2150     Output Parameter:
2151 .   ctx - the user context
2152 
2153     Level: intermediate
2154 
2155 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2156 
2157 @*/
2158 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2159 {
2160   PetscFunctionBegin;
2161   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2162   *(void**)ctx = dm->ctx;
2163   PetscFunctionReturn(0);
2164 }
2165 
2166 #undef __FUNCT__
2167 #define __FUNCT__ "DMSetVariableBounds"
2168 /*@C
2169     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2170 
2171     Logically Collective on DM
2172 
2173     Input Parameter:
2174 +   dm - the DM object
2175 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2176 
2177     Level: intermediate
2178 
2179 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2180          DMSetJacobian()
2181 
2182 @*/
2183 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2184 {
2185   PetscFunctionBegin;
2186   dm->ops->computevariablebounds = f;
2187   PetscFunctionReturn(0);
2188 }
2189 
2190 #undef __FUNCT__
2191 #define __FUNCT__ "DMHasVariableBounds"
2192 /*@
2193     DMHasVariableBounds - does the DM object have a variable bounds function?
2194 
2195     Not Collective
2196 
2197     Input Parameter:
2198 .   dm - the DM object to destroy
2199 
2200     Output Parameter:
2201 .   flg - PETSC_TRUE if the variable bounds function exists
2202 
2203     Level: developer
2204 
2205 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2206 
2207 @*/
2208 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2209 {
2210   PetscFunctionBegin;
2211   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2212   PetscFunctionReturn(0);
2213 }
2214 
2215 #undef __FUNCT__
2216 #define __FUNCT__ "DMComputeVariableBounds"
2217 /*@C
2218     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2219 
2220     Logically Collective on DM
2221 
2222     Input Parameters:
2223 +   dm - the DM object to destroy
2224 -   x  - current solution at which the bounds are computed
2225 
2226     Output parameters:
2227 +   xl - lower bound
2228 -   xu - upper bound
2229 
2230     Level: intermediate
2231 
2232 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2233 
2234 @*/
2235 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2236 {
2237   PetscErrorCode ierr;
2238   PetscFunctionBegin;
2239   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2240   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2241   if (dm->ops->computevariablebounds) {
2242     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2243   }
2244   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2245   PetscFunctionReturn(0);
2246 }
2247 
2248 #undef __FUNCT__
2249 #define __FUNCT__ "DMHasColoring"
2250 /*@
2251     DMHasColoring - does the DM object have a method of providing a coloring?
2252 
2253     Not Collective
2254 
2255     Input Parameter:
2256 .   dm - the DM object
2257 
2258     Output Parameter:
2259 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2260 
2261     Level: developer
2262 
2263 .seealso DMHasFunction(), DMCreateColoring()
2264 
2265 @*/
2266 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2267 {
2268   PetscFunctionBegin;
2269   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2270   PetscFunctionReturn(0);
2271 }
2272 
2273 #undef  __FUNCT__
2274 #define __FUNCT__ "DMSetVec"
2275 /*@C
2276     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2277 
2278     Collective on DM
2279 
2280     Input Parameter:
2281 +   dm - the DM object
2282 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2283 
2284     Level: developer
2285 
2286 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2287 
2288 @*/
2289 PetscErrorCode  DMSetVec(DM dm,Vec x)
2290 {
2291   PetscErrorCode ierr;
2292   PetscFunctionBegin;
2293   if (x) {
2294     if (!dm->x) {
2295       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2296     }
2297     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2298   }
2299   else if (dm->x) {
2300     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2301   }
2302   PetscFunctionReturn(0);
2303 }
2304 
2305 PetscFList DMList                       = PETSC_NULL;
2306 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2307 
2308 #undef __FUNCT__
2309 #define __FUNCT__ "DMSetType"
2310 /*@C
2311   DMSetType - Builds a DM, for a particular DM implementation.
2312 
2313   Collective on DM
2314 
2315   Input Parameters:
2316 + dm     - The DM object
2317 - method - The name of the DM type
2318 
2319   Options Database Key:
2320 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2321 
2322   Notes:
2323   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2324 
2325   Level: intermediate
2326 
2327 .keywords: DM, set, type
2328 .seealso: DMGetType(), DMCreate()
2329 @*/
2330 PetscErrorCode  DMSetType(DM dm, DMType method)
2331 {
2332   PetscErrorCode (*r)(DM);
2333   PetscBool      match;
2334   PetscErrorCode ierr;
2335 
2336   PetscFunctionBegin;
2337   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2338   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2339   if (match) PetscFunctionReturn(0);
2340 
2341   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2342   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2343   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2344 
2345   if (dm->ops->destroy) {
2346     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2347     dm->ops->destroy = PETSC_NULL;
2348   }
2349   ierr = (*r)(dm);CHKERRQ(ierr);
2350   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2351   PetscFunctionReturn(0);
2352 }
2353 
2354 #undef __FUNCT__
2355 #define __FUNCT__ "DMGetType"
2356 /*@C
2357   DMGetType - Gets the DM type name (as a string) from the DM.
2358 
2359   Not Collective
2360 
2361   Input Parameter:
2362 . dm  - The DM
2363 
2364   Output Parameter:
2365 . type - The DM type name
2366 
2367   Level: intermediate
2368 
2369 .keywords: DM, get, type, name
2370 .seealso: DMSetType(), DMCreate()
2371 @*/
2372 PetscErrorCode  DMGetType(DM dm, DMType *type)
2373 {
2374   PetscErrorCode ierr;
2375 
2376   PetscFunctionBegin;
2377   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2378   PetscValidCharPointer(type,2);
2379   if (!DMRegisterAllCalled) {
2380     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2381   }
2382   *type = ((PetscObject)dm)->type_name;
2383   PetscFunctionReturn(0);
2384 }
2385 
2386 #undef __FUNCT__
2387 #define __FUNCT__ "DMConvert"
2388 /*@C
2389   DMConvert - Converts a DM to another DM, either of the same or different type.
2390 
2391   Collective on DM
2392 
2393   Input Parameters:
2394 + dm - the DM
2395 - newtype - new DM type (use "same" for the same type)
2396 
2397   Output Parameter:
2398 . M - pointer to new DM
2399 
2400   Notes:
2401   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2402   the MPI communicator of the generated DM is always the same as the communicator
2403   of the input DM.
2404 
2405   Level: intermediate
2406 
2407 .seealso: DMCreate()
2408 @*/
2409 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2410 {
2411   DM             B;
2412   char           convname[256];
2413   PetscBool      sametype, issame;
2414   PetscErrorCode ierr;
2415 
2416   PetscFunctionBegin;
2417   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2418   PetscValidType(dm,1);
2419   PetscValidPointer(M,3);
2420   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2421   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2422   {
2423     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2424 
2425     /*
2426        Order of precedence:
2427        1) See if a specialized converter is known to the current DM.
2428        2) See if a specialized converter is known to the desired DM class.
2429        3) See if a good general converter is registered for the desired class
2430        4) See if a good general converter is known for the current matrix.
2431        5) Use a really basic converter.
2432     */
2433 
2434     /* 1) See if a specialized converter is known to the current DM and the desired class */
2435     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2436     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2437     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2438     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2439     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2440     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2441     if (conv) goto foundconv;
2442 
2443     /* 2)  See if a specialized converter is known to the desired DM class. */
2444     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2445     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2446     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2447     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2448     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2449     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2450     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2451     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2452     if (conv) {
2453       ierr = DMDestroy(&B);CHKERRQ(ierr);
2454       goto foundconv;
2455     }
2456 
2457 #if 0
2458     /* 3) See if a good general converter is registered for the desired class */
2459     conv = B->ops->convertfrom;
2460     ierr = DMDestroy(&B);CHKERRQ(ierr);
2461     if (conv) goto foundconv;
2462 
2463     /* 4) See if a good general converter is known for the current matrix */
2464     if (dm->ops->convert) {
2465       conv = dm->ops->convert;
2466     }
2467     if (conv) goto foundconv;
2468 #endif
2469 
2470     /* 5) Use a really basic converter. */
2471     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2472 
2473     foundconv:
2474     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2475     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2476     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2477   }
2478   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2479   PetscFunctionReturn(0);
2480 }
2481 
2482 /*--------------------------------------------------------------------------------------------------------------------*/
2483 
2484 #undef __FUNCT__
2485 #define __FUNCT__ "DMRegister"
2486 /*@C
2487   DMRegister - See DMRegisterDynamic()
2488 
2489   Level: advanced
2490 @*/
2491 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2492 {
2493   char fullname[PETSC_MAX_PATH_LEN];
2494   PetscErrorCode ierr;
2495 
2496   PetscFunctionBegin;
2497   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2498   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2499   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2500   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2501   PetscFunctionReturn(0);
2502 }
2503 
2504 
2505 /*--------------------------------------------------------------------------------------------------------------------*/
2506 #undef __FUNCT__
2507 #define __FUNCT__ "DMRegisterDestroy"
2508 /*@C
2509    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2510 
2511    Not Collective
2512 
2513    Level: advanced
2514 
2515 .keywords: DM, register, destroy
2516 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2517 @*/
2518 PetscErrorCode  DMRegisterDestroy(void)
2519 {
2520   PetscErrorCode ierr;
2521 
2522   PetscFunctionBegin;
2523   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2524   DMRegisterAllCalled = PETSC_FALSE;
2525   PetscFunctionReturn(0);
2526 }
2527 
2528 #undef __FUNCT__
2529 #define __FUNCT__ "DMLoad"
2530 /*@C
2531   DMLoad - Loads a DM that has been stored in binary  with DMView().
2532 
2533   Collective on PetscViewer
2534 
2535   Input Parameters:
2536 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2537            some related function before a call to DMLoad().
2538 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2539            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2540 
2541    Level: intermediate
2542 
2543   Notes:
2544    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2545 
2546   Notes for advanced users:
2547   Most users should not need to know the details of the binary storage
2548   format, since DMLoad() and DMView() completely hide these details.
2549   But for anyone who's interested, the standard binary matrix storage
2550   format is
2551 .vb
2552      has not yet been determined
2553 .ve
2554 
2555 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2556 @*/
2557 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2558 {
2559   PetscErrorCode ierr;
2560   PetscBool      isbinary;
2561   PetscInt       classid;
2562   char           type[256];
2563 
2564   PetscFunctionBegin;
2565   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2566   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2567   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2568   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2569 
2570   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2571   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2572   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2573   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2574   ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2575   PetscFunctionReturn(0);
2576 }
2577 
2578 /******************************** FEM Support **********************************/
2579 
2580 #undef __FUNCT__
2581 #define __FUNCT__ "DMPrintCellVector"
2582 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2583   PetscInt       f;
2584   PetscErrorCode ierr;
2585 
2586   PetscFunctionBegin;
2587   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2588   for (f = 0; f < len; ++f) {
2589     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2590   }
2591   PetscFunctionReturn(0);
2592 }
2593 
2594 #undef __FUNCT__
2595 #define __FUNCT__ "DMPrintCellMatrix"
2596 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2597   PetscInt       f, g;
2598   PetscErrorCode ierr;
2599 
2600   PetscFunctionBegin;
2601   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2602   for (f = 0; f < rows; ++f) {
2603     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2604     for (g = 0; g < cols; ++g) {
2605       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2606     }
2607     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2608   }
2609   PetscFunctionReturn(0);
2610 }
2611 
2612 #undef __FUNCT__
2613 #define __FUNCT__ "DMGetDefaultSection"
2614 /*@
2615   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2616 
2617   Input Parameter:
2618 . dm - The DM
2619 
2620   Output Parameter:
2621 . section - The PetscSection
2622 
2623   Level: intermediate
2624 
2625   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2626 
2627 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2628 @*/
2629 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2630   PetscFunctionBegin;
2631   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2632   PetscValidPointer(section, 2);
2633   *section = dm->defaultSection;
2634   PetscFunctionReturn(0);
2635 }
2636 
2637 #undef __FUNCT__
2638 #define __FUNCT__ "DMSetDefaultSection"
2639 /*@
2640   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2641 
2642   Input Parameters:
2643 + dm - The DM
2644 - section - The PetscSection
2645 
2646   Level: intermediate
2647 
2648   Note: Any existing Section will be destroyed
2649 
2650 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2651 @*/
2652 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2653   PetscInt       numFields;
2654   PetscInt       f;
2655   PetscErrorCode ierr;
2656 
2657   PetscFunctionBegin;
2658   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2659   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2660   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2661   dm->defaultSection = section;
2662   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2663   if (numFields) {
2664     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2665     for (f = 0; f < numFields; ++f) {
2666       const char *name;
2667 
2668       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2669       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2670     }
2671   }
2672   PetscFunctionReturn(0);
2673 }
2674 
2675 #undef __FUNCT__
2676 #define __FUNCT__ "DMGetDefaultGlobalSection"
2677 /*@
2678   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2679 
2680   Collective on DM
2681 
2682   Input Parameter:
2683 . dm - The DM
2684 
2685   Output Parameter:
2686 . section - The PetscSection
2687 
2688   Level: intermediate
2689 
2690   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2691 
2692 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2693 @*/
2694 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2695   PetscErrorCode ierr;
2696 
2697   PetscFunctionBegin;
2698   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2699   PetscValidPointer(section, 2);
2700   if (!dm->defaultGlobalSection) {
2701     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2702     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2703     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2704   }
2705   *section = dm->defaultGlobalSection;
2706   PetscFunctionReturn(0);
2707 }
2708 
2709 #undef __FUNCT__
2710 #define __FUNCT__ "DMSetDefaultGlobalSection"
2711 /*@
2712   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2713 
2714   Input Parameters:
2715 + dm - The DM
2716 - section - The PetscSection
2717 
2718   Level: intermediate
2719 
2720   Note: Any existing Section will be destroyed
2721 
2722 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2723 @*/
2724 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2725   PetscErrorCode ierr;
2726 
2727   PetscFunctionBegin;
2728   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2729   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2730   dm->defaultGlobalSection = section;
2731   PetscFunctionReturn(0);
2732 }
2733 
2734 #undef __FUNCT__
2735 #define __FUNCT__ "DMGetDefaultSF"
2736 /*@
2737   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2738   it is created from the default PetscSection layouts in the DM.
2739 
2740   Input Parameter:
2741 . dm - The DM
2742 
2743   Output Parameter:
2744 . sf - The PetscSF
2745 
2746   Level: intermediate
2747 
2748   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2749 
2750 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2751 @*/
2752 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2753   PetscInt       nroots;
2754   PetscErrorCode ierr;
2755 
2756   PetscFunctionBegin;
2757   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2758   PetscValidPointer(sf, 2);
2759   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2760   if (nroots < 0) {
2761     PetscSection section, gSection;
2762 
2763     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2764     if (section) {
2765       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2766       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2767     } else {
2768       *sf = PETSC_NULL;
2769       PetscFunctionReturn(0);
2770     }
2771   }
2772   *sf = dm->defaultSF;
2773   PetscFunctionReturn(0);
2774 }
2775 
2776 #undef __FUNCT__
2777 #define __FUNCT__ "DMSetDefaultSF"
2778 /*@
2779   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2780 
2781   Input Parameters:
2782 + dm - The DM
2783 - sf - The PetscSF
2784 
2785   Level: intermediate
2786 
2787   Note: Any previous SF is destroyed
2788 
2789 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2790 @*/
2791 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2792   PetscErrorCode ierr;
2793 
2794   PetscFunctionBegin;
2795   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2796   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2797   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2798   dm->defaultSF = sf;
2799   PetscFunctionReturn(0);
2800 }
2801 
2802 #undef __FUNCT__
2803 #define __FUNCT__ "DMCreateDefaultSF"
2804 /*@C
2805   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2806   describing the data layout.
2807 
2808   Input Parameters:
2809 + dm - The DM
2810 . localSection - PetscSection describing the local data layout
2811 - globalSection - PetscSection describing the global data layout
2812 
2813   Level: intermediate
2814 
2815 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2816 @*/
2817 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2818 {
2819   MPI_Comm        comm = ((PetscObject) dm)->comm;
2820   PetscLayout     layout;
2821   const PetscInt *ranges;
2822   PetscInt       *local;
2823   PetscSFNode    *remote;
2824   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2825   PetscMPIInt     size, rank;
2826   PetscErrorCode  ierr;
2827 
2828   PetscFunctionBegin;
2829   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2830   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2831   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2832   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2833   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2834   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2835   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2836   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2837   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2838   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2839   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2840     PetscInt gdof, gcdof;
2841 
2842     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2843     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2844     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2845   }
2846   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2847   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2848   for (p = pStart, l = 0; p < pEnd; ++p) {
2849     const PetscInt *cind;
2850     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2851 
2852     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2853     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2854     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2855     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2856     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2857     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2858     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2859     if (!gdof) continue; /* Censored point */
2860     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2861     if (gsize != dof-cdof) {
2862       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2863       cdof = 0; /* Ignore constraints */
2864     }
2865     for (d = 0, c = 0; d < dof; ++d) {
2866       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2867       local[l+d-c] = off+d;
2868     }
2869     if (gdof < 0) {
2870       for(d = 0; d < gsize; ++d, ++l) {
2871         PetscInt offset = -(goff+1) + d, r;
2872 
2873         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2874         if (r < 0) r = -(r+2);
2875         remote[l].rank  = r;
2876         remote[l].index = offset - ranges[r];
2877       }
2878     } else {
2879       for(d = 0; d < gsize; ++d, ++l) {
2880         remote[l].rank  = rank;
2881         remote[l].index = goff+d - ranges[rank];
2882       }
2883     }
2884   }
2885   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2886   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2887   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2888   PetscFunctionReturn(0);
2889 }
2890 
2891 #undef __FUNCT__
2892 #define __FUNCT__ "DMGetPointSF"
2893 /*@
2894   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2895 
2896   Input Parameter:
2897 . dm - The DM
2898 
2899   Output Parameter:
2900 . sf - The PetscSF
2901 
2902   Level: intermediate
2903 
2904   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2905 
2906 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2907 @*/
2908 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
2909   PetscFunctionBegin;
2910   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2911   PetscValidPointer(sf, 2);
2912   *sf = dm->sf;
2913   PetscFunctionReturn(0);
2914 }
2915 
2916 #undef __FUNCT__
2917 #define __FUNCT__ "DMSetPointSF"
2918 /*@
2919   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
2920 
2921   Input Parameters:
2922 + dm - The DM
2923 - sf - The PetscSF
2924 
2925   Level: intermediate
2926 
2927 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2928 @*/
2929 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
2930   PetscErrorCode ierr;
2931 
2932   PetscFunctionBegin;
2933   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2934   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
2935   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
2936   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
2937   dm->sf = sf;
2938   PetscFunctionReturn(0);
2939 }
2940 
2941 #undef __FUNCT__
2942 #define __FUNCT__ "DMGetNumFields"
2943 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
2944 {
2945   PetscFunctionBegin;
2946   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2947   PetscValidPointer(numFields, 2);
2948   *numFields = dm->numFields;
2949   PetscFunctionReturn(0);
2950 }
2951 
2952 #undef __FUNCT__
2953 #define __FUNCT__ "DMSetNumFields"
2954 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
2955 {
2956   PetscInt       f;
2957   PetscErrorCode ierr;
2958 
2959   PetscFunctionBegin;
2960   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2961   for (f = 0; f < dm->numFields; ++f) {
2962     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
2963   }
2964   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
2965   dm->numFields = numFields;
2966   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
2967   for (f = 0; f < dm->numFields; ++f) {
2968     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
2969   }
2970   PetscFunctionReturn(0);
2971 }
2972 
2973 #undef __FUNCT__
2974 #define __FUNCT__ "DMGetField"
2975 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
2976 {
2977   PetscFunctionBegin;
2978   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2979   PetscValidPointer(field, 2);
2980   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
2981   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
2982   *field = dm->fields[f];
2983   PetscFunctionReturn(0);
2984 }
2985 
2986 #undef __FUNCT__
2987 #define __FUNCT__ "DMSetCoordinates"
2988 /*@
2989   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
2990 
2991   Collective on DM
2992 
2993   Input Parameters:
2994 + dm - the DM
2995 - c - coordinate vector
2996 
2997   Note:
2998   The coordinates do include those for ghost points, which are in the local vector
2999 
3000   Level: intermediate
3001 
3002 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3003 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3004 @*/
3005 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3006 {
3007   PetscErrorCode ierr;
3008 
3009   PetscFunctionBegin;
3010   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3011   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3012   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3013   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3014   dm->coordinates = c;
3015   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3016   PetscFunctionReturn(0);
3017 }
3018 
3019 #undef __FUNCT__
3020 #define __FUNCT__ "DMSetCoordinatesLocal"
3021 /*@
3022   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3023 
3024   Collective on DM
3025 
3026    Input Parameters:
3027 +  dm - the DM
3028 -  c - coordinate vector
3029 
3030   Note:
3031   The coordinates of ghost points can be set using DMSetCoordinates()
3032   followed by DMGetCoordinatesLocal(). This is intended to enable the
3033   setting of ghost coordinates outside of the domain.
3034 
3035   Level: intermediate
3036 
3037 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3038 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3039 @*/
3040 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3041 {
3042   PetscErrorCode ierr;
3043 
3044   PetscFunctionBegin;
3045   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3046   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3047   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3048   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3049   dm->coordinatesLocal = c;
3050   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3051   PetscFunctionReturn(0);
3052 }
3053 
3054 #undef __FUNCT__
3055 #define __FUNCT__ "DMGetCoordinates"
3056 /*@
3057   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3058 
3059   Not Collective
3060 
3061   Input Parameter:
3062 . dm - the DM
3063 
3064   Output Parameter:
3065 . c - global coordinate vector
3066 
3067   Note:
3068   This is a borrowed reference, so the user should NOT destroy this vector
3069 
3070   Each process has only the local coordinates (does NOT have the ghost coordinates).
3071 
3072   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3073   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3074 
3075   Level: intermediate
3076 
3077 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3078 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3079 @*/
3080 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3081 {
3082   PetscErrorCode ierr;
3083 
3084   PetscFunctionBegin;
3085   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3086   PetscValidPointer(c,2);
3087   if (!dm->coordinates && dm->coordinatesLocal) {
3088     DM cdm;
3089 
3090     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3091     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3092     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3093     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3094     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3095   }
3096   *c = dm->coordinates;
3097   PetscFunctionReturn(0);
3098 }
3099 
3100 #undef __FUNCT__
3101 #define __FUNCT__ "DMGetCoordinatesLocal"
3102 /*@
3103   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3104 
3105   Collective on DM
3106 
3107   Input Parameter:
3108 . dm - the DM
3109 
3110   Output Parameter:
3111 . c - coordinate vector
3112 
3113   Note:
3114   This is a borrowed reference, so the user should NOT destroy this vector
3115 
3116   Each process has the local and ghost coordinates
3117 
3118   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3119   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3120 
3121   Level: intermediate
3122 
3123 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3124 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3125 @*/
3126 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3127 {
3128   PetscErrorCode ierr;
3129 
3130   PetscFunctionBegin;
3131   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3132   PetscValidPointer(c,2);
3133   if (!dm->coordinatesLocal && dm->coordinates) {
3134     DM cdm;
3135 
3136     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3137     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3138     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3139     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3140     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3141   }
3142   *c = dm->coordinatesLocal;
3143   PetscFunctionReturn(0);
3144 }
3145 
3146 #undef __FUNCT__
3147 #define __FUNCT__ "DMGetCoordinateDM"
3148 /*@
3149   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3150 
3151   Collective on DM
3152 
3153   Input Parameter:
3154 . dm - the DM
3155 
3156   Output Parameter:
3157 . cdm - coordinate DM
3158 
3159   Level: intermediate
3160 
3161 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3162 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3163 @*/
3164 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3165 {
3166   PetscErrorCode ierr;
3167 
3168   PetscFunctionBegin;
3169   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3170   PetscValidPointer(cdm,2);
3171   if (!dm->coordinateDM) {
3172     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3173     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3174   }
3175   *cdm = dm->coordinateDM;
3176   PetscFunctionReturn(0);
3177 }
3178