xref: /petsc/src/dm/interface/dm.c (revision fa59e80594ae4f5b96fb1a0d88f82abc4bbaec69)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMCreate"
8 /*@
9   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
10 
11    If you never  call DMSetType()  it will generate an
12    error when you try to use the vector.
13 
14   Collective on MPI_Comm
15 
16   Input Parameter:
17 . comm - The communicator for the DM object
18 
19   Output Parameter:
20 . dm - The DM object
21 
22   Level: beginner
23 
24 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
25 @*/
26 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
27 {
28   DM             v;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   PetscValidPointer(dm,2);
33   *dm = PETSC_NULL;
34 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
35   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
36   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
37   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
38 #endif
39 
40   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
41   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
42 
43 
44   v->ltogmap      = PETSC_NULL;
45   v->ltogmapb     = PETSC_NULL;
46   v->bs           = 1;
47   v->coloringtype = IS_COLORING_GLOBAL;
48   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
49   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
50   v->defaultSection       = PETSC_NULL;
51   v->defaultGlobalSection = PETSC_NULL;
52   {
53     PetscInt i;
54     for (i = 0; i < 10; ++i) {
55       v->nullspaceConstructors[i] = PETSC_NULL;
56     }
57   }
58   v->numFields = 0;
59   v->fields    = PETSC_NULL;
60 
61   *dm = v;
62   PetscFunctionReturn(0);
63 }
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "DMSetVecType"
67 /*@C
68        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
69 
70    Logically Collective on DMDA
71 
72    Input Parameter:
73 +  da - initial distributed array
74 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
75 
76    Options Database:
77 .   -dm_vec_type ctype
78 
79    Level: intermediate
80 
81 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
82 @*/
83 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
84 {
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   PetscValidHeaderSpecific(da,DM_CLASSID,1);
89   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
90   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
91   PetscFunctionReturn(0);
92 }
93 
94 #undef __FUNCT__
95 #define __FUNCT__ "VecGetDM"
96 /*@
97   VecGetDM - Gets the DM defining the data layout of the vector
98 
99   Not collective
100 
101   Input Parameter:
102 . v - The Vec
103 
104   Output Parameter:
105 . dm - The DM
106 
107   Level: intermediate
108 
109 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
110 @*/
111 PetscErrorCode VecGetDM(Vec v, DM *dm)
112 {
113   PetscErrorCode ierr;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
117   PetscValidPointer(dm,2);
118   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "VecSetDM"
124 /*@
125   VecSetDM - Sets the DM defining the data layout of the vector
126 
127   Not collective
128 
129   Input Parameters:
130 + v - The Vec
131 - dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecSetDM(Vec v, DM dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
144   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "DMSetMatType"
150 /*@C
151        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
152 
153    Logically Collective on DM
154 
155    Input Parameter:
156 +  dm - the DM context
157 .  ctype - the matrix type
158 
159    Options Database:
160 .   -dm_mat_type ctype
161 
162    Level: intermediate
163 
164 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
165 @*/
166 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
167 {
168   PetscErrorCode ierr;
169   PetscFunctionBegin;
170   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
171   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
172   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "MatGetDM"
178 /*@
179   MatGetDM - Gets the DM defining the data layout of the matrix
180 
181   Not collective
182 
183   Input Parameter:
184 . A - The Mat
185 
186   Output Parameter:
187 . dm - The DM
188 
189   Level: intermediate
190 
191 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
192 @*/
193 PetscErrorCode MatGetDM(Mat A, DM *dm)
194 {
195   PetscErrorCode ierr;
196 
197   PetscFunctionBegin;
198   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
199   PetscValidPointer(dm,2);
200   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 #undef __FUNCT__
205 #define __FUNCT__ "MatSetDM"
206 /*@
207   MatSetDM - Sets the DM defining the data layout of the matrix
208 
209   Not collective
210 
211   Input Parameters:
212 + A - The Mat
213 - dm - The DM
214 
215   Level: intermediate
216 
217 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
218 @*/
219 PetscErrorCode MatSetDM(Mat A, DM dm)
220 {
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
225   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
226   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "DMSetOptionsPrefix"
232 /*@C
233    DMSetOptionsPrefix - Sets the prefix used for searching for all
234    DMDA options in the database.
235 
236    Logically Collective on DMDA
237 
238    Input Parameter:
239 +  da - the DMDA context
240 -  prefix - the prefix to prepend to all option names
241 
242    Notes:
243    A hyphen (-) must NOT be given at the beginning of the prefix name.
244    The first character of all runtime options is AUTOMATICALLY the hyphen.
245 
246    Level: advanced
247 
248 .keywords: DMDA, set, options, prefix, database
249 
250 .seealso: DMSetFromOptions()
251 @*/
252 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
253 {
254   PetscErrorCode ierr;
255 
256   PetscFunctionBegin;
257   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
258   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
259   PetscFunctionReturn(0);
260 }
261 
262 #undef __FUNCT__
263 #define __FUNCT__ "DMDestroy"
264 /*@
265     DMDestroy - Destroys a vector packer or DMDA.
266 
267     Collective on DM
268 
269     Input Parameter:
270 .   dm - the DM object to destroy
271 
272     Level: developer
273 
274 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
275 
276 @*/
277 PetscErrorCode  DMDestroy(DM *dm)
278 {
279   PetscInt       i, cnt = 0, f;
280   DMNamedVecLink nlink,nnext;
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   if (!*dm) PetscFunctionReturn(0);
285   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
286 
287   /* count all the circular references of DM and its contained Vecs */
288   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
289     if ((*dm)->localin[i])  {cnt++;}
290     if ((*dm)->globalin[i]) {cnt++;}
291   }
292   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
293   if ((*dm)->x) {
294     DM obj;
295     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
296     if (obj == *dm) cnt++;
297   }
298 
299   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
300   /*
301      Need this test because the dm references the vectors that
302      reference the dm, so destroying the dm calls destroy on the
303      vectors that cause another destroy on the dm
304   */
305   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
306   ((PetscObject) (*dm))->refct = 0;
307   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
308     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
309     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
310   }
311   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
312     nnext = nlink->next;
313     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
314     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
315     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
316     ierr = PetscFree(nlink);CHKERRQ(ierr);
317   }
318   (*dm)->namedglobal = PETSC_NULL;
319 
320   /* Destroy the list of hooks */
321   {
322     DMCoarsenHookLink link,next;
323     for (link=(*dm)->coarsenhook; link; link=next) {
324       next = link->next;
325       ierr = PetscFree(link);CHKERRQ(ierr);
326     }
327     (*dm)->coarsenhook = PETSC_NULL;
328   }
329   {
330     DMRefineHookLink link,next;
331     for (link=(*dm)->refinehook; link; link=next) {
332       next = link->next;
333       ierr = PetscFree(link);CHKERRQ(ierr);
334     }
335     (*dm)->refinehook = PETSC_NULL;
336   }
337   /* Destroy the work arrays */
338   {
339     DMWorkLink link,next;
340     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
341     for (link=(*dm)->workin; link; link=next) {
342       next = link->next;
343       ierr = PetscFree(link->mem);CHKERRQ(ierr);
344       ierr = PetscFree(link);CHKERRQ(ierr);
345     }
346     (*dm)->workin = PETSC_NULL;
347   }
348 
349   if ((*dm)->ctx && (*dm)->ctxdestroy) {
350     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
351   }
352   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
353   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
354   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
355   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
356   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
357   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
358   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
359 
360   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
361   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
362   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
363   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
364   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
365 
366   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
367   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
368   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
369 
370   for (f = 0; f < (*dm)->numFields; ++f) {
371     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
372   }
373   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
374   /* if memory was published with AMS then destroy it */
375   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
376 
377   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
378   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
379   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
380   PetscFunctionReturn(0);
381 }
382 
383 #undef __FUNCT__
384 #define __FUNCT__ "DMSetUp"
385 /*@
386     DMSetUp - sets up the data structures inside a DM object
387 
388     Collective on DM
389 
390     Input Parameter:
391 .   dm - the DM object to setup
392 
393     Level: developer
394 
395 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
396 
397 @*/
398 PetscErrorCode  DMSetUp(DM dm)
399 {
400   PetscErrorCode ierr;
401 
402   PetscFunctionBegin;
403   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
404   if (dm->setupcalled) PetscFunctionReturn(0);
405   if (dm->ops->setup) {
406     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
407   }
408   dm->setupcalled = PETSC_TRUE;
409   PetscFunctionReturn(0);
410 }
411 
412 #undef __FUNCT__
413 #define __FUNCT__ "DMSetFromOptions"
414 /*@
415     DMSetFromOptions - sets parameters in a DM from the options database
416 
417     Collective on DM
418 
419     Input Parameter:
420 .   dm - the DM object to set options for
421 
422     Options Database:
423 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
424 .   -dm_vec_type <type>  type of vector to create inside DM
425 .   -dm_mat_type <type>  type of matrix to create inside DM
426 -   -dm_coloring_type <global or ghosted>
427 
428     Level: developer
429 
430 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
431 
432 @*/
433 PetscErrorCode  DMSetFromOptions(DM dm)
434 {
435   PetscBool      flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg;
436   PetscErrorCode ierr;
437   char           typeName[256] = MATAIJ;
438 
439   PetscFunctionBegin;
440   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
441   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
442     ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr);
443     ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr);
444     ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr);
445     ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr);
446     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
447     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
448     if (flg) {
449       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
450     }
451     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
452     if (flg) {
453       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
454     }
455     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
456     if (dm->ops->setfromoptions) {
457       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
458     }
459     /* process any options handlers added with PetscObjectAddOptionsHandler() */
460     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
461   ierr = PetscOptionsEnd();CHKERRQ(ierr);
462   if (flg1) {
463     ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
464   }
465   if (flg2) {
466     PetscViewer viewer;
467 
468     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
469     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
470     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr);
471     ierr = DMView(dm, viewer);CHKERRQ(ierr);
472     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
473   }
474   if (flg3) {
475     PetscViewer viewer;
476 
477     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
478     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
479     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr);
480     ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr);
481     ierr = DMView(dm, viewer);CHKERRQ(ierr);
482     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
483   }
484   if (flg4) {
485     PetscViewer viewer;
486 
487     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
488     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
489     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr);
490     ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr);
491     ierr = DMView(dm, viewer);CHKERRQ(ierr);
492     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
493   }
494   PetscFunctionReturn(0);
495 }
496 
497 #undef __FUNCT__
498 #define __FUNCT__ "DMView"
499 /*@C
500     DMView - Views a vector packer or DMDA.
501 
502     Collective on DM
503 
504     Input Parameter:
505 +   dm - the DM object to view
506 -   v - the viewer
507 
508     Level: developer
509 
510 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
511 
512 @*/
513 PetscErrorCode  DMView(DM dm,PetscViewer v)
514 {
515   PetscErrorCode ierr;
516   PetscBool      isbinary;
517 
518   PetscFunctionBegin;
519   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
520   if (!v) {
521     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
522   }
523   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
524   if (isbinary) {
525     PetscInt         classid = DM_FILE_CLASSID;
526     MPI_Comm         comm;
527     PetscMPIInt      rank;
528     char             type[256];
529 
530     ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
531     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
532     if (!rank) {
533       ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
534       ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
535       ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
536     }
537   }
538   if (dm->ops->view) {
539     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
540   }
541   PetscFunctionReturn(0);
542 }
543 
544 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
545 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
546 
547 #undef __FUNCT__
548 #define __FUNCT__ "DMCreateGlobalVector"
549 /*@
550     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
551 
552     Collective on DM
553 
554     Input Parameter:
555 .   dm - the DM object
556 
557     Output Parameter:
558 .   vec - the global vector
559 
560     Level: beginner
561 
562 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
563 
564 @*/
565 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
566 {
567   PetscErrorCode ierr;
568 
569   PetscFunctionBegin;
570   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
571   if (dm->defaultSection) {
572     PetscSection gSection;
573     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
574 
575     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
576     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
577     for(p = pStart; p < pEnd; ++p) {
578       PetscInt dof, cdof;
579 
580       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
581       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
582       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
583       if ((dof > 0) && (dof-cdof != blockSize)) {
584         blockSize = 1;
585         break;
586       }
587     }
588     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
589     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
590     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
591     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
592     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
593     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
594     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
595     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
596     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
597     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
598     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
599     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
600   } else {
601     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
602   }
603   PetscFunctionReturn(0);
604 }
605 
606 #undef __FUNCT__
607 #define __FUNCT__ "DMCreateLocalVector"
608 /*@
609     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
610 
611     Not Collective
612 
613     Input Parameter:
614 .   dm - the DM object
615 
616     Output Parameter:
617 .   vec - the local vector
618 
619     Level: beginner
620 
621 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
622 
623 @*/
624 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
625 {
626   PetscErrorCode ierr;
627 
628   PetscFunctionBegin;
629   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
630   if (dm->defaultSection) {
631     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
632 
633     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
634     for(p = pStart; p < pEnd; ++p) {
635       PetscInt dof;
636 
637       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
638       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
639       if ((dof > 0) && (dof != blockSize)) {
640         blockSize = 1;
641         break;
642       }
643     }
644     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
645     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
646     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
647     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
648     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
649     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
650     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
651   } else {
652     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
653   }
654   PetscFunctionReturn(0);
655 }
656 
657 #undef __FUNCT__
658 #define __FUNCT__ "DMGetLocalToGlobalMapping"
659 /*@
660    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
661 
662    Collective on DM
663 
664    Input Parameter:
665 .  dm - the DM that provides the mapping
666 
667    Output Parameter:
668 .  ltog - the mapping
669 
670    Level: intermediate
671 
672    Notes:
673    This mapping can then be used by VecSetLocalToGlobalMapping() or
674    MatSetLocalToGlobalMapping().
675 
676 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
677 @*/
678 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
679 {
680   PetscErrorCode ierr;
681 
682   PetscFunctionBegin;
683   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
684   PetscValidPointer(ltog,2);
685   if (!dm->ltogmap) {
686     PetscSection section, sectionGlobal;
687 
688     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
689     if (section) {
690       PetscInt      *ltog;
691       PetscInt       pStart, pEnd, size, p, l;
692 
693       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
694       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
695       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
696       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
697       for (p = pStart, l = 0; p < pEnd; ++p) {
698         PetscInt dof, off, c;
699 
700         /* Should probably use constrained dofs */
701         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
702         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
703         for (c = 0; c < dof; ++c, ++l) {
704           ltog[l] = off+c;
705         }
706       }
707       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
708       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
709     } else {
710       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
711       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
712     }
713   }
714   *ltog = dm->ltogmap;
715   PetscFunctionReturn(0);
716 }
717 
718 #undef __FUNCT__
719 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
720 /*@
721    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
722 
723    Collective on DM
724 
725    Input Parameter:
726 .  da - the distributed array that provides the mapping
727 
728    Output Parameter:
729 .  ltog - the block mapping
730 
731    Level: intermediate
732 
733    Notes:
734    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
735    MatSetLocalToGlobalMappingBlock().
736 
737 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
738 @*/
739 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
740 {
741   PetscErrorCode ierr;
742 
743   PetscFunctionBegin;
744   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
745   PetscValidPointer(ltog,2);
746   if (!dm->ltogmapb) {
747     PetscInt bs;
748     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
749     if (bs > 1) {
750       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
751       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
752     } else {
753       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
754       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
755     }
756   }
757   *ltog = dm->ltogmapb;
758   PetscFunctionReturn(0);
759 }
760 
761 #undef __FUNCT__
762 #define __FUNCT__ "DMGetBlockSize"
763 /*@
764    DMGetBlockSize - Gets the inherent block size associated with a DM
765 
766    Not Collective
767 
768    Input Parameter:
769 .  dm - the DM with block structure
770 
771    Output Parameter:
772 .  bs - the block size, 1 implies no exploitable block structure
773 
774    Level: intermediate
775 
776 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
777 @*/
778 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
779 {
780   PetscFunctionBegin;
781   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
782   PetscValidPointer(bs,2);
783   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
784   *bs = dm->bs;
785   PetscFunctionReturn(0);
786 }
787 
788 #undef __FUNCT__
789 #define __FUNCT__ "DMCreateInterpolation"
790 /*@
791     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
792 
793     Collective on DM
794 
795     Input Parameter:
796 +   dm1 - the DM object
797 -   dm2 - the second, finer DM object
798 
799     Output Parameter:
800 +  mat - the interpolation
801 -  vec - the scaling (optional)
802 
803     Level: developer
804 
805     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
806         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
807 
808         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
809         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
810 
811 
812 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
813 
814 @*/
815 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
816 {
817   PetscErrorCode ierr;
818 
819   PetscFunctionBegin;
820   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
821   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
822   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
823   PetscFunctionReturn(0);
824 }
825 
826 #undef __FUNCT__
827 #define __FUNCT__ "DMCreateInjection"
828 /*@
829     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
830 
831     Collective on DM
832 
833     Input Parameter:
834 +   dm1 - the DM object
835 -   dm2 - the second, finer DM object
836 
837     Output Parameter:
838 .   ctx - the injection
839 
840     Level: developer
841 
842    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
843         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
844 
845 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
846 
847 @*/
848 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
849 {
850   PetscErrorCode ierr;
851 
852   PetscFunctionBegin;
853   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
854   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
855   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
856   PetscFunctionReturn(0);
857 }
858 
859 #undef __FUNCT__
860 #define __FUNCT__ "DMCreateColoring"
861 /*@C
862     DMCreateColoring - Gets coloring for a DMDA or DMComposite
863 
864     Collective on DM
865 
866     Input Parameter:
867 +   dm - the DM object
868 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
869 -   matype - either MATAIJ or MATBAIJ
870 
871     Output Parameter:
872 .   coloring - the coloring
873 
874     Level: developer
875 
876 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
877 
878 @*/
879 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
880 {
881   PetscErrorCode ierr;
882 
883   PetscFunctionBegin;
884   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
885   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
886   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
887   PetscFunctionReturn(0);
888 }
889 
890 #undef __FUNCT__
891 #define __FUNCT__ "DMCreateMatrix"
892 /*@C
893     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
894 
895     Collective on DM
896 
897     Input Parameter:
898 +   dm - the DM object
899 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
900             any type which inherits from one of these (such as MATAIJ)
901 
902     Output Parameter:
903 .   mat - the empty Jacobian
904 
905     Level: beginner
906 
907     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
908        do not need to do it yourself.
909 
910        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
911        the nonzero pattern call DMDASetMatPreallocateOnly()
912 
913        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
914        internally by PETSc.
915 
916        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
917        the indices for the global numbering for DMDAs which is complicated.
918 
919 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
920 
921 @*/
922 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
923 {
924   PetscErrorCode ierr;
925 
926   PetscFunctionBegin;
927   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
928 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
929   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
930 #endif
931   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
932   PetscValidPointer(mat,3);
933   if (dm->mattype) {
934     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
935   } else {
936     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
937   }
938   PetscFunctionReturn(0);
939 }
940 
941 #undef __FUNCT__
942 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
943 /*@
944   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
945     preallocated but the nonzero structure and zero values will not be set.
946 
947   Logically Collective on DMDA
948 
949   Input Parameter:
950 + dm - the DM
951 - only - PETSC_TRUE if only want preallocation
952 
953   Level: developer
954 .seealso DMCreateMatrix()
955 @*/
956 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
957 {
958   PetscFunctionBegin;
959   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
960   dm->prealloc_only = only;
961   PetscFunctionReturn(0);
962 }
963 
964 #undef __FUNCT__
965 #define __FUNCT__ "DMGetWorkArray"
966 /*@C
967   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
968 
969   Not Collective
970 
971   Input Parameters:
972 + dm - the DM object
973 . count - The minium size
974 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
975 
976   Output Parameter:
977 . array - the work array
978 
979   Level: developer
980 
981 .seealso DMDestroy(), DMCreate()
982 @*/
983 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
984 {
985   PetscErrorCode ierr;
986   DMWorkLink link;
987   size_t size;
988 
989   PetscFunctionBegin;
990   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
991   PetscValidPointer(mem,4);
992   if (dm->workin) {
993     link = dm->workin;
994     dm->workin = dm->workin->next;
995   } else {
996     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
997   }
998   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
999   if (size*count > link->bytes) {
1000     ierr = PetscFree(link->mem);CHKERRQ(ierr);
1001     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1002     link->bytes = size*count;
1003   }
1004   link->next = dm->workout;
1005   dm->workout = link;
1006   *(void**)mem = link->mem;
1007   PetscFunctionReturn(0);
1008 }
1009 
1010 #undef __FUNCT__
1011 #define __FUNCT__ "DMRestoreWorkArray"
1012 /*@C
1013   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1014 
1015   Not Collective
1016 
1017   Input Parameters:
1018 + dm - the DM object
1019 . count - The minium size
1020 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1021 
1022   Output Parameter:
1023 . array - the work array
1024 
1025   Level: developer
1026 
1027 .seealso DMDestroy(), DMCreate()
1028 @*/
1029 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1030 {
1031   DMWorkLink *p,link;
1032 
1033   PetscFunctionBegin;
1034   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1035   PetscValidPointer(mem,4);
1036   for (p=&dm->workout; (link=*p); p=&link->next) {
1037     if (link->mem == *(void**)mem) {
1038       *p = link->next;
1039       link->next = dm->workin;
1040       dm->workin = link;
1041       *(void**)mem = PETSC_NULL;
1042       PetscFunctionReturn(0);
1043     }
1044   }
1045   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1046   PetscFunctionReturn(0);
1047 }
1048 
1049 #undef __FUNCT__
1050 #define __FUNCT__ "DMSetNullSpaceConstructor"
1051 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1052 {
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1055   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1056   dm->nullspaceConstructors[field] = nullsp;
1057   PetscFunctionReturn(0);
1058 }
1059 
1060 #undef __FUNCT__
1061 #define __FUNCT__ "DMCreateFieldIS"
1062 /*@C
1063   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1064 
1065   Not collective
1066 
1067   Input Parameter:
1068 . dm - the DM object
1069 
1070   Output Parameters:
1071 + numFields  - The number of fields (or PETSC_NULL if not requested)
1072 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1073 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1074 
1075   Level: intermediate
1076 
1077   Notes:
1078   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1079   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1080   PetscFree().
1081 
1082 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1083 @*/
1084 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1085 {
1086   PetscSection   section, sectionGlobal;
1087   PetscErrorCode ierr;
1088 
1089   PetscFunctionBegin;
1090   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1091   if (numFields) {
1092     PetscValidPointer(numFields,2);
1093     *numFields = 0;
1094   }
1095   if (fieldNames) {
1096     PetscValidPointer(fieldNames,3);
1097     *fieldNames = PETSC_NULL;
1098   }
1099   if (fields) {
1100     PetscValidPointer(fields,4);
1101     *fields = PETSC_NULL;
1102   }
1103   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1104   if (section) {
1105     PetscInt *fieldSizes, **fieldIndices;
1106     PetscInt  nF, f, pStart, pEnd, p;
1107 
1108     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1109     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1110     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1111     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1112     for (f = 0; f < nF; ++f) {
1113       fieldSizes[f] = 0;
1114     }
1115     for (p = pStart; p < pEnd; ++p) {
1116       PetscInt gdof;
1117 
1118       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1119       if (gdof > 0) {
1120         for (f = 0; f < nF; ++f) {
1121           PetscInt fdof, fcdof;
1122 
1123           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1124           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1125           fieldSizes[f] += fdof-fcdof;
1126         }
1127       }
1128     }
1129     for (f = 0; f < nF; ++f) {
1130       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1131       fieldSizes[f] = 0;
1132     }
1133     for (p = pStart; p < pEnd; ++p) {
1134       PetscInt gdof, goff;
1135 
1136       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1137       if (gdof > 0) {
1138         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1139         for (f = 0; f < nF; ++f) {
1140           PetscInt fdof, fcdof, fc;
1141 
1142           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1143           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1144           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1145             fieldIndices[f][fieldSizes[f]] = goff++;
1146           }
1147         }
1148       }
1149     }
1150     if (numFields) {*numFields = nF;}
1151     if (fieldNames) {
1152       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1153       for (f = 0; f < nF; ++f) {
1154         const char *fieldName;
1155 
1156         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1157         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1158       }
1159     }
1160     if (fields) {
1161       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1162       for (f = 0; f < nF; ++f) {
1163         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1164       }
1165     }
1166     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1167   } else {
1168     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1169   }
1170   PetscFunctionReturn(0);
1171 }
1172 
1173 #undef __FUNCT__
1174 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1175 /*@C
1176   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1177 
1178   Not Collective
1179 
1180   Input Parameters:
1181 + dm   - the DM object
1182 - name - the name of the field decomposition
1183 
1184   Output Parameter:
1185 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1186 
1187   Level: advanced
1188 
1189 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1190 @*/
1191 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1192 {
1193   PetscErrorCode ierr;
1194 
1195   PetscFunctionBegin;
1196   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1197   PetscValidCharPointer(name,2);
1198   PetscValidPointer(ddm,3);
1199   *ddm = PETSC_NULL;
1200   if (dm->ops->createfielddecompositiondm) {
1201     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1202   }
1203   PetscFunctionReturn(0);
1204 }
1205 
1206 #undef __FUNCT__
1207 #define __FUNCT__ "DMCreateFieldDecomposition"
1208 /*@C
1209   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1210                           corresponding to different fields: each IS contains the global indices of the dofs of the
1211                           corresponding field. The optional list of DMs define the DM for each subproblem.
1212                           Generalizes DMCreateFieldIS().
1213 
1214   Not collective
1215 
1216   Input Parameter:
1217 . dm - the DM object
1218 
1219   Output Parameters:
1220 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1221 . namelist  - The name for each field (or PETSC_NULL if not requested)
1222 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1223 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1224 
1225   Level: intermediate
1226 
1227   Notes:
1228   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1229   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1230   and all of the arrays should be freed with PetscFree().
1231 
1232 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1233 @*/
1234 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1235 {
1236   PetscErrorCode ierr;
1237 
1238   PetscFunctionBegin;
1239   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1240   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1241   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1242   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1243   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1244   if (!dm->ops->createfielddecomposition) {
1245     PetscSection section;
1246     PetscInt     numFields, f;
1247 
1248     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1249     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1250     if (section && numFields && dm->ops->createsubdm) {
1251       *len = numFields;
1252       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1253       for (f = 0; f < numFields; ++f) {
1254         const char *fieldName;
1255 
1256         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1257         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1258         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1259       }
1260     } else {
1261       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1262       /* By default there are no DMs associated with subproblems. */
1263       if (dmlist) *dmlist = PETSC_NULL;
1264     }
1265   }
1266   else {
1267     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1268   }
1269   PetscFunctionReturn(0);
1270 }
1271 
1272 #undef __FUNCT__
1273 #define __FUNCT__ "DMCreateSubDM"
1274 /*@C
1275   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1276                   The fields are defined by DMCreateFieldIS().
1277 
1278   Not collective
1279 
1280   Input Parameters:
1281 + dm - the DM object
1282 . numFields - number of fields in this subproblem
1283 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1284 
1285   Output Parameters:
1286 . is - The global indices for the subproblem
1287 - dm - The DM for the subproblem
1288 
1289   Level: intermediate
1290 
1291 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1292 @*/
1293 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1294 {
1295   PetscErrorCode ierr;
1296 
1297   PetscFunctionBegin;
1298   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1299   PetscValidPointer(fields,3);
1300   if (is) {PetscValidPointer(is,4);}
1301   if (subdm) {PetscValidPointer(subdm,5);}
1302   if (dm->ops->createsubdm) {
1303     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1304   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1305   PetscFunctionReturn(0);
1306 }
1307 
1308 #undef __FUNCT__
1309 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1310 /*@C
1311   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1312 
1313   Not Collective
1314 
1315   Input Parameters:
1316 + dm   - the DM object
1317 - name - the name of the subdomain decomposition
1318 
1319   Output Parameter:
1320 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1321 
1322   Level: advanced
1323 
1324 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1325 @*/
1326 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1327 {
1328   PetscErrorCode ierr;
1329 
1330   PetscFunctionBegin;
1331   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1332   PetscValidCharPointer(name,2);
1333   PetscValidPointer(ddm,3);
1334   *ddm = PETSC_NULL;
1335   if (dm->ops->createdomaindecompositiondm) {
1336     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1337   }
1338   PetscFunctionReturn(0);
1339 }
1340 
1341 #undef __FUNCT__
1342 #define __FUNCT__ "DMCreateDomainDecomposition"
1343 /*@C
1344   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1345                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1346                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1347                           define a nonoverlapping covering, while outer subdomains can overlap.
1348                           The optional list of DMs define the DM for each subproblem.
1349 
1350   Not collective
1351 
1352   Input Parameter:
1353 . dm - the DM object
1354 
1355   Output Parameters:
1356 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1357 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1358 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1359 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1360 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1361 
1362   Level: intermediate
1363 
1364   Notes:
1365   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1366   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1367   and all of the arrays should be freed with PetscFree().
1368 
1369 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1370 @*/
1371 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1372 {
1373   PetscErrorCode ierr;
1374 
1375   PetscFunctionBegin;
1376   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1377   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1378   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1379   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1380   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1381   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1382   if (dm->ops->createdomaindecomposition) {
1383     ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1384   }
1385   PetscFunctionReturn(0);
1386 }
1387 
1388 #undef __FUNCT__
1389 #define __FUNCT__ "DMRefine"
1390 /*@
1391   DMRefine - Refines a DM object
1392 
1393   Collective on DM
1394 
1395   Input Parameter:
1396 + dm   - the DM object
1397 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1398 
1399   Output Parameter:
1400 . dmf - the refined DM, or PETSC_NULL
1401 
1402   Note: If no refinement was done, the return value is PETSC_NULL
1403 
1404   Level: developer
1405 
1406 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1407 @*/
1408 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1409 {
1410   PetscErrorCode ierr;
1411   DMRefineHookLink link;
1412 
1413   PetscFunctionBegin;
1414   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1415   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1416   if (*dmf) {
1417     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1418     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1419     (*dmf)->ctx       = dm->ctx;
1420     (*dmf)->leveldown = dm->leveldown;
1421     (*dmf)->levelup   = dm->levelup + 1;
1422     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1423     for (link=dm->refinehook; link; link=link->next) {
1424       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1425     }
1426   }
1427   PetscFunctionReturn(0);
1428 }
1429 
1430 #undef __FUNCT__
1431 #define __FUNCT__ "DMRefineHookAdd"
1432 /*@
1433    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1434 
1435    Logically Collective
1436 
1437    Input Arguments:
1438 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1439 .  refinehook - function to run when setting up a coarser level
1440 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1441 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1442 
1443    Calling sequence of refinehook:
1444 $    refinehook(DM coarse,DM fine,void *ctx);
1445 
1446 +  coarse - coarse level DM
1447 .  fine - fine level DM to interpolate problem to
1448 -  ctx - optional user-defined function context
1449 
1450    Calling sequence for interphook:
1451 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1452 
1453 +  coarse - coarse level DM
1454 .  interp - matrix interpolating a coarse-level solution to the finer grid
1455 .  fine - fine level DM to update
1456 -  ctx - optional user-defined function context
1457 
1458    Level: advanced
1459 
1460    Notes:
1461    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1462 
1463    If this function is called multiple times, the hooks will be run in the order they are added.
1464 
1465 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1466 @*/
1467 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1468 {
1469   PetscErrorCode ierr;
1470   DMRefineHookLink link,*p;
1471 
1472   PetscFunctionBegin;
1473   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1474   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1475   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1476   link->refinehook = refinehook;
1477   link->interphook = interphook;
1478   link->ctx = ctx;
1479   link->next = PETSC_NULL;
1480   *p = link;
1481   PetscFunctionReturn(0);
1482 }
1483 
1484 #undef __FUNCT__
1485 #define __FUNCT__ "DMInterpolate"
1486 /*@
1487    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1488 
1489    Collective if any hooks are
1490 
1491    Input Arguments:
1492 +  coarse - coarser DM to use as a base
1493 .  restrct - interpolation matrix, apply using MatInterpolate()
1494 -  fine - finer DM to update
1495 
1496    Level: developer
1497 
1498 .seealso: DMRefineHookAdd(), MatInterpolate()
1499 @*/
1500 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1501 {
1502   PetscErrorCode ierr;
1503   DMRefineHookLink link;
1504 
1505   PetscFunctionBegin;
1506   for (link=fine->refinehook; link; link=link->next) {
1507     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1508   }
1509   PetscFunctionReturn(0);
1510 }
1511 
1512 #undef __FUNCT__
1513 #define __FUNCT__ "DMGetRefineLevel"
1514 /*@
1515     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1516 
1517     Not Collective
1518 
1519     Input Parameter:
1520 .   dm - the DM object
1521 
1522     Output Parameter:
1523 .   level - number of refinements
1524 
1525     Level: developer
1526 
1527 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1528 
1529 @*/
1530 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1531 {
1532   PetscFunctionBegin;
1533   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1534   *level = dm->levelup;
1535   PetscFunctionReturn(0);
1536 }
1537 
1538 #undef __FUNCT__
1539 #define __FUNCT__ "DMGlobalToLocalBegin"
1540 /*@
1541     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1542 
1543     Neighbor-wise Collective on DM
1544 
1545     Input Parameters:
1546 +   dm - the DM object
1547 .   g - the global vector
1548 .   mode - INSERT_VALUES or ADD_VALUES
1549 -   l - the local vector
1550 
1551 
1552     Level: beginner
1553 
1554 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1555 
1556 @*/
1557 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1558 {
1559   PetscSF        sf;
1560   PetscErrorCode ierr;
1561 
1562   PetscFunctionBegin;
1563   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1564   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1565   if (sf) {
1566     PetscScalar *lArray, *gArray;
1567 
1568     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1569     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1570     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1571     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1572     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1573     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1574   } else {
1575     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1576   }
1577   PetscFunctionReturn(0);
1578 }
1579 
1580 #undef __FUNCT__
1581 #define __FUNCT__ "DMGlobalToLocalEnd"
1582 /*@
1583     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1584 
1585     Neighbor-wise Collective on DM
1586 
1587     Input Parameters:
1588 +   dm - the DM object
1589 .   g - the global vector
1590 .   mode - INSERT_VALUES or ADD_VALUES
1591 -   l - the local vector
1592 
1593 
1594     Level: beginner
1595 
1596 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1597 
1598 @*/
1599 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1600 {
1601   PetscSF        sf;
1602   PetscErrorCode ierr;
1603   PetscScalar    *lArray, *gArray;
1604 
1605   PetscFunctionBegin;
1606   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1607   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1608   if (sf) {
1609   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1610 
1611     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1612     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1613     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1614     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1615     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1616   } else {
1617     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1618   }
1619   PetscFunctionReturn(0);
1620 }
1621 
1622 #undef __FUNCT__
1623 #define __FUNCT__ "DMLocalToGlobalBegin"
1624 /*@
1625     DMLocalToGlobalBegin - updates global vectors from local vectors
1626 
1627     Neighbor-wise Collective on DM
1628 
1629     Input Parameters:
1630 +   dm - the DM object
1631 .   l - the local vector
1632 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1633            base point.
1634 - - the global vector
1635 
1636     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1637            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1638            global array to the final global array with VecAXPY().
1639 
1640     Level: beginner
1641 
1642 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1643 
1644 @*/
1645 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1646 {
1647   PetscSF        sf;
1648   PetscErrorCode ierr;
1649 
1650   PetscFunctionBegin;
1651   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1652   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1653   if (sf) {
1654     MPI_Op       op;
1655     PetscScalar *lArray, *gArray;
1656 
1657     switch(mode) {
1658     case INSERT_VALUES:
1659     case INSERT_ALL_VALUES:
1660 #if defined(PETSC_HAVE_MPI_REPLACE)
1661       op = MPI_REPLACE; break;
1662 #else
1663       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1664 #endif
1665     case ADD_VALUES:
1666     case ADD_ALL_VALUES:
1667       op = MPI_SUM; break;
1668   default:
1669     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1670     }
1671     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1672     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1673     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1674     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1675     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1676   } else {
1677     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1678   }
1679   PetscFunctionReturn(0);
1680 }
1681 
1682 #undef __FUNCT__
1683 #define __FUNCT__ "DMLocalToGlobalEnd"
1684 /*@
1685     DMLocalToGlobalEnd - updates global vectors from local vectors
1686 
1687     Neighbor-wise Collective on DM
1688 
1689     Input Parameters:
1690 +   dm - the DM object
1691 .   l - the local vector
1692 .   mode - INSERT_VALUES or ADD_VALUES
1693 -   g - the global vector
1694 
1695 
1696     Level: beginner
1697 
1698 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1699 
1700 @*/
1701 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1702 {
1703   PetscSF        sf;
1704   PetscErrorCode ierr;
1705 
1706   PetscFunctionBegin;
1707   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1708   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1709   if (sf) {
1710     MPI_Op       op;
1711     PetscScalar *lArray, *gArray;
1712 
1713     switch(mode) {
1714     case INSERT_VALUES:
1715     case INSERT_ALL_VALUES:
1716 #if defined(PETSC_HAVE_MPI_REPLACE)
1717       op = MPI_REPLACE; break;
1718 #else
1719       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1720 #endif
1721     case ADD_VALUES:
1722     case ADD_ALL_VALUES:
1723       op = MPI_SUM; break;
1724     default:
1725       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1726     }
1727     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1728     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1729     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1730     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1731     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1732   } else {
1733     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1734   }
1735   PetscFunctionReturn(0);
1736 }
1737 
1738 #undef __FUNCT__
1739 #define __FUNCT__ "DMCoarsen"
1740 /*@
1741     DMCoarsen - Coarsens a DM object
1742 
1743     Collective on DM
1744 
1745     Input Parameter:
1746 +   dm - the DM object
1747 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1748 
1749     Output Parameter:
1750 .   dmc - the coarsened DM
1751 
1752     Level: developer
1753 
1754 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1755 
1756 @*/
1757 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1758 {
1759   PetscErrorCode ierr;
1760   DMCoarsenHookLink link;
1761 
1762   PetscFunctionBegin;
1763   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1764   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1765   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1766   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1767   (*dmc)->ctx       = dm->ctx;
1768   (*dmc)->levelup   = dm->levelup;
1769   (*dmc)->leveldown = dm->leveldown + 1;
1770   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1771   for (link=dm->coarsenhook; link; link=link->next) {
1772     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1773   }
1774   PetscFunctionReturn(0);
1775 }
1776 
1777 #undef __FUNCT__
1778 #define __FUNCT__ "DMCoarsenHookAdd"
1779 /*@
1780    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1781 
1782    Logically Collective
1783 
1784    Input Arguments:
1785 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1786 .  coarsenhook - function to run when setting up a coarser level
1787 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1788 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1789 
1790    Calling sequence of coarsenhook:
1791 $    coarsenhook(DM fine,DM coarse,void *ctx);
1792 
1793 +  fine - fine level DM
1794 .  coarse - coarse level DM to restrict problem to
1795 -  ctx - optional user-defined function context
1796 
1797    Calling sequence for restricthook:
1798 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1799 
1800 +  fine - fine level DM
1801 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1802 .  rscale - scaling vector for restriction
1803 .  inject - matrix restricting by injection
1804 .  coarse - coarse level DM to update
1805 -  ctx - optional user-defined function context
1806 
1807    Level: advanced
1808 
1809    Notes:
1810    This function is only needed if auxiliary data needs to be set up on coarse grids.
1811 
1812    If this function is called multiple times, the hooks will be run in the order they are added.
1813 
1814    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1815    extract the finest level information from its context (instead of from the SNES).
1816 
1817 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1818 @*/
1819 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1820 {
1821   PetscErrorCode ierr;
1822   DMCoarsenHookLink link,*p;
1823 
1824   PetscFunctionBegin;
1825   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1826   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1827   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1828   link->coarsenhook = coarsenhook;
1829   link->restricthook = restricthook;
1830   link->ctx = ctx;
1831   link->next = PETSC_NULL;
1832   *p = link;
1833   PetscFunctionReturn(0);
1834 }
1835 
1836 #undef __FUNCT__
1837 #define __FUNCT__ "DMRestrict"
1838 /*@
1839    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1840 
1841    Collective if any hooks are
1842 
1843    Input Arguments:
1844 +  fine - finer DM to use as a base
1845 .  restrct - restriction matrix, apply using MatRestrict()
1846 .  inject - injection matrix, also use MatRestrict()
1847 -  coarse - coarer DM to update
1848 
1849    Level: developer
1850 
1851 .seealso: DMCoarsenHookAdd(), MatRestrict()
1852 @*/
1853 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1854 {
1855   PetscErrorCode ierr;
1856   DMCoarsenHookLink link;
1857 
1858   PetscFunctionBegin;
1859   for (link=fine->coarsenhook; link; link=link->next) {
1860     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1861   }
1862   PetscFunctionReturn(0);
1863 }
1864 
1865 #undef __FUNCT__
1866 #define __FUNCT__ "DMBlockRestrictHookAdd"
1867 /*@
1868    DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1869 
1870    Logically Collective
1871 
1872    Input Arguments:
1873 +  global - global DM
1874 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1875 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1876 
1877    Calling sequence for restricthook:
1878 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1879 
1880 +  global - global DM
1881 .  out    - scatter to the outer (with ghost and overlap points) block vector
1882 .  in     - scatter to block vector values only owned locally
1883 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1884 -  ctx - optional user-defined function context
1885 
1886    Level: advanced
1887 
1888    Notes:
1889    This function is only needed if auxiliary data needs to be set up on coarse grids.
1890 
1891    If this function is called multiple times, the hooks will be run in the order they are added.
1892 
1893    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1894    extract the finest level information from its context (instead of from the SNES).
1895 
1896 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1897 @*/
1898 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1899 {
1900   PetscErrorCode ierr;
1901   DMBlockRestrictHookLink link,*p;
1902 
1903   PetscFunctionBegin;
1904   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1905   for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1906   ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr);
1907   link->restricthook = restricthook;
1908   link->ctx = ctx;
1909   link->next = PETSC_NULL;
1910   *p = link;
1911   PetscFunctionReturn(0);
1912 }
1913 
1914 #undef __FUNCT__
1915 #define __FUNCT__ "DMBlockRestrict"
1916 /*@
1917    DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd()
1918 
1919    Collective if any hooks are
1920 
1921    Input Arguments:
1922 +  fine - finer DM to use as a base
1923 .  restrct - restriction matrix, apply using MatRestrict()
1924 .  inject - injection matrix, also use MatRestrict()
1925 -  coarse - coarer DM to update
1926 
1927    Level: developer
1928 
1929 .seealso: DMCoarsenHookAdd(), MatRestrict()
1930 @*/
1931 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block)
1932 {
1933   PetscErrorCode ierr;
1934   DMBlockRestrictHookLink link;
1935 
1936   PetscFunctionBegin;
1937   for (link=global->blockrestricthook; link; link=link->next) {
1938     if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);}
1939   }
1940   PetscFunctionReturn(0);
1941 }
1942 
1943 #undef __FUNCT__
1944 #define __FUNCT__ "DMGetCoarsenLevel"
1945 /*@
1946     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
1947 
1948     Not Collective
1949 
1950     Input Parameter:
1951 .   dm - the DM object
1952 
1953     Output Parameter:
1954 .   level - number of coarsenings
1955 
1956     Level: developer
1957 
1958 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1959 
1960 @*/
1961 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
1962 {
1963   PetscFunctionBegin;
1964   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1965   *level = dm->leveldown;
1966   PetscFunctionReturn(0);
1967 }
1968 
1969 
1970 
1971 #undef __FUNCT__
1972 #define __FUNCT__ "DMRefineHierarchy"
1973 /*@C
1974     DMRefineHierarchy - Refines a DM object, all levels at once
1975 
1976     Collective on DM
1977 
1978     Input Parameter:
1979 +   dm - the DM object
1980 -   nlevels - the number of levels of refinement
1981 
1982     Output Parameter:
1983 .   dmf - the refined DM hierarchy
1984 
1985     Level: developer
1986 
1987 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1988 
1989 @*/
1990 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
1991 {
1992   PetscErrorCode ierr;
1993 
1994   PetscFunctionBegin;
1995   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1996   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1997   if (nlevels == 0) PetscFunctionReturn(0);
1998   if (dm->ops->refinehierarchy) {
1999     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2000   } else if (dm->ops->refine) {
2001     PetscInt i;
2002 
2003     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2004     for (i=1; i<nlevels; i++) {
2005       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2006     }
2007   } else {
2008     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2009   }
2010   PetscFunctionReturn(0);
2011 }
2012 
2013 #undef __FUNCT__
2014 #define __FUNCT__ "DMCoarsenHierarchy"
2015 /*@C
2016     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2017 
2018     Collective on DM
2019 
2020     Input Parameter:
2021 +   dm - the DM object
2022 -   nlevels - the number of levels of coarsening
2023 
2024     Output Parameter:
2025 .   dmc - the coarsened DM hierarchy
2026 
2027     Level: developer
2028 
2029 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2030 
2031 @*/
2032 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2033 {
2034   PetscErrorCode ierr;
2035 
2036   PetscFunctionBegin;
2037   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2038   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2039   if (nlevels == 0) PetscFunctionReturn(0);
2040   PetscValidPointer(dmc,3);
2041   if (dm->ops->coarsenhierarchy) {
2042     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2043   } else if (dm->ops->coarsen) {
2044     PetscInt i;
2045 
2046     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2047     for (i=1; i<nlevels; i++) {
2048       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2049     }
2050   } else {
2051     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2052   }
2053   PetscFunctionReturn(0);
2054 }
2055 
2056 #undef __FUNCT__
2057 #define __FUNCT__ "DMCreateAggregates"
2058 /*@
2059    DMCreateAggregates - Gets the aggregates that map between
2060    grids associated with two DMs.
2061 
2062    Collective on DM
2063 
2064    Input Parameters:
2065 +  dmc - the coarse grid DM
2066 -  dmf - the fine grid DM
2067 
2068    Output Parameters:
2069 .  rest - the restriction matrix (transpose of the projection matrix)
2070 
2071    Level: intermediate
2072 
2073 .keywords: interpolation, restriction, multigrid
2074 
2075 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2076 @*/
2077 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2078 {
2079   PetscErrorCode ierr;
2080 
2081   PetscFunctionBegin;
2082   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2083   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2084   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2085   PetscFunctionReturn(0);
2086 }
2087 
2088 #undef __FUNCT__
2089 #define __FUNCT__ "DMSetApplicationContextDestroy"
2090 /*@C
2091     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2092 
2093     Not Collective
2094 
2095     Input Parameters:
2096 +   dm - the DM object
2097 -   destroy - the destroy function
2098 
2099     Level: intermediate
2100 
2101 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2102 
2103 @*/
2104 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2105 {
2106   PetscFunctionBegin;
2107   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2108   dm->ctxdestroy = destroy;
2109   PetscFunctionReturn(0);
2110 }
2111 
2112 #undef __FUNCT__
2113 #define __FUNCT__ "DMSetApplicationContext"
2114 /*@
2115     DMSetApplicationContext - Set a user context into a DM object
2116 
2117     Not Collective
2118 
2119     Input Parameters:
2120 +   dm - the DM object
2121 -   ctx - the user context
2122 
2123     Level: intermediate
2124 
2125 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2126 
2127 @*/
2128 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2129 {
2130   PetscFunctionBegin;
2131   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2132   dm->ctx = ctx;
2133   PetscFunctionReturn(0);
2134 }
2135 
2136 #undef __FUNCT__
2137 #define __FUNCT__ "DMGetApplicationContext"
2138 /*@
2139     DMGetApplicationContext - Gets a user context from a DM object
2140 
2141     Not Collective
2142 
2143     Input Parameter:
2144 .   dm - the DM object
2145 
2146     Output Parameter:
2147 .   ctx - the user context
2148 
2149     Level: intermediate
2150 
2151 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2152 
2153 @*/
2154 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2155 {
2156   PetscFunctionBegin;
2157   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2158   *(void**)ctx = dm->ctx;
2159   PetscFunctionReturn(0);
2160 }
2161 
2162 #undef __FUNCT__
2163 #define __FUNCT__ "DMSetVariableBounds"
2164 /*@C
2165     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2166 
2167     Logically Collective on DM
2168 
2169     Input Parameter:
2170 +   dm - the DM object
2171 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2172 
2173     Level: intermediate
2174 
2175 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2176          DMSetJacobian()
2177 
2178 @*/
2179 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2180 {
2181   PetscFunctionBegin;
2182   dm->ops->computevariablebounds = f;
2183   PetscFunctionReturn(0);
2184 }
2185 
2186 #undef __FUNCT__
2187 #define __FUNCT__ "DMHasVariableBounds"
2188 /*@
2189     DMHasVariableBounds - does the DM object have a variable bounds function?
2190 
2191     Not Collective
2192 
2193     Input Parameter:
2194 .   dm - the DM object to destroy
2195 
2196     Output Parameter:
2197 .   flg - PETSC_TRUE if the variable bounds function exists
2198 
2199     Level: developer
2200 
2201 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2202 
2203 @*/
2204 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2205 {
2206   PetscFunctionBegin;
2207   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2208   PetscFunctionReturn(0);
2209 }
2210 
2211 #undef __FUNCT__
2212 #define __FUNCT__ "DMComputeVariableBounds"
2213 /*@C
2214     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2215 
2216     Logically Collective on DM
2217 
2218     Input Parameters:
2219 +   dm - the DM object to destroy
2220 -   x  - current solution at which the bounds are computed
2221 
2222     Output parameters:
2223 +   xl - lower bound
2224 -   xu - upper bound
2225 
2226     Level: intermediate
2227 
2228 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2229 
2230 @*/
2231 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2232 {
2233   PetscErrorCode ierr;
2234   PetscFunctionBegin;
2235   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2236   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2237   if (dm->ops->computevariablebounds) {
2238     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2239   }
2240   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2241   PetscFunctionReturn(0);
2242 }
2243 
2244 #undef __FUNCT__
2245 #define __FUNCT__ "DMHasColoring"
2246 /*@
2247     DMHasColoring - does the DM object have a method of providing a coloring?
2248 
2249     Not Collective
2250 
2251     Input Parameter:
2252 .   dm - the DM object
2253 
2254     Output Parameter:
2255 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2256 
2257     Level: developer
2258 
2259 .seealso DMHasFunction(), DMCreateColoring()
2260 
2261 @*/
2262 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2263 {
2264   PetscFunctionBegin;
2265   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2266   PetscFunctionReturn(0);
2267 }
2268 
2269 #undef  __FUNCT__
2270 #define __FUNCT__ "DMSetVec"
2271 /*@C
2272     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2273 
2274     Collective on DM
2275 
2276     Input Parameter:
2277 +   dm - the DM object
2278 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2279 
2280     Level: developer
2281 
2282 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2283 
2284 @*/
2285 PetscErrorCode  DMSetVec(DM dm,Vec x)
2286 {
2287   PetscErrorCode ierr;
2288   PetscFunctionBegin;
2289   if (x) {
2290     if (!dm->x) {
2291       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2292     }
2293     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2294   }
2295   else if (dm->x) {
2296     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2297   }
2298   PetscFunctionReturn(0);
2299 }
2300 
2301 PetscFList DMList                       = PETSC_NULL;
2302 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2303 
2304 #undef __FUNCT__
2305 #define __FUNCT__ "DMSetType"
2306 /*@C
2307   DMSetType - Builds a DM, for a particular DM implementation.
2308 
2309   Collective on DM
2310 
2311   Input Parameters:
2312 + dm     - The DM object
2313 - method - The name of the DM type
2314 
2315   Options Database Key:
2316 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2317 
2318   Notes:
2319   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2320 
2321   Level: intermediate
2322 
2323 .keywords: DM, set, type
2324 .seealso: DMGetType(), DMCreate()
2325 @*/
2326 PetscErrorCode  DMSetType(DM dm, DMType method)
2327 {
2328   PetscErrorCode (*r)(DM);
2329   PetscBool      match;
2330   PetscErrorCode ierr;
2331 
2332   PetscFunctionBegin;
2333   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2334   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2335   if (match) PetscFunctionReturn(0);
2336 
2337   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2338   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2339   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2340 
2341   if (dm->ops->destroy) {
2342     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2343     dm->ops->destroy = PETSC_NULL;
2344   }
2345   ierr = (*r)(dm);CHKERRQ(ierr);
2346   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2347   PetscFunctionReturn(0);
2348 }
2349 
2350 #undef __FUNCT__
2351 #define __FUNCT__ "DMGetType"
2352 /*@C
2353   DMGetType - Gets the DM type name (as a string) from the DM.
2354 
2355   Not Collective
2356 
2357   Input Parameter:
2358 . dm  - The DM
2359 
2360   Output Parameter:
2361 . type - The DM type name
2362 
2363   Level: intermediate
2364 
2365 .keywords: DM, get, type, name
2366 .seealso: DMSetType(), DMCreate()
2367 @*/
2368 PetscErrorCode  DMGetType(DM dm, DMType *type)
2369 {
2370   PetscErrorCode ierr;
2371 
2372   PetscFunctionBegin;
2373   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2374   PetscValidCharPointer(type,2);
2375   if (!DMRegisterAllCalled) {
2376     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2377   }
2378   *type = ((PetscObject)dm)->type_name;
2379   PetscFunctionReturn(0);
2380 }
2381 
2382 #undef __FUNCT__
2383 #define __FUNCT__ "DMConvert"
2384 /*@C
2385   DMConvert - Converts a DM to another DM, either of the same or different type.
2386 
2387   Collective on DM
2388 
2389   Input Parameters:
2390 + dm - the DM
2391 - newtype - new DM type (use "same" for the same type)
2392 
2393   Output Parameter:
2394 . M - pointer to new DM
2395 
2396   Notes:
2397   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2398   the MPI communicator of the generated DM is always the same as the communicator
2399   of the input DM.
2400 
2401   Level: intermediate
2402 
2403 .seealso: DMCreate()
2404 @*/
2405 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2406 {
2407   DM             B;
2408   char           convname[256];
2409   PetscBool      sametype, issame;
2410   PetscErrorCode ierr;
2411 
2412   PetscFunctionBegin;
2413   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2414   PetscValidType(dm,1);
2415   PetscValidPointer(M,3);
2416   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2417   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2418   {
2419     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2420 
2421     /*
2422        Order of precedence:
2423        1) See if a specialized converter is known to the current DM.
2424        2) See if a specialized converter is known to the desired DM class.
2425        3) See if a good general converter is registered for the desired class
2426        4) See if a good general converter is known for the current matrix.
2427        5) Use a really basic converter.
2428     */
2429 
2430     /* 1) See if a specialized converter is known to the current DM and the desired class */
2431     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2432     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2433     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2434     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2435     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2436     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2437     if (conv) goto foundconv;
2438 
2439     /* 2)  See if a specialized converter is known to the desired DM class. */
2440     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2441     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2442     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2443     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2444     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2445     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2446     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2447     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2448     if (conv) {
2449       ierr = DMDestroy(&B);CHKERRQ(ierr);
2450       goto foundconv;
2451     }
2452 
2453 #if 0
2454     /* 3) See if a good general converter is registered for the desired class */
2455     conv = B->ops->convertfrom;
2456     ierr = DMDestroy(&B);CHKERRQ(ierr);
2457     if (conv) goto foundconv;
2458 
2459     /* 4) See if a good general converter is known for the current matrix */
2460     if (dm->ops->convert) {
2461       conv = dm->ops->convert;
2462     }
2463     if (conv) goto foundconv;
2464 #endif
2465 
2466     /* 5) Use a really basic converter. */
2467     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2468 
2469     foundconv:
2470     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2471     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2472     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2473   }
2474   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2475   PetscFunctionReturn(0);
2476 }
2477 
2478 /*--------------------------------------------------------------------------------------------------------------------*/
2479 
2480 #undef __FUNCT__
2481 #define __FUNCT__ "DMRegister"
2482 /*@C
2483   DMRegister - See DMRegisterDynamic()
2484 
2485   Level: advanced
2486 @*/
2487 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2488 {
2489   char fullname[PETSC_MAX_PATH_LEN];
2490   PetscErrorCode ierr;
2491 
2492   PetscFunctionBegin;
2493   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2494   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2495   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2496   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2497   PetscFunctionReturn(0);
2498 }
2499 
2500 
2501 /*--------------------------------------------------------------------------------------------------------------------*/
2502 #undef __FUNCT__
2503 #define __FUNCT__ "DMRegisterDestroy"
2504 /*@C
2505    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2506 
2507    Not Collective
2508 
2509    Level: advanced
2510 
2511 .keywords: DM, register, destroy
2512 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2513 @*/
2514 PetscErrorCode  DMRegisterDestroy(void)
2515 {
2516   PetscErrorCode ierr;
2517 
2518   PetscFunctionBegin;
2519   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2520   DMRegisterAllCalled = PETSC_FALSE;
2521   PetscFunctionReturn(0);
2522 }
2523 
2524 #undef __FUNCT__
2525 #define __FUNCT__ "DMLoad"
2526 /*@C
2527   DMLoad - Loads a DM that has been stored in binary  with DMView().
2528 
2529   Collective on PetscViewer
2530 
2531   Input Parameters:
2532 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2533            some related function before a call to DMLoad().
2534 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2535            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2536 
2537    Level: intermediate
2538 
2539   Notes:
2540    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2541 
2542   Notes for advanced users:
2543   Most users should not need to know the details of the binary storage
2544   format, since DMLoad() and DMView() completely hide these details.
2545   But for anyone who's interested, the standard binary matrix storage
2546   format is
2547 .vb
2548      has not yet been determined
2549 .ve
2550 
2551 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2552 @*/
2553 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2554 {
2555   PetscErrorCode ierr;
2556   PetscBool      isbinary;
2557   PetscInt       classid;
2558   char           type[256];
2559 
2560   PetscFunctionBegin;
2561   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2562   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2563   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2564   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2565 
2566   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2567   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2568   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2569   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2570   ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2571   PetscFunctionReturn(0);
2572 }
2573 
2574 /******************************** FEM Support **********************************/
2575 
2576 #undef __FUNCT__
2577 #define __FUNCT__ "DMPrintCellVector"
2578 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2579   PetscInt       f;
2580   PetscErrorCode ierr;
2581 
2582   PetscFunctionBegin;
2583   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2584   for (f = 0; f < len; ++f) {
2585     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2586   }
2587   PetscFunctionReturn(0);
2588 }
2589 
2590 #undef __FUNCT__
2591 #define __FUNCT__ "DMPrintCellMatrix"
2592 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2593   PetscInt       f, g;
2594   PetscErrorCode ierr;
2595 
2596   PetscFunctionBegin;
2597   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2598   for (f = 0; f < rows; ++f) {
2599     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2600     for (g = 0; g < cols; ++g) {
2601       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2602     }
2603     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2604   }
2605   PetscFunctionReturn(0);
2606 }
2607 
2608 #undef __FUNCT__
2609 #define __FUNCT__ "DMGetDefaultSection"
2610 /*@
2611   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2612 
2613   Input Parameter:
2614 . dm - The DM
2615 
2616   Output Parameter:
2617 . section - The PetscSection
2618 
2619   Level: intermediate
2620 
2621   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2622 
2623 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2624 @*/
2625 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2626   PetscFunctionBegin;
2627   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2628   PetscValidPointer(section, 2);
2629   *section = dm->defaultSection;
2630   PetscFunctionReturn(0);
2631 }
2632 
2633 #undef __FUNCT__
2634 #define __FUNCT__ "DMSetDefaultSection"
2635 /*@
2636   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2637 
2638   Input Parameters:
2639 + dm - The DM
2640 - section - The PetscSection
2641 
2642   Level: intermediate
2643 
2644   Note: Any existing Section will be destroyed
2645 
2646 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2647 @*/
2648 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2649   PetscInt       numFields;
2650   PetscInt       f;
2651   PetscErrorCode ierr;
2652 
2653   PetscFunctionBegin;
2654   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2655   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2656   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2657   dm->defaultSection = section;
2658   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2659   if (numFields) {
2660     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2661     for (f = 0; f < numFields; ++f) {
2662       const char *name;
2663 
2664       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2665       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2666     }
2667   }
2668   PetscFunctionReturn(0);
2669 }
2670 
2671 #undef __FUNCT__
2672 #define __FUNCT__ "DMGetDefaultGlobalSection"
2673 /*@
2674   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2675 
2676   Collective on DM
2677 
2678   Input Parameter:
2679 . dm - The DM
2680 
2681   Output Parameter:
2682 . section - The PetscSection
2683 
2684   Level: intermediate
2685 
2686   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2687 
2688 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2689 @*/
2690 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2691   PetscErrorCode ierr;
2692 
2693   PetscFunctionBegin;
2694   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2695   PetscValidPointer(section, 2);
2696   if (!dm->defaultGlobalSection) {
2697     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2698     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2699     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2700   }
2701   *section = dm->defaultGlobalSection;
2702   PetscFunctionReturn(0);
2703 }
2704 
2705 #undef __FUNCT__
2706 #define __FUNCT__ "DMSetDefaultGlobalSection"
2707 /*@
2708   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2709 
2710   Input Parameters:
2711 + dm - The DM
2712 - section - The PetscSection
2713 
2714   Level: intermediate
2715 
2716   Note: Any existing Section will be destroyed
2717 
2718 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2719 @*/
2720 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2721   PetscErrorCode ierr;
2722 
2723   PetscFunctionBegin;
2724   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2725   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2726   dm->defaultGlobalSection = section;
2727   PetscFunctionReturn(0);
2728 }
2729 
2730 #undef __FUNCT__
2731 #define __FUNCT__ "DMGetDefaultSF"
2732 /*@
2733   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2734   it is created from the default PetscSection layouts in the DM.
2735 
2736   Input Parameter:
2737 . dm - The DM
2738 
2739   Output Parameter:
2740 . sf - The PetscSF
2741 
2742   Level: intermediate
2743 
2744   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2745 
2746 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2747 @*/
2748 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2749   PetscInt       nroots;
2750   PetscErrorCode ierr;
2751 
2752   PetscFunctionBegin;
2753   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2754   PetscValidPointer(sf, 2);
2755   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2756   if (nroots < 0) {
2757     PetscSection section, gSection;
2758 
2759     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2760     if (section) {
2761       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2762       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2763     } else {
2764       *sf = PETSC_NULL;
2765       PetscFunctionReturn(0);
2766     }
2767   }
2768   *sf = dm->defaultSF;
2769   PetscFunctionReturn(0);
2770 }
2771 
2772 #undef __FUNCT__
2773 #define __FUNCT__ "DMSetDefaultSF"
2774 /*@
2775   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2776 
2777   Input Parameters:
2778 + dm - The DM
2779 - sf - The PetscSF
2780 
2781   Level: intermediate
2782 
2783   Note: Any previous SF is destroyed
2784 
2785 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2786 @*/
2787 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2788   PetscErrorCode ierr;
2789 
2790   PetscFunctionBegin;
2791   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2792   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2793   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2794   dm->defaultSF = sf;
2795   PetscFunctionReturn(0);
2796 }
2797 
2798 #undef __FUNCT__
2799 #define __FUNCT__ "DMCreateDefaultSF"
2800 /*@C
2801   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2802   describing the data layout.
2803 
2804   Input Parameters:
2805 + dm - The DM
2806 . localSection - PetscSection describing the local data layout
2807 - globalSection - PetscSection describing the global data layout
2808 
2809   Level: intermediate
2810 
2811 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2812 @*/
2813 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2814 {
2815   MPI_Comm        comm = ((PetscObject) dm)->comm;
2816   PetscLayout     layout;
2817   const PetscInt *ranges;
2818   PetscInt       *local;
2819   PetscSFNode    *remote;
2820   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2821   PetscMPIInt     size, rank;
2822   PetscErrorCode  ierr;
2823 
2824   PetscFunctionBegin;
2825   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2826   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2827   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2828   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2829   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2830   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2831   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2832   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2833   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2834   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2835   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2836     PetscInt gdof, gcdof;
2837 
2838     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2839     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2840     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2841   }
2842   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2843   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2844   for (p = pStart, l = 0; p < pEnd; ++p) {
2845     const PetscInt *cind;
2846     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2847 
2848     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2849     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2850     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2851     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2852     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2853     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2854     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2855     if (!gdof) continue; /* Censored point */
2856     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2857     if (gsize != dof-cdof) {
2858       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2859       cdof = 0; /* Ignore constraints */
2860     }
2861     for (d = 0, c = 0; d < dof; ++d) {
2862       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2863       local[l+d-c] = off+d;
2864     }
2865     if (gdof < 0) {
2866       for(d = 0; d < gsize; ++d, ++l) {
2867         PetscInt offset = -(goff+1) + d, r;
2868 
2869         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2870         if (r < 0) r = -(r+2);
2871         remote[l].rank  = r;
2872         remote[l].index = offset - ranges[r];
2873       }
2874     } else {
2875       for(d = 0; d < gsize; ++d, ++l) {
2876         remote[l].rank  = rank;
2877         remote[l].index = goff+d - ranges[rank];
2878       }
2879     }
2880   }
2881   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2882   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2883   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2884   PetscFunctionReturn(0);
2885 }
2886 
2887 #undef __FUNCT__
2888 #define __FUNCT__ "DMGetPointSF"
2889 /*@
2890   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2891 
2892   Input Parameter:
2893 . dm - The DM
2894 
2895   Output Parameter:
2896 . sf - The PetscSF
2897 
2898   Level: intermediate
2899 
2900   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2901 
2902 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2903 @*/
2904 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
2905   PetscFunctionBegin;
2906   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2907   PetscValidPointer(sf, 2);
2908   *sf = dm->sf;
2909   PetscFunctionReturn(0);
2910 }
2911 
2912 #undef __FUNCT__
2913 #define __FUNCT__ "DMSetPointSF"
2914 /*@
2915   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
2916 
2917   Input Parameters:
2918 + dm - The DM
2919 - sf - The PetscSF
2920 
2921   Level: intermediate
2922 
2923 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2924 @*/
2925 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
2926   PetscErrorCode ierr;
2927 
2928   PetscFunctionBegin;
2929   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2930   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
2931   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
2932   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
2933   dm->sf = sf;
2934   PetscFunctionReturn(0);
2935 }
2936 
2937 #undef __FUNCT__
2938 #define __FUNCT__ "DMGetNumFields"
2939 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
2940 {
2941   PetscFunctionBegin;
2942   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2943   PetscValidPointer(numFields, 2);
2944   *numFields = dm->numFields;
2945   PetscFunctionReturn(0);
2946 }
2947 
2948 #undef __FUNCT__
2949 #define __FUNCT__ "DMSetNumFields"
2950 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
2951 {
2952   PetscInt       f;
2953   PetscErrorCode ierr;
2954 
2955   PetscFunctionBegin;
2956   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2957   for (f = 0; f < dm->numFields; ++f) {
2958     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
2959   }
2960   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
2961   dm->numFields = numFields;
2962   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
2963   for (f = 0; f < dm->numFields; ++f) {
2964     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
2965   }
2966   PetscFunctionReturn(0);
2967 }
2968 
2969 #undef __FUNCT__
2970 #define __FUNCT__ "DMGetField"
2971 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
2972 {
2973   PetscFunctionBegin;
2974   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2975   PetscValidPointer(field, 2);
2976   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
2977   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
2978   *field = dm->fields[f];
2979   PetscFunctionReturn(0);
2980 }
2981 
2982 #undef __FUNCT__
2983 #define __FUNCT__ "DMSetCoordinates"
2984 /*@
2985   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
2986 
2987   Collective on DM
2988 
2989   Input Parameters:
2990 + dm - the DM
2991 - c - coordinate vector
2992 
2993   Note:
2994   The coordinates do include those for ghost points, which are in the local vector
2995 
2996   Level: intermediate
2997 
2998 .keywords: distributed array, get, corners, nodes, local indices, coordinates
2999 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3000 @*/
3001 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3002 {
3003   PetscErrorCode ierr;
3004 
3005   PetscFunctionBegin;
3006   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3007   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3008   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3009   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3010   dm->coordinates = c;
3011   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3012   PetscFunctionReturn(0);
3013 }
3014 
3015 #undef __FUNCT__
3016 #define __FUNCT__ "DMSetCoordinatesLocal"
3017 /*@
3018   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3019 
3020   Collective on DM
3021 
3022    Input Parameters:
3023 +  dm - the DM
3024 -  c - coordinate vector
3025 
3026   Note:
3027   The coordinates of ghost points can be set using DMSetCoordinates()
3028   followed by DMGetCoordinatesLocal(). This is intended to enable the
3029   setting of ghost coordinates outside of the domain.
3030 
3031   Level: intermediate
3032 
3033 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3034 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3035 @*/
3036 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3037 {
3038   PetscErrorCode ierr;
3039 
3040   PetscFunctionBegin;
3041   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3042   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3043   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3044   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3045   dm->coordinatesLocal = c;
3046   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3047   PetscFunctionReturn(0);
3048 }
3049 
3050 #undef __FUNCT__
3051 #define __FUNCT__ "DMGetCoordinates"
3052 /*@
3053   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3054 
3055   Not Collective
3056 
3057   Input Parameter:
3058 . dm - the DM
3059 
3060   Output Parameter:
3061 . c - global coordinate vector
3062 
3063   Note:
3064   This is a borrowed reference, so the user should NOT destroy this vector
3065 
3066   Each process has only the local coordinates (does NOT have the ghost coordinates).
3067 
3068   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3069   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3070 
3071   Level: intermediate
3072 
3073 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3074 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3075 @*/
3076 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3077 {
3078   PetscErrorCode ierr;
3079 
3080   PetscFunctionBegin;
3081   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3082   PetscValidPointer(c,2);
3083   if (!dm->coordinates && dm->coordinatesLocal) {
3084     DM cdm;
3085 
3086     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3087     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3088     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3089     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3090     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3091   }
3092   *c = dm->coordinates;
3093   PetscFunctionReturn(0);
3094 }
3095 
3096 #undef __FUNCT__
3097 #define __FUNCT__ "DMGetCoordinatesLocal"
3098 /*@
3099   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3100 
3101   Collective on DM
3102 
3103   Input Parameter:
3104 . dm - the DM
3105 
3106   Output Parameter:
3107 . c - coordinate vector
3108 
3109   Note:
3110   This is a borrowed reference, so the user should NOT destroy this vector
3111 
3112   Each process has the local and ghost coordinates
3113 
3114   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3115   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3116 
3117   Level: intermediate
3118 
3119 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3120 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3121 @*/
3122 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3123 {
3124   PetscErrorCode ierr;
3125 
3126   PetscFunctionBegin;
3127   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3128   PetscValidPointer(c,2);
3129   if (!dm->coordinatesLocal && dm->coordinates) {
3130     DM cdm;
3131 
3132     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3133     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3134     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3135     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3136     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3137   }
3138   *c = dm->coordinatesLocal;
3139   PetscFunctionReturn(0);
3140 }
3141 
3142 #undef __FUNCT__
3143 #define __FUNCT__ "DMGetCoordinateDM"
3144 /*@
3145   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3146 
3147   Collective on DM
3148 
3149   Input Parameter:
3150 . dm - the DM
3151 
3152   Output Parameter:
3153 . cdm - coordinate DM
3154 
3155   Level: intermediate
3156 
3157 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3158 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3159 @*/
3160 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3161 {
3162   PetscErrorCode ierr;
3163 
3164   PetscFunctionBegin;
3165   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3166   PetscValidPointer(cdm,2);
3167   if (!dm->coordinateDM) {
3168     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3169     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3170   }
3171   *cdm = dm->coordinateDM;
3172   PetscFunctionReturn(0);
3173 }
3174