xref: /petsc/src/dm/interface/dm.c (revision 835c3ec7ce19e5ac5b52b3d452d0b398d46f7605)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMCreate"
8 /*@
9   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
10 
11    If you never  call DMSetType()  it will generate an
12    error when you try to use the vector.
13 
14   Collective on MPI_Comm
15 
16   Input Parameter:
17 . comm - The communicator for the DM object
18 
19   Output Parameter:
20 . dm - The DM object
21 
22   Level: beginner
23 
24 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
25 @*/
26 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
27 {
28   DM             v;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   PetscValidPointer(dm,2);
33   *dm = PETSC_NULL;
34 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
35   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
36   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
37   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
38 #endif
39 
40   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
41   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
42 
43 
44   v->ltogmap      = PETSC_NULL;
45   v->ltogmapb     = PETSC_NULL;
46   v->bs           = 1;
47   v->coloringtype = IS_COLORING_GLOBAL;
48   v->lf           = PETSC_NULL;
49   v->lj           = PETSC_NULL;
50   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
51   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
52   v->defaultSection       = PETSC_NULL;
53   v->defaultGlobalSection = PETSC_NULL;
54   {
55     PetscInt i;
56     for (i = 0; i < 10; ++i) {
57       v->nullspaceConstructors[i] = PETSC_NULL;
58     }
59   }
60   v->numFields = 0;
61   v->fields    = PETSC_NULL;
62 
63   *dm = v;
64   PetscFunctionReturn(0);
65 }
66 
67 #undef __FUNCT__
68 #define __FUNCT__ "DMSetVecType"
69 /*@C
70        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
71 
72    Logically Collective on DMDA
73 
74    Input Parameter:
75 +  da - initial distributed array
76 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
77 
78    Options Database:
79 .   -dm_vec_type ctype
80 
81    Level: intermediate
82 
83 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
84 @*/
85 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
86 {
87   PetscErrorCode ierr;
88 
89   PetscFunctionBegin;
90   PetscValidHeaderSpecific(da,DM_CLASSID,1);
91   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
92   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
93   PetscFunctionReturn(0);
94 }
95 
96 #undef __FUNCT__
97 #define __FUNCT__ "VecGetDM"
98 /*@
99   VecGetDM - Gets the DM defining the data layout of the vector
100 
101   Not collective
102 
103   Input Parameter:
104 . v - The Vec
105 
106   Output Parameter:
107 . dm - The DM
108 
109   Level: intermediate
110 
111 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
112 @*/
113 PetscErrorCode VecGetDM(Vec v, DM *dm)
114 {
115   PetscErrorCode ierr;
116 
117   PetscFunctionBegin;
118   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
119   PetscValidPointer(dm,2);
120   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
121   PetscFunctionReturn(0);
122 }
123 
124 #undef __FUNCT__
125 #define __FUNCT__ "VecSetDM"
126 /*@
127   VecSetDM - Sets the DM defining the data layout of the vector
128 
129   Not collective
130 
131   Input Parameters:
132 + v - The Vec
133 - dm - The DM
134 
135   Level: intermediate
136 
137 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
138 @*/
139 PetscErrorCode VecSetDM(Vec v, DM dm)
140 {
141   PetscErrorCode ierr;
142 
143   PetscFunctionBegin;
144   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
145   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
146   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
147   PetscFunctionReturn(0);
148 }
149 
150 #undef __FUNCT__
151 #define __FUNCT__ "DMSetMatType"
152 /*@C
153        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
154 
155    Logically Collective on DM
156 
157    Input Parameter:
158 +  dm - the DM context
159 .  ctype - the matrix type
160 
161    Options Database:
162 .   -dm_mat_type ctype
163 
164    Level: intermediate
165 
166 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
167 @*/
168 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
169 {
170   PetscErrorCode ierr;
171   PetscFunctionBegin;
172   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
173   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
174   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
175   PetscFunctionReturn(0);
176 }
177 
178 #undef __FUNCT__
179 #define __FUNCT__ "MatGetDM"
180 /*@
181   MatGetDM - Gets the DM defining the data layout of the matrix
182 
183   Not collective
184 
185   Input Parameter:
186 . A - The Mat
187 
188   Output Parameter:
189 . dm - The DM
190 
191   Level: intermediate
192 
193 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
194 @*/
195 PetscErrorCode MatGetDM(Mat A, DM *dm)
196 {
197   PetscErrorCode ierr;
198 
199   PetscFunctionBegin;
200   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
201   PetscValidPointer(dm,2);
202   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
203   PetscFunctionReturn(0);
204 }
205 
206 #undef __FUNCT__
207 #define __FUNCT__ "MatSetDM"
208 /*@
209   MatSetDM - Sets the DM defining the data layout of the matrix
210 
211   Not collective
212 
213   Input Parameters:
214 + A - The Mat
215 - dm - The DM
216 
217   Level: intermediate
218 
219 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
220 @*/
221 PetscErrorCode MatSetDM(Mat A, DM dm)
222 {
223   PetscErrorCode ierr;
224 
225   PetscFunctionBegin;
226   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
227   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
228   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 #undef __FUNCT__
233 #define __FUNCT__ "DMSetOptionsPrefix"
234 /*@C
235    DMSetOptionsPrefix - Sets the prefix used for searching for all
236    DMDA options in the database.
237 
238    Logically Collective on DMDA
239 
240    Input Parameter:
241 +  da - the DMDA context
242 -  prefix - the prefix to prepend to all option names
243 
244    Notes:
245    A hyphen (-) must NOT be given at the beginning of the prefix name.
246    The first character of all runtime options is AUTOMATICALLY the hyphen.
247 
248    Level: advanced
249 
250 .keywords: DMDA, set, options, prefix, database
251 
252 .seealso: DMSetFromOptions()
253 @*/
254 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
255 {
256   PetscErrorCode ierr;
257 
258   PetscFunctionBegin;
259   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
260   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
261   PetscFunctionReturn(0);
262 }
263 
264 #undef __FUNCT__
265 #define __FUNCT__ "DMDestroy"
266 /*@
267     DMDestroy - Destroys a vector packer or DMDA.
268 
269     Collective on DM
270 
271     Input Parameter:
272 .   dm - the DM object to destroy
273 
274     Level: developer
275 
276 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
277 
278 @*/
279 PetscErrorCode  DMDestroy(DM *dm)
280 {
281   PetscInt       i, cnt = 0, f;
282   DMNamedVecLink nlink,nnext;
283   PetscErrorCode ierr;
284 
285   PetscFunctionBegin;
286   if (!*dm) PetscFunctionReturn(0);
287   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
288 
289   /* count all the circular references of DM and its contained Vecs */
290   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
291     if ((*dm)->localin[i])  {cnt++;}
292     if ((*dm)->globalin[i]) {cnt++;}
293   }
294   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
295   if ((*dm)->x) {
296     DM obj;
297     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
298     if (obj == *dm) cnt++;
299   }
300 
301   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
302   /*
303      Need this test because the dm references the vectors that
304      reference the dm, so destroying the dm calls destroy on the
305      vectors that cause another destroy on the dm
306   */
307   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
308   ((PetscObject) (*dm))->refct = 0;
309   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
310     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
311     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
312   }
313   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
314     nnext = nlink->next;
315     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
316     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
317     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
318     ierr = PetscFree(nlink);CHKERRQ(ierr);
319   }
320   (*dm)->namedglobal = PETSC_NULL;
321 
322   /* Destroy the list of hooks */
323   {
324     DMCoarsenHookLink link,next;
325     for (link=(*dm)->coarsenhook; link; link=next) {
326       next = link->next;
327       ierr = PetscFree(link);CHKERRQ(ierr);
328     }
329     (*dm)->coarsenhook = PETSC_NULL;
330   }
331   {
332     DMRefineHookLink link,next;
333     for (link=(*dm)->refinehook; link; link=next) {
334       next = link->next;
335       ierr = PetscFree(link);CHKERRQ(ierr);
336     }
337     (*dm)->refinehook = PETSC_NULL;
338   }
339   /* Destroy the work arrays */
340   {
341     DMWorkLink link,next;
342     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
343     for (link=(*dm)->workin; link; link=next) {
344       next = link->next;
345       ierr = PetscFree(link->mem);CHKERRQ(ierr);
346       ierr = PetscFree(link);CHKERRQ(ierr);
347     }
348     (*dm)->workin = PETSC_NULL;
349   }
350 
351   if ((*dm)->ctx && (*dm)->ctxdestroy) {
352     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
353   }
354   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
355   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
356   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
357   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
358   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
359   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
360   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
361 
362   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
363   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
364   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
365   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
366   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
367 
368   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
369   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
370   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
371 
372   for (f = 0; f < (*dm)->numFields; ++f) {
373     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
374   }
375   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
376   /* if memory was published with AMS then destroy it */
377   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
378 
379   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
380   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
381   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
382   PetscFunctionReturn(0);
383 }
384 
385 #undef __FUNCT__
386 #define __FUNCT__ "DMSetUp"
387 /*@
388     DMSetUp - sets up the data structures inside a DM object
389 
390     Collective on DM
391 
392     Input Parameter:
393 .   dm - the DM object to setup
394 
395     Level: developer
396 
397 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
398 
399 @*/
400 PetscErrorCode  DMSetUp(DM dm)
401 {
402   PetscErrorCode ierr;
403 
404   PetscFunctionBegin;
405   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
406   if (dm->setupcalled) PetscFunctionReturn(0);
407   if (dm->ops->setup) {
408     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
409   }
410   dm->setupcalled = PETSC_TRUE;
411   PetscFunctionReturn(0);
412 }
413 
414 #undef __FUNCT__
415 #define __FUNCT__ "DMSetFromOptions"
416 /*@
417     DMSetFromOptions - sets parameters in a DM from the options database
418 
419     Collective on DM
420 
421     Input Parameter:
422 .   dm - the DM object to set options for
423 
424     Options Database:
425 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
426 .   -dm_vec_type <type>  type of vector to create inside DM
427 .   -dm_mat_type <type>  type of matrix to create inside DM
428 -   -dm_coloring_type <global or ghosted>
429 
430     Level: developer
431 
432 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
433 
434 @*/
435 PetscErrorCode  DMSetFromOptions(DM dm)
436 {
437   PetscBool      flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg;
438   PetscErrorCode ierr;
439   char           typeName[256] = MATAIJ;
440 
441   PetscFunctionBegin;
442   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
443   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
444     ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr);
445     ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr);
446     ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr);
447     ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr);
448     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
449     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
450     if (flg) {
451       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
452     }
453     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
454     if (flg) {
455       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
456     }
457     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
458     if (dm->ops->setfromoptions) {
459       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
460     }
461     /* process any options handlers added with PetscObjectAddOptionsHandler() */
462     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
463   ierr = PetscOptionsEnd();CHKERRQ(ierr);
464   if (flg1) {
465     ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
466   }
467   if (flg2) {
468     PetscViewer viewer;
469 
470     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
471     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
472     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr);
473     ierr = DMView(dm, viewer);CHKERRQ(ierr);
474     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
475   }
476   if (flg3) {
477     PetscViewer viewer;
478 
479     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
480     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
481     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr);
482     ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr);
483     ierr = DMView(dm, viewer);CHKERRQ(ierr);
484     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
485   }
486   if (flg4) {
487     PetscViewer viewer;
488 
489     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
490     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
491     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr);
492     ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr);
493     ierr = DMView(dm, viewer);CHKERRQ(ierr);
494     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
495   }
496   PetscFunctionReturn(0);
497 }
498 
499 #undef __FUNCT__
500 #define __FUNCT__ "DMView"
501 /*@C
502     DMView - Views a vector packer or DMDA.
503 
504     Collective on DM
505 
506     Input Parameter:
507 +   dm - the DM object to view
508 -   v - the viewer
509 
510     Level: developer
511 
512 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
513 
514 @*/
515 PetscErrorCode  DMView(DM dm,PetscViewer v)
516 {
517   PetscErrorCode ierr;
518   PetscBool      isbinary;
519 
520   PetscFunctionBegin;
521   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
522   if (!v) {
523     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
524   }
525   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
526   if (isbinary) {
527     PetscInt         classid = DM_FILE_CLASSID;
528     MPI_Comm         comm;
529     PetscMPIInt      rank;
530     char             type[256];
531 
532     ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
533     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
534     if (!rank) {
535       ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
536       ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
537       ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
538     }
539   }
540   if (dm->ops->view) {
541     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
542   }
543   PetscFunctionReturn(0);
544 }
545 
546 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
547 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
548 
549 #undef __FUNCT__
550 #define __FUNCT__ "DMCreateGlobalVector"
551 /*@
552     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
553 
554     Collective on DM
555 
556     Input Parameter:
557 .   dm - the DM object
558 
559     Output Parameter:
560 .   vec - the global vector
561 
562     Level: beginner
563 
564 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
565 
566 @*/
567 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
568 {
569   PetscErrorCode ierr;
570 
571   PetscFunctionBegin;
572   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
573   if (dm->defaultSection) {
574     PetscSection gSection;
575     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
576 
577     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
578     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
579     for(p = pStart; p < pEnd; ++p) {
580       PetscInt dof, cdof;
581 
582       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
583       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
584       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
585       if ((dof > 0) && (dof-cdof != blockSize)) {
586         blockSize = 1;
587         break;
588       }
589     }
590     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
591     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
592     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
593     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
594     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
595     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
596     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
597     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
598     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
599     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
600     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
601     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
602   } else {
603     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
604   }
605   PetscFunctionReturn(0);
606 }
607 
608 #undef __FUNCT__
609 #define __FUNCT__ "DMCreateLocalVector"
610 /*@
611     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
612 
613     Not Collective
614 
615     Input Parameter:
616 .   dm - the DM object
617 
618     Output Parameter:
619 .   vec - the local vector
620 
621     Level: beginner
622 
623 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
624 
625 @*/
626 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
627 {
628   PetscErrorCode ierr;
629 
630   PetscFunctionBegin;
631   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
632   if (dm->defaultSection) {
633     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
634 
635     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
636     for(p = pStart; p < pEnd; ++p) {
637       PetscInt dof;
638 
639       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
640       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
641       if ((dof > 0) && (dof != blockSize)) {
642         blockSize = 1;
643         break;
644       }
645     }
646     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
647     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
648     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
649     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
650     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
651     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
652     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
653   } else {
654     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
655   }
656   PetscFunctionReturn(0);
657 }
658 
659 #undef __FUNCT__
660 #define __FUNCT__ "DMGetLocalToGlobalMapping"
661 /*@
662    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
663 
664    Collective on DM
665 
666    Input Parameter:
667 .  dm - the DM that provides the mapping
668 
669    Output Parameter:
670 .  ltog - the mapping
671 
672    Level: intermediate
673 
674    Notes:
675    This mapping can then be used by VecSetLocalToGlobalMapping() or
676    MatSetLocalToGlobalMapping().
677 
678 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
679 @*/
680 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
681 {
682   PetscErrorCode ierr;
683 
684   PetscFunctionBegin;
685   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
686   PetscValidPointer(ltog,2);
687   if (!dm->ltogmap) {
688     PetscSection section, sectionGlobal;
689 
690     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
691     if (section) {
692       PetscInt      *ltog;
693       PetscInt       pStart, pEnd, size, p, l;
694 
695       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
696       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
697       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
698       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
699       for (p = pStart, l = 0; p < pEnd; ++p) {
700         PetscInt dof, off, c;
701 
702         /* Should probably use constrained dofs */
703         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
704         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
705         for (c = 0; c < dof; ++c, ++l) {
706           ltog[l] = off+c;
707         }
708       }
709       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
710       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
711     } else {
712       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
713       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
714     }
715   }
716   *ltog = dm->ltogmap;
717   PetscFunctionReturn(0);
718 }
719 
720 #undef __FUNCT__
721 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
722 /*@
723    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
724 
725    Collective on DM
726 
727    Input Parameter:
728 .  da - the distributed array that provides the mapping
729 
730    Output Parameter:
731 .  ltog - the block mapping
732 
733    Level: intermediate
734 
735    Notes:
736    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
737    MatSetLocalToGlobalMappingBlock().
738 
739 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
740 @*/
741 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
742 {
743   PetscErrorCode ierr;
744 
745   PetscFunctionBegin;
746   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
747   PetscValidPointer(ltog,2);
748   if (!dm->ltogmapb) {
749     PetscInt bs;
750     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
751     if (bs > 1) {
752       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
753       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
754     } else {
755       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
756       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
757     }
758   }
759   *ltog = dm->ltogmapb;
760   PetscFunctionReturn(0);
761 }
762 
763 #undef __FUNCT__
764 #define __FUNCT__ "DMGetBlockSize"
765 /*@
766    DMGetBlockSize - Gets the inherent block size associated with a DM
767 
768    Not Collective
769 
770    Input Parameter:
771 .  dm - the DM with block structure
772 
773    Output Parameter:
774 .  bs - the block size, 1 implies no exploitable block structure
775 
776    Level: intermediate
777 
778 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
779 @*/
780 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
781 {
782   PetscFunctionBegin;
783   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
784   PetscValidPointer(bs,2);
785   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
786   *bs = dm->bs;
787   PetscFunctionReturn(0);
788 }
789 
790 #undef __FUNCT__
791 #define __FUNCT__ "DMCreateInterpolation"
792 /*@
793     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
794 
795     Collective on DM
796 
797     Input Parameter:
798 +   dm1 - the DM object
799 -   dm2 - the second, finer DM object
800 
801     Output Parameter:
802 +  mat - the interpolation
803 -  vec - the scaling (optional)
804 
805     Level: developer
806 
807     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
808         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
809 
810         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
811         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
812 
813 
814 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
815 
816 @*/
817 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
818 {
819   PetscErrorCode ierr;
820 
821   PetscFunctionBegin;
822   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
823   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
824   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
825   PetscFunctionReturn(0);
826 }
827 
828 #undef __FUNCT__
829 #define __FUNCT__ "DMCreateInjection"
830 /*@
831     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
832 
833     Collective on DM
834 
835     Input Parameter:
836 +   dm1 - the DM object
837 -   dm2 - the second, finer DM object
838 
839     Output Parameter:
840 .   ctx - the injection
841 
842     Level: developer
843 
844    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
845         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
846 
847 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
848 
849 @*/
850 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
851 {
852   PetscErrorCode ierr;
853 
854   PetscFunctionBegin;
855   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
856   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
857   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
858   PetscFunctionReturn(0);
859 }
860 
861 #undef __FUNCT__
862 #define __FUNCT__ "DMCreateColoring"
863 /*@C
864     DMCreateColoring - Gets coloring for a DMDA or DMComposite
865 
866     Collective on DM
867 
868     Input Parameter:
869 +   dm - the DM object
870 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
871 -   matype - either MATAIJ or MATBAIJ
872 
873     Output Parameter:
874 .   coloring - the coloring
875 
876     Level: developer
877 
878 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
879 
880 @*/
881 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
882 {
883   PetscErrorCode ierr;
884 
885   PetscFunctionBegin;
886   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
887   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
888   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
889   PetscFunctionReturn(0);
890 }
891 
892 #undef __FUNCT__
893 #define __FUNCT__ "DMCreateMatrix"
894 /*@C
895     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
896 
897     Collective on DM
898 
899     Input Parameter:
900 +   dm - the DM object
901 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
902             any type which inherits from one of these (such as MATAIJ)
903 
904     Output Parameter:
905 .   mat - the empty Jacobian
906 
907     Level: beginner
908 
909     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
910        do not need to do it yourself.
911 
912        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
913        the nonzero pattern call DMDASetMatPreallocateOnly()
914 
915        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
916        internally by PETSc.
917 
918        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
919        the indices for the global numbering for DMDAs which is complicated.
920 
921 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
922 
923 @*/
924 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
925 {
926   PetscErrorCode ierr;
927 
928   PetscFunctionBegin;
929   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
930 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
931   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
932 #endif
933   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
934   PetscValidPointer(mat,3);
935   if (dm->mattype) {
936     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
937   } else {
938     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
939   }
940   PetscFunctionReturn(0);
941 }
942 
943 #undef __FUNCT__
944 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
945 /*@
946   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
947     preallocated but the nonzero structure and zero values will not be set.
948 
949   Logically Collective on DMDA
950 
951   Input Parameter:
952 + dm - the DM
953 - only - PETSC_TRUE if only want preallocation
954 
955   Level: developer
956 .seealso DMCreateMatrix()
957 @*/
958 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
959 {
960   PetscFunctionBegin;
961   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
962   dm->prealloc_only = only;
963   PetscFunctionReturn(0);
964 }
965 
966 #undef __FUNCT__
967 #define __FUNCT__ "DMGetWorkArray"
968 /*@C
969   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
970 
971   Not Collective
972 
973   Input Parameters:
974 + dm - the DM object
975 . count - The minium size
976 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
977 
978   Output Parameter:
979 . array - the work array
980 
981   Level: developer
982 
983 .seealso DMDestroy(), DMCreate()
984 @*/
985 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
986 {
987   PetscErrorCode ierr;
988   DMWorkLink link;
989   size_t size;
990 
991   PetscFunctionBegin;
992   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
993   PetscValidPointer(mem,4);
994   if (dm->workin) {
995     link = dm->workin;
996     dm->workin = dm->workin->next;
997   } else {
998     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
999   }
1000   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1001   if (size*count > link->bytes) {
1002     ierr = PetscFree(link->mem);CHKERRQ(ierr);
1003     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1004     link->bytes = size*count;
1005   }
1006   link->next = dm->workout;
1007   dm->workout = link;
1008   *(void**)mem = link->mem;
1009   PetscFunctionReturn(0);
1010 }
1011 
1012 #undef __FUNCT__
1013 #define __FUNCT__ "DMRestoreWorkArray"
1014 /*@C
1015   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1016 
1017   Not Collective
1018 
1019   Input Parameters:
1020 + dm - the DM object
1021 . count - The minium size
1022 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1023 
1024   Output Parameter:
1025 . array - the work array
1026 
1027   Level: developer
1028 
1029 .seealso DMDestroy(), DMCreate()
1030 @*/
1031 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1032 {
1033   DMWorkLink *p,link;
1034 
1035   PetscFunctionBegin;
1036   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1037   PetscValidPointer(mem,4);
1038   for (p=&dm->workout; (link=*p); p=&link->next) {
1039     if (link->mem == *(void**)mem) {
1040       *p = link->next;
1041       link->next = dm->workin;
1042       dm->workin = link;
1043       *(void**)mem = PETSC_NULL;
1044       PetscFunctionReturn(0);
1045     }
1046   }
1047   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1048   PetscFunctionReturn(0);
1049 }
1050 
1051 #undef __FUNCT__
1052 #define __FUNCT__ "DMSetNullSpaceConstructor"
1053 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1054 {
1055   PetscFunctionBegin;
1056   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1057   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1058   dm->nullspaceConstructors[field] = nullsp;
1059   PetscFunctionReturn(0);
1060 }
1061 
1062 #undef __FUNCT__
1063 #define __FUNCT__ "DMCreateFieldIS"
1064 /*@C
1065   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1066 
1067   Not collective
1068 
1069   Input Parameter:
1070 . dm - the DM object
1071 
1072   Output Parameters:
1073 + numFields  - The number of fields (or PETSC_NULL if not requested)
1074 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1075 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1076 
1077   Level: intermediate
1078 
1079   Notes:
1080   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1081   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1082   PetscFree().
1083 
1084 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1085 @*/
1086 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1087 {
1088   PetscSection   section, sectionGlobal;
1089   PetscErrorCode ierr;
1090 
1091   PetscFunctionBegin;
1092   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1093   if (numFields) {
1094     PetscValidPointer(numFields,2);
1095     *numFields = 0;
1096   }
1097   if (fieldNames) {
1098     PetscValidPointer(fieldNames,3);
1099     *fieldNames = PETSC_NULL;
1100   }
1101   if (fields) {
1102     PetscValidPointer(fields,4);
1103     *fields = PETSC_NULL;
1104   }
1105   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1106   if (section) {
1107     PetscInt *fieldSizes, **fieldIndices;
1108     PetscInt  nF, f, pStart, pEnd, p;
1109 
1110     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1111     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1112     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1113     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1114     for (f = 0; f < nF; ++f) {
1115       fieldSizes[f] = 0;
1116     }
1117     for (p = pStart; p < pEnd; ++p) {
1118       PetscInt gdof;
1119 
1120       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1121       if (gdof > 0) {
1122         for (f = 0; f < nF; ++f) {
1123           PetscInt fdof, fcdof;
1124 
1125           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1126           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1127           fieldSizes[f] += fdof-fcdof;
1128         }
1129       }
1130     }
1131     for (f = 0; f < nF; ++f) {
1132       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1133       fieldSizes[f] = 0;
1134     }
1135     for (p = pStart; p < pEnd; ++p) {
1136       PetscInt gdof, goff;
1137 
1138       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1139       if (gdof > 0) {
1140         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1141         for (f = 0; f < nF; ++f) {
1142           PetscInt fdof, fcdof, fc;
1143 
1144           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1145           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1146           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1147             fieldIndices[f][fieldSizes[f]] = goff++;
1148           }
1149         }
1150       }
1151     }
1152     if (numFields) {*numFields = nF;}
1153     if (fieldNames) {
1154       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1155       for (f = 0; f < nF; ++f) {
1156         const char *fieldName;
1157 
1158         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1159         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1160       }
1161     }
1162     if (fields) {
1163       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1164       for (f = 0; f < nF; ++f) {
1165         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1166       }
1167     }
1168     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1169   } else {
1170     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1171   }
1172   PetscFunctionReturn(0);
1173 }
1174 
1175 #undef __FUNCT__
1176 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1177 /*@C
1178   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1179 
1180   Not Collective
1181 
1182   Input Parameters:
1183 + dm   - the DM object
1184 - name - the name of the field decomposition
1185 
1186   Output Parameter:
1187 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1188 
1189   Level: advanced
1190 
1191 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1192 @*/
1193 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1194 {
1195   PetscErrorCode ierr;
1196 
1197   PetscFunctionBegin;
1198   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1199   PetscValidCharPointer(name,2);
1200   PetscValidPointer(ddm,3);
1201   *ddm = PETSC_NULL;
1202   if (dm->ops->createfielddecompositiondm) {
1203     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1204   }
1205   PetscFunctionReturn(0);
1206 }
1207 
1208 #undef __FUNCT__
1209 #define __FUNCT__ "DMCreateFieldDecomposition"
1210 /*@C
1211   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1212                           corresponding to different fields: each IS contains the global indices of the dofs of the
1213                           corresponding field. The optional list of DMs define the DM for each subproblem.
1214                           Generalizes DMCreateFieldIS().
1215 
1216   Not collective
1217 
1218   Input Parameter:
1219 . dm - the DM object
1220 
1221   Output Parameters:
1222 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1223 . namelist  - The name for each field (or PETSC_NULL if not requested)
1224 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1225 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1226 
1227   Level: intermediate
1228 
1229   Notes:
1230   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1231   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1232   and all of the arrays should be freed with PetscFree().
1233 
1234 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1235 @*/
1236 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1237 {
1238   PetscErrorCode ierr;
1239 
1240   PetscFunctionBegin;
1241   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1242   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1243   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1244   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1245   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1246   if (!dm->ops->createfielddecomposition) {
1247     PetscSection section;
1248     PetscInt     numFields, f;
1249 
1250     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1251     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1252     if (section && numFields && dm->ops->createsubdm) {
1253       *len = numFields;
1254       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1255       for (f = 0; f < numFields; ++f) {
1256         const char *fieldName;
1257 
1258         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1259         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1260         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1261       }
1262     } else {
1263       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1264       /* By default there are no DMs associated with subproblems. */
1265       if (dmlist) *dmlist = PETSC_NULL;
1266     }
1267   }
1268   else {
1269     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1270   }
1271   PetscFunctionReturn(0);
1272 }
1273 
1274 #undef __FUNCT__
1275 #define __FUNCT__ "DMCreateSubDM"
1276 /*@C
1277   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1278                   The fields are defined by DMCreateFieldIS().
1279 
1280   Not collective
1281 
1282   Input Parameters:
1283 + dm - the DM object
1284 . numFields - number of fields in this subproblem
1285 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1286 
1287   Output Parameters:
1288 . is - The global indices for the subproblem
1289 - dm - The DM for the subproblem
1290 
1291   Level: intermediate
1292 
1293 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1294 @*/
1295 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1296 {
1297   PetscErrorCode ierr;
1298 
1299   PetscFunctionBegin;
1300   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1301   PetscValidPointer(fields,3);
1302   if (is) {PetscValidPointer(is,4);}
1303   if (subdm) {PetscValidPointer(subdm,5);}
1304   if (dm->ops->createsubdm) {
1305     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1306   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1307   PetscFunctionReturn(0);
1308 }
1309 
1310 #undef __FUNCT__
1311 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1312 /*@C
1313   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1314 
1315   Not Collective
1316 
1317   Input Parameters:
1318 + dm   - the DM object
1319 - name - the name of the subdomain decomposition
1320 
1321   Output Parameter:
1322 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1323 
1324   Level: advanced
1325 
1326 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1327 @*/
1328 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1329 {
1330   PetscErrorCode ierr;
1331 
1332   PetscFunctionBegin;
1333   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1334   PetscValidCharPointer(name,2);
1335   PetscValidPointer(ddm,3);
1336   *ddm = PETSC_NULL;
1337   if (dm->ops->createdomaindecompositiondm) {
1338     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1339   }
1340   PetscFunctionReturn(0);
1341 }
1342 
1343 #undef __FUNCT__
1344 #define __FUNCT__ "DMCreateDomainDecomposition"
1345 /*@C
1346   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1347                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1348                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1349                           define a nonoverlapping covering, while outer subdomains can overlap.
1350                           The optional list of DMs define the DM for each subproblem.
1351 
1352   Not collective
1353 
1354   Input Parameter:
1355 . dm - the DM object
1356 
1357   Output Parameters:
1358 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1359 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1360 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1361 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1362 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1363 
1364   Level: intermediate
1365 
1366   Notes:
1367   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1368   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1369   and all of the arrays should be freed with PetscFree().
1370 
1371 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1372 @*/
1373 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1374 {
1375   PetscErrorCode ierr;
1376 
1377   PetscFunctionBegin;
1378   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1379   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1380   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1381   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1382   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1383   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1384   if (dm->ops->createdomaindecomposition) {
1385     ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1386   }
1387   PetscFunctionReturn(0);
1388 }
1389 
1390 #undef __FUNCT__
1391 #define __FUNCT__ "DMRefine"
1392 /*@
1393   DMRefine - Refines a DM object
1394 
1395   Collective on DM
1396 
1397   Input Parameter:
1398 + dm   - the DM object
1399 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1400 
1401   Output Parameter:
1402 . dmf - the refined DM, or PETSC_NULL
1403 
1404   Note: If no refinement was done, the return value is PETSC_NULL
1405 
1406   Level: developer
1407 
1408 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1409 @*/
1410 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1411 {
1412   PetscErrorCode ierr;
1413   DMRefineHookLink link;
1414 
1415   PetscFunctionBegin;
1416   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1417   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1418   if (*dmf) {
1419     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1420     (*dmf)->ops->initialguess = dm->ops->initialguess;
1421     (*dmf)->ops->function     = dm->ops->function;
1422     (*dmf)->ops->functionj    = dm->ops->functionj;
1423     if (dm->ops->jacobian != DMComputeJacobianDefault) {
1424       (*dmf)->ops->jacobian     = dm->ops->jacobian;
1425     }
1426     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1427     (*dmf)->ctx       = dm->ctx;
1428     (*dmf)->leveldown = dm->leveldown;
1429     (*dmf)->levelup   = dm->levelup + 1;
1430     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1431     for (link=dm->refinehook; link; link=link->next) {
1432       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1433     }
1434   }
1435   PetscFunctionReturn(0);
1436 }
1437 
1438 #undef __FUNCT__
1439 #define __FUNCT__ "DMRefineHookAdd"
1440 /*@
1441    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1442 
1443    Logically Collective
1444 
1445    Input Arguments:
1446 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1447 .  refinehook - function to run when setting up a coarser level
1448 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1449 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1450 
1451    Calling sequence of refinehook:
1452 $    refinehook(DM coarse,DM fine,void *ctx);
1453 
1454 +  coarse - coarse level DM
1455 .  fine - fine level DM to interpolate problem to
1456 -  ctx - optional user-defined function context
1457 
1458    Calling sequence for interphook:
1459 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1460 
1461 +  coarse - coarse level DM
1462 .  interp - matrix interpolating a coarse-level solution to the finer grid
1463 .  fine - fine level DM to update
1464 -  ctx - optional user-defined function context
1465 
1466    Level: advanced
1467 
1468    Notes:
1469    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1470 
1471    If this function is called multiple times, the hooks will be run in the order they are added.
1472 
1473 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1474 @*/
1475 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1476 {
1477   PetscErrorCode ierr;
1478   DMRefineHookLink link,*p;
1479 
1480   PetscFunctionBegin;
1481   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1482   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1483   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1484   link->refinehook = refinehook;
1485   link->interphook = interphook;
1486   link->ctx = ctx;
1487   link->next = PETSC_NULL;
1488   *p = link;
1489   PetscFunctionReturn(0);
1490 }
1491 
1492 #undef __FUNCT__
1493 #define __FUNCT__ "DMInterpolate"
1494 /*@
1495    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1496 
1497    Collective if any hooks are
1498 
1499    Input Arguments:
1500 +  coarse - coarser DM to use as a base
1501 .  restrct - interpolation matrix, apply using MatInterpolate()
1502 -  fine - finer DM to update
1503 
1504    Level: developer
1505 
1506 .seealso: DMRefineHookAdd(), MatInterpolate()
1507 @*/
1508 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1509 {
1510   PetscErrorCode ierr;
1511   DMRefineHookLink link;
1512 
1513   PetscFunctionBegin;
1514   for (link=fine->refinehook; link; link=link->next) {
1515     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1516   }
1517   PetscFunctionReturn(0);
1518 }
1519 
1520 #undef __FUNCT__
1521 #define __FUNCT__ "DMGetRefineLevel"
1522 /*@
1523     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1524 
1525     Not Collective
1526 
1527     Input Parameter:
1528 .   dm - the DM object
1529 
1530     Output Parameter:
1531 .   level - number of refinements
1532 
1533     Level: developer
1534 
1535 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1536 
1537 @*/
1538 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1539 {
1540   PetscFunctionBegin;
1541   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1542   *level = dm->levelup;
1543   PetscFunctionReturn(0);
1544 }
1545 
1546 #undef __FUNCT__
1547 #define __FUNCT__ "DMGlobalToLocalBegin"
1548 /*@
1549     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1550 
1551     Neighbor-wise Collective on DM
1552 
1553     Input Parameters:
1554 +   dm - the DM object
1555 .   g - the global vector
1556 .   mode - INSERT_VALUES or ADD_VALUES
1557 -   l - the local vector
1558 
1559 
1560     Level: beginner
1561 
1562 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1563 
1564 @*/
1565 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1566 {
1567   PetscSF        sf;
1568   PetscErrorCode ierr;
1569 
1570   PetscFunctionBegin;
1571   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1572   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1573   if (sf) {
1574     PetscScalar *lArray, *gArray;
1575 
1576     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1577     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1578     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1579     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1580     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1581     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1582   } else {
1583     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1584   }
1585   PetscFunctionReturn(0);
1586 }
1587 
1588 #undef __FUNCT__
1589 #define __FUNCT__ "DMGlobalToLocalEnd"
1590 /*@
1591     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1592 
1593     Neighbor-wise Collective on DM
1594 
1595     Input Parameters:
1596 +   dm - the DM object
1597 .   g - the global vector
1598 .   mode - INSERT_VALUES or ADD_VALUES
1599 -   l - the local vector
1600 
1601 
1602     Level: beginner
1603 
1604 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1605 
1606 @*/
1607 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1608 {
1609   PetscSF        sf;
1610   PetscErrorCode ierr;
1611   PetscScalar    *lArray, *gArray;
1612 
1613   PetscFunctionBegin;
1614   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1615   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1616   if (sf) {
1617   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1618 
1619     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1620     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1621     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1622     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1623     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1624   } else {
1625     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1626   }
1627   PetscFunctionReturn(0);
1628 }
1629 
1630 #undef __FUNCT__
1631 #define __FUNCT__ "DMLocalToGlobalBegin"
1632 /*@
1633     DMLocalToGlobalBegin - updates global vectors from local vectors
1634 
1635     Neighbor-wise Collective on DM
1636 
1637     Input Parameters:
1638 +   dm - the DM object
1639 .   l - the local vector
1640 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1641            base point.
1642 - - the global vector
1643 
1644     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1645            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1646            global array to the final global array with VecAXPY().
1647 
1648     Level: beginner
1649 
1650 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1651 
1652 @*/
1653 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1654 {
1655   PetscSF        sf;
1656   PetscErrorCode ierr;
1657 
1658   PetscFunctionBegin;
1659   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1660   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1661   if (sf) {
1662     MPI_Op       op;
1663     PetscScalar *lArray, *gArray;
1664 
1665     switch(mode) {
1666     case INSERT_VALUES:
1667     case INSERT_ALL_VALUES:
1668 #if defined(PETSC_HAVE_MPI_REPLACE)
1669       op = MPI_REPLACE; break;
1670 #else
1671       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1672 #endif
1673     case ADD_VALUES:
1674     case ADD_ALL_VALUES:
1675       op = MPI_SUM; break;
1676   default:
1677     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1678     }
1679     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1680     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1681     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1682     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1683     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1684   } else {
1685     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1686   }
1687   PetscFunctionReturn(0);
1688 }
1689 
1690 #undef __FUNCT__
1691 #define __FUNCT__ "DMLocalToGlobalEnd"
1692 /*@
1693     DMLocalToGlobalEnd - updates global vectors from local vectors
1694 
1695     Neighbor-wise Collective on DM
1696 
1697     Input Parameters:
1698 +   dm - the DM object
1699 .   l - the local vector
1700 .   mode - INSERT_VALUES or ADD_VALUES
1701 -   g - the global vector
1702 
1703 
1704     Level: beginner
1705 
1706 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1707 
1708 @*/
1709 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1710 {
1711   PetscSF        sf;
1712   PetscErrorCode ierr;
1713 
1714   PetscFunctionBegin;
1715   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1716   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1717   if (sf) {
1718     MPI_Op       op;
1719     PetscScalar *lArray, *gArray;
1720 
1721     switch(mode) {
1722     case INSERT_VALUES:
1723     case INSERT_ALL_VALUES:
1724 #if defined(PETSC_HAVE_MPI_REPLACE)
1725       op = MPI_REPLACE; break;
1726 #else
1727       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1728 #endif
1729     case ADD_VALUES:
1730     case ADD_ALL_VALUES:
1731       op = MPI_SUM; break;
1732     default:
1733       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1734     }
1735     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1736     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1737     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1738     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1739     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1740   } else {
1741     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1742   }
1743   PetscFunctionReturn(0);
1744 }
1745 
1746 #undef __FUNCT__
1747 #define __FUNCT__ "DMComputeJacobianDefault"
1748 /*@
1749     DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided
1750 
1751     Collective on DM
1752 
1753     Input Parameter:
1754 +   dm - the DM object
1755 .   x - location to compute Jacobian at; may be ignored for linear problems
1756 .   A - matrix that defines the operator for the linear solve
1757 -   B - the matrix used to construct the preconditioner
1758 
1759     Level: developer
1760 
1761 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
1762          DMSetFunction()
1763 
1764 @*/
1765 PetscErrorCode  DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag)
1766 {
1767   PetscErrorCode ierr;
1768 
1769   PetscFunctionBegin;
1770   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1771   *stflag = SAME_NONZERO_PATTERN;
1772   ierr  = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr);
1773   if (A != B) {
1774     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1775     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1776   }
1777   PetscFunctionReturn(0);
1778 }
1779 
1780 #undef __FUNCT__
1781 #define __FUNCT__ "DMCoarsen"
1782 /*@
1783     DMCoarsen - Coarsens a DM object
1784 
1785     Collective on DM
1786 
1787     Input Parameter:
1788 +   dm - the DM object
1789 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1790 
1791     Output Parameter:
1792 .   dmc - the coarsened DM
1793 
1794     Level: developer
1795 
1796 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1797 
1798 @*/
1799 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1800 {
1801   PetscErrorCode ierr;
1802   DMCoarsenHookLink link;
1803 
1804   PetscFunctionBegin;
1805   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1806   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1807   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1808   (*dmc)->ops->initialguess = dm->ops->initialguess;
1809   (*dmc)->ops->function     = dm->ops->function;
1810   (*dmc)->ops->functionj    = dm->ops->functionj;
1811   if (dm->ops->jacobian != DMComputeJacobianDefault) {
1812     (*dmc)->ops->jacobian     = dm->ops->jacobian;
1813   }
1814   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1815   (*dmc)->ctx       = dm->ctx;
1816   (*dmc)->levelup   = dm->levelup;
1817   (*dmc)->leveldown = dm->leveldown + 1;
1818   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1819   for (link=dm->coarsenhook; link; link=link->next) {
1820     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1821   }
1822   PetscFunctionReturn(0);
1823 }
1824 
1825 #undef __FUNCT__
1826 #define __FUNCT__ "DMCoarsenHookAdd"
1827 /*@
1828    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1829 
1830    Logically Collective
1831 
1832    Input Arguments:
1833 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1834 .  coarsenhook - function to run when setting up a coarser level
1835 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1836 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1837 
1838    Calling sequence of coarsenhook:
1839 $    coarsenhook(DM fine,DM coarse,void *ctx);
1840 
1841 +  fine - fine level DM
1842 .  coarse - coarse level DM to restrict problem to
1843 -  ctx - optional user-defined function context
1844 
1845    Calling sequence for restricthook:
1846 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1847 
1848 +  fine - fine level DM
1849 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1850 .  rscale - scaling vector for restriction
1851 .  inject - matrix restricting by injection
1852 .  coarse - coarse level DM to update
1853 -  ctx - optional user-defined function context
1854 
1855    Level: advanced
1856 
1857    Notes:
1858    This function is only needed if auxiliary data needs to be set up on coarse grids.
1859 
1860    If this function is called multiple times, the hooks will be run in the order they are added.
1861 
1862    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1863    extract the finest level information from its context (instead of from the SNES).
1864 
1865 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1866 @*/
1867 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1868 {
1869   PetscErrorCode ierr;
1870   DMCoarsenHookLink link,*p;
1871 
1872   PetscFunctionBegin;
1873   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1874   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1875   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1876   link->coarsenhook = coarsenhook;
1877   link->restricthook = restricthook;
1878   link->ctx = ctx;
1879   link->next = PETSC_NULL;
1880   *p = link;
1881   PetscFunctionReturn(0);
1882 }
1883 
1884 #undef __FUNCT__
1885 #define __FUNCT__ "DMRestrict"
1886 /*@
1887    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1888 
1889    Collective if any hooks are
1890 
1891    Input Arguments:
1892 +  fine - finer DM to use as a base
1893 .  restrct - restriction matrix, apply using MatRestrict()
1894 .  inject - injection matrix, also use MatRestrict()
1895 -  coarse - coarer DM to update
1896 
1897    Level: developer
1898 
1899 .seealso: DMCoarsenHookAdd(), MatRestrict()
1900 @*/
1901 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1902 {
1903   PetscErrorCode ierr;
1904   DMCoarsenHookLink link;
1905 
1906   PetscFunctionBegin;
1907   for (link=fine->coarsenhook; link; link=link->next) {
1908     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1909   }
1910   PetscFunctionReturn(0);
1911 }
1912 
1913 #undef __FUNCT__
1914 #define __FUNCT__ "DMBlockRestrictHookAdd"
1915 /*@
1916    DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1917 
1918    Logically Collective
1919 
1920    Input Arguments:
1921 +  global - global DM
1922 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1923 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1924 
1925    Calling sequence for restricthook:
1926 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1927 
1928 +  global - global DM
1929 .  out    - scatter to the outer (with ghost and overlap points) block vector
1930 .  in     - scatter to block vector values only owned locally
1931 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1932 -  ctx - optional user-defined function context
1933 
1934    Level: advanced
1935 
1936    Notes:
1937    This function is only needed if auxiliary data needs to be set up on coarse grids.
1938 
1939    If this function is called multiple times, the hooks will be run in the order they are added.
1940 
1941    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1942    extract the finest level information from its context (instead of from the SNES).
1943 
1944 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1945 @*/
1946 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1947 {
1948   PetscErrorCode ierr;
1949   DMBlockRestrictHookLink link,*p;
1950 
1951   PetscFunctionBegin;
1952   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1953   for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1954   ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr);
1955   link->restricthook = restricthook;
1956   link->ctx = ctx;
1957   link->next = PETSC_NULL;
1958   *p = link;
1959   PetscFunctionReturn(0);
1960 }
1961 
1962 #undef __FUNCT__
1963 #define __FUNCT__ "DMBlockRestrict"
1964 /*@
1965    DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd()
1966 
1967    Collective if any hooks are
1968 
1969    Input Arguments:
1970 +  fine - finer DM to use as a base
1971 .  restrct - restriction matrix, apply using MatRestrict()
1972 .  inject - injection matrix, also use MatRestrict()
1973 -  coarse - coarer DM to update
1974 
1975    Level: developer
1976 
1977 .seealso: DMCoarsenHookAdd(), MatRestrict()
1978 @*/
1979 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block)
1980 {
1981   PetscErrorCode ierr;
1982   DMBlockRestrictHookLink link;
1983 
1984   PetscFunctionBegin;
1985   for (link=global->blockrestricthook; link; link=link->next) {
1986     if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);}
1987   }
1988   PetscFunctionReturn(0);
1989 }
1990 
1991 #undef __FUNCT__
1992 #define __FUNCT__ "DMGetCoarsenLevel"
1993 /*@
1994     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
1995 
1996     Not Collective
1997 
1998     Input Parameter:
1999 .   dm - the DM object
2000 
2001     Output Parameter:
2002 .   level - number of coarsenings
2003 
2004     Level: developer
2005 
2006 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2007 
2008 @*/
2009 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2010 {
2011   PetscFunctionBegin;
2012   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2013   *level = dm->leveldown;
2014   PetscFunctionReturn(0);
2015 }
2016 
2017 
2018 
2019 #undef __FUNCT__
2020 #define __FUNCT__ "DMRefineHierarchy"
2021 /*@C
2022     DMRefineHierarchy - Refines a DM object, all levels at once
2023 
2024     Collective on DM
2025 
2026     Input Parameter:
2027 +   dm - the DM object
2028 -   nlevels - the number of levels of refinement
2029 
2030     Output Parameter:
2031 .   dmf - the refined DM hierarchy
2032 
2033     Level: developer
2034 
2035 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2036 
2037 @*/
2038 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2039 {
2040   PetscErrorCode ierr;
2041 
2042   PetscFunctionBegin;
2043   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2044   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2045   if (nlevels == 0) PetscFunctionReturn(0);
2046   if (dm->ops->refinehierarchy) {
2047     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2048   } else if (dm->ops->refine) {
2049     PetscInt i;
2050 
2051     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2052     for (i=1; i<nlevels; i++) {
2053       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2054     }
2055   } else {
2056     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2057   }
2058   PetscFunctionReturn(0);
2059 }
2060 
2061 #undef __FUNCT__
2062 #define __FUNCT__ "DMCoarsenHierarchy"
2063 /*@C
2064     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2065 
2066     Collective on DM
2067 
2068     Input Parameter:
2069 +   dm - the DM object
2070 -   nlevels - the number of levels of coarsening
2071 
2072     Output Parameter:
2073 .   dmc - the coarsened DM hierarchy
2074 
2075     Level: developer
2076 
2077 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2078 
2079 @*/
2080 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2081 {
2082   PetscErrorCode ierr;
2083 
2084   PetscFunctionBegin;
2085   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2086   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2087   if (nlevels == 0) PetscFunctionReturn(0);
2088   PetscValidPointer(dmc,3);
2089   if (dm->ops->coarsenhierarchy) {
2090     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2091   } else if (dm->ops->coarsen) {
2092     PetscInt i;
2093 
2094     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2095     for (i=1; i<nlevels; i++) {
2096       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2097     }
2098   } else {
2099     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2100   }
2101   PetscFunctionReturn(0);
2102 }
2103 
2104 #undef __FUNCT__
2105 #define __FUNCT__ "DMCreateAggregates"
2106 /*@
2107    DMCreateAggregates - Gets the aggregates that map between
2108    grids associated with two DMs.
2109 
2110    Collective on DM
2111 
2112    Input Parameters:
2113 +  dmc - the coarse grid DM
2114 -  dmf - the fine grid DM
2115 
2116    Output Parameters:
2117 .  rest - the restriction matrix (transpose of the projection matrix)
2118 
2119    Level: intermediate
2120 
2121 .keywords: interpolation, restriction, multigrid
2122 
2123 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2124 @*/
2125 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2126 {
2127   PetscErrorCode ierr;
2128 
2129   PetscFunctionBegin;
2130   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2131   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2132   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2133   PetscFunctionReturn(0);
2134 }
2135 
2136 #undef __FUNCT__
2137 #define __FUNCT__ "DMSetApplicationContextDestroy"
2138 /*@C
2139     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2140 
2141     Not Collective
2142 
2143     Input Parameters:
2144 +   dm - the DM object
2145 -   destroy - the destroy function
2146 
2147     Level: intermediate
2148 
2149 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2150 
2151 @*/
2152 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2153 {
2154   PetscFunctionBegin;
2155   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2156   dm->ctxdestroy = destroy;
2157   PetscFunctionReturn(0);
2158 }
2159 
2160 #undef __FUNCT__
2161 #define __FUNCT__ "DMSetApplicationContext"
2162 /*@
2163     DMSetApplicationContext - Set a user context into a DM object
2164 
2165     Not Collective
2166 
2167     Input Parameters:
2168 +   dm - the DM object
2169 -   ctx - the user context
2170 
2171     Level: intermediate
2172 
2173 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2174 
2175 @*/
2176 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2177 {
2178   PetscFunctionBegin;
2179   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2180   dm->ctx = ctx;
2181   PetscFunctionReturn(0);
2182 }
2183 
2184 #undef __FUNCT__
2185 #define __FUNCT__ "DMGetApplicationContext"
2186 /*@
2187     DMGetApplicationContext - Gets a user context from a DM object
2188 
2189     Not Collective
2190 
2191     Input Parameter:
2192 .   dm - the DM object
2193 
2194     Output Parameter:
2195 .   ctx - the user context
2196 
2197     Level: intermediate
2198 
2199 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2200 
2201 @*/
2202 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2203 {
2204   PetscFunctionBegin;
2205   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2206   *(void**)ctx = dm->ctx;
2207   PetscFunctionReturn(0);
2208 }
2209 
2210 #undef __FUNCT__
2211 #define __FUNCT__ "DMSetFunction"
2212 /*@C
2213     DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES
2214 
2215     Logically Collective on DM
2216 
2217     Input Parameter:
2218 +   dm - the DM object
2219 -   f - the function to compute (use PETSC_NULL to cancel a previous function that was set)
2220 
2221     Level: intermediate
2222 
2223     Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian
2224            computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian.
2225 
2226 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2227          DMSetJacobian()
2228 
2229 @*/
2230 PetscErrorCode  DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2231 {
2232   PetscFunctionBegin;
2233   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2234   dm->ops->function = f;
2235   if (f) {
2236     dm->ops->functionj = f;
2237   }
2238   PetscFunctionReturn(0);
2239 }
2240 
2241 #undef __FUNCT__
2242 #define __FUNCT__ "DMSetJacobian"
2243 /*@C
2244     DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES
2245 
2246     Logically Collective on DM
2247 
2248     Input Parameter:
2249 +   dm - the DM object to destroy
2250 -   f - the function to compute the matrix entries
2251 
2252     Level: intermediate
2253 
2254 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2255          DMSetFunction()
2256 
2257 @*/
2258 PetscErrorCode  DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*))
2259 {
2260   PetscFunctionBegin;
2261   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2262   dm->ops->jacobian = f;
2263   PetscFunctionReturn(0);
2264 }
2265 
2266 #undef __FUNCT__
2267 #define __FUNCT__ "DMSetVariableBounds"
2268 /*@C
2269     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2270 
2271     Logically Collective on DM
2272 
2273     Input Parameter:
2274 +   dm - the DM object
2275 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2276 
2277     Level: intermediate
2278 
2279 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2280          DMSetJacobian()
2281 
2282 @*/
2283 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2284 {
2285   PetscFunctionBegin;
2286   dm->ops->computevariablebounds = f;
2287   PetscFunctionReturn(0);
2288 }
2289 
2290 #undef __FUNCT__
2291 #define __FUNCT__ "DMHasVariableBounds"
2292 /*@
2293     DMHasVariableBounds - does the DM object have a variable bounds function?
2294 
2295     Not Collective
2296 
2297     Input Parameter:
2298 .   dm - the DM object to destroy
2299 
2300     Output Parameter:
2301 .   flg - PETSC_TRUE if the variable bounds function exists
2302 
2303     Level: developer
2304 
2305 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2306 
2307 @*/
2308 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2309 {
2310   PetscFunctionBegin;
2311   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2312   PetscFunctionReturn(0);
2313 }
2314 
2315 #undef __FUNCT__
2316 #define __FUNCT__ "DMComputeVariableBounds"
2317 /*@C
2318     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2319 
2320     Logically Collective on DM
2321 
2322     Input Parameters:
2323 +   dm - the DM object to destroy
2324 -   x  - current solution at which the bounds are computed
2325 
2326     Output parameters:
2327 +   xl - lower bound
2328 -   xu - upper bound
2329 
2330     Level: intermediate
2331 
2332 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2333          DMSetFunction(), DMSetVariableBounds()
2334 
2335 @*/
2336 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2337 {
2338   PetscErrorCode ierr;
2339   PetscFunctionBegin;
2340   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2341   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2342   if (dm->ops->computevariablebounds) {
2343     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2344   }
2345   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2346   PetscFunctionReturn(0);
2347 }
2348 
2349 #undef __FUNCT__
2350 #define __FUNCT__ "DMHasFunction"
2351 /*@
2352     DMHasFunction - does the DM object have a function
2353 
2354     Not Collective
2355 
2356     Input Parameter:
2357 .   dm - the DM object to destroy
2358 
2359     Output Parameter:
2360 .   flg - PETSC_TRUE if function exists
2361 
2362     Level: developer
2363 
2364 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2365 
2366 @*/
2367 PetscErrorCode  DMHasFunction(DM dm,PetscBool  *flg)
2368 {
2369   PetscFunctionBegin;
2370   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2371   *flg =  (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE;
2372   PetscFunctionReturn(0);
2373 }
2374 
2375 #undef __FUNCT__
2376 #define __FUNCT__ "DMHasJacobian"
2377 /*@
2378     DMHasJacobian - does the DM object have a matrix function
2379 
2380     Not Collective
2381 
2382     Input Parameter:
2383 .   dm - the DM object to destroy
2384 
2385     Output Parameter:
2386 .   flg - PETSC_TRUE if function exists
2387 
2388     Level: developer
2389 
2390 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2391 
2392 @*/
2393 PetscErrorCode  DMHasJacobian(DM dm,PetscBool  *flg)
2394 {
2395   PetscFunctionBegin;
2396   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2397   *flg =  (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE;
2398   PetscFunctionReturn(0);
2399 }
2400 
2401 #undef __FUNCT__
2402 #define __FUNCT__ "DMHasColoring"
2403 /*@
2404     DMHasColoring - does the DM object have a method of providing a coloring?
2405 
2406     Not Collective
2407 
2408     Input Parameter:
2409 .   dm - the DM object
2410 
2411     Output Parameter:
2412 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2413 
2414     Level: developer
2415 
2416 .seealso DMHasFunction(), DMCreateColoring()
2417 
2418 @*/
2419 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2420 {
2421   PetscFunctionBegin;
2422   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2423   PetscFunctionReturn(0);
2424 }
2425 
2426 #undef  __FUNCT__
2427 #define __FUNCT__ "DMSetVec"
2428 /*@C
2429     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2430 
2431     Collective on DM
2432 
2433     Input Parameter:
2434 +   dm - the DM object
2435 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2436 
2437     Level: developer
2438 
2439 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2440          DMSetFunction(), DMSetJacobian(), DMSetVariableBounds()
2441 
2442 @*/
2443 PetscErrorCode  DMSetVec(DM dm,Vec x)
2444 {
2445   PetscErrorCode ierr;
2446   PetscFunctionBegin;
2447   if (x) {
2448     if (!dm->x) {
2449       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2450     }
2451     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2452   }
2453   else if (dm->x) {
2454     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2455   }
2456   PetscFunctionReturn(0);
2457 }
2458 
2459 
2460 #undef __FUNCT__
2461 #define __FUNCT__ "DMComputeFunction"
2462 /*@
2463     DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES
2464 
2465     Collective on DM
2466 
2467     Input Parameter:
2468 +   dm - the DM object to destroy
2469 .   x - the location where the function is evaluationed, may be ignored for linear problems
2470 -   b - the vector to hold the right hand side entries
2471 
2472     Level: developer
2473 
2474 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2475          DMSetJacobian()
2476 
2477 @*/
2478 PetscErrorCode  DMComputeFunction(DM dm,Vec x,Vec b)
2479 {
2480   PetscErrorCode ierr;
2481   PetscFunctionBegin;
2482   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2483   if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()");
2484   PetscStackPush("DM user function");
2485   ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr);
2486   PetscStackPop;
2487   PetscFunctionReturn(0);
2488 }
2489 
2490 
2491 
2492 #undef __FUNCT__
2493 #define __FUNCT__ "DMComputeJacobian"
2494 /*@
2495     DMComputeJacobian - compute the matrix entries for the solver
2496 
2497     Collective on DM
2498 
2499     Input Parameter:
2500 +   dm - the DM object
2501 .   x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM
2502 .   A - matrix that defines the operator for the linear solve
2503 -   B - the matrix used to construct the preconditioner
2504 
2505     Level: developer
2506 
2507 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2508          DMSetFunction()
2509 
2510 @*/
2511 PetscErrorCode  DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag)
2512 {
2513   PetscErrorCode ierr;
2514 
2515   PetscFunctionBegin;
2516   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2517   if (!dm->ops->jacobian) {
2518     ISColoring     coloring;
2519     MatFDColoring  fd;
2520     MatType        mtype;
2521 
2522     ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr);
2523     ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr);
2524     ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr);
2525     ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr);
2526     ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr);
2527     ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr);
2528     ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr);
2529 
2530     dm->fd = fd;
2531     dm->ops->jacobian = DMComputeJacobianDefault;
2532 
2533     /* don't know why this is needed */
2534     ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr);
2535   }
2536   if (!x) x = dm->x;
2537   ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr);
2538 
2539   /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */
2540   if (x) {
2541     if (!dm->x) {
2542       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2543     }
2544     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2545   }
2546   if (A != B) {
2547     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2548     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2549   }
2550   PetscFunctionReturn(0);
2551 }
2552 
2553 
2554 PetscFList DMList                       = PETSC_NULL;
2555 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2556 
2557 #undef __FUNCT__
2558 #define __FUNCT__ "DMSetType"
2559 /*@C
2560   DMSetType - Builds a DM, for a particular DM implementation.
2561 
2562   Collective on DM
2563 
2564   Input Parameters:
2565 + dm     - The DM object
2566 - method - The name of the DM type
2567 
2568   Options Database Key:
2569 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2570 
2571   Notes:
2572   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2573 
2574   Level: intermediate
2575 
2576 .keywords: DM, set, type
2577 .seealso: DMGetType(), DMCreate()
2578 @*/
2579 PetscErrorCode  DMSetType(DM dm, DMType method)
2580 {
2581   PetscErrorCode (*r)(DM);
2582   PetscBool      match;
2583   PetscErrorCode ierr;
2584 
2585   PetscFunctionBegin;
2586   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2587   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2588   if (match) PetscFunctionReturn(0);
2589 
2590   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2591   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2592   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2593 
2594   if (dm->ops->destroy) {
2595     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2596     dm->ops->destroy = PETSC_NULL;
2597   }
2598   ierr = (*r)(dm);CHKERRQ(ierr);
2599   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2600   PetscFunctionReturn(0);
2601 }
2602 
2603 #undef __FUNCT__
2604 #define __FUNCT__ "DMGetType"
2605 /*@C
2606   DMGetType - Gets the DM type name (as a string) from the DM.
2607 
2608   Not Collective
2609 
2610   Input Parameter:
2611 . dm  - The DM
2612 
2613   Output Parameter:
2614 . type - The DM type name
2615 
2616   Level: intermediate
2617 
2618 .keywords: DM, get, type, name
2619 .seealso: DMSetType(), DMCreate()
2620 @*/
2621 PetscErrorCode  DMGetType(DM dm, DMType *type)
2622 {
2623   PetscErrorCode ierr;
2624 
2625   PetscFunctionBegin;
2626   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2627   PetscValidCharPointer(type,2);
2628   if (!DMRegisterAllCalled) {
2629     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2630   }
2631   *type = ((PetscObject)dm)->type_name;
2632   PetscFunctionReturn(0);
2633 }
2634 
2635 #undef __FUNCT__
2636 #define __FUNCT__ "DMConvert"
2637 /*@C
2638   DMConvert - Converts a DM to another DM, either of the same or different type.
2639 
2640   Collective on DM
2641 
2642   Input Parameters:
2643 + dm - the DM
2644 - newtype - new DM type (use "same" for the same type)
2645 
2646   Output Parameter:
2647 . M - pointer to new DM
2648 
2649   Notes:
2650   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2651   the MPI communicator of the generated DM is always the same as the communicator
2652   of the input DM.
2653 
2654   Level: intermediate
2655 
2656 .seealso: DMCreate()
2657 @*/
2658 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2659 {
2660   DM             B;
2661   char           convname[256];
2662   PetscBool      sametype, issame;
2663   PetscErrorCode ierr;
2664 
2665   PetscFunctionBegin;
2666   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2667   PetscValidType(dm,1);
2668   PetscValidPointer(M,3);
2669   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2670   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2671   {
2672     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2673 
2674     /*
2675        Order of precedence:
2676        1) See if a specialized converter is known to the current DM.
2677        2) See if a specialized converter is known to the desired DM class.
2678        3) See if a good general converter is registered for the desired class
2679        4) See if a good general converter is known for the current matrix.
2680        5) Use a really basic converter.
2681     */
2682 
2683     /* 1) See if a specialized converter is known to the current DM and the desired class */
2684     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2685     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2686     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2687     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2688     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2689     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2690     if (conv) goto foundconv;
2691 
2692     /* 2)  See if a specialized converter is known to the desired DM class. */
2693     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2694     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2695     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2696     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2697     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2698     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2699     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2700     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2701     if (conv) {
2702       ierr = DMDestroy(&B);CHKERRQ(ierr);
2703       goto foundconv;
2704     }
2705 
2706 #if 0
2707     /* 3) See if a good general converter is registered for the desired class */
2708     conv = B->ops->convertfrom;
2709     ierr = DMDestroy(&B);CHKERRQ(ierr);
2710     if (conv) goto foundconv;
2711 
2712     /* 4) See if a good general converter is known for the current matrix */
2713     if (dm->ops->convert) {
2714       conv = dm->ops->convert;
2715     }
2716     if (conv) goto foundconv;
2717 #endif
2718 
2719     /* 5) Use a really basic converter. */
2720     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2721 
2722     foundconv:
2723     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2724     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2725     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2726   }
2727   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2728   PetscFunctionReturn(0);
2729 }
2730 
2731 /*--------------------------------------------------------------------------------------------------------------------*/
2732 
2733 #undef __FUNCT__
2734 #define __FUNCT__ "DMRegister"
2735 /*@C
2736   DMRegister - See DMRegisterDynamic()
2737 
2738   Level: advanced
2739 @*/
2740 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2741 {
2742   char fullname[PETSC_MAX_PATH_LEN];
2743   PetscErrorCode ierr;
2744 
2745   PetscFunctionBegin;
2746   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2747   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2748   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2749   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2750   PetscFunctionReturn(0);
2751 }
2752 
2753 
2754 /*--------------------------------------------------------------------------------------------------------------------*/
2755 #undef __FUNCT__
2756 #define __FUNCT__ "DMRegisterDestroy"
2757 /*@C
2758    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2759 
2760    Not Collective
2761 
2762    Level: advanced
2763 
2764 .keywords: DM, register, destroy
2765 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2766 @*/
2767 PetscErrorCode  DMRegisterDestroy(void)
2768 {
2769   PetscErrorCode ierr;
2770 
2771   PetscFunctionBegin;
2772   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2773   DMRegisterAllCalled = PETSC_FALSE;
2774   PetscFunctionReturn(0);
2775 }
2776 
2777 #if defined(PETSC_HAVE_MATLAB_ENGINE)
2778 #include <mex.h>
2779 
2780 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext;
2781 
2782 #undef __FUNCT__
2783 #define __FUNCT__ "DMComputeFunction_Matlab"
2784 /*
2785    DMComputeFunction_Matlab - Calls the function that has been set with
2786                          DMSetFunctionMatlab().
2787 
2788    For linear problems x is null
2789 
2790 .seealso: DMSetFunction(), DMGetFunction()
2791 */
2792 PetscErrorCode  DMComputeFunction_Matlab(DM dm,Vec x,Vec y)
2793 {
2794   PetscErrorCode    ierr;
2795   DMMatlabContext   *sctx;
2796   int               nlhs = 1,nrhs = 4;
2797   mxArray	    *plhs[1],*prhs[4];
2798   long long int     lx = 0,ly = 0,ls = 0;
2799 
2800   PetscFunctionBegin;
2801   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2802   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
2803   PetscCheckSameComm(dm,1,y,3);
2804 
2805   /* call Matlab function in ctx with arguments x and y */
2806   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2807   ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr);
2808   ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr);
2809   ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr);
2810   prhs[0] =  mxCreateDoubleScalar((double)ls);
2811   prhs[1] =  mxCreateDoubleScalar((double)lx);
2812   prhs[2] =  mxCreateDoubleScalar((double)ly);
2813   prhs[3] =  mxCreateString(sctx->funcname);
2814   ierr    =  mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr);
2815   ierr    =  mxGetScalar(plhs[0]);CHKERRQ(ierr);
2816   mxDestroyArray(prhs[0]);
2817   mxDestroyArray(prhs[1]);
2818   mxDestroyArray(prhs[2]);
2819   mxDestroyArray(prhs[3]);
2820   mxDestroyArray(plhs[0]);
2821   PetscFunctionReturn(0);
2822 }
2823 
2824 
2825 #undef __FUNCT__
2826 #define __FUNCT__ "DMSetFunctionMatlab"
2827 /*
2828    DMSetFunctionMatlab - Sets the function evaluation routine
2829 
2830 */
2831 PetscErrorCode  DMSetFunctionMatlab(DM dm,const char *func)
2832 {
2833   PetscErrorCode    ierr;
2834   DMMatlabContext   *sctx;
2835 
2836   PetscFunctionBegin;
2837   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2838   /* currently sctx is memory bleed */
2839   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2840   if (!sctx) {
2841     ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr);
2842   }
2843   ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr);
2844   ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr);
2845   ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr);
2846   PetscFunctionReturn(0);
2847 }
2848 
2849 #undef __FUNCT__
2850 #define __FUNCT__ "DMComputeJacobian_Matlab"
2851 /*
2852    DMComputeJacobian_Matlab - Calls the function that has been set with
2853                          DMSetJacobianMatlab().
2854 
2855    For linear problems x is null
2856 
2857 .seealso: DMSetFunction(), DMGetFunction()
2858 */
2859 PetscErrorCode  DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str)
2860 {
2861   PetscErrorCode    ierr;
2862   DMMatlabContext   *sctx;
2863   int               nlhs = 2,nrhs = 5;
2864   mxArray	    *plhs[2],*prhs[5];
2865   long long int     lx = 0,lA = 0,lB = 0,ls = 0;
2866 
2867   PetscFunctionBegin;
2868   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2869   PetscValidHeaderSpecific(A,MAT_CLASSID,3);
2870 
2871   /* call MATLAB function in ctx with arguments x, A, and B */
2872   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2873   ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr);
2874   ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr);
2875   ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr);
2876   ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr);
2877   prhs[0] =  mxCreateDoubleScalar((double)ls);
2878   prhs[1] =  mxCreateDoubleScalar((double)lx);
2879   prhs[2] =  mxCreateDoubleScalar((double)lA);
2880   prhs[3] =  mxCreateDoubleScalar((double)lB);
2881   prhs[4] =  mxCreateString(sctx->jacname);
2882   ierr    =  mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr);
2883   *str    =  (MatStructure) mxGetScalar(plhs[0]);
2884   ierr    =  (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr);
2885   mxDestroyArray(prhs[0]);
2886   mxDestroyArray(prhs[1]);
2887   mxDestroyArray(prhs[2]);
2888   mxDestroyArray(prhs[3]);
2889   mxDestroyArray(prhs[4]);
2890   mxDestroyArray(plhs[0]);
2891   mxDestroyArray(plhs[1]);
2892   PetscFunctionReturn(0);
2893 }
2894 
2895 
2896 #undef __FUNCT__
2897 #define __FUNCT__ "DMSetJacobianMatlab"
2898 /*
2899    DMSetJacobianMatlab - Sets the Jacobian function evaluation routine
2900 
2901 */
2902 PetscErrorCode  DMSetJacobianMatlab(DM dm,const char *func)
2903 {
2904   PetscErrorCode    ierr;
2905   DMMatlabContext   *sctx;
2906 
2907   PetscFunctionBegin;
2908   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2909   /* currently sctx is memory bleed */
2910   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2911   if (!sctx) {
2912     ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr);
2913   }
2914   ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr);
2915   ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr);
2916   ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr);
2917   PetscFunctionReturn(0);
2918 }
2919 #endif
2920 
2921 #undef __FUNCT__
2922 #define __FUNCT__ "DMLoad"
2923 /*@C
2924   DMLoad - Loads a DM that has been stored in binary  with DMView().
2925 
2926   Collective on PetscViewer
2927 
2928   Input Parameters:
2929 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2930            some related function before a call to DMLoad().
2931 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2932            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2933 
2934    Level: intermediate
2935 
2936   Notes:
2937    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2938 
2939   Notes for advanced users:
2940   Most users should not need to know the details of the binary storage
2941   format, since DMLoad() and DMView() completely hide these details.
2942   But for anyone who's interested, the standard binary matrix storage
2943   format is
2944 .vb
2945      has not yet been determined
2946 .ve
2947 
2948 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2949 @*/
2950 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2951 {
2952   PetscErrorCode ierr;
2953   PetscBool      isbinary;
2954   PetscInt       classid;
2955   char           type[256];
2956 
2957   PetscFunctionBegin;
2958   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2959   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2960   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2961   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2962 
2963   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2964   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2965   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2966   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2967   ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2968   PetscFunctionReturn(0);
2969 }
2970 
2971 /******************************** FEM Support **********************************/
2972 
2973 #undef __FUNCT__
2974 #define __FUNCT__ "DMPrintCellVector"
2975 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2976   PetscInt       f;
2977   PetscErrorCode ierr;
2978 
2979   PetscFunctionBegin;
2980   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2981   for (f = 0; f < len; ++f) {
2982     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2983   }
2984   PetscFunctionReturn(0);
2985 }
2986 
2987 #undef __FUNCT__
2988 #define __FUNCT__ "DMPrintCellMatrix"
2989 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2990   PetscInt       f, g;
2991   PetscErrorCode ierr;
2992 
2993   PetscFunctionBegin;
2994   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2995   for (f = 0; f < rows; ++f) {
2996     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2997     for (g = 0; g < cols; ++g) {
2998       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2999     }
3000     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
3001   }
3002   PetscFunctionReturn(0);
3003 }
3004 
3005 #undef __FUNCT__
3006 #define __FUNCT__ "DMGetLocalFunction"
3007 /*@C
3008   DMGetLocalFunction - Get the local residual function from this DM
3009 
3010   Not collective
3011 
3012   Input Parameter:
3013 . dm - The DM
3014 
3015   Output Parameter:
3016 . lf - The local residual function
3017 
3018    Calling sequence of lf:
3019 $    lf (SNES snes, Vec x, Vec f, void *ctx);
3020 
3021 +  snes - the SNES context
3022 .  x - local vector with the state at which to evaluate residual
3023 .  f - local vector to put residual in
3024 -  ctx - optional user-defined function context
3025 
3026   Level: intermediate
3027 
3028 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian()
3029 @*/
3030 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *))
3031 {
3032   PetscFunctionBegin;
3033   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3034   if (lf) *lf = dm->lf;
3035   PetscFunctionReturn(0);
3036 }
3037 
3038 #undef __FUNCT__
3039 #define __FUNCT__ "DMSetLocalFunction"
3040 /*@C
3041   DMSetLocalFunction - Set the local residual function from this DM
3042 
3043   Not collective
3044 
3045   Input Parameters:
3046 + dm - The DM
3047 - lf - The local residual function
3048 
3049    Calling sequence of lf:
3050 $    lf (SNES snes, Vec x, Vec f, void *ctx);
3051 
3052 +  snes - the SNES context
3053 .  x - local vector with the state at which to evaluate residual
3054 .  f - local vector to put residual in
3055 -  ctx - optional user-defined function context
3056 
3057   Level: intermediate
3058 
3059 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian()
3060 @*/
3061 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *))
3062 {
3063   PetscFunctionBegin;
3064   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3065   dm->lf = lf;
3066   PetscFunctionReturn(0);
3067 }
3068 
3069 #undef __FUNCT__
3070 #define __FUNCT__ "DMGetLocalJacobian"
3071 /*@C
3072   DMGetLocalJacobian - Get the local Jacobian function from this DM
3073 
3074   Not collective
3075 
3076   Input Parameter:
3077 . dm - The DM
3078 
3079   Output Parameter:
3080 . lj - The local Jacobian function
3081 
3082    Calling sequence of lj:
3083 $    lj (SNES snes, Vec x, Mat J, Mat M, void *ctx);
3084 
3085 +  snes - the SNES context
3086 .  x - local vector with the state at which to evaluate residual
3087 .  J - matrix to put Jacobian in
3088 .  M - matrix to use for defining Jacobian preconditioner
3089 -  ctx - optional user-defined function context
3090 
3091   Level: intermediate
3092 
3093 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction()
3094 @*/
3095 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *))
3096 {
3097   PetscFunctionBegin;
3098   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3099   if (lj) *lj = dm->lj;
3100   PetscFunctionReturn(0);
3101 }
3102 
3103 #undef __FUNCT__
3104 #define __FUNCT__ "DMSetLocalJacobian"
3105 /*@C
3106   DMSetLocalJacobian - Set the local Jacobian function from this DM
3107 
3108   Not collective
3109 
3110   Input Parameters:
3111 + dm - The DM
3112 - lj - The local Jacobian function
3113 
3114    Calling sequence of lj:
3115 $    lj (SNES snes, Vec x, Mat J, Mat M, void *ctx);
3116 
3117 +  snes - the SNES context
3118 .  x - local vector with the state at which to evaluate residual
3119 .  J - matrix to put Jacobian in
3120 .  M - matrix to use for defining Jacobian preconditioner
3121 -  ctx - optional user-defined function context
3122 
3123   Level: intermediate
3124 
3125 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction()
3126 @*/
3127 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat,  Mat, void *))
3128 {
3129   PetscFunctionBegin;
3130   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3131   dm->lj = lj;
3132   PetscFunctionReturn(0);
3133 }
3134 
3135 #undef __FUNCT__
3136 #define __FUNCT__ "DMGetDefaultSection"
3137 /*@
3138   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
3139 
3140   Input Parameter:
3141 . dm - The DM
3142 
3143   Output Parameter:
3144 . section - The PetscSection
3145 
3146   Level: intermediate
3147 
3148   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
3149 
3150 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
3151 @*/
3152 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
3153   PetscFunctionBegin;
3154   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3155   PetscValidPointer(section, 2);
3156   *section = dm->defaultSection;
3157   PetscFunctionReturn(0);
3158 }
3159 
3160 #undef __FUNCT__
3161 #define __FUNCT__ "DMSetDefaultSection"
3162 /*@
3163   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
3164 
3165   Input Parameters:
3166 + dm - The DM
3167 - section - The PetscSection
3168 
3169   Level: intermediate
3170 
3171   Note: Any existing Section will be destroyed
3172 
3173 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
3174 @*/
3175 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
3176   PetscInt       numFields;
3177   PetscInt       f;
3178   PetscErrorCode ierr;
3179 
3180   PetscFunctionBegin;
3181   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3182   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
3183   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
3184   dm->defaultSection = section;
3185   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
3186   if (numFields) {
3187     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
3188     for (f = 0; f < numFields; ++f) {
3189       const char *name;
3190 
3191       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
3192       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
3193     }
3194   }
3195   PetscFunctionReturn(0);
3196 }
3197 
3198 #undef __FUNCT__
3199 #define __FUNCT__ "DMGetDefaultGlobalSection"
3200 /*@
3201   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
3202 
3203   Collective on DM
3204 
3205   Input Parameter:
3206 . dm - The DM
3207 
3208   Output Parameter:
3209 . section - The PetscSection
3210 
3211   Level: intermediate
3212 
3213   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
3214 
3215 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
3216 @*/
3217 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
3218   PetscErrorCode ierr;
3219 
3220   PetscFunctionBegin;
3221   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3222   PetscValidPointer(section, 2);
3223   if (!dm->defaultGlobalSection) {
3224     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
3225     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
3226     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
3227   }
3228   *section = dm->defaultGlobalSection;
3229   PetscFunctionReturn(0);
3230 }
3231 
3232 #undef __FUNCT__
3233 #define __FUNCT__ "DMSetDefaultGlobalSection"
3234 /*@
3235   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
3236 
3237   Input Parameters:
3238 + dm - The DM
3239 - section - The PetscSection
3240 
3241   Level: intermediate
3242 
3243   Note: Any existing Section will be destroyed
3244 
3245 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
3246 @*/
3247 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
3248   PetscErrorCode ierr;
3249 
3250   PetscFunctionBegin;
3251   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3252   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
3253   dm->defaultGlobalSection = section;
3254   PetscFunctionReturn(0);
3255 }
3256 
3257 #undef __FUNCT__
3258 #define __FUNCT__ "DMGetDefaultSF"
3259 /*@
3260   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
3261   it is created from the default PetscSection layouts in the DM.
3262 
3263   Input Parameter:
3264 . dm - The DM
3265 
3266   Output Parameter:
3267 . sf - The PetscSF
3268 
3269   Level: intermediate
3270 
3271   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3272 
3273 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
3274 @*/
3275 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
3276   PetscInt       nroots;
3277   PetscErrorCode ierr;
3278 
3279   PetscFunctionBegin;
3280   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3281   PetscValidPointer(sf, 2);
3282   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
3283   if (nroots < 0) {
3284     PetscSection section, gSection;
3285 
3286     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
3287     if (section) {
3288       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
3289       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
3290     } else {
3291       *sf = PETSC_NULL;
3292       PetscFunctionReturn(0);
3293     }
3294   }
3295   *sf = dm->defaultSF;
3296   PetscFunctionReturn(0);
3297 }
3298 
3299 #undef __FUNCT__
3300 #define __FUNCT__ "DMSetDefaultSF"
3301 /*@
3302   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3303 
3304   Input Parameters:
3305 + dm - The DM
3306 - sf - The PetscSF
3307 
3308   Level: intermediate
3309 
3310   Note: Any previous SF is destroyed
3311 
3312 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3313 @*/
3314 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
3315   PetscErrorCode ierr;
3316 
3317   PetscFunctionBegin;
3318   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3319   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
3320   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3321   dm->defaultSF = sf;
3322   PetscFunctionReturn(0);
3323 }
3324 
3325 #undef __FUNCT__
3326 #define __FUNCT__ "DMCreateDefaultSF"
3327 /*@C
3328   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3329   describing the data layout.
3330 
3331   Input Parameters:
3332 + dm - The DM
3333 . localSection - PetscSection describing the local data layout
3334 - globalSection - PetscSection describing the global data layout
3335 
3336   Level: intermediate
3337 
3338 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3339 @*/
3340 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3341 {
3342   MPI_Comm        comm = ((PetscObject) dm)->comm;
3343   PetscLayout     layout;
3344   const PetscInt *ranges;
3345   PetscInt       *local;
3346   PetscSFNode    *remote;
3347   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
3348   PetscMPIInt     size, rank;
3349   PetscErrorCode  ierr;
3350 
3351   PetscFunctionBegin;
3352   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3353   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3354   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3355   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3356   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3357   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3358   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3359   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3360   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3361   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3362   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
3363     PetscInt gdof, gcdof;
3364 
3365     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3366     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3367     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3368   }
3369   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
3370   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
3371   for (p = pStart, l = 0; p < pEnd; ++p) {
3372     const PetscInt *cind;
3373     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3374 
3375     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3376     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3377     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3378     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3379     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3380     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3381     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3382     if (!gdof) continue; /* Censored point */
3383     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3384     if (gsize != dof-cdof) {
3385       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3386       cdof = 0; /* Ignore constraints */
3387     }
3388     for (d = 0, c = 0; d < dof; ++d) {
3389       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3390       local[l+d-c] = off+d;
3391     }
3392     if (gdof < 0) {
3393       for(d = 0; d < gsize; ++d, ++l) {
3394         PetscInt offset = -(goff+1) + d, r;
3395 
3396         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3397         if (r < 0) r = -(r+2);
3398         remote[l].rank  = r;
3399         remote[l].index = offset - ranges[r];
3400       }
3401     } else {
3402       for(d = 0; d < gsize; ++d, ++l) {
3403         remote[l].rank  = rank;
3404         remote[l].index = goff+d - ranges[rank];
3405       }
3406     }
3407   }
3408   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3409   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3410   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3411   PetscFunctionReturn(0);
3412 }
3413 
3414 #undef __FUNCT__
3415 #define __FUNCT__ "DMGetPointSF"
3416 /*@
3417   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3418 
3419   Input Parameter:
3420 . dm - The DM
3421 
3422   Output Parameter:
3423 . sf - The PetscSF
3424 
3425   Level: intermediate
3426 
3427   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3428 
3429 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3430 @*/
3431 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
3432   PetscFunctionBegin;
3433   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3434   PetscValidPointer(sf, 2);
3435   *sf = dm->sf;
3436   PetscFunctionReturn(0);
3437 }
3438 
3439 #undef __FUNCT__
3440 #define __FUNCT__ "DMSetPointSF"
3441 /*@
3442   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3443 
3444   Input Parameters:
3445 + dm - The DM
3446 - sf - The PetscSF
3447 
3448   Level: intermediate
3449 
3450 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3451 @*/
3452 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
3453   PetscErrorCode ierr;
3454 
3455   PetscFunctionBegin;
3456   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3457   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3458   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3459   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3460   dm->sf = sf;
3461   PetscFunctionReturn(0);
3462 }
3463 
3464 #undef __FUNCT__
3465 #define __FUNCT__ "DMGetNumFields"
3466 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3467 {
3468   PetscFunctionBegin;
3469   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3470   PetscValidPointer(numFields, 2);
3471   *numFields = dm->numFields;
3472   PetscFunctionReturn(0);
3473 }
3474 
3475 #undef __FUNCT__
3476 #define __FUNCT__ "DMSetNumFields"
3477 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3478 {
3479   PetscInt       f;
3480   PetscErrorCode ierr;
3481 
3482   PetscFunctionBegin;
3483   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3484   for (f = 0; f < dm->numFields; ++f) {
3485     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3486   }
3487   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3488   dm->numFields = numFields;
3489   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3490   for (f = 0; f < dm->numFields; ++f) {
3491     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3492   }
3493   PetscFunctionReturn(0);
3494 }
3495 
3496 #undef __FUNCT__
3497 #define __FUNCT__ "DMGetField"
3498 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3499 {
3500   PetscFunctionBegin;
3501   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3502   PetscValidPointer(field, 2);
3503   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3504   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3505   *field = dm->fields[f];
3506   PetscFunctionReturn(0);
3507 }
3508 
3509 #undef __FUNCT__
3510 #define __FUNCT__ "DMSetCoordinates"
3511 /*@
3512   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3513 
3514   Collective on DM
3515 
3516   Input Parameters:
3517 + dm - the DM
3518 - c - coordinate vector
3519 
3520   Note:
3521   The coordinates do include those for ghost points, which are in the local vector
3522 
3523   Level: intermediate
3524 
3525 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3526 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3527 @*/
3528 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3529 {
3530   PetscErrorCode ierr;
3531 
3532   PetscFunctionBegin;
3533   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3534   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3535   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3536   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3537   dm->coordinates = c;
3538   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3539   PetscFunctionReturn(0);
3540 }
3541 
3542 #undef __FUNCT__
3543 #define __FUNCT__ "DMSetCoordinatesLocal"
3544 /*@
3545   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3546 
3547   Collective on DM
3548 
3549    Input Parameters:
3550 +  dm - the DM
3551 -  c - coordinate vector
3552 
3553   Note:
3554   The coordinates of ghost points can be set using DMSetCoordinates()
3555   followed by DMGetCoordinatesLocal(). This is intended to enable the
3556   setting of ghost coordinates outside of the domain.
3557 
3558   Level: intermediate
3559 
3560 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3561 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3562 @*/
3563 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3564 {
3565   PetscErrorCode ierr;
3566 
3567   PetscFunctionBegin;
3568   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3569   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3570   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3571   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3572   dm->coordinatesLocal = c;
3573   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3574   PetscFunctionReturn(0);
3575 }
3576 
3577 #undef __FUNCT__
3578 #define __FUNCT__ "DMGetCoordinates"
3579 /*@
3580   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3581 
3582   Not Collective
3583 
3584   Input Parameter:
3585 . dm - the DM
3586 
3587   Output Parameter:
3588 . c - global coordinate vector
3589 
3590   Note:
3591   This is a borrowed reference, so the user should NOT destroy this vector
3592 
3593   Each process has only the local coordinates (does NOT have the ghost coordinates).
3594 
3595   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3596   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3597 
3598   Level: intermediate
3599 
3600 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3601 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3602 @*/
3603 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3604 {
3605   PetscErrorCode ierr;
3606 
3607   PetscFunctionBegin;
3608   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3609   PetscValidPointer(c,2);
3610   if (!dm->coordinates && dm->coordinatesLocal) {
3611     DM cdm;
3612 
3613     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3614     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3615     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3616     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3617     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3618   }
3619   *c = dm->coordinates;
3620   PetscFunctionReturn(0);
3621 }
3622 
3623 #undef __FUNCT__
3624 #define __FUNCT__ "DMGetCoordinatesLocal"
3625 /*@
3626   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3627 
3628   Collective on DM
3629 
3630   Input Parameter:
3631 . dm - the DM
3632 
3633   Output Parameter:
3634 . c - coordinate vector
3635 
3636   Note:
3637   This is a borrowed reference, so the user should NOT destroy this vector
3638 
3639   Each process has the local and ghost coordinates
3640 
3641   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3642   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3643 
3644   Level: intermediate
3645 
3646 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3647 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3648 @*/
3649 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3650 {
3651   PetscErrorCode ierr;
3652 
3653   PetscFunctionBegin;
3654   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3655   PetscValidPointer(c,2);
3656   if (!dm->coordinatesLocal && dm->coordinates) {
3657     DM cdm;
3658 
3659     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3660     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3661     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3662     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3663     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3664   }
3665   *c = dm->coordinatesLocal;
3666   PetscFunctionReturn(0);
3667 }
3668 
3669 #undef __FUNCT__
3670 #define __FUNCT__ "DMGetCoordinateDM"
3671 /*@
3672   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3673 
3674   Collective on DM
3675 
3676   Input Parameter:
3677 . dm - the DM
3678 
3679   Output Parameter:
3680 . cdm - coordinate DM
3681 
3682   Level: intermediate
3683 
3684 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3685 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3686 @*/
3687 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3688 {
3689   PetscErrorCode ierr;
3690 
3691   PetscFunctionBegin;
3692   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3693   PetscValidPointer(cdm,2);
3694   if (!dm->coordinateDM) {
3695     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3696     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3697   }
3698   *cdm = dm->coordinateDM;
3699   PetscFunctionReturn(0);
3700 }
3701