xref: /petsc/src/dm/interface/dm.c (revision 5763e96a680df71a0e7a0867db50b84a978e2c5b)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMCreate"
8 /*@
9   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
10 
11    If you never  call DMSetType()  it will generate an
12    error when you try to use the vector.
13 
14   Collective on MPI_Comm
15 
16   Input Parameter:
17 . comm - The communicator for the DM object
18 
19   Output Parameter:
20 . dm - The DM object
21 
22   Level: beginner
23 
24 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
25 @*/
26 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
27 {
28   DM             v;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   PetscValidPointer(dm,2);
33   *dm = PETSC_NULL;
34 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
35   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
36   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
37   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
38 #endif
39 
40   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
41   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
42 
43 
44   v->ltogmap      = PETSC_NULL;
45   v->ltogmapb     = PETSC_NULL;
46   v->bs           = 1;
47   v->coloringtype = IS_COLORING_GLOBAL;
48   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
49   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
50   v->defaultSection       = PETSC_NULL;
51   v->defaultGlobalSection = PETSC_NULL;
52   {
53     PetscInt i;
54     for (i = 0; i < 10; ++i) {
55       v->nullspaceConstructors[i] = PETSC_NULL;
56     }
57   }
58   v->numFields = 0;
59   v->fields    = PETSC_NULL;
60 
61   *dm = v;
62   PetscFunctionReturn(0);
63 }
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "DMSetVecType"
67 /*@C
68        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
69 
70    Logically Collective on DMDA
71 
72    Input Parameter:
73 +  da - initial distributed array
74 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
75 
76    Options Database:
77 .   -dm_vec_type ctype
78 
79    Level: intermediate
80 
81 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
82 @*/
83 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
84 {
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   PetscValidHeaderSpecific(da,DM_CLASSID,1);
89   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
90   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
91   PetscFunctionReturn(0);
92 }
93 
94 #undef __FUNCT__
95 #define __FUNCT__ "VecGetDM"
96 /*@
97   VecGetDM - Gets the DM defining the data layout of the vector
98 
99   Not collective
100 
101   Input Parameter:
102 . v - The Vec
103 
104   Output Parameter:
105 . dm - The DM
106 
107   Level: intermediate
108 
109 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
110 @*/
111 PetscErrorCode VecGetDM(Vec v, DM *dm)
112 {
113   PetscErrorCode ierr;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
117   PetscValidPointer(dm,2);
118   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "VecSetDM"
124 /*@
125   VecSetDM - Sets the DM defining the data layout of the vector
126 
127   Not collective
128 
129   Input Parameters:
130 + v - The Vec
131 - dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecSetDM(Vec v, DM dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
144   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "DMSetMatType"
150 /*@C
151        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
152 
153    Logically Collective on DM
154 
155    Input Parameter:
156 +  dm - the DM context
157 .  ctype - the matrix type
158 
159    Options Database:
160 .   -dm_mat_type ctype
161 
162    Level: intermediate
163 
164 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
165 @*/
166 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
167 {
168   PetscErrorCode ierr;
169   PetscFunctionBegin;
170   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
171   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
172   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "MatGetDM"
178 /*@
179   MatGetDM - Gets the DM defining the data layout of the matrix
180 
181   Not collective
182 
183   Input Parameter:
184 . A - The Mat
185 
186   Output Parameter:
187 . dm - The DM
188 
189   Level: intermediate
190 
191 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
192 @*/
193 PetscErrorCode MatGetDM(Mat A, DM *dm)
194 {
195   PetscErrorCode ierr;
196 
197   PetscFunctionBegin;
198   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
199   PetscValidPointer(dm,2);
200   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 #undef __FUNCT__
205 #define __FUNCT__ "MatSetDM"
206 /*@
207   MatSetDM - Sets the DM defining the data layout of the matrix
208 
209   Not collective
210 
211   Input Parameters:
212 + A - The Mat
213 - dm - The DM
214 
215   Level: intermediate
216 
217 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
218 @*/
219 PetscErrorCode MatSetDM(Mat A, DM dm)
220 {
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
225   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
226   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "DMSetOptionsPrefix"
232 /*@C
233    DMSetOptionsPrefix - Sets the prefix used for searching for all
234    DMDA options in the database.
235 
236    Logically Collective on DMDA
237 
238    Input Parameter:
239 +  da - the DMDA context
240 -  prefix - the prefix to prepend to all option names
241 
242    Notes:
243    A hyphen (-) must NOT be given at the beginning of the prefix name.
244    The first character of all runtime options is AUTOMATICALLY the hyphen.
245 
246    Level: advanced
247 
248 .keywords: DMDA, set, options, prefix, database
249 
250 .seealso: DMSetFromOptions()
251 @*/
252 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
253 {
254   PetscErrorCode ierr;
255 
256   PetscFunctionBegin;
257   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
258   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
259   PetscFunctionReturn(0);
260 }
261 
262 #undef __FUNCT__
263 #define __FUNCT__ "DMDestroy"
264 /*@
265     DMDestroy - Destroys a vector packer or DMDA.
266 
267     Collective on DM
268 
269     Input Parameter:
270 .   dm - the DM object to destroy
271 
272     Level: developer
273 
274 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
275 
276 @*/
277 PetscErrorCode  DMDestroy(DM *dm)
278 {
279   PetscInt       i, cnt = 0, f;
280   DMNamedVecLink nlink,nnext;
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   if (!*dm) PetscFunctionReturn(0);
285   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
286 
287   /* count all the circular references of DM and its contained Vecs */
288   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
289     if ((*dm)->localin[i])  {cnt++;}
290     if ((*dm)->globalin[i]) {cnt++;}
291   }
292   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
293   if ((*dm)->x) {
294     DM obj;
295     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
296     if (obj == *dm) cnt++;
297   }
298 
299   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
300   /*
301      Need this test because the dm references the vectors that
302      reference the dm, so destroying the dm calls destroy on the
303      vectors that cause another destroy on the dm
304   */
305   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
306   ((PetscObject) (*dm))->refct = 0;
307   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
308     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
309     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
310   }
311   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
312     nnext = nlink->next;
313     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
314     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
315     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
316     ierr = PetscFree(nlink);CHKERRQ(ierr);
317   }
318   (*dm)->namedglobal = PETSC_NULL;
319 
320   /* Destroy the list of hooks */
321   {
322     DMCoarsenHookLink link,next;
323     for (link=(*dm)->coarsenhook; link; link=next) {
324       next = link->next;
325       ierr = PetscFree(link);CHKERRQ(ierr);
326     }
327     (*dm)->coarsenhook = PETSC_NULL;
328   }
329   {
330     DMRefineHookLink link,next;
331     for (link=(*dm)->refinehook; link; link=next) {
332       next = link->next;
333       ierr = PetscFree(link);CHKERRQ(ierr);
334     }
335     (*dm)->refinehook = PETSC_NULL;
336   }
337   /* Destroy the work arrays */
338   {
339     DMWorkLink link,next;
340     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
341     for (link=(*dm)->workin; link; link=next) {
342       next = link->next;
343       ierr = PetscFree(link->mem);CHKERRQ(ierr);
344       ierr = PetscFree(link);CHKERRQ(ierr);
345     }
346     (*dm)->workin = PETSC_NULL;
347   }
348 
349   if ((*dm)->ctx && (*dm)->ctxdestroy) {
350     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
351   }
352   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
353   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
354   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
355   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
356   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
357   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
358   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
359 
360   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
361   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
362   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
363   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
364   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
365 
366   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
367   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
368   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
369 
370   for (f = 0; f < (*dm)->numFields; ++f) {
371     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
372   }
373   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
374   /* if memory was published with AMS then destroy it */
375   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
376 
377   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
378   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
379   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
380   PetscFunctionReturn(0);
381 }
382 
383 #undef __FUNCT__
384 #define __FUNCT__ "DMSetUp"
385 /*@
386     DMSetUp - sets up the data structures inside a DM object
387 
388     Collective on DM
389 
390     Input Parameter:
391 .   dm - the DM object to setup
392 
393     Level: developer
394 
395 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
396 
397 @*/
398 PetscErrorCode  DMSetUp(DM dm)
399 {
400   PetscErrorCode ierr;
401 
402   PetscFunctionBegin;
403   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
404   if (dm->setupcalled) PetscFunctionReturn(0);
405   if (dm->ops->setup) {
406     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
407   }
408   dm->setupcalled = PETSC_TRUE;
409   PetscFunctionReturn(0);
410 }
411 
412 #undef __FUNCT__
413 #define __FUNCT__ "DMSetFromOptions"
414 /*@
415     DMSetFromOptions - sets parameters in a DM from the options database
416 
417     Collective on DM
418 
419     Input Parameter:
420 .   dm - the DM object to set options for
421 
422     Options Database:
423 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
424 .   -dm_vec_type <type>  type of vector to create inside DM
425 .   -dm_mat_type <type>  type of matrix to create inside DM
426 -   -dm_coloring_type <global or ghosted>
427 
428     Level: developer
429 
430 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
431 
432 @*/
433 PetscErrorCode  DMSetFromOptions(DM dm)
434 {
435   PetscBool      flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg;
436   PetscErrorCode ierr;
437   char           typeName[256] = MATAIJ;
438 
439   PetscFunctionBegin;
440   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
441   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
442     ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr);
443     ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr);
444     ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr);
445     ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr);
446     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
447     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
448     if (flg) {
449       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
450     }
451     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
452     if (flg) {
453       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
454     }
455     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
456     if (dm->ops->setfromoptions) {
457       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
458     }
459     /* process any options handlers added with PetscObjectAddOptionsHandler() */
460     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
461   ierr = PetscOptionsEnd();CHKERRQ(ierr);
462   if (flg1) {
463     ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
464   }
465   if (flg2) {
466     PetscViewer viewer;
467 
468     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
469     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
470     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr);
471     ierr = DMView(dm, viewer);CHKERRQ(ierr);
472     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
473   }
474   if (flg3) {
475     PetscViewer viewer;
476 
477     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
478     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
479     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr);
480     ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr);
481     ierr = DMView(dm, viewer);CHKERRQ(ierr);
482     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
483   }
484   if (flg4) {
485     PetscViewer viewer;
486 
487     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
488     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
489     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr);
490     ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr);
491     ierr = DMView(dm, viewer);CHKERRQ(ierr);
492     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
493   }
494   PetscFunctionReturn(0);
495 }
496 
497 #undef __FUNCT__
498 #define __FUNCT__ "DMView"
499 /*@C
500     DMView - Views a vector packer or DMDA.
501 
502     Collective on DM
503 
504     Input Parameter:
505 +   dm - the DM object to view
506 -   v - the viewer
507 
508     Level: developer
509 
510 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
511 
512 @*/
513 PetscErrorCode  DMView(DM dm,PetscViewer v)
514 {
515   PetscErrorCode ierr;
516   PetscBool      isbinary;
517 
518   PetscFunctionBegin;
519   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
520   if (!v) {
521     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
522   }
523   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
524   if (isbinary) {
525     PetscInt         classid = DM_FILE_CLASSID;
526     MPI_Comm         comm;
527     PetscMPIInt      rank;
528     char             type[256];
529 
530     ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
531     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
532     if (!rank) {
533       ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
534       ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
535       ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
536     }
537   }
538   if (dm->ops->view) {
539     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
540   }
541   PetscFunctionReturn(0);
542 }
543 
544 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
545 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
546 
547 #undef __FUNCT__
548 #define __FUNCT__ "DMCreateGlobalVector"
549 /*@
550     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
551 
552     Collective on DM
553 
554     Input Parameter:
555 .   dm - the DM object
556 
557     Output Parameter:
558 .   vec - the global vector
559 
560     Level: beginner
561 
562 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
563 
564 @*/
565 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
566 {
567   PetscErrorCode ierr;
568 
569   PetscFunctionBegin;
570   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
571   if (dm->defaultSection) {
572     PetscSection gSection;
573     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
574 
575     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
576     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
577     for(p = pStart; p < pEnd; ++p) {
578       PetscInt dof, cdof;
579 
580       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
581       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
582       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
583       if ((dof > 0) && (dof-cdof != blockSize)) {
584         blockSize = 1;
585         break;
586       }
587     }
588     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
589     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
590     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
591     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
592     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
593     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
594     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
595     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
596     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
597     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
598     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
599     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
600   } else {
601     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
602   }
603   PetscFunctionReturn(0);
604 }
605 
606 #undef __FUNCT__
607 #define __FUNCT__ "DMCreateLocalVector"
608 /*@
609     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
610 
611     Not Collective
612 
613     Input Parameter:
614 .   dm - the DM object
615 
616     Output Parameter:
617 .   vec - the local vector
618 
619     Level: beginner
620 
621 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
622 
623 @*/
624 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
625 {
626   PetscErrorCode ierr;
627 
628   PetscFunctionBegin;
629   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
630   if (dm->defaultSection) {
631     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
632 
633     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
634     for(p = pStart; p < pEnd; ++p) {
635       PetscInt dof;
636 
637       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
638       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
639       if ((dof > 0) && (dof != blockSize)) {
640         blockSize = 1;
641         break;
642       }
643     }
644     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
645     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
646     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
647     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
648     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
649     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
650     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
651   } else {
652     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
653   }
654   PetscFunctionReturn(0);
655 }
656 
657 #undef __FUNCT__
658 #define __FUNCT__ "DMGetLocalToGlobalMapping"
659 /*@
660    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
661 
662    Collective on DM
663 
664    Input Parameter:
665 .  dm - the DM that provides the mapping
666 
667    Output Parameter:
668 .  ltog - the mapping
669 
670    Level: intermediate
671 
672    Notes:
673    This mapping can then be used by VecSetLocalToGlobalMapping() or
674    MatSetLocalToGlobalMapping().
675 
676 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
677 @*/
678 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
679 {
680   PetscErrorCode ierr;
681 
682   PetscFunctionBegin;
683   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
684   PetscValidPointer(ltog,2);
685   if (!dm->ltogmap) {
686     PetscSection section, sectionGlobal;
687 
688     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
689     if (section) {
690       PetscInt      *ltog;
691       PetscInt       pStart, pEnd, size, p, l;
692 
693       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
694       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
695       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
696       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
697       for (p = pStart, l = 0; p < pEnd; ++p) {
698         PetscInt dof, off, c;
699 
700         /* Should probably use constrained dofs */
701         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
702         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
703         for (c = 0; c < dof; ++c, ++l) {
704           ltog[l] = off+c;
705         }
706       }
707       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
708       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
709     } else {
710       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
711       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
712     }
713   }
714   *ltog = dm->ltogmap;
715   PetscFunctionReturn(0);
716 }
717 
718 #undef __FUNCT__
719 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
720 /*@
721    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
722 
723    Collective on DM
724 
725    Input Parameter:
726 .  da - the distributed array that provides the mapping
727 
728    Output Parameter:
729 .  ltog - the block mapping
730 
731    Level: intermediate
732 
733    Notes:
734    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
735    MatSetLocalToGlobalMappingBlock().
736 
737 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
738 @*/
739 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
740 {
741   PetscErrorCode ierr;
742 
743   PetscFunctionBegin;
744   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
745   PetscValidPointer(ltog,2);
746   if (!dm->ltogmapb) {
747     PetscInt bs;
748     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
749     if (bs > 1) {
750       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
751       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
752     } else {
753       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
754       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
755     }
756   }
757   *ltog = dm->ltogmapb;
758   PetscFunctionReturn(0);
759 }
760 
761 #undef __FUNCT__
762 #define __FUNCT__ "DMGetBlockSize"
763 /*@
764    DMGetBlockSize - Gets the inherent block size associated with a DM
765 
766    Not Collective
767 
768    Input Parameter:
769 .  dm - the DM with block structure
770 
771    Output Parameter:
772 .  bs - the block size, 1 implies no exploitable block structure
773 
774    Level: intermediate
775 
776 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
777 @*/
778 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
779 {
780   PetscFunctionBegin;
781   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
782   PetscValidPointer(bs,2);
783   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
784   *bs = dm->bs;
785   PetscFunctionReturn(0);
786 }
787 
788 #undef __FUNCT__
789 #define __FUNCT__ "DMCreateInterpolation"
790 /*@
791     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
792 
793     Collective on DM
794 
795     Input Parameter:
796 +   dm1 - the DM object
797 -   dm2 - the second, finer DM object
798 
799     Output Parameter:
800 +  mat - the interpolation
801 -  vec - the scaling (optional)
802 
803     Level: developer
804 
805     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
806         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
807 
808         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
809         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
810 
811 
812 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
813 
814 @*/
815 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
816 {
817   PetscErrorCode ierr;
818 
819   PetscFunctionBegin;
820   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
821   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
822   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
823   PetscFunctionReturn(0);
824 }
825 
826 #undef __FUNCT__
827 #define __FUNCT__ "DMCreateInjection"
828 /*@
829     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
830 
831     Collective on DM
832 
833     Input Parameter:
834 +   dm1 - the DM object
835 -   dm2 - the second, finer DM object
836 
837     Output Parameter:
838 .   ctx - the injection
839 
840     Level: developer
841 
842    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
843         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
844 
845 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
846 
847 @*/
848 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
849 {
850   PetscErrorCode ierr;
851 
852   PetscFunctionBegin;
853   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
854   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
855   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
856   PetscFunctionReturn(0);
857 }
858 
859 #undef __FUNCT__
860 #define __FUNCT__ "DMCreateColoring"
861 /*@C
862     DMCreateColoring - Gets coloring for a DMDA or DMComposite
863 
864     Collective on DM
865 
866     Input Parameter:
867 +   dm - the DM object
868 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
869 -   matype - either MATAIJ or MATBAIJ
870 
871     Output Parameter:
872 .   coloring - the coloring
873 
874     Level: developer
875 
876 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
877 
878 @*/
879 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
880 {
881   PetscErrorCode ierr;
882 
883   PetscFunctionBegin;
884   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
885   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
886   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
887   PetscFunctionReturn(0);
888 }
889 
890 #undef __FUNCT__
891 #define __FUNCT__ "DMCreateMatrix"
892 /*@C
893     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
894 
895     Collective on DM
896 
897     Input Parameter:
898 +   dm - the DM object
899 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
900             any type which inherits from one of these (such as MATAIJ)
901 
902     Output Parameter:
903 .   mat - the empty Jacobian
904 
905     Level: beginner
906 
907     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
908        do not need to do it yourself.
909 
910        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
911        the nonzero pattern call DMDASetMatPreallocateOnly()
912 
913        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
914        internally by PETSc.
915 
916        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
917        the indices for the global numbering for DMDAs which is complicated.
918 
919 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
920 
921 @*/
922 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
923 {
924   PetscErrorCode ierr;
925 
926   PetscFunctionBegin;
927   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
928 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
929   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
930 #endif
931   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
932   PetscValidPointer(mat,3);
933   if (dm->mattype) {
934     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
935   } else {
936     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
937   }
938   PetscFunctionReturn(0);
939 }
940 
941 #undef __FUNCT__
942 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
943 /*@
944   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
945     preallocated but the nonzero structure and zero values will not be set.
946 
947   Logically Collective on DMDA
948 
949   Input Parameter:
950 + dm - the DM
951 - only - PETSC_TRUE if only want preallocation
952 
953   Level: developer
954 .seealso DMCreateMatrix()
955 @*/
956 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
957 {
958   PetscFunctionBegin;
959   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
960   dm->prealloc_only = only;
961   PetscFunctionReturn(0);
962 }
963 
964 #undef __FUNCT__
965 #define __FUNCT__ "DMGetWorkArray"
966 /*@C
967   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
968 
969   Not Collective
970 
971   Input Parameters:
972 + dm - the DM object
973 . count - The minium size
974 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
975 
976   Output Parameter:
977 . array - the work array
978 
979   Level: developer
980 
981 .seealso DMDestroy(), DMCreate()
982 @*/
983 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
984 {
985   PetscErrorCode ierr;
986   DMWorkLink link;
987   size_t size;
988 
989   PetscFunctionBegin;
990   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
991   PetscValidPointer(mem,4);
992   if (dm->workin) {
993     link = dm->workin;
994     dm->workin = dm->workin->next;
995   } else {
996     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
997   }
998   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
999   if (size*count > link->bytes) {
1000     ierr = PetscFree(link->mem);CHKERRQ(ierr);
1001     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1002     link->bytes = size*count;
1003   }
1004   link->next = dm->workout;
1005   dm->workout = link;
1006   *(void**)mem = link->mem;
1007   PetscFunctionReturn(0);
1008 }
1009 
1010 #undef __FUNCT__
1011 #define __FUNCT__ "DMRestoreWorkArray"
1012 /*@C
1013   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1014 
1015   Not Collective
1016 
1017   Input Parameters:
1018 + dm - the DM object
1019 . count - The minium size
1020 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1021 
1022   Output Parameter:
1023 . array - the work array
1024 
1025   Level: developer
1026 
1027 .seealso DMDestroy(), DMCreate()
1028 @*/
1029 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1030 {
1031   DMWorkLink *p,link;
1032 
1033   PetscFunctionBegin;
1034   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1035   PetscValidPointer(mem,4);
1036   for (p=&dm->workout; (link=*p); p=&link->next) {
1037     if (link->mem == *(void**)mem) {
1038       *p = link->next;
1039       link->next = dm->workin;
1040       dm->workin = link;
1041       *(void**)mem = PETSC_NULL;
1042       PetscFunctionReturn(0);
1043     }
1044   }
1045   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1046   PetscFunctionReturn(0);
1047 }
1048 
1049 #undef __FUNCT__
1050 #define __FUNCT__ "DMSetNullSpaceConstructor"
1051 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1052 {
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1055   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1056   dm->nullspaceConstructors[field] = nullsp;
1057   PetscFunctionReturn(0);
1058 }
1059 
1060 #undef __FUNCT__
1061 #define __FUNCT__ "DMCreateFieldIS"
1062 /*@C
1063   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1064 
1065   Not collective
1066 
1067   Input Parameter:
1068 . dm - the DM object
1069 
1070   Output Parameters:
1071 + numFields  - The number of fields (or PETSC_NULL if not requested)
1072 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1073 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1074 
1075   Level: intermediate
1076 
1077   Notes:
1078   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1079   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1080   PetscFree().
1081 
1082 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1083 @*/
1084 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1085 {
1086   PetscSection   section, sectionGlobal;
1087   PetscErrorCode ierr;
1088 
1089   PetscFunctionBegin;
1090   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1091   if (numFields) {
1092     PetscValidPointer(numFields,2);
1093     *numFields = 0;
1094   }
1095   if (fieldNames) {
1096     PetscValidPointer(fieldNames,3);
1097     *fieldNames = PETSC_NULL;
1098   }
1099   if (fields) {
1100     PetscValidPointer(fields,4);
1101     *fields = PETSC_NULL;
1102   }
1103   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1104   if (section) {
1105     PetscInt *fieldSizes, **fieldIndices;
1106     PetscInt  nF, f, pStart, pEnd, p;
1107 
1108     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1109     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1110     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1111     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1112     for (f = 0; f < nF; ++f) {
1113       fieldSizes[f] = 0;
1114     }
1115     for (p = pStart; p < pEnd; ++p) {
1116       PetscInt gdof;
1117 
1118       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1119       if (gdof > 0) {
1120         for (f = 0; f < nF; ++f) {
1121           PetscInt fdof, fcdof;
1122 
1123           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1124           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1125           fieldSizes[f] += fdof-fcdof;
1126         }
1127       }
1128     }
1129     for (f = 0; f < nF; ++f) {
1130       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1131       fieldSizes[f] = 0;
1132     }
1133     for (p = pStart; p < pEnd; ++p) {
1134       PetscInt gdof, goff;
1135 
1136       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1137       if (gdof > 0) {
1138         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1139         for (f = 0; f < nF; ++f) {
1140           PetscInt fdof, fcdof, fc;
1141 
1142           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1143           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1144           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1145             fieldIndices[f][fieldSizes[f]] = goff++;
1146           }
1147         }
1148       }
1149     }
1150     if (numFields) {*numFields = nF;}
1151     if (fieldNames) {
1152       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1153       for (f = 0; f < nF; ++f) {
1154         const char *fieldName;
1155 
1156         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1157         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1158       }
1159     }
1160     if (fields) {
1161       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1162       for (f = 0; f < nF; ++f) {
1163         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1164       }
1165     }
1166     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1167   } else {
1168     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1169   }
1170   PetscFunctionReturn(0);
1171 }
1172 
1173 #undef __FUNCT__
1174 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1175 /*@C
1176   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1177 
1178   Not Collective
1179 
1180   Input Parameters:
1181 + dm   - the DM object
1182 - name - the name of the field decomposition
1183 
1184   Output Parameter:
1185 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1186 
1187   Level: advanced
1188 
1189 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1190 @*/
1191 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1192 {
1193   PetscErrorCode ierr;
1194 
1195   PetscFunctionBegin;
1196   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1197   PetscValidCharPointer(name,2);
1198   PetscValidPointer(ddm,3);
1199   *ddm = PETSC_NULL;
1200   if (dm->ops->createfielddecompositiondm) {
1201     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1202   }
1203   PetscFunctionReturn(0);
1204 }
1205 
1206 #undef __FUNCT__
1207 #define __FUNCT__ "DMCreateFieldDecomposition"
1208 /*@C
1209   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1210                           corresponding to different fields: each IS contains the global indices of the dofs of the
1211                           corresponding field. The optional list of DMs define the DM for each subproblem.
1212                           Generalizes DMCreateFieldIS().
1213 
1214   Not collective
1215 
1216   Input Parameter:
1217 . dm - the DM object
1218 
1219   Output Parameters:
1220 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1221 . namelist  - The name for each field (or PETSC_NULL if not requested)
1222 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1223 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1224 
1225   Level: intermediate
1226 
1227   Notes:
1228   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1229   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1230   and all of the arrays should be freed with PetscFree().
1231 
1232 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1233 @*/
1234 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1235 {
1236   PetscErrorCode ierr;
1237 
1238   PetscFunctionBegin;
1239   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1240   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1241   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1242   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1243   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1244   if (!dm->ops->createfielddecomposition) {
1245     PetscSection section;
1246     PetscInt     numFields, f;
1247 
1248     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1249     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1250     if (section && numFields && dm->ops->createsubdm) {
1251       *len = numFields;
1252       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1253       for (f = 0; f < numFields; ++f) {
1254         const char *fieldName;
1255 
1256         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1257         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1258         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1259       }
1260     } else {
1261       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1262       /* By default there are no DMs associated with subproblems. */
1263       if (dmlist) *dmlist = PETSC_NULL;
1264     }
1265   }
1266   else {
1267     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1268   }
1269   PetscFunctionReturn(0);
1270 }
1271 
1272 #undef __FUNCT__
1273 #define __FUNCT__ "DMCreateSubDM"
1274 /*@C
1275   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1276                   The fields are defined by DMCreateFieldIS().
1277 
1278   Not collective
1279 
1280   Input Parameters:
1281 + dm - the DM object
1282 . numFields - number of fields in this subproblem
1283 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1284 
1285   Output Parameters:
1286 . is - The global indices for the subproblem
1287 - dm - The DM for the subproblem
1288 
1289   Level: intermediate
1290 
1291 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1292 @*/
1293 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1294 {
1295   PetscErrorCode ierr;
1296 
1297   PetscFunctionBegin;
1298   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1299   PetscValidPointer(fields,3);
1300   if (is) {PetscValidPointer(is,4);}
1301   if (subdm) {PetscValidPointer(subdm,5);}
1302   if (dm->ops->createsubdm) {
1303     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1304   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1305   PetscFunctionReturn(0);
1306 }
1307 
1308 #undef __FUNCT__
1309 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1310 /*@C
1311   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1312 
1313   Not Collective
1314 
1315   Input Parameters:
1316 + dm   - the DM object
1317 - name - the name of the subdomain decomposition
1318 
1319   Output Parameter:
1320 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1321 
1322   Level: advanced
1323 
1324 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1325 @*/
1326 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1327 {
1328   PetscErrorCode ierr;
1329 
1330   PetscFunctionBegin;
1331   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1332   PetscValidCharPointer(name,2);
1333   PetscValidPointer(ddm,3);
1334   *ddm = PETSC_NULL;
1335   if (dm->ops->createdomaindecompositiondm) {
1336     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1337   }
1338   PetscFunctionReturn(0);
1339 }
1340 
1341 #undef __FUNCT__
1342 #define __FUNCT__ "DMCreateDomainDecomposition"
1343 /*@C
1344   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1345                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1346                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1347                           define a nonoverlapping covering, while outer subdomains can overlap.
1348                           The optional list of DMs define the DM for each subproblem.
1349 
1350   Not collective
1351 
1352   Input Parameter:
1353 . dm - the DM object
1354 
1355   Output Parameters:
1356 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1357 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1358 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1359 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1360 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1361 
1362   Level: intermediate
1363 
1364   Notes:
1365   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1366   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1367   and all of the arrays should be freed with PetscFree().
1368 
1369 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1370 @*/
1371 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1372 {
1373   PetscErrorCode ierr;
1374 
1375   PetscFunctionBegin;
1376   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1377   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1378   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1379   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1380   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1381   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1382   if (dm->ops->createdomaindecomposition) {
1383     ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1384   }
1385   PetscFunctionReturn(0);
1386 }
1387 
1388 #undef __FUNCT__
1389 #define __FUNCT__ "DMRefine"
1390 /*@
1391   DMRefine - Refines a DM object
1392 
1393   Collective on DM
1394 
1395   Input Parameter:
1396 + dm   - the DM object
1397 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1398 
1399   Output Parameter:
1400 . dmf - the refined DM, or PETSC_NULL
1401 
1402   Note: If no refinement was done, the return value is PETSC_NULL
1403 
1404   Level: developer
1405 
1406 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1407 @*/
1408 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1409 {
1410   PetscErrorCode ierr;
1411   DMRefineHookLink link;
1412 
1413   PetscFunctionBegin;
1414   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1415   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1416   if (*dmf) {
1417     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1418     (*dmf)->ops->initialguess = dm->ops->initialguess;
1419     (*dmf)->ops->function     = dm->ops->function;
1420     (*dmf)->ops->functionj    = dm->ops->functionj;
1421     if (dm->ops->jacobian != DMComputeJacobianDefault) {
1422       (*dmf)->ops->jacobian     = dm->ops->jacobian;
1423     }
1424     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1425     (*dmf)->ctx       = dm->ctx;
1426     (*dmf)->leveldown = dm->leveldown;
1427     (*dmf)->levelup   = dm->levelup + 1;
1428     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1429     for (link=dm->refinehook; link; link=link->next) {
1430       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1431     }
1432   }
1433   PetscFunctionReturn(0);
1434 }
1435 
1436 #undef __FUNCT__
1437 #define __FUNCT__ "DMRefineHookAdd"
1438 /*@
1439    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1440 
1441    Logically Collective
1442 
1443    Input Arguments:
1444 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1445 .  refinehook - function to run when setting up a coarser level
1446 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1447 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1448 
1449    Calling sequence of refinehook:
1450 $    refinehook(DM coarse,DM fine,void *ctx);
1451 
1452 +  coarse - coarse level DM
1453 .  fine - fine level DM to interpolate problem to
1454 -  ctx - optional user-defined function context
1455 
1456    Calling sequence for interphook:
1457 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1458 
1459 +  coarse - coarse level DM
1460 .  interp - matrix interpolating a coarse-level solution to the finer grid
1461 .  fine - fine level DM to update
1462 -  ctx - optional user-defined function context
1463 
1464    Level: advanced
1465 
1466    Notes:
1467    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1468 
1469    If this function is called multiple times, the hooks will be run in the order they are added.
1470 
1471 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1472 @*/
1473 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1474 {
1475   PetscErrorCode ierr;
1476   DMRefineHookLink link,*p;
1477 
1478   PetscFunctionBegin;
1479   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1480   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1481   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1482   link->refinehook = refinehook;
1483   link->interphook = interphook;
1484   link->ctx = ctx;
1485   link->next = PETSC_NULL;
1486   *p = link;
1487   PetscFunctionReturn(0);
1488 }
1489 
1490 #undef __FUNCT__
1491 #define __FUNCT__ "DMInterpolate"
1492 /*@
1493    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1494 
1495    Collective if any hooks are
1496 
1497    Input Arguments:
1498 +  coarse - coarser DM to use as a base
1499 .  restrct - interpolation matrix, apply using MatInterpolate()
1500 -  fine - finer DM to update
1501 
1502    Level: developer
1503 
1504 .seealso: DMRefineHookAdd(), MatInterpolate()
1505 @*/
1506 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1507 {
1508   PetscErrorCode ierr;
1509   DMRefineHookLink link;
1510 
1511   PetscFunctionBegin;
1512   for (link=fine->refinehook; link; link=link->next) {
1513     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1514   }
1515   PetscFunctionReturn(0);
1516 }
1517 
1518 #undef __FUNCT__
1519 #define __FUNCT__ "DMGetRefineLevel"
1520 /*@
1521     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1522 
1523     Not Collective
1524 
1525     Input Parameter:
1526 .   dm - the DM object
1527 
1528     Output Parameter:
1529 .   level - number of refinements
1530 
1531     Level: developer
1532 
1533 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1534 
1535 @*/
1536 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1537 {
1538   PetscFunctionBegin;
1539   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1540   *level = dm->levelup;
1541   PetscFunctionReturn(0);
1542 }
1543 
1544 #undef __FUNCT__
1545 #define __FUNCT__ "DMGlobalToLocalBegin"
1546 /*@
1547     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1548 
1549     Neighbor-wise Collective on DM
1550 
1551     Input Parameters:
1552 +   dm - the DM object
1553 .   g - the global vector
1554 .   mode - INSERT_VALUES or ADD_VALUES
1555 -   l - the local vector
1556 
1557 
1558     Level: beginner
1559 
1560 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1561 
1562 @*/
1563 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1564 {
1565   PetscSF        sf;
1566   PetscErrorCode ierr;
1567 
1568   PetscFunctionBegin;
1569   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1570   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1571   if (sf) {
1572     PetscScalar *lArray, *gArray;
1573 
1574     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1575     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1576     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1577     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1578     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1579     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1580   } else {
1581     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1582   }
1583   PetscFunctionReturn(0);
1584 }
1585 
1586 #undef __FUNCT__
1587 #define __FUNCT__ "DMGlobalToLocalEnd"
1588 /*@
1589     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1590 
1591     Neighbor-wise Collective on DM
1592 
1593     Input Parameters:
1594 +   dm - the DM object
1595 .   g - the global vector
1596 .   mode - INSERT_VALUES or ADD_VALUES
1597 -   l - the local vector
1598 
1599 
1600     Level: beginner
1601 
1602 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1603 
1604 @*/
1605 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1606 {
1607   PetscSF        sf;
1608   PetscErrorCode ierr;
1609   PetscScalar    *lArray, *gArray;
1610 
1611   PetscFunctionBegin;
1612   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1613   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1614   if (sf) {
1615   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1616 
1617     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1618     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1619     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1620     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1621     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1622   } else {
1623     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1624   }
1625   PetscFunctionReturn(0);
1626 }
1627 
1628 #undef __FUNCT__
1629 #define __FUNCT__ "DMLocalToGlobalBegin"
1630 /*@
1631     DMLocalToGlobalBegin - updates global vectors from local vectors
1632 
1633     Neighbor-wise Collective on DM
1634 
1635     Input Parameters:
1636 +   dm - the DM object
1637 .   l - the local vector
1638 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1639            base point.
1640 - - the global vector
1641 
1642     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1643            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1644            global array to the final global array with VecAXPY().
1645 
1646     Level: beginner
1647 
1648 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1649 
1650 @*/
1651 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1652 {
1653   PetscSF        sf;
1654   PetscErrorCode ierr;
1655 
1656   PetscFunctionBegin;
1657   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1658   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1659   if (sf) {
1660     MPI_Op       op;
1661     PetscScalar *lArray, *gArray;
1662 
1663     switch(mode) {
1664     case INSERT_VALUES:
1665     case INSERT_ALL_VALUES:
1666 #if defined(PETSC_HAVE_MPI_REPLACE)
1667       op = MPI_REPLACE; break;
1668 #else
1669       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1670 #endif
1671     case ADD_VALUES:
1672     case ADD_ALL_VALUES:
1673       op = MPI_SUM; break;
1674   default:
1675     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1676     }
1677     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1678     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1679     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1680     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1681     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1682   } else {
1683     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1684   }
1685   PetscFunctionReturn(0);
1686 }
1687 
1688 #undef __FUNCT__
1689 #define __FUNCT__ "DMLocalToGlobalEnd"
1690 /*@
1691     DMLocalToGlobalEnd - updates global vectors from local vectors
1692 
1693     Neighbor-wise Collective on DM
1694 
1695     Input Parameters:
1696 +   dm - the DM object
1697 .   l - the local vector
1698 .   mode - INSERT_VALUES or ADD_VALUES
1699 -   g - the global vector
1700 
1701 
1702     Level: beginner
1703 
1704 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1705 
1706 @*/
1707 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1708 {
1709   PetscSF        sf;
1710   PetscErrorCode ierr;
1711 
1712   PetscFunctionBegin;
1713   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1714   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1715   if (sf) {
1716     MPI_Op       op;
1717     PetscScalar *lArray, *gArray;
1718 
1719     switch(mode) {
1720     case INSERT_VALUES:
1721     case INSERT_ALL_VALUES:
1722 #if defined(PETSC_HAVE_MPI_REPLACE)
1723       op = MPI_REPLACE; break;
1724 #else
1725       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1726 #endif
1727     case ADD_VALUES:
1728     case ADD_ALL_VALUES:
1729       op = MPI_SUM; break;
1730     default:
1731       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1732     }
1733     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1734     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1735     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1736     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1737     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1738   } else {
1739     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1740   }
1741   PetscFunctionReturn(0);
1742 }
1743 
1744 #undef __FUNCT__
1745 #define __FUNCT__ "DMComputeJacobianDefault"
1746 /*@
1747     DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided
1748 
1749     Collective on DM
1750 
1751     Input Parameter:
1752 +   dm - the DM object
1753 .   x - location to compute Jacobian at; may be ignored for linear problems
1754 .   A - matrix that defines the operator for the linear solve
1755 -   B - the matrix used to construct the preconditioner
1756 
1757     Level: developer
1758 
1759 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
1760          DMSetFunction()
1761 
1762 @*/
1763 PetscErrorCode  DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag)
1764 {
1765   PetscErrorCode ierr;
1766 
1767   PetscFunctionBegin;
1768   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1769   *stflag = SAME_NONZERO_PATTERN;
1770   ierr  = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr);
1771   if (A != B) {
1772     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1773     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1774   }
1775   PetscFunctionReturn(0);
1776 }
1777 
1778 #undef __FUNCT__
1779 #define __FUNCT__ "DMCoarsen"
1780 /*@
1781     DMCoarsen - Coarsens a DM object
1782 
1783     Collective on DM
1784 
1785     Input Parameter:
1786 +   dm - the DM object
1787 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1788 
1789     Output Parameter:
1790 .   dmc - the coarsened DM
1791 
1792     Level: developer
1793 
1794 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1795 
1796 @*/
1797 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1798 {
1799   PetscErrorCode ierr;
1800   DMCoarsenHookLink link;
1801 
1802   PetscFunctionBegin;
1803   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1804   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1805   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1806   (*dmc)->ops->initialguess = dm->ops->initialguess;
1807   (*dmc)->ops->function     = dm->ops->function;
1808   (*dmc)->ops->functionj    = dm->ops->functionj;
1809   if (dm->ops->jacobian != DMComputeJacobianDefault) {
1810     (*dmc)->ops->jacobian     = dm->ops->jacobian;
1811   }
1812   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1813   (*dmc)->ctx       = dm->ctx;
1814   (*dmc)->levelup   = dm->levelup;
1815   (*dmc)->leveldown = dm->leveldown + 1;
1816   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1817   for (link=dm->coarsenhook; link; link=link->next) {
1818     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1819   }
1820   PetscFunctionReturn(0);
1821 }
1822 
1823 #undef __FUNCT__
1824 #define __FUNCT__ "DMCoarsenHookAdd"
1825 /*@
1826    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1827 
1828    Logically Collective
1829 
1830    Input Arguments:
1831 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1832 .  coarsenhook - function to run when setting up a coarser level
1833 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1834 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1835 
1836    Calling sequence of coarsenhook:
1837 $    coarsenhook(DM fine,DM coarse,void *ctx);
1838 
1839 +  fine - fine level DM
1840 .  coarse - coarse level DM to restrict problem to
1841 -  ctx - optional user-defined function context
1842 
1843    Calling sequence for restricthook:
1844 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1845 
1846 +  fine - fine level DM
1847 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1848 .  rscale - scaling vector for restriction
1849 .  inject - matrix restricting by injection
1850 .  coarse - coarse level DM to update
1851 -  ctx - optional user-defined function context
1852 
1853    Level: advanced
1854 
1855    Notes:
1856    This function is only needed if auxiliary data needs to be set up on coarse grids.
1857 
1858    If this function is called multiple times, the hooks will be run in the order they are added.
1859 
1860    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1861    extract the finest level information from its context (instead of from the SNES).
1862 
1863 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1864 @*/
1865 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1866 {
1867   PetscErrorCode ierr;
1868   DMCoarsenHookLink link,*p;
1869 
1870   PetscFunctionBegin;
1871   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1872   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1873   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1874   link->coarsenhook = coarsenhook;
1875   link->restricthook = restricthook;
1876   link->ctx = ctx;
1877   link->next = PETSC_NULL;
1878   *p = link;
1879   PetscFunctionReturn(0);
1880 }
1881 
1882 #undef __FUNCT__
1883 #define __FUNCT__ "DMRestrict"
1884 /*@
1885    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1886 
1887    Collective if any hooks are
1888 
1889    Input Arguments:
1890 +  fine - finer DM to use as a base
1891 .  restrct - restriction matrix, apply using MatRestrict()
1892 .  inject - injection matrix, also use MatRestrict()
1893 -  coarse - coarer DM to update
1894 
1895    Level: developer
1896 
1897 .seealso: DMCoarsenHookAdd(), MatRestrict()
1898 @*/
1899 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1900 {
1901   PetscErrorCode ierr;
1902   DMCoarsenHookLink link;
1903 
1904   PetscFunctionBegin;
1905   for (link=fine->coarsenhook; link; link=link->next) {
1906     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1907   }
1908   PetscFunctionReturn(0);
1909 }
1910 
1911 #undef __FUNCT__
1912 #define __FUNCT__ "DMBlockRestrictHookAdd"
1913 /*@
1914    DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1915 
1916    Logically Collective
1917 
1918    Input Arguments:
1919 +  global - global DM
1920 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1921 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1922 
1923    Calling sequence for restricthook:
1924 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1925 
1926 +  global - global DM
1927 .  out    - scatter to the outer (with ghost and overlap points) block vector
1928 .  in     - scatter to block vector values only owned locally
1929 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1930 -  ctx - optional user-defined function context
1931 
1932    Level: advanced
1933 
1934    Notes:
1935    This function is only needed if auxiliary data needs to be set up on coarse grids.
1936 
1937    If this function is called multiple times, the hooks will be run in the order they are added.
1938 
1939    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1940    extract the finest level information from its context (instead of from the SNES).
1941 
1942 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1943 @*/
1944 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1945 {
1946   PetscErrorCode ierr;
1947   DMBlockRestrictHookLink link,*p;
1948 
1949   PetscFunctionBegin;
1950   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1951   for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1952   ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr);
1953   link->restricthook = restricthook;
1954   link->ctx = ctx;
1955   link->next = PETSC_NULL;
1956   *p = link;
1957   PetscFunctionReturn(0);
1958 }
1959 
1960 #undef __FUNCT__
1961 #define __FUNCT__ "DMBlockRestrict"
1962 /*@
1963    DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd()
1964 
1965    Collective if any hooks are
1966 
1967    Input Arguments:
1968 +  fine - finer DM to use as a base
1969 .  restrct - restriction matrix, apply using MatRestrict()
1970 .  inject - injection matrix, also use MatRestrict()
1971 -  coarse - coarer DM to update
1972 
1973    Level: developer
1974 
1975 .seealso: DMCoarsenHookAdd(), MatRestrict()
1976 @*/
1977 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block)
1978 {
1979   PetscErrorCode ierr;
1980   DMBlockRestrictHookLink link;
1981 
1982   PetscFunctionBegin;
1983   for (link=global->blockrestricthook; link; link=link->next) {
1984     if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);}
1985   }
1986   PetscFunctionReturn(0);
1987 }
1988 
1989 #undef __FUNCT__
1990 #define __FUNCT__ "DMGetCoarsenLevel"
1991 /*@
1992     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
1993 
1994     Not Collective
1995 
1996     Input Parameter:
1997 .   dm - the DM object
1998 
1999     Output Parameter:
2000 .   level - number of coarsenings
2001 
2002     Level: developer
2003 
2004 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2005 
2006 @*/
2007 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2008 {
2009   PetscFunctionBegin;
2010   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2011   *level = dm->leveldown;
2012   PetscFunctionReturn(0);
2013 }
2014 
2015 
2016 
2017 #undef __FUNCT__
2018 #define __FUNCT__ "DMRefineHierarchy"
2019 /*@C
2020     DMRefineHierarchy - Refines a DM object, all levels at once
2021 
2022     Collective on DM
2023 
2024     Input Parameter:
2025 +   dm - the DM object
2026 -   nlevels - the number of levels of refinement
2027 
2028     Output Parameter:
2029 .   dmf - the refined DM hierarchy
2030 
2031     Level: developer
2032 
2033 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2034 
2035 @*/
2036 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2037 {
2038   PetscErrorCode ierr;
2039 
2040   PetscFunctionBegin;
2041   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2042   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2043   if (nlevels == 0) PetscFunctionReturn(0);
2044   if (dm->ops->refinehierarchy) {
2045     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2046   } else if (dm->ops->refine) {
2047     PetscInt i;
2048 
2049     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2050     for (i=1; i<nlevels; i++) {
2051       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2052     }
2053   } else {
2054     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2055   }
2056   PetscFunctionReturn(0);
2057 }
2058 
2059 #undef __FUNCT__
2060 #define __FUNCT__ "DMCoarsenHierarchy"
2061 /*@C
2062     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2063 
2064     Collective on DM
2065 
2066     Input Parameter:
2067 +   dm - the DM object
2068 -   nlevels - the number of levels of coarsening
2069 
2070     Output Parameter:
2071 .   dmc - the coarsened DM hierarchy
2072 
2073     Level: developer
2074 
2075 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2076 
2077 @*/
2078 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2079 {
2080   PetscErrorCode ierr;
2081 
2082   PetscFunctionBegin;
2083   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2084   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2085   if (nlevels == 0) PetscFunctionReturn(0);
2086   PetscValidPointer(dmc,3);
2087   if (dm->ops->coarsenhierarchy) {
2088     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2089   } else if (dm->ops->coarsen) {
2090     PetscInt i;
2091 
2092     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2093     for (i=1; i<nlevels; i++) {
2094       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2095     }
2096   } else {
2097     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2098   }
2099   PetscFunctionReturn(0);
2100 }
2101 
2102 #undef __FUNCT__
2103 #define __FUNCT__ "DMCreateAggregates"
2104 /*@
2105    DMCreateAggregates - Gets the aggregates that map between
2106    grids associated with two DMs.
2107 
2108    Collective on DM
2109 
2110    Input Parameters:
2111 +  dmc - the coarse grid DM
2112 -  dmf - the fine grid DM
2113 
2114    Output Parameters:
2115 .  rest - the restriction matrix (transpose of the projection matrix)
2116 
2117    Level: intermediate
2118 
2119 .keywords: interpolation, restriction, multigrid
2120 
2121 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2122 @*/
2123 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2124 {
2125   PetscErrorCode ierr;
2126 
2127   PetscFunctionBegin;
2128   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2129   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2130   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2131   PetscFunctionReturn(0);
2132 }
2133 
2134 #undef __FUNCT__
2135 #define __FUNCT__ "DMSetApplicationContextDestroy"
2136 /*@C
2137     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2138 
2139     Not Collective
2140 
2141     Input Parameters:
2142 +   dm - the DM object
2143 -   destroy - the destroy function
2144 
2145     Level: intermediate
2146 
2147 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2148 
2149 @*/
2150 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2151 {
2152   PetscFunctionBegin;
2153   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2154   dm->ctxdestroy = destroy;
2155   PetscFunctionReturn(0);
2156 }
2157 
2158 #undef __FUNCT__
2159 #define __FUNCT__ "DMSetApplicationContext"
2160 /*@
2161     DMSetApplicationContext - Set a user context into a DM object
2162 
2163     Not Collective
2164 
2165     Input Parameters:
2166 +   dm - the DM object
2167 -   ctx - the user context
2168 
2169     Level: intermediate
2170 
2171 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2172 
2173 @*/
2174 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2175 {
2176   PetscFunctionBegin;
2177   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2178   dm->ctx = ctx;
2179   PetscFunctionReturn(0);
2180 }
2181 
2182 #undef __FUNCT__
2183 #define __FUNCT__ "DMGetApplicationContext"
2184 /*@
2185     DMGetApplicationContext - Gets a user context from a DM object
2186 
2187     Not Collective
2188 
2189     Input Parameter:
2190 .   dm - the DM object
2191 
2192     Output Parameter:
2193 .   ctx - the user context
2194 
2195     Level: intermediate
2196 
2197 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2198 
2199 @*/
2200 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2201 {
2202   PetscFunctionBegin;
2203   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2204   *(void**)ctx = dm->ctx;
2205   PetscFunctionReturn(0);
2206 }
2207 
2208 #undef __FUNCT__
2209 #define __FUNCT__ "DMSetFunction"
2210 /*@C
2211     DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES
2212 
2213     Logically Collective on DM
2214 
2215     Input Parameter:
2216 +   dm - the DM object
2217 -   f - the function to compute (use PETSC_NULL to cancel a previous function that was set)
2218 
2219     Level: intermediate
2220 
2221     Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian
2222            computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian.
2223 
2224 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2225          DMSetJacobian()
2226 
2227 @*/
2228 PetscErrorCode  DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2229 {
2230   PetscFunctionBegin;
2231   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2232   dm->ops->function = f;
2233   if (f) {
2234     dm->ops->functionj = f;
2235   }
2236   PetscFunctionReturn(0);
2237 }
2238 
2239 #undef __FUNCT__
2240 #define __FUNCT__ "DMSetJacobian"
2241 /*@C
2242     DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES
2243 
2244     Logically Collective on DM
2245 
2246     Input Parameter:
2247 +   dm - the DM object to destroy
2248 -   f - the function to compute the matrix entries
2249 
2250     Level: intermediate
2251 
2252 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2253          DMSetFunction()
2254 
2255 @*/
2256 PetscErrorCode  DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*))
2257 {
2258   PetscFunctionBegin;
2259   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2260   dm->ops->jacobian = f;
2261   PetscFunctionReturn(0);
2262 }
2263 
2264 #undef __FUNCT__
2265 #define __FUNCT__ "DMSetVariableBounds"
2266 /*@C
2267     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2268 
2269     Logically Collective on DM
2270 
2271     Input Parameter:
2272 +   dm - the DM object
2273 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2274 
2275     Level: intermediate
2276 
2277 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2278          DMSetJacobian()
2279 
2280 @*/
2281 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2282 {
2283   PetscFunctionBegin;
2284   dm->ops->computevariablebounds = f;
2285   PetscFunctionReturn(0);
2286 }
2287 
2288 #undef __FUNCT__
2289 #define __FUNCT__ "DMHasVariableBounds"
2290 /*@
2291     DMHasVariableBounds - does the DM object have a variable bounds function?
2292 
2293     Not Collective
2294 
2295     Input Parameter:
2296 .   dm - the DM object to destroy
2297 
2298     Output Parameter:
2299 .   flg - PETSC_TRUE if the variable bounds function exists
2300 
2301     Level: developer
2302 
2303 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2304 
2305 @*/
2306 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2307 {
2308   PetscFunctionBegin;
2309   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2310   PetscFunctionReturn(0);
2311 }
2312 
2313 #undef __FUNCT__
2314 #define __FUNCT__ "DMComputeVariableBounds"
2315 /*@C
2316     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2317 
2318     Logically Collective on DM
2319 
2320     Input Parameters:
2321 +   dm - the DM object to destroy
2322 -   x  - current solution at which the bounds are computed
2323 
2324     Output parameters:
2325 +   xl - lower bound
2326 -   xu - upper bound
2327 
2328     Level: intermediate
2329 
2330 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2331          DMSetFunction(), DMSetVariableBounds()
2332 
2333 @*/
2334 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2335 {
2336   PetscErrorCode ierr;
2337   PetscFunctionBegin;
2338   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2339   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2340   if (dm->ops->computevariablebounds) {
2341     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2342   }
2343   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2344   PetscFunctionReturn(0);
2345 }
2346 
2347 #undef __FUNCT__
2348 #define __FUNCT__ "DMHasFunction"
2349 /*@
2350     DMHasFunction - does the DM object have a function
2351 
2352     Not Collective
2353 
2354     Input Parameter:
2355 .   dm - the DM object to destroy
2356 
2357     Output Parameter:
2358 .   flg - PETSC_TRUE if function exists
2359 
2360     Level: developer
2361 
2362 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2363 
2364 @*/
2365 PetscErrorCode  DMHasFunction(DM dm,PetscBool  *flg)
2366 {
2367   PetscFunctionBegin;
2368   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2369   *flg =  (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE;
2370   PetscFunctionReturn(0);
2371 }
2372 
2373 #undef __FUNCT__
2374 #define __FUNCT__ "DMHasJacobian"
2375 /*@
2376     DMHasJacobian - does the DM object have a matrix function
2377 
2378     Not Collective
2379 
2380     Input Parameter:
2381 .   dm - the DM object to destroy
2382 
2383     Output Parameter:
2384 .   flg - PETSC_TRUE if function exists
2385 
2386     Level: developer
2387 
2388 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2389 
2390 @*/
2391 PetscErrorCode  DMHasJacobian(DM dm,PetscBool  *flg)
2392 {
2393   PetscFunctionBegin;
2394   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2395   *flg =  (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE;
2396   PetscFunctionReturn(0);
2397 }
2398 
2399 #undef __FUNCT__
2400 #define __FUNCT__ "DMHasColoring"
2401 /*@
2402     DMHasColoring - does the DM object have a method of providing a coloring?
2403 
2404     Not Collective
2405 
2406     Input Parameter:
2407 .   dm - the DM object
2408 
2409     Output Parameter:
2410 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2411 
2412     Level: developer
2413 
2414 .seealso DMHasFunction(), DMCreateColoring()
2415 
2416 @*/
2417 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2418 {
2419   PetscFunctionBegin;
2420   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2421   PetscFunctionReturn(0);
2422 }
2423 
2424 #undef  __FUNCT__
2425 #define __FUNCT__ "DMSetVec"
2426 /*@C
2427     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2428 
2429     Collective on DM
2430 
2431     Input Parameter:
2432 +   dm - the DM object
2433 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2434 
2435     Level: developer
2436 
2437 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2438          DMSetFunction(), DMSetJacobian(), DMSetVariableBounds()
2439 
2440 @*/
2441 PetscErrorCode  DMSetVec(DM dm,Vec x)
2442 {
2443   PetscErrorCode ierr;
2444   PetscFunctionBegin;
2445   if (x) {
2446     if (!dm->x) {
2447       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2448     }
2449     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2450   }
2451   else if (dm->x) {
2452     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2453   }
2454   PetscFunctionReturn(0);
2455 }
2456 
2457 
2458 #undef __FUNCT__
2459 #define __FUNCT__ "DMComputeFunction"
2460 /*@
2461     DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES
2462 
2463     Collective on DM
2464 
2465     Input Parameter:
2466 +   dm - the DM object to destroy
2467 .   x - the location where the function is evaluationed, may be ignored for linear problems
2468 -   b - the vector to hold the right hand side entries
2469 
2470     Level: developer
2471 
2472 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2473          DMSetJacobian()
2474 
2475 @*/
2476 PetscErrorCode  DMComputeFunction(DM dm,Vec x,Vec b)
2477 {
2478   PetscErrorCode ierr;
2479   PetscFunctionBegin;
2480   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2481   if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()");
2482   PetscStackPush("DM user function");
2483   ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr);
2484   PetscStackPop;
2485   PetscFunctionReturn(0);
2486 }
2487 
2488 
2489 
2490 #undef __FUNCT__
2491 #define __FUNCT__ "DMComputeJacobian"
2492 /*@
2493     DMComputeJacobian - compute the matrix entries for the solver
2494 
2495     Collective on DM
2496 
2497     Input Parameter:
2498 +   dm - the DM object
2499 .   x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM
2500 .   A - matrix that defines the operator for the linear solve
2501 -   B - the matrix used to construct the preconditioner
2502 
2503     Level: developer
2504 
2505 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2506          DMSetFunction()
2507 
2508 @*/
2509 PetscErrorCode  DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag)
2510 {
2511   PetscErrorCode ierr;
2512 
2513   PetscFunctionBegin;
2514   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2515   if (!dm->ops->jacobian) {
2516     ISColoring     coloring;
2517     MatFDColoring  fd;
2518     MatType        mtype;
2519 
2520     ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr);
2521     ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr);
2522     ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr);
2523     ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr);
2524     ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr);
2525     ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr);
2526     ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr);
2527 
2528     dm->fd = fd;
2529     dm->ops->jacobian = DMComputeJacobianDefault;
2530 
2531     /* don't know why this is needed */
2532     ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr);
2533   }
2534   if (!x) x = dm->x;
2535   ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr);
2536 
2537   /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */
2538   if (x) {
2539     if (!dm->x) {
2540       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2541     }
2542     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2543   }
2544   if (A != B) {
2545     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2546     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2547   }
2548   PetscFunctionReturn(0);
2549 }
2550 
2551 
2552 PetscFList DMList                       = PETSC_NULL;
2553 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2554 
2555 #undef __FUNCT__
2556 #define __FUNCT__ "DMSetType"
2557 /*@C
2558   DMSetType - Builds a DM, for a particular DM implementation.
2559 
2560   Collective on DM
2561 
2562   Input Parameters:
2563 + dm     - The DM object
2564 - method - The name of the DM type
2565 
2566   Options Database Key:
2567 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2568 
2569   Notes:
2570   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2571 
2572   Level: intermediate
2573 
2574 .keywords: DM, set, type
2575 .seealso: DMGetType(), DMCreate()
2576 @*/
2577 PetscErrorCode  DMSetType(DM dm, DMType method)
2578 {
2579   PetscErrorCode (*r)(DM);
2580   PetscBool      match;
2581   PetscErrorCode ierr;
2582 
2583   PetscFunctionBegin;
2584   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2585   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2586   if (match) PetscFunctionReturn(0);
2587 
2588   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2589   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2590   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2591 
2592   if (dm->ops->destroy) {
2593     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2594     dm->ops->destroy = PETSC_NULL;
2595   }
2596   ierr = (*r)(dm);CHKERRQ(ierr);
2597   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2598   PetscFunctionReturn(0);
2599 }
2600 
2601 #undef __FUNCT__
2602 #define __FUNCT__ "DMGetType"
2603 /*@C
2604   DMGetType - Gets the DM type name (as a string) from the DM.
2605 
2606   Not Collective
2607 
2608   Input Parameter:
2609 . dm  - The DM
2610 
2611   Output Parameter:
2612 . type - The DM type name
2613 
2614   Level: intermediate
2615 
2616 .keywords: DM, get, type, name
2617 .seealso: DMSetType(), DMCreate()
2618 @*/
2619 PetscErrorCode  DMGetType(DM dm, DMType *type)
2620 {
2621   PetscErrorCode ierr;
2622 
2623   PetscFunctionBegin;
2624   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2625   PetscValidCharPointer(type,2);
2626   if (!DMRegisterAllCalled) {
2627     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2628   }
2629   *type = ((PetscObject)dm)->type_name;
2630   PetscFunctionReturn(0);
2631 }
2632 
2633 #undef __FUNCT__
2634 #define __FUNCT__ "DMConvert"
2635 /*@C
2636   DMConvert - Converts a DM to another DM, either of the same or different type.
2637 
2638   Collective on DM
2639 
2640   Input Parameters:
2641 + dm - the DM
2642 - newtype - new DM type (use "same" for the same type)
2643 
2644   Output Parameter:
2645 . M - pointer to new DM
2646 
2647   Notes:
2648   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2649   the MPI communicator of the generated DM is always the same as the communicator
2650   of the input DM.
2651 
2652   Level: intermediate
2653 
2654 .seealso: DMCreate()
2655 @*/
2656 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2657 {
2658   DM             B;
2659   char           convname[256];
2660   PetscBool      sametype, issame;
2661   PetscErrorCode ierr;
2662 
2663   PetscFunctionBegin;
2664   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2665   PetscValidType(dm,1);
2666   PetscValidPointer(M,3);
2667   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2668   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2669   {
2670     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2671 
2672     /*
2673        Order of precedence:
2674        1) See if a specialized converter is known to the current DM.
2675        2) See if a specialized converter is known to the desired DM class.
2676        3) See if a good general converter is registered for the desired class
2677        4) See if a good general converter is known for the current matrix.
2678        5) Use a really basic converter.
2679     */
2680 
2681     /* 1) See if a specialized converter is known to the current DM and the desired class */
2682     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2683     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2684     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2685     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2686     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2687     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2688     if (conv) goto foundconv;
2689 
2690     /* 2)  See if a specialized converter is known to the desired DM class. */
2691     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2692     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2693     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2694     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2695     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2696     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2697     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2698     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2699     if (conv) {
2700       ierr = DMDestroy(&B);CHKERRQ(ierr);
2701       goto foundconv;
2702     }
2703 
2704 #if 0
2705     /* 3) See if a good general converter is registered for the desired class */
2706     conv = B->ops->convertfrom;
2707     ierr = DMDestroy(&B);CHKERRQ(ierr);
2708     if (conv) goto foundconv;
2709 
2710     /* 4) See if a good general converter is known for the current matrix */
2711     if (dm->ops->convert) {
2712       conv = dm->ops->convert;
2713     }
2714     if (conv) goto foundconv;
2715 #endif
2716 
2717     /* 5) Use a really basic converter. */
2718     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2719 
2720     foundconv:
2721     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2722     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2723     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2724   }
2725   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2726   PetscFunctionReturn(0);
2727 }
2728 
2729 /*--------------------------------------------------------------------------------------------------------------------*/
2730 
2731 #undef __FUNCT__
2732 #define __FUNCT__ "DMRegister"
2733 /*@C
2734   DMRegister - See DMRegisterDynamic()
2735 
2736   Level: advanced
2737 @*/
2738 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2739 {
2740   char fullname[PETSC_MAX_PATH_LEN];
2741   PetscErrorCode ierr;
2742 
2743   PetscFunctionBegin;
2744   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2745   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2746   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2747   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2748   PetscFunctionReturn(0);
2749 }
2750 
2751 
2752 /*--------------------------------------------------------------------------------------------------------------------*/
2753 #undef __FUNCT__
2754 #define __FUNCT__ "DMRegisterDestroy"
2755 /*@C
2756    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2757 
2758    Not Collective
2759 
2760    Level: advanced
2761 
2762 .keywords: DM, register, destroy
2763 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2764 @*/
2765 PetscErrorCode  DMRegisterDestroy(void)
2766 {
2767   PetscErrorCode ierr;
2768 
2769   PetscFunctionBegin;
2770   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2771   DMRegisterAllCalled = PETSC_FALSE;
2772   PetscFunctionReturn(0);
2773 }
2774 
2775 #if defined(PETSC_HAVE_MATLAB_ENGINE)
2776 #include <mex.h>
2777 
2778 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext;
2779 
2780 #undef __FUNCT__
2781 #define __FUNCT__ "DMComputeFunction_Matlab"
2782 /*
2783    DMComputeFunction_Matlab - Calls the function that has been set with
2784                          DMSetFunctionMatlab().
2785 
2786    For linear problems x is null
2787 
2788 .seealso: DMSetFunction(), DMGetFunction()
2789 */
2790 PetscErrorCode  DMComputeFunction_Matlab(DM dm,Vec x,Vec y)
2791 {
2792   PetscErrorCode    ierr;
2793   DMMatlabContext   *sctx;
2794   int               nlhs = 1,nrhs = 4;
2795   mxArray	    *plhs[1],*prhs[4];
2796   long long int     lx = 0,ly = 0,ls = 0;
2797 
2798   PetscFunctionBegin;
2799   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2800   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
2801   PetscCheckSameComm(dm,1,y,3);
2802 
2803   /* call Matlab function in ctx with arguments x and y */
2804   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2805   ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr);
2806   ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr);
2807   ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr);
2808   prhs[0] =  mxCreateDoubleScalar((double)ls);
2809   prhs[1] =  mxCreateDoubleScalar((double)lx);
2810   prhs[2] =  mxCreateDoubleScalar((double)ly);
2811   prhs[3] =  mxCreateString(sctx->funcname);
2812   ierr    =  mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr);
2813   ierr    =  mxGetScalar(plhs[0]);CHKERRQ(ierr);
2814   mxDestroyArray(prhs[0]);
2815   mxDestroyArray(prhs[1]);
2816   mxDestroyArray(prhs[2]);
2817   mxDestroyArray(prhs[3]);
2818   mxDestroyArray(plhs[0]);
2819   PetscFunctionReturn(0);
2820 }
2821 
2822 
2823 #undef __FUNCT__
2824 #define __FUNCT__ "DMSetFunctionMatlab"
2825 /*
2826    DMSetFunctionMatlab - Sets the function evaluation routine
2827 
2828 */
2829 PetscErrorCode  DMSetFunctionMatlab(DM dm,const char *func)
2830 {
2831   PetscErrorCode    ierr;
2832   DMMatlabContext   *sctx;
2833 
2834   PetscFunctionBegin;
2835   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2836   /* currently sctx is memory bleed */
2837   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2838   if (!sctx) {
2839     ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr);
2840   }
2841   ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr);
2842   ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr);
2843   ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr);
2844   PetscFunctionReturn(0);
2845 }
2846 
2847 #undef __FUNCT__
2848 #define __FUNCT__ "DMComputeJacobian_Matlab"
2849 /*
2850    DMComputeJacobian_Matlab - Calls the function that has been set with
2851                          DMSetJacobianMatlab().
2852 
2853    For linear problems x is null
2854 
2855 .seealso: DMSetFunction(), DMGetFunction()
2856 */
2857 PetscErrorCode  DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str)
2858 {
2859   PetscErrorCode    ierr;
2860   DMMatlabContext   *sctx;
2861   int               nlhs = 2,nrhs = 5;
2862   mxArray	    *plhs[2],*prhs[5];
2863   long long int     lx = 0,lA = 0,lB = 0,ls = 0;
2864 
2865   PetscFunctionBegin;
2866   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2867   PetscValidHeaderSpecific(A,MAT_CLASSID,3);
2868 
2869   /* call MATLAB function in ctx with arguments x, A, and B */
2870   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2871   ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr);
2872   ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr);
2873   ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr);
2874   ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr);
2875   prhs[0] =  mxCreateDoubleScalar((double)ls);
2876   prhs[1] =  mxCreateDoubleScalar((double)lx);
2877   prhs[2] =  mxCreateDoubleScalar((double)lA);
2878   prhs[3] =  mxCreateDoubleScalar((double)lB);
2879   prhs[4] =  mxCreateString(sctx->jacname);
2880   ierr    =  mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr);
2881   *str    =  (MatStructure) mxGetScalar(plhs[0]);
2882   ierr    =  (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr);
2883   mxDestroyArray(prhs[0]);
2884   mxDestroyArray(prhs[1]);
2885   mxDestroyArray(prhs[2]);
2886   mxDestroyArray(prhs[3]);
2887   mxDestroyArray(prhs[4]);
2888   mxDestroyArray(plhs[0]);
2889   mxDestroyArray(plhs[1]);
2890   PetscFunctionReturn(0);
2891 }
2892 
2893 
2894 #undef __FUNCT__
2895 #define __FUNCT__ "DMSetJacobianMatlab"
2896 /*
2897    DMSetJacobianMatlab - Sets the Jacobian function evaluation routine
2898 
2899 */
2900 PetscErrorCode  DMSetJacobianMatlab(DM dm,const char *func)
2901 {
2902   PetscErrorCode    ierr;
2903   DMMatlabContext   *sctx;
2904 
2905   PetscFunctionBegin;
2906   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2907   /* currently sctx is memory bleed */
2908   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2909   if (!sctx) {
2910     ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr);
2911   }
2912   ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr);
2913   ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr);
2914   ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr);
2915   PetscFunctionReturn(0);
2916 }
2917 #endif
2918 
2919 #undef __FUNCT__
2920 #define __FUNCT__ "DMLoad"
2921 /*@C
2922   DMLoad - Loads a DM that has been stored in binary  with DMView().
2923 
2924   Collective on PetscViewer
2925 
2926   Input Parameters:
2927 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2928            some related function before a call to DMLoad().
2929 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2930            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2931 
2932    Level: intermediate
2933 
2934   Notes:
2935    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2936 
2937   Notes for advanced users:
2938   Most users should not need to know the details of the binary storage
2939   format, since DMLoad() and DMView() completely hide these details.
2940   But for anyone who's interested, the standard binary matrix storage
2941   format is
2942 .vb
2943      has not yet been determined
2944 .ve
2945 
2946 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2947 @*/
2948 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2949 {
2950   PetscErrorCode ierr;
2951   PetscBool      isbinary;
2952   PetscInt       classid;
2953   char           type[256];
2954 
2955   PetscFunctionBegin;
2956   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2957   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2958   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2959   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2960 
2961   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2962   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2963   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2964   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2965   ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2966   PetscFunctionReturn(0);
2967 }
2968 
2969 /******************************** FEM Support **********************************/
2970 
2971 #undef __FUNCT__
2972 #define __FUNCT__ "DMPrintCellVector"
2973 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2974   PetscInt       f;
2975   PetscErrorCode ierr;
2976 
2977   PetscFunctionBegin;
2978   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2979   for (f = 0; f < len; ++f) {
2980     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2981   }
2982   PetscFunctionReturn(0);
2983 }
2984 
2985 #undef __FUNCT__
2986 #define __FUNCT__ "DMPrintCellMatrix"
2987 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2988   PetscInt       f, g;
2989   PetscErrorCode ierr;
2990 
2991   PetscFunctionBegin;
2992   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2993   for (f = 0; f < rows; ++f) {
2994     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2995     for (g = 0; g < cols; ++g) {
2996       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2997     }
2998     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2999   }
3000   PetscFunctionReturn(0);
3001 }
3002 
3003 #undef __FUNCT__
3004 #define __FUNCT__ "DMGetDefaultSection"
3005 /*@
3006   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
3007 
3008   Input Parameter:
3009 . dm - The DM
3010 
3011   Output Parameter:
3012 . section - The PetscSection
3013 
3014   Level: intermediate
3015 
3016   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
3017 
3018 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
3019 @*/
3020 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
3021   PetscFunctionBegin;
3022   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3023   PetscValidPointer(section, 2);
3024   *section = dm->defaultSection;
3025   PetscFunctionReturn(0);
3026 }
3027 
3028 #undef __FUNCT__
3029 #define __FUNCT__ "DMSetDefaultSection"
3030 /*@
3031   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
3032 
3033   Input Parameters:
3034 + dm - The DM
3035 - section - The PetscSection
3036 
3037   Level: intermediate
3038 
3039   Note: Any existing Section will be destroyed
3040 
3041 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
3042 @*/
3043 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
3044   PetscInt       numFields;
3045   PetscInt       f;
3046   PetscErrorCode ierr;
3047 
3048   PetscFunctionBegin;
3049   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3050   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
3051   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
3052   dm->defaultSection = section;
3053   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
3054   if (numFields) {
3055     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
3056     for (f = 0; f < numFields; ++f) {
3057       const char *name;
3058 
3059       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
3060       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
3061     }
3062   }
3063   PetscFunctionReturn(0);
3064 }
3065 
3066 #undef __FUNCT__
3067 #define __FUNCT__ "DMGetDefaultGlobalSection"
3068 /*@
3069   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
3070 
3071   Collective on DM
3072 
3073   Input Parameter:
3074 . dm - The DM
3075 
3076   Output Parameter:
3077 . section - The PetscSection
3078 
3079   Level: intermediate
3080 
3081   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
3082 
3083 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
3084 @*/
3085 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
3086   PetscErrorCode ierr;
3087 
3088   PetscFunctionBegin;
3089   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3090   PetscValidPointer(section, 2);
3091   if (!dm->defaultGlobalSection) {
3092     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
3093     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
3094     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
3095   }
3096   *section = dm->defaultGlobalSection;
3097   PetscFunctionReturn(0);
3098 }
3099 
3100 #undef __FUNCT__
3101 #define __FUNCT__ "DMSetDefaultGlobalSection"
3102 /*@
3103   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
3104 
3105   Input Parameters:
3106 + dm - The DM
3107 - section - The PetscSection
3108 
3109   Level: intermediate
3110 
3111   Note: Any existing Section will be destroyed
3112 
3113 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
3114 @*/
3115 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
3116   PetscErrorCode ierr;
3117 
3118   PetscFunctionBegin;
3119   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3120   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
3121   dm->defaultGlobalSection = section;
3122   PetscFunctionReturn(0);
3123 }
3124 
3125 #undef __FUNCT__
3126 #define __FUNCT__ "DMGetDefaultSF"
3127 /*@
3128   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
3129   it is created from the default PetscSection layouts in the DM.
3130 
3131   Input Parameter:
3132 . dm - The DM
3133 
3134   Output Parameter:
3135 . sf - The PetscSF
3136 
3137   Level: intermediate
3138 
3139   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3140 
3141 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
3142 @*/
3143 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
3144   PetscInt       nroots;
3145   PetscErrorCode ierr;
3146 
3147   PetscFunctionBegin;
3148   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3149   PetscValidPointer(sf, 2);
3150   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
3151   if (nroots < 0) {
3152     PetscSection section, gSection;
3153 
3154     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
3155     if (section) {
3156       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
3157       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
3158     } else {
3159       *sf = PETSC_NULL;
3160       PetscFunctionReturn(0);
3161     }
3162   }
3163   *sf = dm->defaultSF;
3164   PetscFunctionReturn(0);
3165 }
3166 
3167 #undef __FUNCT__
3168 #define __FUNCT__ "DMSetDefaultSF"
3169 /*@
3170   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3171 
3172   Input Parameters:
3173 + dm - The DM
3174 - sf - The PetscSF
3175 
3176   Level: intermediate
3177 
3178   Note: Any previous SF is destroyed
3179 
3180 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3181 @*/
3182 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
3183   PetscErrorCode ierr;
3184 
3185   PetscFunctionBegin;
3186   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3187   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
3188   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3189   dm->defaultSF = sf;
3190   PetscFunctionReturn(0);
3191 }
3192 
3193 #undef __FUNCT__
3194 #define __FUNCT__ "DMCreateDefaultSF"
3195 /*@C
3196   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3197   describing the data layout.
3198 
3199   Input Parameters:
3200 + dm - The DM
3201 . localSection - PetscSection describing the local data layout
3202 - globalSection - PetscSection describing the global data layout
3203 
3204   Level: intermediate
3205 
3206 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3207 @*/
3208 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3209 {
3210   MPI_Comm        comm = ((PetscObject) dm)->comm;
3211   PetscLayout     layout;
3212   const PetscInt *ranges;
3213   PetscInt       *local;
3214   PetscSFNode    *remote;
3215   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
3216   PetscMPIInt     size, rank;
3217   PetscErrorCode  ierr;
3218 
3219   PetscFunctionBegin;
3220   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3221   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3222   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3223   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3224   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3225   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3226   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3227   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3228   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3229   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3230   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
3231     PetscInt gdof, gcdof;
3232 
3233     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3234     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3235     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3236   }
3237   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
3238   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
3239   for (p = pStart, l = 0; p < pEnd; ++p) {
3240     const PetscInt *cind;
3241     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3242 
3243     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3244     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3245     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3246     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3247     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3248     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3249     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3250     if (!gdof) continue; /* Censored point */
3251     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3252     if (gsize != dof-cdof) {
3253       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3254       cdof = 0; /* Ignore constraints */
3255     }
3256     for (d = 0, c = 0; d < dof; ++d) {
3257       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3258       local[l+d-c] = off+d;
3259     }
3260     if (gdof < 0) {
3261       for(d = 0; d < gsize; ++d, ++l) {
3262         PetscInt offset = -(goff+1) + d, r;
3263 
3264         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3265         if (r < 0) r = -(r+2);
3266         remote[l].rank  = r;
3267         remote[l].index = offset - ranges[r];
3268       }
3269     } else {
3270       for(d = 0; d < gsize; ++d, ++l) {
3271         remote[l].rank  = rank;
3272         remote[l].index = goff+d - ranges[rank];
3273       }
3274     }
3275   }
3276   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3277   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3278   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3279   PetscFunctionReturn(0);
3280 }
3281 
3282 #undef __FUNCT__
3283 #define __FUNCT__ "DMGetPointSF"
3284 /*@
3285   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3286 
3287   Input Parameter:
3288 . dm - The DM
3289 
3290   Output Parameter:
3291 . sf - The PetscSF
3292 
3293   Level: intermediate
3294 
3295   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3296 
3297 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3298 @*/
3299 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
3300   PetscFunctionBegin;
3301   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3302   PetscValidPointer(sf, 2);
3303   *sf = dm->sf;
3304   PetscFunctionReturn(0);
3305 }
3306 
3307 #undef __FUNCT__
3308 #define __FUNCT__ "DMSetPointSF"
3309 /*@
3310   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3311 
3312   Input Parameters:
3313 + dm - The DM
3314 - sf - The PetscSF
3315 
3316   Level: intermediate
3317 
3318 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3319 @*/
3320 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
3321   PetscErrorCode ierr;
3322 
3323   PetscFunctionBegin;
3324   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3325   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3326   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3327   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3328   dm->sf = sf;
3329   PetscFunctionReturn(0);
3330 }
3331 
3332 #undef __FUNCT__
3333 #define __FUNCT__ "DMGetNumFields"
3334 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3335 {
3336   PetscFunctionBegin;
3337   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3338   PetscValidPointer(numFields, 2);
3339   *numFields = dm->numFields;
3340   PetscFunctionReturn(0);
3341 }
3342 
3343 #undef __FUNCT__
3344 #define __FUNCT__ "DMSetNumFields"
3345 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3346 {
3347   PetscInt       f;
3348   PetscErrorCode ierr;
3349 
3350   PetscFunctionBegin;
3351   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3352   for (f = 0; f < dm->numFields; ++f) {
3353     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3354   }
3355   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3356   dm->numFields = numFields;
3357   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3358   for (f = 0; f < dm->numFields; ++f) {
3359     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3360   }
3361   PetscFunctionReturn(0);
3362 }
3363 
3364 #undef __FUNCT__
3365 #define __FUNCT__ "DMGetField"
3366 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3367 {
3368   PetscFunctionBegin;
3369   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3370   PetscValidPointer(field, 2);
3371   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3372   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3373   *field = dm->fields[f];
3374   PetscFunctionReturn(0);
3375 }
3376 
3377 #undef __FUNCT__
3378 #define __FUNCT__ "DMSetCoordinates"
3379 /*@
3380   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3381 
3382   Collective on DM
3383 
3384   Input Parameters:
3385 + dm - the DM
3386 - c - coordinate vector
3387 
3388   Note:
3389   The coordinates do include those for ghost points, which are in the local vector
3390 
3391   Level: intermediate
3392 
3393 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3394 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3395 @*/
3396 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3397 {
3398   PetscErrorCode ierr;
3399 
3400   PetscFunctionBegin;
3401   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3402   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3403   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3404   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3405   dm->coordinates = c;
3406   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3407   PetscFunctionReturn(0);
3408 }
3409 
3410 #undef __FUNCT__
3411 #define __FUNCT__ "DMSetCoordinatesLocal"
3412 /*@
3413   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3414 
3415   Collective on DM
3416 
3417    Input Parameters:
3418 +  dm - the DM
3419 -  c - coordinate vector
3420 
3421   Note:
3422   The coordinates of ghost points can be set using DMSetCoordinates()
3423   followed by DMGetCoordinatesLocal(). This is intended to enable the
3424   setting of ghost coordinates outside of the domain.
3425 
3426   Level: intermediate
3427 
3428 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3429 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3430 @*/
3431 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3432 {
3433   PetscErrorCode ierr;
3434 
3435   PetscFunctionBegin;
3436   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3437   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3438   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3439   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3440   dm->coordinatesLocal = c;
3441   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3442   PetscFunctionReturn(0);
3443 }
3444 
3445 #undef __FUNCT__
3446 #define __FUNCT__ "DMGetCoordinates"
3447 /*@
3448   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3449 
3450   Not Collective
3451 
3452   Input Parameter:
3453 . dm - the DM
3454 
3455   Output Parameter:
3456 . c - global coordinate vector
3457 
3458   Note:
3459   This is a borrowed reference, so the user should NOT destroy this vector
3460 
3461   Each process has only the local coordinates (does NOT have the ghost coordinates).
3462 
3463   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3464   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3465 
3466   Level: intermediate
3467 
3468 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3469 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3470 @*/
3471 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3472 {
3473   PetscErrorCode ierr;
3474 
3475   PetscFunctionBegin;
3476   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3477   PetscValidPointer(c,2);
3478   if (!dm->coordinates && dm->coordinatesLocal) {
3479     DM cdm;
3480 
3481     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3482     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3483     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3484     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3485     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3486   }
3487   *c = dm->coordinates;
3488   PetscFunctionReturn(0);
3489 }
3490 
3491 #undef __FUNCT__
3492 #define __FUNCT__ "DMGetCoordinatesLocal"
3493 /*@
3494   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3495 
3496   Collective on DM
3497 
3498   Input Parameter:
3499 . dm - the DM
3500 
3501   Output Parameter:
3502 . c - coordinate vector
3503 
3504   Note:
3505   This is a borrowed reference, so the user should NOT destroy this vector
3506 
3507   Each process has the local and ghost coordinates
3508 
3509   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3510   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3511 
3512   Level: intermediate
3513 
3514 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3515 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3516 @*/
3517 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3518 {
3519   PetscErrorCode ierr;
3520 
3521   PetscFunctionBegin;
3522   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3523   PetscValidPointer(c,2);
3524   if (!dm->coordinatesLocal && dm->coordinates) {
3525     DM cdm;
3526 
3527     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3528     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3529     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3530     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3531     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3532   }
3533   *c = dm->coordinatesLocal;
3534   PetscFunctionReturn(0);
3535 }
3536 
3537 #undef __FUNCT__
3538 #define __FUNCT__ "DMGetCoordinateDM"
3539 /*@
3540   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3541 
3542   Collective on DM
3543 
3544   Input Parameter:
3545 . dm - the DM
3546 
3547   Output Parameter:
3548 . cdm - coordinate DM
3549 
3550   Level: intermediate
3551 
3552 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3553 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3554 @*/
3555 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3556 {
3557   PetscErrorCode ierr;
3558 
3559   PetscFunctionBegin;
3560   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3561   PetscValidPointer(cdm,2);
3562   if (!dm->coordinateDM) {
3563     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3564     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3565   }
3566   *cdm = dm->coordinateDM;
3567   PetscFunctionReturn(0);
3568 }
3569