xref: /petsc/src/dm/interface/dm.c (revision baf369e73c834e6d9bd6baf7b5d05002dc48c405)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMCreate"
8 /*@
9   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
10 
11    If you never  call DMSetType()  it will generate an
12    error when you try to use the vector.
13 
14   Collective on MPI_Comm
15 
16   Input Parameter:
17 . comm - The communicator for the DM object
18 
19   Output Parameter:
20 . dm - The DM object
21 
22   Level: beginner
23 
24 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
25 @*/
26 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
27 {
28   DM             v;
29   PetscErrorCode ierr;
30 
31   PetscFunctionBegin;
32   PetscValidPointer(dm,2);
33   *dm = PETSC_NULL;
34 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
35   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
36   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
37   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
38 #endif
39 
40   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
41   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
42 
43 
44   v->ltogmap      = PETSC_NULL;
45   v->ltogmapb     = PETSC_NULL;
46   v->bs           = 1;
47   v->coloringtype = IS_COLORING_GLOBAL;
48   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
49   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
50   v->defaultSection       = PETSC_NULL;
51   v->defaultGlobalSection = PETSC_NULL;
52   {
53     PetscInt i;
54     for (i = 0; i < 10; ++i) {
55       v->nullspaceConstructors[i] = PETSC_NULL;
56     }
57   }
58   v->numFields = 0;
59   v->fields    = PETSC_NULL;
60 
61   *dm = v;
62   PetscFunctionReturn(0);
63 }
64 
65 #undef __FUNCT__
66 #define __FUNCT__ "DMSetVecType"
67 /*@C
68        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
69 
70    Logically Collective on DMDA
71 
72    Input Parameter:
73 +  da - initial distributed array
74 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
75 
76    Options Database:
77 .   -dm_vec_type ctype
78 
79    Level: intermediate
80 
81 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
82 @*/
83 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
84 {
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   PetscValidHeaderSpecific(da,DM_CLASSID,1);
89   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
90   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
91   PetscFunctionReturn(0);
92 }
93 
94 #undef __FUNCT__
95 #define __FUNCT__ "VecGetDM"
96 /*@
97   VecGetDM - Gets the DM defining the data layout of the vector
98 
99   Not collective
100 
101   Input Parameter:
102 . v - The Vec
103 
104   Output Parameter:
105 . dm - The DM
106 
107   Level: intermediate
108 
109 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
110 @*/
111 PetscErrorCode VecGetDM(Vec v, DM *dm)
112 {
113   PetscErrorCode ierr;
114 
115   PetscFunctionBegin;
116   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
117   PetscValidPointer(dm,2);
118   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
119   PetscFunctionReturn(0);
120 }
121 
122 #undef __FUNCT__
123 #define __FUNCT__ "VecSetDM"
124 /*@
125   VecSetDM - Sets the DM defining the data layout of the vector
126 
127   Not collective
128 
129   Input Parameters:
130 + v - The Vec
131 - dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecSetDM(Vec v, DM dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
144   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "DMSetMatType"
150 /*@C
151        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
152 
153    Logically Collective on DM
154 
155    Input Parameter:
156 +  dm - the DM context
157 .  ctype - the matrix type
158 
159    Options Database:
160 .   -dm_mat_type ctype
161 
162    Level: intermediate
163 
164 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
165 @*/
166 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
167 {
168   PetscErrorCode ierr;
169   PetscFunctionBegin;
170   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
171   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
172   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
173   PetscFunctionReturn(0);
174 }
175 
176 #undef __FUNCT__
177 #define __FUNCT__ "MatGetDM"
178 /*@
179   MatGetDM - Gets the DM defining the data layout of the matrix
180 
181   Not collective
182 
183   Input Parameter:
184 . A - The Mat
185 
186   Output Parameter:
187 . dm - The DM
188 
189   Level: intermediate
190 
191 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
192 @*/
193 PetscErrorCode MatGetDM(Mat A, DM *dm)
194 {
195   PetscErrorCode ierr;
196 
197   PetscFunctionBegin;
198   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
199   PetscValidPointer(dm,2);
200   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 #undef __FUNCT__
205 #define __FUNCT__ "MatSetDM"
206 /*@
207   MatSetDM - Sets the DM defining the data layout of the matrix
208 
209   Not collective
210 
211   Input Parameters:
212 + A - The Mat
213 - dm - The DM
214 
215   Level: intermediate
216 
217 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
218 @*/
219 PetscErrorCode MatSetDM(Mat A, DM dm)
220 {
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
225   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
226   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "DMSetOptionsPrefix"
232 /*@C
233    DMSetOptionsPrefix - Sets the prefix used for searching for all
234    DMDA options in the database.
235 
236    Logically Collective on DMDA
237 
238    Input Parameter:
239 +  da - the DMDA context
240 -  prefix - the prefix to prepend to all option names
241 
242    Notes:
243    A hyphen (-) must NOT be given at the beginning of the prefix name.
244    The first character of all runtime options is AUTOMATICALLY the hyphen.
245 
246    Level: advanced
247 
248 .keywords: DMDA, set, options, prefix, database
249 
250 .seealso: DMSetFromOptions()
251 @*/
252 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
253 {
254   PetscErrorCode ierr;
255 
256   PetscFunctionBegin;
257   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
258   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
259   PetscFunctionReturn(0);
260 }
261 
262 #undef __FUNCT__
263 #define __FUNCT__ "DMDestroy"
264 /*@
265     DMDestroy - Destroys a vector packer or DMDA.
266 
267     Collective on DM
268 
269     Input Parameter:
270 .   dm - the DM object to destroy
271 
272     Level: developer
273 
274 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
275 
276 @*/
277 PetscErrorCode  DMDestroy(DM *dm)
278 {
279   PetscInt       i, cnt = 0, f;
280   DMNamedVecLink nlink,nnext;
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   if (!*dm) PetscFunctionReturn(0);
285   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
286 
287   /* count all the circular references of DM and its contained Vecs */
288   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
289     if ((*dm)->localin[i])  {cnt++;}
290     if ((*dm)->globalin[i]) {cnt++;}
291   }
292   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
293   if ((*dm)->x) {
294     DM obj;
295     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
296     if (obj == *dm) cnt++;
297   }
298 
299   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
300   /*
301      Need this test because the dm references the vectors that
302      reference the dm, so destroying the dm calls destroy on the
303      vectors that cause another destroy on the dm
304   */
305   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
306   ((PetscObject) (*dm))->refct = 0;
307   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
308     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
309     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
310   }
311   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
312     nnext = nlink->next;
313     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
314     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
315     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
316     ierr = PetscFree(nlink);CHKERRQ(ierr);
317   }
318   (*dm)->namedglobal = PETSC_NULL;
319 
320   /* Destroy the list of hooks */
321   {
322     DMCoarsenHookLink link,next;
323     for (link=(*dm)->coarsenhook; link; link=next) {
324       next = link->next;
325       ierr = PetscFree(link);CHKERRQ(ierr);
326     }
327     (*dm)->coarsenhook = PETSC_NULL;
328   }
329   {
330     DMRefineHookLink link,next;
331     for (link=(*dm)->refinehook; link; link=next) {
332       next = link->next;
333       ierr = PetscFree(link);CHKERRQ(ierr);
334     }
335     (*dm)->refinehook = PETSC_NULL;
336   }
337   {
338     DMSubDomainHookLink link,next;
339     for (link=(*dm)->subdomainhook; link; link=next) {
340       next = link->next;
341       ierr = PetscFree(link);CHKERRQ(ierr);
342     }
343     (*dm)->subdomainhook = PETSC_NULL;
344   }
345   {
346     DMGlobalToLocalHookLink link,next;
347     for (link=(*dm)->gtolhook; link; link=next) {
348       next = link->next;
349       ierr = PetscFree(link);CHKERRQ(ierr);
350     }
351     (*dm)->gtolhook = PETSC_NULL;
352   }
353   /* Destroy the work arrays */
354   {
355     DMWorkLink link,next;
356     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
357     for (link=(*dm)->workin; link; link=next) {
358       next = link->next;
359       ierr = PetscFree(link->mem);CHKERRQ(ierr);
360       ierr = PetscFree(link);CHKERRQ(ierr);
361     }
362     (*dm)->workin = PETSC_NULL;
363   }
364 
365   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
366   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
367   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
368 
369   if ((*dm)->ctx && (*dm)->ctxdestroy) {
370     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
371   }
372   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
373   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
374   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
375   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
376   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
377   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
378   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
379 
380   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
381   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
382   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
383   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
384   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
385 
386   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
387   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
388   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
389 
390   for (f = 0; f < (*dm)->numFields; ++f) {
391     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
392   }
393   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
394   /* if memory was published with AMS then destroy it */
395   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
396 
397   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
398   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
399   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
400   PetscFunctionReturn(0);
401 }
402 
403 #undef __FUNCT__
404 #define __FUNCT__ "DMSetUp"
405 /*@
406     DMSetUp - sets up the data structures inside a DM object
407 
408     Collective on DM
409 
410     Input Parameter:
411 .   dm - the DM object to setup
412 
413     Level: developer
414 
415 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
416 
417 @*/
418 PetscErrorCode  DMSetUp(DM dm)
419 {
420   PetscErrorCode ierr;
421 
422   PetscFunctionBegin;
423   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
424   if (dm->setupcalled) PetscFunctionReturn(0);
425   if (dm->ops->setup) {
426     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
427   }
428   dm->setupcalled = PETSC_TRUE;
429   PetscFunctionReturn(0);
430 }
431 
432 #undef __FUNCT__
433 #define __FUNCT__ "DMSetFromOptions"
434 /*@
435     DMSetFromOptions - sets parameters in a DM from the options database
436 
437     Collective on DM
438 
439     Input Parameter:
440 .   dm - the DM object to set options for
441 
442     Options Database:
443 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
444 .   -dm_vec_type <type>  type of vector to create inside DM
445 .   -dm_mat_type <type>  type of matrix to create inside DM
446 -   -dm_coloring_type <global or ghosted>
447 
448     Level: developer
449 
450 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
451 
452 @*/
453 PetscErrorCode  DMSetFromOptions(DM dm)
454 {
455   PetscBool      flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg;
456   PetscErrorCode ierr;
457   char           typeName[256] = MATAIJ;
458 
459   PetscFunctionBegin;
460   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
461   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
462     ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr);
463     ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr);
464     ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr);
465     ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr);
466     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
467     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
468     if (flg) {
469       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
470     }
471     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
472     if (flg) {
473       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
474     }
475     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
476     if (dm->ops->setfromoptions) {
477       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
478     }
479     /* process any options handlers added with PetscObjectAddOptionsHandler() */
480     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
481   ierr = PetscOptionsEnd();CHKERRQ(ierr);
482   if (flg1) {
483     ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
484   }
485   if (flg2) {
486     PetscViewer viewer;
487 
488     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
489     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
490     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr);
491     ierr = DMView(dm, viewer);CHKERRQ(ierr);
492     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
493   }
494   if (flg3) {
495     PetscViewer viewer;
496 
497     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
498     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
499     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr);
500     ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr);
501     ierr = DMView(dm, viewer);CHKERRQ(ierr);
502     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
503   }
504   if (flg4) {
505     PetscViewer viewer;
506 
507     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
508     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
509     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr);
510     ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr);
511     ierr = DMView(dm, viewer);CHKERRQ(ierr);
512     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
513   }
514   PetscFunctionReturn(0);
515 }
516 
517 #undef __FUNCT__
518 #define __FUNCT__ "DMView"
519 /*@C
520     DMView - Views a vector packer or DMDA.
521 
522     Collective on DM
523 
524     Input Parameter:
525 +   dm - the DM object to view
526 -   v - the viewer
527 
528     Level: developer
529 
530 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
531 
532 @*/
533 PetscErrorCode  DMView(DM dm,PetscViewer v)
534 {
535   PetscErrorCode ierr;
536   PetscBool      isbinary;
537 
538   PetscFunctionBegin;
539   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
540   if (!v) {
541     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
542   }
543   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
544   if (isbinary) {
545     PetscInt         classid = DM_FILE_CLASSID;
546     MPI_Comm         comm;
547     PetscMPIInt      rank;
548     char             type[256];
549 
550     ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
551     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
552     if (!rank) {
553       ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
554       ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
555       ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
556     }
557   }
558   if (dm->ops->view) {
559     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
560   }
561   PetscFunctionReturn(0);
562 }
563 
564 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
565 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
566 
567 #undef __FUNCT__
568 #define __FUNCT__ "DMCreateGlobalVector"
569 /*@
570     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
571 
572     Collective on DM
573 
574     Input Parameter:
575 .   dm - the DM object
576 
577     Output Parameter:
578 .   vec - the global vector
579 
580     Level: beginner
581 
582 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
583 
584 @*/
585 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
586 {
587   PetscErrorCode ierr;
588 
589   PetscFunctionBegin;
590   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
591   if (dm->defaultSection) {
592     PetscSection gSection;
593     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
594 
595     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
596     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
597     for(p = pStart; p < pEnd; ++p) {
598       PetscInt dof, cdof;
599 
600       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
601       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
602       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
603       if ((dof > 0) && (dof-cdof != blockSize)) {
604         blockSize = 1;
605         break;
606       }
607     }
608     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
609     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
610     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
611     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
612     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
613     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
614     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
615     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
616     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
617     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
618     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
619     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
620   } else {
621     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
622   }
623   PetscFunctionReturn(0);
624 }
625 
626 #undef __FUNCT__
627 #define __FUNCT__ "DMCreateLocalVector"
628 /*@
629     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
630 
631     Not Collective
632 
633     Input Parameter:
634 .   dm - the DM object
635 
636     Output Parameter:
637 .   vec - the local vector
638 
639     Level: beginner
640 
641 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
642 
643 @*/
644 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
645 {
646   PetscErrorCode ierr;
647 
648   PetscFunctionBegin;
649   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
650   if (dm->defaultSection) {
651     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
652 
653     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
654     for(p = pStart; p < pEnd; ++p) {
655       PetscInt dof;
656 
657       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
658       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
659       if ((dof > 0) && (dof != blockSize)) {
660         blockSize = 1;
661         break;
662       }
663     }
664     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
665     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
666     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
667     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
668     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
669     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
670     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
671   } else {
672     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
673   }
674   PetscFunctionReturn(0);
675 }
676 
677 #undef __FUNCT__
678 #define __FUNCT__ "DMGetLocalToGlobalMapping"
679 /*@
680    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
681 
682    Collective on DM
683 
684    Input Parameter:
685 .  dm - the DM that provides the mapping
686 
687    Output Parameter:
688 .  ltog - the mapping
689 
690    Level: intermediate
691 
692    Notes:
693    This mapping can then be used by VecSetLocalToGlobalMapping() or
694    MatSetLocalToGlobalMapping().
695 
696 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
697 @*/
698 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
699 {
700   PetscErrorCode ierr;
701 
702   PetscFunctionBegin;
703   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
704   PetscValidPointer(ltog,2);
705   if (!dm->ltogmap) {
706     PetscSection section, sectionGlobal;
707 
708     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
709     if (section) {
710       PetscInt      *ltog;
711       PetscInt       pStart, pEnd, size, p, l;
712 
713       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
714       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
715       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
716       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
717       for (p = pStart, l = 0; p < pEnd; ++p) {
718         PetscInt dof, off, c;
719 
720         /* Should probably use constrained dofs */
721         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
722         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
723         for (c = 0; c < dof; ++c, ++l) {
724           ltog[l] = off+c;
725         }
726       }
727       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
728       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
729     } else {
730       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
731       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
732     }
733   }
734   *ltog = dm->ltogmap;
735   PetscFunctionReturn(0);
736 }
737 
738 #undef __FUNCT__
739 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
740 /*@
741    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
742 
743    Collective on DM
744 
745    Input Parameter:
746 .  da - the distributed array that provides the mapping
747 
748    Output Parameter:
749 .  ltog - the block mapping
750 
751    Level: intermediate
752 
753    Notes:
754    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
755    MatSetLocalToGlobalMappingBlock().
756 
757 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
758 @*/
759 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
760 {
761   PetscErrorCode ierr;
762 
763   PetscFunctionBegin;
764   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
765   PetscValidPointer(ltog,2);
766   if (!dm->ltogmapb) {
767     PetscInt bs;
768     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
769     if (bs > 1) {
770       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
771       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
772     } else {
773       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
774       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
775     }
776   }
777   *ltog = dm->ltogmapb;
778   PetscFunctionReturn(0);
779 }
780 
781 #undef __FUNCT__
782 #define __FUNCT__ "DMGetBlockSize"
783 /*@
784    DMGetBlockSize - Gets the inherent block size associated with a DM
785 
786    Not Collective
787 
788    Input Parameter:
789 .  dm - the DM with block structure
790 
791    Output Parameter:
792 .  bs - the block size, 1 implies no exploitable block structure
793 
794    Level: intermediate
795 
796 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
797 @*/
798 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
799 {
800   PetscFunctionBegin;
801   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
802   PetscValidPointer(bs,2);
803   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
804   *bs = dm->bs;
805   PetscFunctionReturn(0);
806 }
807 
808 #undef __FUNCT__
809 #define __FUNCT__ "DMCreateInterpolation"
810 /*@
811     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
812 
813     Collective on DM
814 
815     Input Parameter:
816 +   dm1 - the DM object
817 -   dm2 - the second, finer DM object
818 
819     Output Parameter:
820 +  mat - the interpolation
821 -  vec - the scaling (optional)
822 
823     Level: developer
824 
825     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
826         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
827 
828         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
829         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
830 
831 
832 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
833 
834 @*/
835 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
836 {
837   PetscErrorCode ierr;
838 
839   PetscFunctionBegin;
840   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
841   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
842   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
843   PetscFunctionReturn(0);
844 }
845 
846 #undef __FUNCT__
847 #define __FUNCT__ "DMCreateInjection"
848 /*@
849     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
850 
851     Collective on DM
852 
853     Input Parameter:
854 +   dm1 - the DM object
855 -   dm2 - the second, finer DM object
856 
857     Output Parameter:
858 .   ctx - the injection
859 
860     Level: developer
861 
862    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
863         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
864 
865 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
866 
867 @*/
868 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
869 {
870   PetscErrorCode ierr;
871 
872   PetscFunctionBegin;
873   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
874   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
875   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
876   PetscFunctionReturn(0);
877 }
878 
879 #undef __FUNCT__
880 #define __FUNCT__ "DMCreateColoring"
881 /*@C
882     DMCreateColoring - Gets coloring for a DMDA or DMComposite
883 
884     Collective on DM
885 
886     Input Parameter:
887 +   dm - the DM object
888 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
889 -   matype - either MATAIJ or MATBAIJ
890 
891     Output Parameter:
892 .   coloring - the coloring
893 
894     Level: developer
895 
896 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
897 
898 @*/
899 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
900 {
901   PetscErrorCode ierr;
902 
903   PetscFunctionBegin;
904   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
905   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
906   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
907   PetscFunctionReturn(0);
908 }
909 
910 #undef __FUNCT__
911 #define __FUNCT__ "DMCreateMatrix"
912 /*@C
913     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
914 
915     Collective on DM
916 
917     Input Parameter:
918 +   dm - the DM object
919 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
920             any type which inherits from one of these (such as MATAIJ)
921 
922     Output Parameter:
923 .   mat - the empty Jacobian
924 
925     Level: beginner
926 
927     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
928        do not need to do it yourself.
929 
930        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
931        the nonzero pattern call DMDASetMatPreallocateOnly()
932 
933        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
934        internally by PETSc.
935 
936        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
937        the indices for the global numbering for DMDAs which is complicated.
938 
939 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
940 
941 @*/
942 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
943 {
944   PetscErrorCode ierr;
945 
946   PetscFunctionBegin;
947   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
948 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
949   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
950 #endif
951   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
952   PetscValidPointer(mat,3);
953   if (dm->mattype) {
954     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
955   } else {
956     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
957   }
958   PetscFunctionReturn(0);
959 }
960 
961 #undef __FUNCT__
962 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
963 /*@
964   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
965     preallocated but the nonzero structure and zero values will not be set.
966 
967   Logically Collective on DMDA
968 
969   Input Parameter:
970 + dm - the DM
971 - only - PETSC_TRUE if only want preallocation
972 
973   Level: developer
974 .seealso DMCreateMatrix()
975 @*/
976 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
977 {
978   PetscFunctionBegin;
979   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
980   dm->prealloc_only = only;
981   PetscFunctionReturn(0);
982 }
983 
984 #undef __FUNCT__
985 #define __FUNCT__ "DMGetWorkArray"
986 /*@C
987   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
988 
989   Not Collective
990 
991   Input Parameters:
992 + dm - the DM object
993 . count - The minium size
994 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
995 
996   Output Parameter:
997 . array - the work array
998 
999   Level: developer
1000 
1001 .seealso DMDestroy(), DMCreate()
1002 @*/
1003 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1004 {
1005   PetscErrorCode ierr;
1006   DMWorkLink link;
1007   size_t size;
1008 
1009   PetscFunctionBegin;
1010   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1011   PetscValidPointer(mem,4);
1012   if (dm->workin) {
1013     link = dm->workin;
1014     dm->workin = dm->workin->next;
1015   } else {
1016     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1017   }
1018   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1019   if (size*count > link->bytes) {
1020     ierr = PetscFree(link->mem);CHKERRQ(ierr);
1021     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1022     link->bytes = size*count;
1023   }
1024   link->next = dm->workout;
1025   dm->workout = link;
1026   *(void**)mem = link->mem;
1027   PetscFunctionReturn(0);
1028 }
1029 
1030 #undef __FUNCT__
1031 #define __FUNCT__ "DMRestoreWorkArray"
1032 /*@C
1033   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1034 
1035   Not Collective
1036 
1037   Input Parameters:
1038 + dm - the DM object
1039 . count - The minium size
1040 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1041 
1042   Output Parameter:
1043 . array - the work array
1044 
1045   Level: developer
1046 
1047 .seealso DMDestroy(), DMCreate()
1048 @*/
1049 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1050 {
1051   DMWorkLink *p,link;
1052 
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1055   PetscValidPointer(mem,4);
1056   for (p=&dm->workout; (link=*p); p=&link->next) {
1057     if (link->mem == *(void**)mem) {
1058       *p = link->next;
1059       link->next = dm->workin;
1060       dm->workin = link;
1061       *(void**)mem = PETSC_NULL;
1062       PetscFunctionReturn(0);
1063     }
1064   }
1065   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1066   PetscFunctionReturn(0);
1067 }
1068 
1069 #undef __FUNCT__
1070 #define __FUNCT__ "DMSetNullSpaceConstructor"
1071 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1072 {
1073   PetscFunctionBegin;
1074   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1075   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1076   dm->nullspaceConstructors[field] = nullsp;
1077   PetscFunctionReturn(0);
1078 }
1079 
1080 #undef __FUNCT__
1081 #define __FUNCT__ "DMCreateFieldIS"
1082 /*@C
1083   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1084 
1085   Not collective
1086 
1087   Input Parameter:
1088 . dm - the DM object
1089 
1090   Output Parameters:
1091 + numFields  - The number of fields (or PETSC_NULL if not requested)
1092 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1093 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1094 
1095   Level: intermediate
1096 
1097   Notes:
1098   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1099   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1100   PetscFree().
1101 
1102 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1103 @*/
1104 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1105 {
1106   PetscSection   section, sectionGlobal;
1107   PetscErrorCode ierr;
1108 
1109   PetscFunctionBegin;
1110   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1111   if (numFields) {
1112     PetscValidPointer(numFields,2);
1113     *numFields = 0;
1114   }
1115   if (fieldNames) {
1116     PetscValidPointer(fieldNames,3);
1117     *fieldNames = PETSC_NULL;
1118   }
1119   if (fields) {
1120     PetscValidPointer(fields,4);
1121     *fields = PETSC_NULL;
1122   }
1123   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1124   if (section) {
1125     PetscInt *fieldSizes, **fieldIndices;
1126     PetscInt  nF, f, pStart, pEnd, p;
1127 
1128     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1129     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1130     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1131     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1132     for (f = 0; f < nF; ++f) {
1133       fieldSizes[f] = 0;
1134     }
1135     for (p = pStart; p < pEnd; ++p) {
1136       PetscInt gdof;
1137 
1138       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1139       if (gdof > 0) {
1140         for (f = 0; f < nF; ++f) {
1141           PetscInt fdof, fcdof;
1142 
1143           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1144           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1145           fieldSizes[f] += fdof-fcdof;
1146         }
1147       }
1148     }
1149     for (f = 0; f < nF; ++f) {
1150       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1151       fieldSizes[f] = 0;
1152     }
1153     for (p = pStart; p < pEnd; ++p) {
1154       PetscInt gdof, goff;
1155 
1156       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1157       if (gdof > 0) {
1158         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1159         for (f = 0; f < nF; ++f) {
1160           PetscInt fdof, fcdof, fc;
1161 
1162           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1163           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1164           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1165             fieldIndices[f][fieldSizes[f]] = goff++;
1166           }
1167         }
1168       }
1169     }
1170     if (numFields) {*numFields = nF;}
1171     if (fieldNames) {
1172       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1173       for (f = 0; f < nF; ++f) {
1174         const char *fieldName;
1175 
1176         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1177         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1178       }
1179     }
1180     if (fields) {
1181       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1182       for (f = 0; f < nF; ++f) {
1183         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1184       }
1185     }
1186     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1187   } else {
1188     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1189   }
1190   PetscFunctionReturn(0);
1191 }
1192 
1193 #undef __FUNCT__
1194 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1195 /*@C
1196   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1197 
1198   Not Collective
1199 
1200   Input Parameters:
1201 + dm   - the DM object
1202 - name - the name of the field decomposition
1203 
1204   Output Parameter:
1205 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1206 
1207   Level: advanced
1208 
1209 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1210 @*/
1211 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1212 {
1213   PetscErrorCode ierr;
1214 
1215   PetscFunctionBegin;
1216   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1217   PetscValidCharPointer(name,2);
1218   PetscValidPointer(ddm,3);
1219   *ddm = PETSC_NULL;
1220   if (dm->ops->createfielddecompositiondm) {
1221     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1222   }
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 #undef __FUNCT__
1227 #define __FUNCT__ "DMCreateFieldDecomposition"
1228 /*@C
1229   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1230                           corresponding to different fields: each IS contains the global indices of the dofs of the
1231                           corresponding field. The optional list of DMs define the DM for each subproblem.
1232                           Generalizes DMCreateFieldIS().
1233 
1234   Not collective
1235 
1236   Input Parameter:
1237 . dm - the DM object
1238 
1239   Output Parameters:
1240 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1241 . namelist  - The name for each field (or PETSC_NULL if not requested)
1242 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1243 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1244 
1245   Level: intermediate
1246 
1247   Notes:
1248   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1249   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1250   and all of the arrays should be freed with PetscFree().
1251 
1252 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1253 @*/
1254 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1255 {
1256   PetscErrorCode ierr;
1257 
1258   PetscFunctionBegin;
1259   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1260   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1261   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1262   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1263   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1264   if (!dm->ops->createfielddecomposition) {
1265     PetscSection section;
1266     PetscInt     numFields, f;
1267 
1268     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1269     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1270     if (section && numFields && dm->ops->createsubdm) {
1271       *len = numFields;
1272       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1273       for (f = 0; f < numFields; ++f) {
1274         const char *fieldName;
1275 
1276         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1277         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1278         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1279       }
1280     } else {
1281       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1282       /* By default there are no DMs associated with subproblems. */
1283       if (dmlist) *dmlist = PETSC_NULL;
1284     }
1285   }
1286   else {
1287     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1288   }
1289   PetscFunctionReturn(0);
1290 }
1291 
1292 #undef __FUNCT__
1293 #define __FUNCT__ "DMCreateSubDM"
1294 /*@C
1295   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1296                   The fields are defined by DMCreateFieldIS().
1297 
1298   Not collective
1299 
1300   Input Parameters:
1301 + dm - the DM object
1302 . numFields - number of fields in this subproblem
1303 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1304 
1305   Output Parameters:
1306 . is - The global indices for the subproblem
1307 - dm - The DM for the subproblem
1308 
1309   Level: intermediate
1310 
1311 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1312 @*/
1313 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1314 {
1315   PetscErrorCode ierr;
1316 
1317   PetscFunctionBegin;
1318   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1319   PetscValidPointer(fields,3);
1320   if (is) {PetscValidPointer(is,4);}
1321   if (subdm) {PetscValidPointer(subdm,5);}
1322   if (dm->ops->createsubdm) {
1323     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1324   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1325   PetscFunctionReturn(0);
1326 }
1327 
1328 #undef __FUNCT__
1329 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1330 /*@C
1331   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1332 
1333   Not Collective
1334 
1335   Input Parameters:
1336 + dm   - the DM object
1337 - name - the name of the subdomain decomposition
1338 
1339   Output Parameter:
1340 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1341 
1342   Level: advanced
1343 
1344 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1345 @*/
1346 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1347 {
1348   PetscErrorCode ierr;
1349 
1350   PetscFunctionBegin;
1351   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1352   PetscValidCharPointer(name,2);
1353   PetscValidPointer(ddm,3);
1354   *ddm = PETSC_NULL;
1355   if (dm->ops->createdomaindecompositiondm) {
1356     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1357   }
1358   PetscFunctionReturn(0);
1359 }
1360 
1361 #undef __FUNCT__
1362 #define __FUNCT__ "DMCreateDomainDecomposition"
1363 /*@C
1364   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1365                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1366                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1367                           define a nonoverlapping covering, while outer subdomains can overlap.
1368                           The optional list of DMs define the DM for each subproblem.
1369 
1370   Not collective
1371 
1372   Input Parameter:
1373 . dm - the DM object
1374 
1375   Output Parameters:
1376 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1377 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1378 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1379 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1380 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1381 
1382   Level: intermediate
1383 
1384   Notes:
1385   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1386   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1387   and all of the arrays should be freed with PetscFree().
1388 
1389 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1390 @*/
1391 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1392 {
1393   PetscErrorCode ierr;
1394   DMSubDomainHookLink link;
1395   PetscInt            i,l;
1396 
1397   PetscFunctionBegin;
1398   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1399   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1400   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1401   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1402   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1403   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1404   if (dm->ops->createdomaindecomposition) {
1405     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1406   }
1407   if (len) *len = l;
1408   for (i = 0; i < l; i++) {
1409     for (link=dm->subdomainhook; link; link=link->next) {
1410       if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1411     }
1412   }
1413   PetscFunctionReturn(0);
1414 }
1415 
1416 
1417 #undef __FUNCT__
1418 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1419 /*@C
1420   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1421 
1422   Not collective
1423 
1424   Input Parameters:
1425 + dm - the DM object
1426 . n  - the number of subdomain scatters
1427 - subdms - the local subdomains
1428 
1429   Output Parameters:
1430 + n     - the number of scatters returned
1431 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1432 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1433 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1434 
1435   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1436   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1437   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1438   solution and residual data.
1439 
1440   Level: developer
1441 
1442 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1443 @*/
1444 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1445 {
1446   PetscErrorCode ierr;
1447 
1448   PetscFunctionBegin;
1449   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1450   PetscValidPointer(subdms,3);
1451   PetscValidPointer(iscat,4);
1452   PetscValidPointer(oscat,5);
1453   PetscValidPointer(gscat,6);
1454   if (dm->ops->createddscatters) {
1455     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat); CHKERRQ(ierr);
1456   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1457   PetscFunctionReturn(0);
1458 }
1459 
1460 #undef __FUNCT__
1461 #define __FUNCT__ "DMRefine"
1462 /*@
1463   DMRefine - Refines a DM object
1464 
1465   Collective on DM
1466 
1467   Input Parameter:
1468 + dm   - the DM object
1469 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1470 
1471   Output Parameter:
1472 . dmf - the refined DM, or PETSC_NULL
1473 
1474   Note: If no refinement was done, the return value is PETSC_NULL
1475 
1476   Level: developer
1477 
1478 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1479 @*/
1480 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1481 {
1482   PetscErrorCode ierr;
1483   DMRefineHookLink link;
1484 
1485   PetscFunctionBegin;
1486   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1487   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1488   if (*dmf) {
1489     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1490     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1491     (*dmf)->ctx       = dm->ctx;
1492     (*dmf)->leveldown = dm->leveldown;
1493     (*dmf)->levelup   = dm->levelup + 1;
1494     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1495     for (link=dm->refinehook; link; link=link->next) {
1496       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1497     }
1498   }
1499   PetscFunctionReturn(0);
1500 }
1501 
1502 #undef __FUNCT__
1503 #define __FUNCT__ "DMRefineHookAdd"
1504 /*@
1505    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1506 
1507    Logically Collective
1508 
1509    Input Arguments:
1510 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1511 .  refinehook - function to run when setting up a coarser level
1512 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1513 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1514 
1515    Calling sequence of refinehook:
1516 $    refinehook(DM coarse,DM fine,void *ctx);
1517 
1518 +  coarse - coarse level DM
1519 .  fine - fine level DM to interpolate problem to
1520 -  ctx - optional user-defined function context
1521 
1522    Calling sequence for interphook:
1523 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1524 
1525 +  coarse - coarse level DM
1526 .  interp - matrix interpolating a coarse-level solution to the finer grid
1527 .  fine - fine level DM to update
1528 -  ctx - optional user-defined function context
1529 
1530    Level: advanced
1531 
1532    Notes:
1533    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1534 
1535    If this function is called multiple times, the hooks will be run in the order they are added.
1536 
1537 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1538 @*/
1539 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1540 {
1541   PetscErrorCode ierr;
1542   DMRefineHookLink link,*p;
1543 
1544   PetscFunctionBegin;
1545   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1546   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1547   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1548   link->refinehook = refinehook;
1549   link->interphook = interphook;
1550   link->ctx = ctx;
1551   link->next = PETSC_NULL;
1552   *p = link;
1553   PetscFunctionReturn(0);
1554 }
1555 
1556 #undef __FUNCT__
1557 #define __FUNCT__ "DMInterpolate"
1558 /*@
1559    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1560 
1561    Collective if any hooks are
1562 
1563    Input Arguments:
1564 +  coarse - coarser DM to use as a base
1565 .  restrct - interpolation matrix, apply using MatInterpolate()
1566 -  fine - finer DM to update
1567 
1568    Level: developer
1569 
1570 .seealso: DMRefineHookAdd(), MatInterpolate()
1571 @*/
1572 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1573 {
1574   PetscErrorCode ierr;
1575   DMRefineHookLink link;
1576 
1577   PetscFunctionBegin;
1578   for (link=fine->refinehook; link; link=link->next) {
1579     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1580   }
1581   PetscFunctionReturn(0);
1582 }
1583 
1584 #undef __FUNCT__
1585 #define __FUNCT__ "DMGetRefineLevel"
1586 /*@
1587     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1588 
1589     Not Collective
1590 
1591     Input Parameter:
1592 .   dm - the DM object
1593 
1594     Output Parameter:
1595 .   level - number of refinements
1596 
1597     Level: developer
1598 
1599 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1600 
1601 @*/
1602 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1603 {
1604   PetscFunctionBegin;
1605   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1606   *level = dm->levelup;
1607   PetscFunctionReturn(0);
1608 }
1609 
1610 #undef __FUNCT__
1611 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1612 /*@
1613    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1614 
1615    Logically Collective
1616 
1617    Input Arguments:
1618 +  dm - the DM
1619 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1620 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1621 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1622 
1623    Calling sequence for beginhook:
1624 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1625 
1626 +  dm - global DM
1627 .  g - global vector
1628 .  mode - mode
1629 .  l - local vector
1630 -  ctx - optional user-defined function context
1631 
1632 
1633    Calling sequence for endhook:
1634 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1635 
1636 +  global - global DM
1637 -  ctx - optional user-defined function context
1638 
1639    Level: advanced
1640 
1641    Notes:
1642    This function is only needed if auxiliary data needs to be set up on coarse grids.
1643 
1644    If this function is called multiple times, the hooks will be run in the order they are added.
1645 
1646    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1647    extract the finest level information from its context (instead of from the SNES).
1648 
1649 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1650 @*/
1651 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1652 {
1653   PetscErrorCode ierr;
1654   DMGlobalToLocalHookLink link,*p;
1655 
1656   PetscFunctionBegin;
1657   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1658   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1659   ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1660   link->beginhook = beginhook;
1661   link->endhook = endhook;
1662   link->ctx = ctx;
1663   link->next = PETSC_NULL;
1664   *p = link;
1665   PetscFunctionReturn(0);
1666 }
1667 
1668 #undef __FUNCT__
1669 #define __FUNCT__ "DMGlobalToLocalBegin"
1670 /*@
1671     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1672 
1673     Neighbor-wise Collective on DM
1674 
1675     Input Parameters:
1676 +   dm - the DM object
1677 .   g - the global vector
1678 .   mode - INSERT_VALUES or ADD_VALUES
1679 -   l - the local vector
1680 
1681 
1682     Level: beginner
1683 
1684 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1685 
1686 @*/
1687 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1688 {
1689   PetscSF        sf;
1690   PetscErrorCode ierr;
1691   DMGlobalToLocalHookLink link;
1692 
1693   PetscFunctionBegin;
1694   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1695   for (link=dm->gtolhook; link; link=link->next) {
1696     if (link->beginhook) {ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1697   }
1698   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1699   if (sf) {
1700     PetscScalar *lArray, *gArray;
1701 
1702     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1703     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1704     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1705     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1706     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1707     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1708   } else {
1709     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1710   }
1711   PetscFunctionReturn(0);
1712 }
1713 
1714 #undef __FUNCT__
1715 #define __FUNCT__ "DMGlobalToLocalEnd"
1716 /*@
1717     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1718 
1719     Neighbor-wise Collective on DM
1720 
1721     Input Parameters:
1722 +   dm - the DM object
1723 .   g - the global vector
1724 .   mode - INSERT_VALUES or ADD_VALUES
1725 -   l - the local vector
1726 
1727 
1728     Level: beginner
1729 
1730 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1731 
1732 @*/
1733 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1734 {
1735   PetscSF        sf;
1736   PetscErrorCode ierr;
1737   PetscScalar    *lArray, *gArray;
1738   DMGlobalToLocalHookLink link;
1739 
1740   PetscFunctionBegin;
1741   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1742   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1743   if (sf) {
1744   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1745 
1746     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1747     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1748     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1749     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1750     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1751   } else {
1752     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1753   }
1754   for (link=dm->gtolhook; link; link=link->next) {
1755     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1756   }
1757   PetscFunctionReturn(0);
1758 }
1759 
1760 #undef __FUNCT__
1761 #define __FUNCT__ "DMLocalToGlobalBegin"
1762 /*@
1763     DMLocalToGlobalBegin - updates global vectors from local vectors
1764 
1765     Neighbor-wise Collective on DM
1766 
1767     Input Parameters:
1768 +   dm - the DM object
1769 .   l - the local vector
1770 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1771            base point.
1772 - - the global vector
1773 
1774     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1775            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1776            global array to the final global array with VecAXPY().
1777 
1778     Level: beginner
1779 
1780 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1781 
1782 @*/
1783 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1784 {
1785   PetscSF        sf;
1786   PetscErrorCode ierr;
1787 
1788   PetscFunctionBegin;
1789   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1790   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1791   if (sf) {
1792     MPI_Op       op;
1793     PetscScalar *lArray, *gArray;
1794 
1795     switch(mode) {
1796     case INSERT_VALUES:
1797     case INSERT_ALL_VALUES:
1798 #if defined(PETSC_HAVE_MPI_REPLACE)
1799       op = MPI_REPLACE; break;
1800 #else
1801       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1802 #endif
1803     case ADD_VALUES:
1804     case ADD_ALL_VALUES:
1805       op = MPI_SUM; break;
1806   default:
1807     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1808     }
1809     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1810     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1811     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1812     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1813     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1814   } else {
1815     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1816   }
1817   PetscFunctionReturn(0);
1818 }
1819 
1820 #undef __FUNCT__
1821 #define __FUNCT__ "DMLocalToGlobalEnd"
1822 /*@
1823     DMLocalToGlobalEnd - updates global vectors from local vectors
1824 
1825     Neighbor-wise Collective on DM
1826 
1827     Input Parameters:
1828 +   dm - the DM object
1829 .   l - the local vector
1830 .   mode - INSERT_VALUES or ADD_VALUES
1831 -   g - the global vector
1832 
1833 
1834     Level: beginner
1835 
1836 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1837 
1838 @*/
1839 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1840 {
1841   PetscSF        sf;
1842   PetscErrorCode ierr;
1843 
1844   PetscFunctionBegin;
1845   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1846   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1847   if (sf) {
1848     MPI_Op       op;
1849     PetscScalar *lArray, *gArray;
1850 
1851     switch(mode) {
1852     case INSERT_VALUES:
1853     case INSERT_ALL_VALUES:
1854 #if defined(PETSC_HAVE_MPI_REPLACE)
1855       op = MPI_REPLACE; break;
1856 #else
1857       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1858 #endif
1859     case ADD_VALUES:
1860     case ADD_ALL_VALUES:
1861       op = MPI_SUM; break;
1862     default:
1863       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1864     }
1865     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1866     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1867     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1868     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1869     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1870   } else {
1871     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1872   }
1873   PetscFunctionReturn(0);
1874 }
1875 
1876 #undef __FUNCT__
1877 #define __FUNCT__ "DMCoarsen"
1878 /*@
1879     DMCoarsen - Coarsens a DM object
1880 
1881     Collective on DM
1882 
1883     Input Parameter:
1884 +   dm - the DM object
1885 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1886 
1887     Output Parameter:
1888 .   dmc - the coarsened DM
1889 
1890     Level: developer
1891 
1892 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1893 
1894 @*/
1895 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1896 {
1897   PetscErrorCode ierr;
1898   DMCoarsenHookLink link;
1899 
1900   PetscFunctionBegin;
1901   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1902   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1903   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1904   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1905   (*dmc)->ctx       = dm->ctx;
1906   (*dmc)->levelup   = dm->levelup;
1907   (*dmc)->leveldown = dm->leveldown + 1;
1908   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1909   for (link=dm->coarsenhook; link; link=link->next) {
1910     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1911   }
1912   PetscFunctionReturn(0);
1913 }
1914 
1915 #undef __FUNCT__
1916 #define __FUNCT__ "DMCoarsenHookAdd"
1917 /*@
1918    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1919 
1920    Logically Collective
1921 
1922    Input Arguments:
1923 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1924 .  coarsenhook - function to run when setting up a coarser level
1925 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1926 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1927 
1928    Calling sequence of coarsenhook:
1929 $    coarsenhook(DM fine,DM coarse,void *ctx);
1930 
1931 +  fine - fine level DM
1932 .  coarse - coarse level DM to restrict problem to
1933 -  ctx - optional user-defined function context
1934 
1935    Calling sequence for restricthook:
1936 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1937 
1938 +  fine - fine level DM
1939 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1940 .  rscale - scaling vector for restriction
1941 .  inject - matrix restricting by injection
1942 .  coarse - coarse level DM to update
1943 -  ctx - optional user-defined function context
1944 
1945    Level: advanced
1946 
1947    Notes:
1948    This function is only needed if auxiliary data needs to be set up on coarse grids.
1949 
1950    If this function is called multiple times, the hooks will be run in the order they are added.
1951 
1952    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1953    extract the finest level information from its context (instead of from the SNES).
1954 
1955 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1956 @*/
1957 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1958 {
1959   PetscErrorCode ierr;
1960   DMCoarsenHookLink link,*p;
1961 
1962   PetscFunctionBegin;
1963   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1964   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1965   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1966   link->coarsenhook = coarsenhook;
1967   link->restricthook = restricthook;
1968   link->ctx = ctx;
1969   link->next = PETSC_NULL;
1970   *p = link;
1971   PetscFunctionReturn(0);
1972 }
1973 
1974 #undef __FUNCT__
1975 #define __FUNCT__ "DMRestrict"
1976 /*@
1977    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1978 
1979    Collective if any hooks are
1980 
1981    Input Arguments:
1982 +  fine - finer DM to use as a base
1983 .  restrct - restriction matrix, apply using MatRestrict()
1984 .  inject - injection matrix, also use MatRestrict()
1985 -  coarse - coarer DM to update
1986 
1987    Level: developer
1988 
1989 .seealso: DMCoarsenHookAdd(), MatRestrict()
1990 @*/
1991 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1992 {
1993   PetscErrorCode ierr;
1994   DMCoarsenHookLink link;
1995 
1996   PetscFunctionBegin;
1997   for (link=fine->coarsenhook; link; link=link->next) {
1998     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1999   }
2000   PetscFunctionReturn(0);
2001 }
2002 
2003 #undef __FUNCT__
2004 #define __FUNCT__ "DMSubDomainHookAdd"
2005 /*@
2006    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2007 
2008    Logically Collective
2009 
2010    Input Arguments:
2011 +  global - global DM
2012 .
2013 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2014 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
2015 
2016    Calling sequence for restricthook:
2017 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
2018 
2019 +  global - global DM
2020 .  out    - scatter to the outer (with ghost and overlap points) block vector
2021 .  in     - scatter to block vector values only owned locally
2022 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
2023 -  ctx - optional user-defined function context
2024 
2025    Level: advanced
2026 
2027    Notes:
2028    This function is only needed if auxiliary data needs to be set up on coarse grids.
2029 
2030    If this function is called multiple times, the hooks will be run in the order they are added.
2031 
2032    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2033    extract the finest level information from its context (instead of from the SNES).
2034 
2035 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2036 @*/
2037 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2038 {
2039   PetscErrorCode ierr;
2040   DMSubDomainHookLink link,*p;
2041 
2042   PetscFunctionBegin;
2043   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2044   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2045   ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2046   link->restricthook = restricthook;
2047   link->ddhook = ddhook;
2048   link->ctx = ctx;
2049   link->next = PETSC_NULL;
2050   *p = link;
2051   PetscFunctionReturn(0);
2052 }
2053 
2054 #undef __FUNCT__
2055 #define __FUNCT__ "DMSubDomainRestrict"
2056 /*@
2057    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2058 
2059    Collective if any hooks are
2060 
2061    Input Arguments:
2062 +  fine - finer DM to use as a base
2063 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2064 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2065 -  coarse - coarer DM to update
2066 
2067    Level: developer
2068 
2069 .seealso: DMCoarsenHookAdd(), MatRestrict()
2070 @*/
2071 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2072 {
2073   PetscErrorCode ierr;
2074   DMSubDomainHookLink link;
2075 
2076   PetscFunctionBegin;
2077   for (link=global->subdomainhook; link; link=link->next) {
2078     if (link->restricthook) {ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);}
2079   }
2080   PetscFunctionReturn(0);
2081 }
2082 
2083 #undef __FUNCT__
2084 #define __FUNCT__ "DMGetCoarsenLevel"
2085 /*@
2086     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2087 
2088     Not Collective
2089 
2090     Input Parameter:
2091 .   dm - the DM object
2092 
2093     Output Parameter:
2094 .   level - number of coarsenings
2095 
2096     Level: developer
2097 
2098 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2099 
2100 @*/
2101 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2102 {
2103   PetscFunctionBegin;
2104   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2105   *level = dm->leveldown;
2106   PetscFunctionReturn(0);
2107 }
2108 
2109 
2110 
2111 #undef __FUNCT__
2112 #define __FUNCT__ "DMRefineHierarchy"
2113 /*@C
2114     DMRefineHierarchy - Refines a DM object, all levels at once
2115 
2116     Collective on DM
2117 
2118     Input Parameter:
2119 +   dm - the DM object
2120 -   nlevels - the number of levels of refinement
2121 
2122     Output Parameter:
2123 .   dmf - the refined DM hierarchy
2124 
2125     Level: developer
2126 
2127 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2128 
2129 @*/
2130 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2131 {
2132   PetscErrorCode ierr;
2133 
2134   PetscFunctionBegin;
2135   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2136   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2137   if (nlevels == 0) PetscFunctionReturn(0);
2138   if (dm->ops->refinehierarchy) {
2139     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2140   } else if (dm->ops->refine) {
2141     PetscInt i;
2142 
2143     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2144     for (i=1; i<nlevels; i++) {
2145       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2146     }
2147   } else {
2148     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2149   }
2150   PetscFunctionReturn(0);
2151 }
2152 
2153 #undef __FUNCT__
2154 #define __FUNCT__ "DMCoarsenHierarchy"
2155 /*@C
2156     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2157 
2158     Collective on DM
2159 
2160     Input Parameter:
2161 +   dm - the DM object
2162 -   nlevels - the number of levels of coarsening
2163 
2164     Output Parameter:
2165 .   dmc - the coarsened DM hierarchy
2166 
2167     Level: developer
2168 
2169 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2170 
2171 @*/
2172 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2173 {
2174   PetscErrorCode ierr;
2175 
2176   PetscFunctionBegin;
2177   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2178   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2179   if (nlevels == 0) PetscFunctionReturn(0);
2180   PetscValidPointer(dmc,3);
2181   if (dm->ops->coarsenhierarchy) {
2182     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2183   } else if (dm->ops->coarsen) {
2184     PetscInt i;
2185 
2186     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2187     for (i=1; i<nlevels; i++) {
2188       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2189     }
2190   } else {
2191     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2192   }
2193   PetscFunctionReturn(0);
2194 }
2195 
2196 #undef __FUNCT__
2197 #define __FUNCT__ "DMCreateAggregates"
2198 /*@
2199    DMCreateAggregates - Gets the aggregates that map between
2200    grids associated with two DMs.
2201 
2202    Collective on DM
2203 
2204    Input Parameters:
2205 +  dmc - the coarse grid DM
2206 -  dmf - the fine grid DM
2207 
2208    Output Parameters:
2209 .  rest - the restriction matrix (transpose of the projection matrix)
2210 
2211    Level: intermediate
2212 
2213 .keywords: interpolation, restriction, multigrid
2214 
2215 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2216 @*/
2217 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2218 {
2219   PetscErrorCode ierr;
2220 
2221   PetscFunctionBegin;
2222   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2223   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2224   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2225   PetscFunctionReturn(0);
2226 }
2227 
2228 #undef __FUNCT__
2229 #define __FUNCT__ "DMSetApplicationContextDestroy"
2230 /*@C
2231     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2232 
2233     Not Collective
2234 
2235     Input Parameters:
2236 +   dm - the DM object
2237 -   destroy - the destroy function
2238 
2239     Level: intermediate
2240 
2241 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2242 
2243 @*/
2244 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2245 {
2246   PetscFunctionBegin;
2247   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2248   dm->ctxdestroy = destroy;
2249   PetscFunctionReturn(0);
2250 }
2251 
2252 #undef __FUNCT__
2253 #define __FUNCT__ "DMSetApplicationContext"
2254 /*@
2255     DMSetApplicationContext - Set a user context into a DM object
2256 
2257     Not Collective
2258 
2259     Input Parameters:
2260 +   dm - the DM object
2261 -   ctx - the user context
2262 
2263     Level: intermediate
2264 
2265 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2266 
2267 @*/
2268 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2269 {
2270   PetscFunctionBegin;
2271   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2272   dm->ctx = ctx;
2273   PetscFunctionReturn(0);
2274 }
2275 
2276 #undef __FUNCT__
2277 #define __FUNCT__ "DMGetApplicationContext"
2278 /*@
2279     DMGetApplicationContext - Gets a user context from a DM object
2280 
2281     Not Collective
2282 
2283     Input Parameter:
2284 .   dm - the DM object
2285 
2286     Output Parameter:
2287 .   ctx - the user context
2288 
2289     Level: intermediate
2290 
2291 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2292 
2293 @*/
2294 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2295 {
2296   PetscFunctionBegin;
2297   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2298   *(void**)ctx = dm->ctx;
2299   PetscFunctionReturn(0);
2300 }
2301 
2302 #undef __FUNCT__
2303 #define __FUNCT__ "DMSetVariableBounds"
2304 /*@C
2305     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2306 
2307     Logically Collective on DM
2308 
2309     Input Parameter:
2310 +   dm - the DM object
2311 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2312 
2313     Level: intermediate
2314 
2315 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2316          DMSetJacobian()
2317 
2318 @*/
2319 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2320 {
2321   PetscFunctionBegin;
2322   dm->ops->computevariablebounds = f;
2323   PetscFunctionReturn(0);
2324 }
2325 
2326 #undef __FUNCT__
2327 #define __FUNCT__ "DMHasVariableBounds"
2328 /*@
2329     DMHasVariableBounds - does the DM object have a variable bounds function?
2330 
2331     Not Collective
2332 
2333     Input Parameter:
2334 .   dm - the DM object to destroy
2335 
2336     Output Parameter:
2337 .   flg - PETSC_TRUE if the variable bounds function exists
2338 
2339     Level: developer
2340 
2341 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2342 
2343 @*/
2344 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2345 {
2346   PetscFunctionBegin;
2347   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2348   PetscFunctionReturn(0);
2349 }
2350 
2351 #undef __FUNCT__
2352 #define __FUNCT__ "DMComputeVariableBounds"
2353 /*@C
2354     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2355 
2356     Logically Collective on DM
2357 
2358     Input Parameters:
2359 +   dm - the DM object to destroy
2360 -   x  - current solution at which the bounds are computed
2361 
2362     Output parameters:
2363 +   xl - lower bound
2364 -   xu - upper bound
2365 
2366     Level: intermediate
2367 
2368 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2369 
2370 @*/
2371 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2372 {
2373   PetscErrorCode ierr;
2374   PetscFunctionBegin;
2375   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2376   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2377   if (dm->ops->computevariablebounds) {
2378     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2379   }
2380   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2381   PetscFunctionReturn(0);
2382 }
2383 
2384 #undef __FUNCT__
2385 #define __FUNCT__ "DMHasColoring"
2386 /*@
2387     DMHasColoring - does the DM object have a method of providing a coloring?
2388 
2389     Not Collective
2390 
2391     Input Parameter:
2392 .   dm - the DM object
2393 
2394     Output Parameter:
2395 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2396 
2397     Level: developer
2398 
2399 .seealso DMHasFunction(), DMCreateColoring()
2400 
2401 @*/
2402 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2403 {
2404   PetscFunctionBegin;
2405   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2406   PetscFunctionReturn(0);
2407 }
2408 
2409 #undef  __FUNCT__
2410 #define __FUNCT__ "DMSetVec"
2411 /*@C
2412     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2413 
2414     Collective on DM
2415 
2416     Input Parameter:
2417 +   dm - the DM object
2418 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2419 
2420     Level: developer
2421 
2422 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2423 
2424 @*/
2425 PetscErrorCode  DMSetVec(DM dm,Vec x)
2426 {
2427   PetscErrorCode ierr;
2428   PetscFunctionBegin;
2429   if (x) {
2430     if (!dm->x) {
2431       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2432     }
2433     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2434   }
2435   else if (dm->x) {
2436     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2437   }
2438   PetscFunctionReturn(0);
2439 }
2440 
2441 PetscFList DMList                       = PETSC_NULL;
2442 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2443 
2444 #undef __FUNCT__
2445 #define __FUNCT__ "DMSetType"
2446 /*@C
2447   DMSetType - Builds a DM, for a particular DM implementation.
2448 
2449   Collective on DM
2450 
2451   Input Parameters:
2452 + dm     - The DM object
2453 - method - The name of the DM type
2454 
2455   Options Database Key:
2456 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2457 
2458   Notes:
2459   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2460 
2461   Level: intermediate
2462 
2463 .keywords: DM, set, type
2464 .seealso: DMGetType(), DMCreate()
2465 @*/
2466 PetscErrorCode  DMSetType(DM dm, DMType method)
2467 {
2468   PetscErrorCode (*r)(DM);
2469   PetscBool      match;
2470   PetscErrorCode ierr;
2471 
2472   PetscFunctionBegin;
2473   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2474   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2475   if (match) PetscFunctionReturn(0);
2476 
2477   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2478   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2479   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2480 
2481   if (dm->ops->destroy) {
2482     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2483     dm->ops->destroy = PETSC_NULL;
2484   }
2485   ierr = (*r)(dm);CHKERRQ(ierr);
2486   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2487   PetscFunctionReturn(0);
2488 }
2489 
2490 #undef __FUNCT__
2491 #define __FUNCT__ "DMGetType"
2492 /*@C
2493   DMGetType - Gets the DM type name (as a string) from the DM.
2494 
2495   Not Collective
2496 
2497   Input Parameter:
2498 . dm  - The DM
2499 
2500   Output Parameter:
2501 . type - The DM type name
2502 
2503   Level: intermediate
2504 
2505 .keywords: DM, get, type, name
2506 .seealso: DMSetType(), DMCreate()
2507 @*/
2508 PetscErrorCode  DMGetType(DM dm, DMType *type)
2509 {
2510   PetscErrorCode ierr;
2511 
2512   PetscFunctionBegin;
2513   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2514   PetscValidCharPointer(type,2);
2515   if (!DMRegisterAllCalled) {
2516     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2517   }
2518   *type = ((PetscObject)dm)->type_name;
2519   PetscFunctionReturn(0);
2520 }
2521 
2522 #undef __FUNCT__
2523 #define __FUNCT__ "DMConvert"
2524 /*@C
2525   DMConvert - Converts a DM to another DM, either of the same or different type.
2526 
2527   Collective on DM
2528 
2529   Input Parameters:
2530 + dm - the DM
2531 - newtype - new DM type (use "same" for the same type)
2532 
2533   Output Parameter:
2534 . M - pointer to new DM
2535 
2536   Notes:
2537   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2538   the MPI communicator of the generated DM is always the same as the communicator
2539   of the input DM.
2540 
2541   Level: intermediate
2542 
2543 .seealso: DMCreate()
2544 @*/
2545 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2546 {
2547   DM             B;
2548   char           convname[256];
2549   PetscBool      sametype, issame;
2550   PetscErrorCode ierr;
2551 
2552   PetscFunctionBegin;
2553   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2554   PetscValidType(dm,1);
2555   PetscValidPointer(M,3);
2556   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2557   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2558   {
2559     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2560 
2561     /*
2562        Order of precedence:
2563        1) See if a specialized converter is known to the current DM.
2564        2) See if a specialized converter is known to the desired DM class.
2565        3) See if a good general converter is registered for the desired class
2566        4) See if a good general converter is known for the current matrix.
2567        5) Use a really basic converter.
2568     */
2569 
2570     /* 1) See if a specialized converter is known to the current DM and the desired class */
2571     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2572     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2573     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2574     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2575     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2576     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2577     if (conv) goto foundconv;
2578 
2579     /* 2)  See if a specialized converter is known to the desired DM class. */
2580     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2581     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2582     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2583     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2584     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2585     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2586     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2587     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2588     if (conv) {
2589       ierr = DMDestroy(&B);CHKERRQ(ierr);
2590       goto foundconv;
2591     }
2592 
2593 #if 0
2594     /* 3) See if a good general converter is registered for the desired class */
2595     conv = B->ops->convertfrom;
2596     ierr = DMDestroy(&B);CHKERRQ(ierr);
2597     if (conv) goto foundconv;
2598 
2599     /* 4) See if a good general converter is known for the current matrix */
2600     if (dm->ops->convert) {
2601       conv = dm->ops->convert;
2602     }
2603     if (conv) goto foundconv;
2604 #endif
2605 
2606     /* 5) Use a really basic converter. */
2607     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2608 
2609     foundconv:
2610     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2611     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2612     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2613   }
2614   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2615   PetscFunctionReturn(0);
2616 }
2617 
2618 /*--------------------------------------------------------------------------------------------------------------------*/
2619 
2620 #undef __FUNCT__
2621 #define __FUNCT__ "DMRegister"
2622 /*@C
2623   DMRegister - See DMRegisterDynamic()
2624 
2625   Level: advanced
2626 @*/
2627 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2628 {
2629   char fullname[PETSC_MAX_PATH_LEN];
2630   PetscErrorCode ierr;
2631 
2632   PetscFunctionBegin;
2633   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2634   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2635   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2636   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2637   PetscFunctionReturn(0);
2638 }
2639 
2640 
2641 /*--------------------------------------------------------------------------------------------------------------------*/
2642 #undef __FUNCT__
2643 #define __FUNCT__ "DMRegisterDestroy"
2644 /*@C
2645    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2646 
2647    Not Collective
2648 
2649    Level: advanced
2650 
2651 .keywords: DM, register, destroy
2652 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2653 @*/
2654 PetscErrorCode  DMRegisterDestroy(void)
2655 {
2656   PetscErrorCode ierr;
2657 
2658   PetscFunctionBegin;
2659   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2660   DMRegisterAllCalled = PETSC_FALSE;
2661   PetscFunctionReturn(0);
2662 }
2663 
2664 #undef __FUNCT__
2665 #define __FUNCT__ "DMLoad"
2666 /*@C
2667   DMLoad - Loads a DM that has been stored in binary  with DMView().
2668 
2669   Collective on PetscViewer
2670 
2671   Input Parameters:
2672 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2673            some related function before a call to DMLoad().
2674 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2675            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2676 
2677    Level: intermediate
2678 
2679   Notes:
2680    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2681 
2682   Notes for advanced users:
2683   Most users should not need to know the details of the binary storage
2684   format, since DMLoad() and DMView() completely hide these details.
2685   But for anyone who's interested, the standard binary matrix storage
2686   format is
2687 .vb
2688      has not yet been determined
2689 .ve
2690 
2691 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2692 @*/
2693 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2694 {
2695   PetscErrorCode ierr;
2696   PetscBool      isbinary;
2697   PetscInt       classid;
2698   char           type[256];
2699 
2700   PetscFunctionBegin;
2701   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2702   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2703   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2704   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2705 
2706   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2707   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2708   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2709   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2710   ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2711   PetscFunctionReturn(0);
2712 }
2713 
2714 /******************************** FEM Support **********************************/
2715 
2716 #undef __FUNCT__
2717 #define __FUNCT__ "DMPrintCellVector"
2718 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2719   PetscInt       f;
2720   PetscErrorCode ierr;
2721 
2722   PetscFunctionBegin;
2723   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2724   for (f = 0; f < len; ++f) {
2725     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2726   }
2727   PetscFunctionReturn(0);
2728 }
2729 
2730 #undef __FUNCT__
2731 #define __FUNCT__ "DMPrintCellMatrix"
2732 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2733   PetscInt       f, g;
2734   PetscErrorCode ierr;
2735 
2736   PetscFunctionBegin;
2737   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2738   for (f = 0; f < rows; ++f) {
2739     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2740     for (g = 0; g < cols; ++g) {
2741       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2742     }
2743     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2744   }
2745   PetscFunctionReturn(0);
2746 }
2747 
2748 #undef __FUNCT__
2749 #define __FUNCT__ "DMGetDefaultSection"
2750 /*@
2751   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2752 
2753   Input Parameter:
2754 . dm - The DM
2755 
2756   Output Parameter:
2757 . section - The PetscSection
2758 
2759   Level: intermediate
2760 
2761   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2762 
2763 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2764 @*/
2765 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2766   PetscFunctionBegin;
2767   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2768   PetscValidPointer(section, 2);
2769   *section = dm->defaultSection;
2770   PetscFunctionReturn(0);
2771 }
2772 
2773 #undef __FUNCT__
2774 #define __FUNCT__ "DMSetDefaultSection"
2775 /*@
2776   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2777 
2778   Input Parameters:
2779 + dm - The DM
2780 - section - The PetscSection
2781 
2782   Level: intermediate
2783 
2784   Note: Any existing Section will be destroyed
2785 
2786 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2787 @*/
2788 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2789   PetscInt       numFields;
2790   PetscInt       f;
2791   PetscErrorCode ierr;
2792 
2793   PetscFunctionBegin;
2794   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2795   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2796   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2797   dm->defaultSection = section;
2798   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2799   if (numFields) {
2800     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2801     for (f = 0; f < numFields; ++f) {
2802       const char *name;
2803 
2804       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2805       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2806     }
2807   }
2808   PetscFunctionReturn(0);
2809 }
2810 
2811 #undef __FUNCT__
2812 #define __FUNCT__ "DMGetDefaultGlobalSection"
2813 /*@
2814   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2815 
2816   Collective on DM
2817 
2818   Input Parameter:
2819 . dm - The DM
2820 
2821   Output Parameter:
2822 . section - The PetscSection
2823 
2824   Level: intermediate
2825 
2826   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2827 
2828 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2829 @*/
2830 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2831   PetscErrorCode ierr;
2832 
2833   PetscFunctionBegin;
2834   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2835   PetscValidPointer(section, 2);
2836   if (!dm->defaultGlobalSection) {
2837     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2838     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2839     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2840   }
2841   *section = dm->defaultGlobalSection;
2842   PetscFunctionReturn(0);
2843 }
2844 
2845 #undef __FUNCT__
2846 #define __FUNCT__ "DMSetDefaultGlobalSection"
2847 /*@
2848   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2849 
2850   Input Parameters:
2851 + dm - The DM
2852 - section - The PetscSection
2853 
2854   Level: intermediate
2855 
2856   Note: Any existing Section will be destroyed
2857 
2858 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2859 @*/
2860 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2861   PetscErrorCode ierr;
2862 
2863   PetscFunctionBegin;
2864   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2865   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2866   dm->defaultGlobalSection = section;
2867   PetscFunctionReturn(0);
2868 }
2869 
2870 #undef __FUNCT__
2871 #define __FUNCT__ "DMGetDefaultSF"
2872 /*@
2873   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2874   it is created from the default PetscSection layouts in the DM.
2875 
2876   Input Parameter:
2877 . dm - The DM
2878 
2879   Output Parameter:
2880 . sf - The PetscSF
2881 
2882   Level: intermediate
2883 
2884   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2885 
2886 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2887 @*/
2888 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2889   PetscInt       nroots;
2890   PetscErrorCode ierr;
2891 
2892   PetscFunctionBegin;
2893   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2894   PetscValidPointer(sf, 2);
2895   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2896   if (nroots < 0) {
2897     PetscSection section, gSection;
2898 
2899     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2900     if (section) {
2901       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2902       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2903     } else {
2904       *sf = PETSC_NULL;
2905       PetscFunctionReturn(0);
2906     }
2907   }
2908   *sf = dm->defaultSF;
2909   PetscFunctionReturn(0);
2910 }
2911 
2912 #undef __FUNCT__
2913 #define __FUNCT__ "DMSetDefaultSF"
2914 /*@
2915   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2916 
2917   Input Parameters:
2918 + dm - The DM
2919 - sf - The PetscSF
2920 
2921   Level: intermediate
2922 
2923   Note: Any previous SF is destroyed
2924 
2925 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2926 @*/
2927 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2928   PetscErrorCode ierr;
2929 
2930   PetscFunctionBegin;
2931   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2932   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2933   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2934   dm->defaultSF = sf;
2935   PetscFunctionReturn(0);
2936 }
2937 
2938 #undef __FUNCT__
2939 #define __FUNCT__ "DMCreateDefaultSF"
2940 /*@C
2941   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2942   describing the data layout.
2943 
2944   Input Parameters:
2945 + dm - The DM
2946 . localSection - PetscSection describing the local data layout
2947 - globalSection - PetscSection describing the global data layout
2948 
2949   Level: intermediate
2950 
2951 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2952 @*/
2953 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2954 {
2955   MPI_Comm        comm = ((PetscObject) dm)->comm;
2956   PetscLayout     layout;
2957   const PetscInt *ranges;
2958   PetscInt       *local;
2959   PetscSFNode    *remote;
2960   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2961   PetscMPIInt     size, rank;
2962   PetscErrorCode  ierr;
2963 
2964   PetscFunctionBegin;
2965   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2966   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2967   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2968   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2969   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2970   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2971   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2972   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2973   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2974   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2975   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2976     PetscInt gdof, gcdof;
2977 
2978     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2979     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2980     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2981   }
2982   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2983   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2984   for (p = pStart, l = 0; p < pEnd; ++p) {
2985     const PetscInt *cind;
2986     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2987 
2988     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2989     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2990     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2991     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2992     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2993     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2994     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2995     if (!gdof) continue; /* Censored point */
2996     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2997     if (gsize != dof-cdof) {
2998       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2999       cdof = 0; /* Ignore constraints */
3000     }
3001     for (d = 0, c = 0; d < dof; ++d) {
3002       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3003       local[l+d-c] = off+d;
3004     }
3005     if (gdof < 0) {
3006       for(d = 0; d < gsize; ++d, ++l) {
3007         PetscInt offset = -(goff+1) + d, r;
3008 
3009         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3010         if (r < 0) r = -(r+2);
3011         remote[l].rank  = r;
3012         remote[l].index = offset - ranges[r];
3013       }
3014     } else {
3015       for(d = 0; d < gsize; ++d, ++l) {
3016         remote[l].rank  = rank;
3017         remote[l].index = goff+d - ranges[rank];
3018       }
3019     }
3020   }
3021   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3022   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3023   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3024   PetscFunctionReturn(0);
3025 }
3026 
3027 #undef __FUNCT__
3028 #define __FUNCT__ "DMGetPointSF"
3029 /*@
3030   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3031 
3032   Input Parameter:
3033 . dm - The DM
3034 
3035   Output Parameter:
3036 . sf - The PetscSF
3037 
3038   Level: intermediate
3039 
3040   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3041 
3042 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3043 @*/
3044 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
3045   PetscFunctionBegin;
3046   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3047   PetscValidPointer(sf, 2);
3048   *sf = dm->sf;
3049   PetscFunctionReturn(0);
3050 }
3051 
3052 #undef __FUNCT__
3053 #define __FUNCT__ "DMSetPointSF"
3054 /*@
3055   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3056 
3057   Input Parameters:
3058 + dm - The DM
3059 - sf - The PetscSF
3060 
3061   Level: intermediate
3062 
3063 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3064 @*/
3065 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
3066   PetscErrorCode ierr;
3067 
3068   PetscFunctionBegin;
3069   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3070   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3071   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3072   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3073   dm->sf = sf;
3074   PetscFunctionReturn(0);
3075 }
3076 
3077 #undef __FUNCT__
3078 #define __FUNCT__ "DMGetNumFields"
3079 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3080 {
3081   PetscFunctionBegin;
3082   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3083   PetscValidPointer(numFields, 2);
3084   *numFields = dm->numFields;
3085   PetscFunctionReturn(0);
3086 }
3087 
3088 #undef __FUNCT__
3089 #define __FUNCT__ "DMSetNumFields"
3090 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3091 {
3092   PetscInt       f;
3093   PetscErrorCode ierr;
3094 
3095   PetscFunctionBegin;
3096   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3097   for (f = 0; f < dm->numFields; ++f) {
3098     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3099   }
3100   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3101   dm->numFields = numFields;
3102   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3103   for (f = 0; f < dm->numFields; ++f) {
3104     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3105   }
3106   PetscFunctionReturn(0);
3107 }
3108 
3109 #undef __FUNCT__
3110 #define __FUNCT__ "DMGetField"
3111 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3112 {
3113   PetscFunctionBegin;
3114   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3115   PetscValidPointer(field, 2);
3116   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3117   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3118   *field = dm->fields[f];
3119   PetscFunctionReturn(0);
3120 }
3121 
3122 #undef __FUNCT__
3123 #define __FUNCT__ "DMSetCoordinates"
3124 /*@
3125   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3126 
3127   Collective on DM
3128 
3129   Input Parameters:
3130 + dm - the DM
3131 - c - coordinate vector
3132 
3133   Note:
3134   The coordinates do include those for ghost points, which are in the local vector
3135 
3136   Level: intermediate
3137 
3138 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3139 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3140 @*/
3141 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3142 {
3143   PetscErrorCode ierr;
3144 
3145   PetscFunctionBegin;
3146   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3147   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3148   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3149   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3150   dm->coordinates = c;
3151   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3152   PetscFunctionReturn(0);
3153 }
3154 
3155 #undef __FUNCT__
3156 #define __FUNCT__ "DMSetCoordinatesLocal"
3157 /*@
3158   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3159 
3160   Collective on DM
3161 
3162    Input Parameters:
3163 +  dm - the DM
3164 -  c - coordinate vector
3165 
3166   Note:
3167   The coordinates of ghost points can be set using DMSetCoordinates()
3168   followed by DMGetCoordinatesLocal(). This is intended to enable the
3169   setting of ghost coordinates outside of the domain.
3170 
3171   Level: intermediate
3172 
3173 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3174 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3175 @*/
3176 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3177 {
3178   PetscErrorCode ierr;
3179 
3180   PetscFunctionBegin;
3181   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3182   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3183   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3184   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3185   dm->coordinatesLocal = c;
3186   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3187   PetscFunctionReturn(0);
3188 }
3189 
3190 #undef __FUNCT__
3191 #define __FUNCT__ "DMGetCoordinates"
3192 /*@
3193   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3194 
3195   Not Collective
3196 
3197   Input Parameter:
3198 . dm - the DM
3199 
3200   Output Parameter:
3201 . c - global coordinate vector
3202 
3203   Note:
3204   This is a borrowed reference, so the user should NOT destroy this vector
3205 
3206   Each process has only the local coordinates (does NOT have the ghost coordinates).
3207 
3208   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3209   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3210 
3211   Level: intermediate
3212 
3213 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3214 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3215 @*/
3216 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3217 {
3218   PetscErrorCode ierr;
3219 
3220   PetscFunctionBegin;
3221   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3222   PetscValidPointer(c,2);
3223   if (!dm->coordinates && dm->coordinatesLocal) {
3224     DM cdm = PETSC_NULL;
3225 
3226     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3227     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3228     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3229     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3230     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3231   }
3232   *c = dm->coordinates;
3233   PetscFunctionReturn(0);
3234 }
3235 
3236 #undef __FUNCT__
3237 #define __FUNCT__ "DMGetCoordinatesLocal"
3238 /*@
3239   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3240 
3241   Collective on DM
3242 
3243   Input Parameter:
3244 . dm - the DM
3245 
3246   Output Parameter:
3247 . c - coordinate vector
3248 
3249   Note:
3250   This is a borrowed reference, so the user should NOT destroy this vector
3251 
3252   Each process has the local and ghost coordinates
3253 
3254   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3255   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3256 
3257   Level: intermediate
3258 
3259 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3260 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3261 @*/
3262 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3263 {
3264   PetscErrorCode ierr;
3265 
3266   PetscFunctionBegin;
3267   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3268   PetscValidPointer(c,2);
3269   if (!dm->coordinatesLocal && dm->coordinates) {
3270     DM cdm = PETSC_NULL;
3271 
3272     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3273     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3274     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3275     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3276     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3277   }
3278   *c = dm->coordinatesLocal;
3279   PetscFunctionReturn(0);
3280 }
3281 
3282 #undef __FUNCT__
3283 #define __FUNCT__ "DMGetCoordinateDM"
3284 /*@
3285   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3286 
3287   Collective on DM
3288 
3289   Input Parameter:
3290 . dm - the DM
3291 
3292   Output Parameter:
3293 . cdm - coordinate DM
3294 
3295   Level: intermediate
3296 
3297 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3298 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3299 @*/
3300 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3301 {
3302   PetscErrorCode ierr;
3303 
3304   PetscFunctionBegin;
3305   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3306   PetscValidPointer(cdm,2);
3307   if (!dm->coordinateDM) {
3308     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3309     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3310   }
3311   *cdm = dm->coordinateDM;
3312   PetscFunctionReturn(0);
3313 }
3314