xref: /petsc/src/dm/interface/dm.c (revision 7083f85c484aa853dbab836c854c8615e7d8e855)
1 #include <petscsnes.h>
2 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
3 
4 PetscClassId  DM_CLASSID;
5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "DMCreate"
9 /*@
10   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
11 
12    If you never  call DMSetType()  it will generate an
13    error when you try to use the vector.
14 
15   Collective on MPI_Comm
16 
17   Input Parameter:
18 . comm - The communicator for the DM object
19 
20   Output Parameter:
21 . dm - The DM object
22 
23   Level: beginner
24 
25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
26 @*/
27 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
28 {
29   DM             v;
30   PetscErrorCode ierr;
31 
32   PetscFunctionBegin;
33   PetscValidPointer(dm,2);
34   *dm = PETSC_NULL;
35 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
36   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
37   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
38   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
39 #endif
40 
41   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
42   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
43 
44 
45   v->ltogmap      = PETSC_NULL;
46   v->ltogmapb     = PETSC_NULL;
47   v->bs           = 1;
48   v->coloringtype = IS_COLORING_GLOBAL;
49   v->lf           = PETSC_NULL;
50   v->lj           = PETSC_NULL;
51   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
52   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
53   v->defaultSection       = PETSC_NULL;
54   v->defaultGlobalSection = PETSC_NULL;
55   {
56     PetscInt i;
57     for (i = 0; i < 10; ++i) {
58       v->nullspaceConstructors[i] = PETSC_NULL;
59     }
60   }
61   v->numFields = 0;
62   v->fields    = PETSC_NULL;
63 
64   *dm = v;
65   PetscFunctionReturn(0);
66 }
67 
68 
69 #undef __FUNCT__
70 #define __FUNCT__ "DMSetVecType"
71 /*@C
72        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
73 
74    Logically Collective on DMDA
75 
76    Input Parameter:
77 +  da - initial distributed array
78 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
79 
80    Options Database:
81 .   -dm_vec_type ctype
82 
83    Level: intermediate
84 
85 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
86 @*/
87 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
88 {
89   PetscErrorCode ierr;
90 
91   PetscFunctionBegin;
92   PetscValidHeaderSpecific(da,DM_CLASSID,1);
93   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
94   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
95   PetscFunctionReturn(0);
96 }
97 
98 #undef __FUNCT__
99 #define __FUNCT__ "DMSetMatType"
100 /*@C
101        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
102 
103    Logically Collective on DM
104 
105    Input Parameter:
106 +  dm - the DM context
107 .  ctype - the matrix type
108 
109    Options Database:
110 .   -dm_mat_type ctype
111 
112    Level: intermediate
113 
114 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
115 @*/
116 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
117 {
118   PetscErrorCode ierr;
119   PetscFunctionBegin;
120   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
121   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
122   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
123   PetscFunctionReturn(0);
124 }
125 
126 #undef __FUNCT__
127 #define __FUNCT__ "DMSetOptionsPrefix"
128 /*@C
129    DMSetOptionsPrefix - Sets the prefix used for searching for all
130    DMDA options in the database.
131 
132    Logically Collective on DMDA
133 
134    Input Parameter:
135 +  da - the DMDA context
136 -  prefix - the prefix to prepend to all option names
137 
138    Notes:
139    A hyphen (-) must NOT be given at the beginning of the prefix name.
140    The first character of all runtime options is AUTOMATICALLY the hyphen.
141 
142    Level: advanced
143 
144 .keywords: DMDA, set, options, prefix, database
145 
146 .seealso: DMSetFromOptions()
147 @*/
148 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
149 {
150   PetscErrorCode ierr;
151 
152   PetscFunctionBegin;
153   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
154   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
155   PetscFunctionReturn(0);
156 }
157 
158 #undef __FUNCT__
159 #define __FUNCT__ "DMDestroy"
160 /*@
161     DMDestroy - Destroys a vector packer or DMDA.
162 
163     Collective on DM
164 
165     Input Parameter:
166 .   dm - the DM object to destroy
167 
168     Level: developer
169 
170 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
171 
172 @*/
173 PetscErrorCode  DMDestroy(DM *dm)
174 {
175   PetscInt       i, cnt = 0, f;
176   DMNamedVecLink nlink,nnext;
177   PetscErrorCode ierr;
178 
179   PetscFunctionBegin;
180   if (!*dm) PetscFunctionReturn(0);
181   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
182 
183   /* count all the circular references of DM and its contained Vecs */
184   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
185     if ((*dm)->localin[i])  {cnt++;}
186     if ((*dm)->globalin[i]) {cnt++;}
187   }
188   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
189   if ((*dm)->x) {
190     PetscObject obj;
191     ierr = PetscObjectQuery((PetscObject)(*dm)->x,"DM",&obj);CHKERRQ(ierr);
192     if (obj == (PetscObject)*dm) cnt++;
193   }
194 
195   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
196   /*
197      Need this test because the dm references the vectors that
198      reference the dm, so destroying the dm calls destroy on the
199      vectors that cause another destroy on the dm
200   */
201   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
202   ((PetscObject) (*dm))->refct = 0;
203   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
204     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
205     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
206   }
207   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
208     nnext = nlink->next;
209     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
210     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
211     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
212     ierr = PetscFree(nlink);CHKERRQ(ierr);
213   }
214   (*dm)->namedglobal = PETSC_NULL;
215 
216   /* Destroy the list of hooks */
217   {
218     DMCoarsenHookLink link,next;
219     for (link=(*dm)->coarsenhook; link; link=next) {
220       next = link->next;
221       ierr = PetscFree(link);CHKERRQ(ierr);
222     }
223     (*dm)->coarsenhook = PETSC_NULL;
224   }
225   {
226     DMRefineHookLink link,next;
227     for (link=(*dm)->refinehook; link; link=next) {
228       next = link->next;
229       ierr = PetscFree(link);CHKERRQ(ierr);
230     }
231     (*dm)->refinehook = PETSC_NULL;
232   }
233   /* Destroy the work arrays */
234   {
235     DMWorkLink link,next;
236     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
237     for (link=(*dm)->workin; link; link=next) {
238       next = link->next;
239       ierr = PetscFree(link->mem);CHKERRQ(ierr);
240       ierr = PetscFree(link);CHKERRQ(ierr);
241     }
242     (*dm)->workin = PETSC_NULL;
243   }
244 
245   if ((*dm)->ctx && (*dm)->ctxdestroy) {
246     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
247   }
248   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
249   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
250   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
251   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
252   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
253   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
254   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
255 
256   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
257   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
258   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
259   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
260 
261   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
262   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
263   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
264 
265   for (f = 0; f < (*dm)->numFields; ++f) {
266     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
267   }
268   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
269   /* if memory was published with AMS then destroy it */
270   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
271 
272   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
273   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
274   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
275   PetscFunctionReturn(0);
276 }
277 
278 #undef __FUNCT__
279 #define __FUNCT__ "DMSetUp"
280 /*@
281     DMSetUp - sets up the data structures inside a DM object
282 
283     Collective on DM
284 
285     Input Parameter:
286 .   dm - the DM object to setup
287 
288     Level: developer
289 
290 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
291 
292 @*/
293 PetscErrorCode  DMSetUp(DM dm)
294 {
295   PetscErrorCode ierr;
296 
297   PetscFunctionBegin;
298   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
299   if (dm->setupcalled) PetscFunctionReturn(0);
300   if (dm->ops->setup) {
301     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
302   }
303   dm->setupcalled = PETSC_TRUE;
304   PetscFunctionReturn(0);
305 }
306 
307 #undef __FUNCT__
308 #define __FUNCT__ "DMSetFromOptions"
309 /*@
310     DMSetFromOptions - sets parameters in a DM from the options database
311 
312     Collective on DM
313 
314     Input Parameter:
315 .   dm - the DM object to set options for
316 
317     Options Database:
318 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
319 .   -dm_vec_type <type>  type of vector to create inside DM
320 .   -dm_mat_type <type>  type of matrix to create inside DM
321 -   -dm_coloring_type <global or ghosted>
322 
323     Level: developer
324 
325 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
326 
327 @*/
328 PetscErrorCode  DMSetFromOptions(DM dm)
329 {
330   PetscBool      flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg;
331   PetscErrorCode ierr;
332   char           typeName[256] = MATAIJ;
333 
334   PetscFunctionBegin;
335   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
336   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
337     ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr);
338     ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr);
339     ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr);
340     ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr);
341     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
342     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
343     if (flg) {
344       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
345     }
346     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
347     if (flg) {
348       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
349     }
350     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
351     if (dm->ops->setfromoptions) {
352       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
353     }
354     /* process any options handlers added with PetscObjectAddOptionsHandler() */
355     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
356   ierr = PetscOptionsEnd();CHKERRQ(ierr);
357   if (flg1) {
358     ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
359   }
360   if (flg2) {
361     PetscViewer viewer;
362 
363     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
364     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
365     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr);
366     ierr = DMView(dm, viewer);CHKERRQ(ierr);
367     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
368   }
369   if (flg3) {
370     PetscViewer viewer;
371 
372     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
373     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
374     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr);
375     ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr);
376     ierr = DMView(dm, viewer);CHKERRQ(ierr);
377     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
378   }
379   if (flg4) {
380     PetscViewer viewer;
381 
382     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
383     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
384     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr);
385     ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr);
386     ierr = DMView(dm, viewer);CHKERRQ(ierr);
387     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
388   }
389   PetscFunctionReturn(0);
390 }
391 
392 #undef __FUNCT__
393 #define __FUNCT__ "DMView"
394 /*@C
395     DMView - Views a vector packer or DMDA.
396 
397     Collective on DM
398 
399     Input Parameter:
400 +   dm - the DM object to view
401 -   v - the viewer
402 
403     Level: developer
404 
405 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
406 
407 @*/
408 PetscErrorCode  DMView(DM dm,PetscViewer v)
409 {
410   PetscErrorCode ierr;
411 
412   PetscFunctionBegin;
413   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
414  if (!v) {
415     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
416   }
417   if (dm->ops->view) {
418     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
419   }
420   PetscFunctionReturn(0);
421 }
422 
423 #undef __FUNCT__
424 #define __FUNCT__ "DMCreateGlobalVector"
425 /*@
426     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
427 
428     Collective on DM
429 
430     Input Parameter:
431 .   dm - the DM object
432 
433     Output Parameter:
434 .   vec - the global vector
435 
436     Level: beginner
437 
438 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
439 
440 @*/
441 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
442 {
443   PetscErrorCode ierr;
444 
445   PetscFunctionBegin;
446   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
447   if (dm->defaultSection) {
448     PetscSection gSection;
449     PetscInt     localSize;
450 
451     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
452     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
453     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
454     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
455     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
456     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
457     ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr);
458     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
459     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
460     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
461   } else {
462     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
463   }
464   PetscFunctionReturn(0);
465 }
466 
467 #undef __FUNCT__
468 #define __FUNCT__ "DMCreateLocalVector"
469 /*@
470     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
471 
472     Not Collective
473 
474     Input Parameter:
475 .   dm - the DM object
476 
477     Output Parameter:
478 .   vec - the local vector
479 
480     Level: beginner
481 
482 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
483 
484 @*/
485 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
486 {
487   PetscErrorCode ierr;
488 
489   PetscFunctionBegin;
490   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
491   if (dm->defaultSection) {
492     PetscInt localSize;
493 
494     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
495     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
496     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
497     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
498     ierr = PetscObjectCompose((PetscObject) *vec, "DM", (PetscObject) dm);CHKERRQ(ierr);
499   } else {
500     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
501   }
502   PetscFunctionReturn(0);
503 }
504 
505 #undef __FUNCT__
506 #define __FUNCT__ "DMGetLocalToGlobalMapping"
507 /*@
508    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
509 
510    Collective on DM
511 
512    Input Parameter:
513 .  dm - the DM that provides the mapping
514 
515    Output Parameter:
516 .  ltog - the mapping
517 
518    Level: intermediate
519 
520    Notes:
521    This mapping can then be used by VecSetLocalToGlobalMapping() or
522    MatSetLocalToGlobalMapping().
523 
524 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
525 @*/
526 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
527 {
528   PetscErrorCode ierr;
529 
530   PetscFunctionBegin;
531   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
532   PetscValidPointer(ltog,2);
533   if (!dm->ltogmap) {
534     PetscSection section, sectionGlobal;
535 
536     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
537     if (section) {
538       PetscInt      *ltog;
539       PetscInt       pStart, pEnd, size, p, l;
540 
541       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
542       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
543       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
544       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
545       for (p = pStart, l = 0; p < pEnd; ++p) {
546         PetscInt dof, off, c;
547 
548         /* Should probably use constrained dofs */
549         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
550         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
551         for (c = 0; c < dof; ++c, ++l) {
552           ltog[l] = off+c;
553         }
554       }
555       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
556       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
557     } else {
558       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
559       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
560     }
561   }
562   *ltog = dm->ltogmap;
563   PetscFunctionReturn(0);
564 }
565 
566 #undef __FUNCT__
567 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
568 /*@
569    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
570 
571    Collective on DM
572 
573    Input Parameter:
574 .  da - the distributed array that provides the mapping
575 
576    Output Parameter:
577 .  ltog - the block mapping
578 
579    Level: intermediate
580 
581    Notes:
582    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
583    MatSetLocalToGlobalMappingBlock().
584 
585 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
586 @*/
587 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
588 {
589   PetscErrorCode ierr;
590 
591   PetscFunctionBegin;
592   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
593   PetscValidPointer(ltog,2);
594   if (!dm->ltogmapb) {
595     PetscInt bs;
596     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
597     if (bs > 1) {
598       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
599       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
600     } else {
601       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
602       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
603     }
604   }
605   *ltog = dm->ltogmapb;
606   PetscFunctionReturn(0);
607 }
608 
609 #undef __FUNCT__
610 #define __FUNCT__ "DMGetBlockSize"
611 /*@
612    DMGetBlockSize - Gets the inherent block size associated with a DM
613 
614    Not Collective
615 
616    Input Parameter:
617 .  dm - the DM with block structure
618 
619    Output Parameter:
620 .  bs - the block size, 1 implies no exploitable block structure
621 
622    Level: intermediate
623 
624 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
625 @*/
626 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
627 {
628   PetscFunctionBegin;
629   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
630   PetscValidPointer(bs,2);
631   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
632   *bs = dm->bs;
633   PetscFunctionReturn(0);
634 }
635 
636 #undef __FUNCT__
637 #define __FUNCT__ "DMCreateInterpolation"
638 /*@
639     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
640 
641     Collective on DM
642 
643     Input Parameter:
644 +   dm1 - the DM object
645 -   dm2 - the second, finer DM object
646 
647     Output Parameter:
648 +  mat - the interpolation
649 -  vec - the scaling (optional)
650 
651     Level: developer
652 
653     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
654         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
655 
656         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMDAGetCoordinateDA()) to interpolate the mesh coordinate vectors
657         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
658 
659 
660 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
661 
662 @*/
663 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
664 {
665   PetscErrorCode ierr;
666 
667   PetscFunctionBegin;
668   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
669   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
670   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
671   PetscFunctionReturn(0);
672 }
673 
674 #undef __FUNCT__
675 #define __FUNCT__ "DMCreateInjection"
676 /*@
677     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
678 
679     Collective on DM
680 
681     Input Parameter:
682 +   dm1 - the DM object
683 -   dm2 - the second, finer DM object
684 
685     Output Parameter:
686 .   ctx - the injection
687 
688     Level: developer
689 
690    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
691         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
692 
693 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
694 
695 @*/
696 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
697 {
698   PetscErrorCode ierr;
699 
700   PetscFunctionBegin;
701   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
702   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
703   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
704   PetscFunctionReturn(0);
705 }
706 
707 #undef __FUNCT__
708 #define __FUNCT__ "DMCreateColoring"
709 /*@C
710     DMCreateColoring - Gets coloring for a DMDA or DMComposite
711 
712     Collective on DM
713 
714     Input Parameter:
715 +   dm - the DM object
716 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
717 -   matype - either MATAIJ or MATBAIJ
718 
719     Output Parameter:
720 .   coloring - the coloring
721 
722     Level: developer
723 
724 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
725 
726 @*/
727 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
728 {
729   PetscErrorCode ierr;
730 
731   PetscFunctionBegin;
732   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
733   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
734   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
735   PetscFunctionReturn(0);
736 }
737 
738 #undef __FUNCT__
739 #define __FUNCT__ "DMCreateMatrix"
740 /*@C
741     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
742 
743     Collective on DM
744 
745     Input Parameter:
746 +   dm - the DM object
747 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
748             any type which inherits from one of these (such as MATAIJ)
749 
750     Output Parameter:
751 .   mat - the empty Jacobian
752 
753     Level: beginner
754 
755     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
756        do not need to do it yourself.
757 
758        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
759        the nonzero pattern call DMDASetMatPreallocateOnly()
760 
761        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
762        internally by PETSc.
763 
764        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
765        the indices for the global numbering for DMDAs which is complicated.
766 
767 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
768 
769 @*/
770 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
771 {
772   PetscErrorCode ierr;
773 
774   PetscFunctionBegin;
775   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
776 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
777   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
778 #endif
779   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
780   PetscValidPointer(mat,3);
781   if (dm->mattype) {
782     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
783   } else {
784     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
785   }
786   PetscFunctionReturn(0);
787 }
788 
789 #undef __FUNCT__
790 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
791 /*@
792   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
793     preallocated but the nonzero structure and zero values will not be set.
794 
795   Logically Collective on DMDA
796 
797   Input Parameter:
798 + dm - the DM
799 - only - PETSC_TRUE if only want preallocation
800 
801   Level: developer
802 .seealso DMCreateMatrix()
803 @*/
804 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
805 {
806   PetscFunctionBegin;
807   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
808   dm->prealloc_only = only;
809   PetscFunctionReturn(0);
810 }
811 
812 #undef __FUNCT__
813 #define __FUNCT__ "DMGetWorkArray"
814 /*@C
815   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
816 
817   Not Collective
818 
819   Input Parameters:
820 + dm - the DM object
821 . count - The minium size
822 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
823 
824   Output Parameter:
825 . array - the work array
826 
827   Level: developer
828 
829 .seealso DMDestroy(), DMCreate()
830 @*/
831 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
832 {
833   PetscErrorCode ierr;
834   DMWorkLink link;
835   size_t size;
836 
837   PetscFunctionBegin;
838   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
839   PetscValidPointer(mem,4);
840   if (dm->workin) {
841     link = dm->workin;
842     dm->workin = dm->workin->next;
843   } else {
844     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
845   }
846   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
847   if (size*count > link->bytes) {
848     ierr = PetscFree(link->mem);CHKERRQ(ierr);
849     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
850     link->bytes = size*count;
851   }
852   link->next = dm->workout;
853   dm->workout = link;
854   *(void**)mem = link->mem;
855   PetscFunctionReturn(0);
856 }
857 
858 #undef __FUNCT__
859 #define __FUNCT__ "DMRestoreWorkArray"
860 /*@C
861   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
862 
863   Not Collective
864 
865   Input Parameters:
866 + dm - the DM object
867 . count - The minium size
868 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
869 
870   Output Parameter:
871 . array - the work array
872 
873   Level: developer
874 
875 .seealso DMDestroy(), DMCreate()
876 @*/
877 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
878 {
879   DMWorkLink *p,link;
880 
881   PetscFunctionBegin;
882   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
883   PetscValidPointer(mem,4);
884   for (p=&dm->workout; (link=*p); p=&link->next) {
885     if (link->mem == *(void**)mem) {
886       *p = link->next;
887       link->next = dm->workin;
888       dm->workin = link;
889       *(void**)mem = PETSC_NULL;
890       PetscFunctionReturn(0);
891     }
892   }
893   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
894   PetscFunctionReturn(0);
895 }
896 
897 #undef __FUNCT__
898 #define __FUNCT__ "DMSetNullSpaceConstructor"
899 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
900 {
901   PetscFunctionBegin;
902   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
903   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
904   dm->nullspaceConstructors[field] = nullsp;
905   PetscFunctionReturn(0);
906 }
907 
908 #undef __FUNCT__
909 #define __FUNCT__ "DMCreateFieldIS"
910 /*@C
911   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
912 
913   Not collective
914 
915   Input Parameter:
916 . dm - the DM object
917 
918   Output Parameters:
919 + numFields  - The number of fields (or PETSC_NULL if not requested)
920 . fieldNames - The name for each field (or PETSC_NULL if not requested)
921 - fields     - The global indices for each field (or PETSC_NULL if not requested)
922 
923   Level: intermediate
924 
925   Notes:
926   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
927   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
928   PetscFree().
929 
930 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
931 @*/
932 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
933 {
934   PetscSection   section, sectionGlobal;
935   PetscErrorCode ierr;
936 
937   PetscFunctionBegin;
938   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
939   if (numFields) {
940     PetscValidPointer(numFields,2);
941     *numFields = 0;
942   }
943   if (fieldNames) {
944     PetscValidPointer(fieldNames,3);
945     *fieldNames = PETSC_NULL;
946   }
947   if (fields) {
948     PetscValidPointer(fields,4);
949     *fields = PETSC_NULL;
950   }
951   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
952   if (section) {
953     PetscInt *fieldSizes, **fieldIndices;
954     PetscInt  nF, f, pStart, pEnd, p;
955 
956     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
957     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
958     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
959     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
960     for (f = 0; f < nF; ++f) {
961       fieldSizes[f] = 0;
962     }
963     for (p = pStart; p < pEnd; ++p) {
964       PetscInt gdof;
965 
966       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
967       if (gdof > 0) {
968         for (f = 0; f < nF; ++f) {
969           PetscInt fdof, fcdof;
970 
971           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
972           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
973           fieldSizes[f] += fdof-fcdof;
974         }
975       }
976     }
977     for (f = 0; f < nF; ++f) {
978       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
979       fieldSizes[f] = 0;
980     }
981     for (p = pStart; p < pEnd; ++p) {
982       PetscInt gdof, goff;
983 
984       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
985       if (gdof > 0) {
986         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
987         for (f = 0; f < nF; ++f) {
988           PetscInt fdof, fcdof, fc;
989 
990           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
991           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
992           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
993             fieldIndices[f][fieldSizes[f]] = goff++;
994           }
995         }
996       }
997     }
998     if (numFields) {*numFields = nF;}
999     if (fieldNames) {
1000       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1001       for (f = 0; f < nF; ++f) {
1002         const char *fieldName;
1003 
1004         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1005         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1006       }
1007     }
1008     if (fields) {
1009       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1010       for (f = 0; f < nF; ++f) {
1011         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1012       }
1013     }
1014     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1015   } else {
1016     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1017   }
1018   PetscFunctionReturn(0);
1019 }
1020 
1021 #undef __FUNCT__
1022 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1023 /*@C
1024   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1025 
1026   Not Collective
1027 
1028   Input Parameters:
1029 + dm   - the DM object
1030 - name - the name of the field decomposition
1031 
1032   Output Parameter:
1033 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1034 
1035   Level: advanced
1036 
1037 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1038 @*/
1039 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1040 {
1041   PetscErrorCode ierr;
1042 
1043   PetscFunctionBegin;
1044   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1045   PetscValidCharPointer(name,2);
1046   PetscValidPointer(ddm,3);
1047   *ddm = PETSC_NULL;
1048   if (dm->ops->createfielddecompositiondm) {
1049     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1050   }
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 #undef __FUNCT__
1055 #define __FUNCT__ "DMCreateFieldDecomposition"
1056 /*@C
1057   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1058                           corresponding to different fields: each IS contains the global indices of the dofs of the
1059                           corresponding field. The optional list of DMs define the DM for each subproblem.
1060                           Generalizes DMCreateFieldIS().
1061 
1062   Not collective
1063 
1064   Input Parameter:
1065 . dm - the DM object
1066 
1067   Output Parameters:
1068 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1069 . namelist  - The name for each field (or PETSC_NULL if not requested)
1070 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1071 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1072 
1073   Level: intermediate
1074 
1075   Notes:
1076   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1077   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1078   and all of the arrays should be freed with PetscFree().
1079 
1080 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1081 @*/
1082 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1083 {
1084   PetscErrorCode ierr;
1085 
1086   PetscFunctionBegin;
1087   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1088   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1089   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1090   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1091   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1092   if (!dm->ops->createfielddecomposition) {
1093     PetscSection section;
1094     PetscInt     numFields, f;
1095 
1096     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1097     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1098     if (section && numFields && dm->ops->createsubdm) {
1099       *len = numFields;
1100       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1101       for (f = 0; f < numFields; ++f) {
1102         const char *fieldName;
1103 
1104         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1105         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1106         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1107       }
1108     } else {
1109       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1110       /* By default there are no DMs associated with subproblems. */
1111       if (dmlist) *dmlist = PETSC_NULL;
1112     }
1113   }
1114   else {
1115     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1116   }
1117   PetscFunctionReturn(0);
1118 }
1119 
1120 #undef __FUNCT__
1121 #define __FUNCT__ "DMCreateSubDM"
1122 /*@C
1123   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1124                   The fields are defined by DMCreateFieldIS().
1125 
1126   Not collective
1127 
1128   Input Parameters:
1129 + dm - the DM object
1130 . numFields - number of fields in this subproblem
1131 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1132 
1133   Output Parameters:
1134 . is - The global indices for the subproblem
1135 - dm - The DM for the subproblem
1136 
1137   Level: intermediate
1138 
1139 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1140 @*/
1141 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1142 {
1143   PetscErrorCode ierr;
1144 
1145   PetscFunctionBegin;
1146   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1147   PetscValidPointer(fields,3);
1148   if (is) {PetscValidPointer(is,4);}
1149   if (subdm) {PetscValidPointer(subdm,5);}
1150   if (dm->ops->createsubdm) {
1151     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1152   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1153   PetscFunctionReturn(0);
1154 }
1155 
1156 #undef __FUNCT__
1157 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1158 /*@C
1159   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1160 
1161   Not Collective
1162 
1163   Input Parameters:
1164 + dm   - the DM object
1165 - name - the name of the subdomain decomposition
1166 
1167   Output Parameter:
1168 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1169 
1170   Level: advanced
1171 
1172 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1173 @*/
1174 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1175 {
1176   PetscErrorCode ierr;
1177 
1178   PetscFunctionBegin;
1179   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1180   PetscValidCharPointer(name,2);
1181   PetscValidPointer(ddm,3);
1182   *ddm = PETSC_NULL;
1183   if (dm->ops->createdomaindecompositiondm) {
1184     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1185   }
1186   PetscFunctionReturn(0);
1187 }
1188 
1189 #undef __FUNCT__
1190 #define __FUNCT__ "DMCreateDomainDecomposition"
1191 /*@C
1192   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1193                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1194                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1195                           define a nonoverlapping covering, while outer subdomains can overlap.
1196                           The optional list of DMs define the DM for each subproblem.
1197 
1198   Not collective
1199 
1200   Input Parameter:
1201 . dm - the DM object
1202 
1203   Output Parameters:
1204 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1205 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1206 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1207 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1208 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1209 
1210   Level: intermediate
1211 
1212   Notes:
1213   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1214   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1215   and all of the arrays should be freed with PetscFree().
1216 
1217 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1218 @*/
1219 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1220 {
1221   PetscErrorCode ierr;
1222 
1223   PetscFunctionBegin;
1224   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1225   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1226   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1227   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1228   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1229   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1230   if (dm->ops->createdomaindecomposition) {
1231     ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1232   }
1233   PetscFunctionReturn(0);
1234 }
1235 
1236 #undef __FUNCT__
1237 #define __FUNCT__ "DMRefine"
1238 /*@
1239   DMRefine - Refines a DM object
1240 
1241   Collective on DM
1242 
1243   Input Parameter:
1244 + dm   - the DM object
1245 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1246 
1247   Output Parameter:
1248 . dmf - the refined DM, or PETSC_NULL
1249 
1250   Note: If no refinement was done, the return value is PETSC_NULL
1251 
1252   Level: developer
1253 
1254 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1255 @*/
1256 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1257 {
1258   PetscErrorCode ierr;
1259   DMRefineHookLink link;
1260 
1261   PetscFunctionBegin;
1262   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1263   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1264   if (*dmf) {
1265     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1266     (*dmf)->ops->initialguess = dm->ops->initialguess;
1267     (*dmf)->ops->function     = dm->ops->function;
1268     (*dmf)->ops->functionj    = dm->ops->functionj;
1269     if (dm->ops->jacobian != DMComputeJacobianDefault) {
1270       (*dmf)->ops->jacobian     = dm->ops->jacobian;
1271     }
1272     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1273     (*dmf)->ctx       = dm->ctx;
1274     (*dmf)->leveldown = dm->leveldown;
1275     (*dmf)->levelup   = dm->levelup + 1;
1276     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1277     for (link=dm->refinehook; link; link=link->next) {
1278       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1279     }
1280   }
1281   PetscFunctionReturn(0);
1282 }
1283 
1284 #undef __FUNCT__
1285 #define __FUNCT__ "DMRefineHookAdd"
1286 /*@
1287    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1288 
1289    Logically Collective
1290 
1291    Input Arguments:
1292 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1293 .  refinehook - function to run when setting up a coarser level
1294 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1295 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1296 
1297    Calling sequence of refinehook:
1298 $    refinehook(DM coarse,DM fine,void *ctx);
1299 
1300 +  coarse - coarse level DM
1301 .  fine - fine level DM to interpolate problem to
1302 -  ctx - optional user-defined function context
1303 
1304    Calling sequence for interphook:
1305 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1306 
1307 +  coarse - coarse level DM
1308 .  interp - matrix interpolating a coarse-level solution to the finer grid
1309 .  fine - fine level DM to update
1310 -  ctx - optional user-defined function context
1311 
1312    Level: advanced
1313 
1314    Notes:
1315    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1316 
1317    If this function is called multiple times, the hooks will be run in the order they are added.
1318 
1319 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1320 @*/
1321 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1322 {
1323   PetscErrorCode ierr;
1324   DMRefineHookLink link,*p;
1325 
1326   PetscFunctionBegin;
1327   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1328   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1329   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1330   link->refinehook = refinehook;
1331   link->interphook = interphook;
1332   link->ctx = ctx;
1333   link->next = PETSC_NULL;
1334   *p = link;
1335   PetscFunctionReturn(0);
1336 }
1337 
1338 #undef __FUNCT__
1339 #define __FUNCT__ "DMInterpolate"
1340 /*@
1341    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1342 
1343    Collective if any hooks are
1344 
1345    Input Arguments:
1346 +  coarse - coarser DM to use as a base
1347 .  restrct - interpolation matrix, apply using MatInterpolate()
1348 -  fine - finer DM to update
1349 
1350    Level: developer
1351 
1352 .seealso: DMRefineHookAdd(), MatInterpolate()
1353 @*/
1354 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1355 {
1356   PetscErrorCode ierr;
1357   DMRefineHookLink link;
1358 
1359   PetscFunctionBegin;
1360   for (link=fine->refinehook; link; link=link->next) {
1361     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1362   }
1363   PetscFunctionReturn(0);
1364 }
1365 
1366 #undef __FUNCT__
1367 #define __FUNCT__ "DMGetRefineLevel"
1368 /*@
1369     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1370 
1371     Not Collective
1372 
1373     Input Parameter:
1374 .   dm - the DM object
1375 
1376     Output Parameter:
1377 .   level - number of refinements
1378 
1379     Level: developer
1380 
1381 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1382 
1383 @*/
1384 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1385 {
1386   PetscFunctionBegin;
1387   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1388   *level = dm->levelup;
1389   PetscFunctionReturn(0);
1390 }
1391 
1392 #undef __FUNCT__
1393 #define __FUNCT__ "DMGlobalToLocalBegin"
1394 /*@
1395     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1396 
1397     Neighbor-wise Collective on DM
1398 
1399     Input Parameters:
1400 +   dm - the DM object
1401 .   g - the global vector
1402 .   mode - INSERT_VALUES or ADD_VALUES
1403 -   l - the local vector
1404 
1405 
1406     Level: beginner
1407 
1408 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1409 
1410 @*/
1411 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1412 {
1413   PetscSF        sf;
1414   PetscErrorCode ierr;
1415 
1416   PetscFunctionBegin;
1417   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1418   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1419   if (sf) {
1420     PetscScalar *lArray, *gArray;
1421 
1422     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1423     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1424     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1425     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1426     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1427     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1428   } else {
1429     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1430   }
1431   PetscFunctionReturn(0);
1432 }
1433 
1434 #undef __FUNCT__
1435 #define __FUNCT__ "DMGlobalToLocalEnd"
1436 /*@
1437     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1438 
1439     Neighbor-wise Collective on DM
1440 
1441     Input Parameters:
1442 +   dm - the DM object
1443 .   g - the global vector
1444 .   mode - INSERT_VALUES or ADD_VALUES
1445 -   l - the local vector
1446 
1447 
1448     Level: beginner
1449 
1450 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1451 
1452 @*/
1453 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1454 {
1455   PetscSF        sf;
1456   PetscErrorCode ierr;
1457   PetscScalar    *lArray, *gArray;
1458 
1459   PetscFunctionBegin;
1460   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1461   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1462   if (sf) {
1463   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1464 
1465     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1466     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1467     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1468     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1469     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1470   } else {
1471     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1472   }
1473   PetscFunctionReturn(0);
1474 }
1475 
1476 #undef __FUNCT__
1477 #define __FUNCT__ "DMLocalToGlobalBegin"
1478 /*@
1479     DMLocalToGlobalBegin - updates global vectors from local vectors
1480 
1481     Neighbor-wise Collective on DM
1482 
1483     Input Parameters:
1484 +   dm - the DM object
1485 .   l - the local vector
1486 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1487            base point.
1488 - - the global vector
1489 
1490     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1491            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1492            global array to the final global array with VecAXPY().
1493 
1494     Level: beginner
1495 
1496 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1497 
1498 @*/
1499 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1500 {
1501   PetscSF        sf;
1502   PetscErrorCode ierr;
1503 
1504   PetscFunctionBegin;
1505   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1506   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1507   if (sf) {
1508     MPI_Op       op;
1509     PetscScalar *lArray, *gArray;
1510 
1511     switch(mode) {
1512     case INSERT_VALUES:
1513     case INSERT_ALL_VALUES:
1514 #if defined(PETSC_HAVE_MPI_REPLACE)
1515       op = MPI_REPLACE; break;
1516 #else
1517       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1518 #endif
1519     case ADD_VALUES:
1520     case ADD_ALL_VALUES:
1521       op = MPI_SUM; break;
1522   default:
1523     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1524     }
1525     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1526     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1527     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1528     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1529     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1530   } else {
1531     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1532   }
1533   PetscFunctionReturn(0);
1534 }
1535 
1536 #undef __FUNCT__
1537 #define __FUNCT__ "DMLocalToGlobalEnd"
1538 /*@
1539     DMLocalToGlobalEnd - updates global vectors from local vectors
1540 
1541     Neighbor-wise Collective on DM
1542 
1543     Input Parameters:
1544 +   dm - the DM object
1545 .   l - the local vector
1546 .   mode - INSERT_VALUES or ADD_VALUES
1547 -   g - the global vector
1548 
1549 
1550     Level: beginner
1551 
1552 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1553 
1554 @*/
1555 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1556 {
1557   PetscSF        sf;
1558   PetscErrorCode ierr;
1559 
1560   PetscFunctionBegin;
1561   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1562   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1563   if (sf) {
1564     MPI_Op       op;
1565     PetscScalar *lArray, *gArray;
1566 
1567     switch(mode) {
1568     case INSERT_VALUES:
1569     case INSERT_ALL_VALUES:
1570 #if defined(PETSC_HAVE_MPI_REPLACE)
1571       op = MPI_REPLACE; break;
1572 #else
1573       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1574 #endif
1575     case ADD_VALUES:
1576     case ADD_ALL_VALUES:
1577       op = MPI_SUM; break;
1578     default:
1579       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1580     }
1581     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1582     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1583     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1584     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1585     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1586   } else {
1587     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1588   }
1589   PetscFunctionReturn(0);
1590 }
1591 
1592 #undef __FUNCT__
1593 #define __FUNCT__ "DMComputeJacobianDefault"
1594 /*@
1595     DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided
1596 
1597     Collective on DM
1598 
1599     Input Parameter:
1600 +   dm - the DM object
1601 .   x - location to compute Jacobian at; may be ignored for linear problems
1602 .   A - matrix that defines the operator for the linear solve
1603 -   B - the matrix used to construct the preconditioner
1604 
1605     Level: developer
1606 
1607 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
1608          DMSetFunction()
1609 
1610 @*/
1611 PetscErrorCode  DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag)
1612 {
1613   PetscErrorCode ierr;
1614 
1615   PetscFunctionBegin;
1616   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1617   *stflag = SAME_NONZERO_PATTERN;
1618   ierr  = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr);
1619   if (A != B) {
1620     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1621     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1622   }
1623   PetscFunctionReturn(0);
1624 }
1625 
1626 #undef __FUNCT__
1627 #define __FUNCT__ "DMCoarsen"
1628 /*@
1629     DMCoarsen - Coarsens a DM object
1630 
1631     Collective on DM
1632 
1633     Input Parameter:
1634 +   dm - the DM object
1635 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1636 
1637     Output Parameter:
1638 .   dmc - the coarsened DM
1639 
1640     Level: developer
1641 
1642 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1643 
1644 @*/
1645 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1646 {
1647   PetscErrorCode ierr;
1648   DMCoarsenHookLink link;
1649 
1650   PetscFunctionBegin;
1651   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1652   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1653   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1654   (*dmc)->ops->initialguess = dm->ops->initialguess;
1655   (*dmc)->ops->function     = dm->ops->function;
1656   (*dmc)->ops->functionj    = dm->ops->functionj;
1657   if (dm->ops->jacobian != DMComputeJacobianDefault) {
1658     (*dmc)->ops->jacobian     = dm->ops->jacobian;
1659   }
1660   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1661   (*dmc)->ctx       = dm->ctx;
1662   (*dmc)->levelup   = dm->levelup;
1663   (*dmc)->leveldown = dm->leveldown + 1;
1664   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1665   for (link=dm->coarsenhook; link; link=link->next) {
1666     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1667   }
1668   PetscFunctionReturn(0);
1669 }
1670 
1671 #undef __FUNCT__
1672 #define __FUNCT__ "DMCoarsenHookAdd"
1673 /*@
1674    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1675 
1676    Logically Collective
1677 
1678    Input Arguments:
1679 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1680 .  coarsenhook - function to run when setting up a coarser level
1681 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1682 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1683 
1684    Calling sequence of coarsenhook:
1685 $    coarsenhook(DM fine,DM coarse,void *ctx);
1686 
1687 +  fine - fine level DM
1688 .  coarse - coarse level DM to restrict problem to
1689 -  ctx - optional user-defined function context
1690 
1691    Calling sequence for restricthook:
1692 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1693 
1694 +  fine - fine level DM
1695 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1696 .  rscale - scaling vector for restriction
1697 .  inject - matrix restricting by injection
1698 .  coarse - coarse level DM to update
1699 -  ctx - optional user-defined function context
1700 
1701    Level: advanced
1702 
1703    Notes:
1704    This function is only needed if auxiliary data needs to be set up on coarse grids.
1705 
1706    If this function is called multiple times, the hooks will be run in the order they are added.
1707 
1708    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1709    extract the finest level information from its context (instead of from the SNES).
1710 
1711 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1712 @*/
1713 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1714 {
1715   PetscErrorCode ierr;
1716   DMCoarsenHookLink link,*p;
1717 
1718   PetscFunctionBegin;
1719   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1720   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1721   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1722   link->coarsenhook = coarsenhook;
1723   link->restricthook = restricthook;
1724   link->ctx = ctx;
1725   link->next = PETSC_NULL;
1726   *p = link;
1727   PetscFunctionReturn(0);
1728 }
1729 
1730 #undef __FUNCT__
1731 #define __FUNCT__ "DMRestrict"
1732 /*@
1733    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1734 
1735    Collective if any hooks are
1736 
1737    Input Arguments:
1738 +  fine - finer DM to use as a base
1739 .  restrct - restriction matrix, apply using MatRestrict()
1740 .  inject - injection matrix, also use MatRestrict()
1741 -  coarse - coarer DM to update
1742 
1743    Level: developer
1744 
1745 .seealso: DMCoarsenHookAdd(), MatRestrict()
1746 @*/
1747 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1748 {
1749   PetscErrorCode ierr;
1750   DMCoarsenHookLink link;
1751 
1752   PetscFunctionBegin;
1753   for (link=fine->coarsenhook; link; link=link->next) {
1754     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1755   }
1756   PetscFunctionReturn(0);
1757 }
1758 
1759 #undef __FUNCT__
1760 #define __FUNCT__ "DMBlockRestrictHookAdd"
1761 /*@
1762    DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1763 
1764    Logically Collective
1765 
1766    Input Arguments:
1767 +  global - global DM
1768 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1769 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1770 
1771    Calling sequence for restricthook:
1772 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1773 
1774 +  global - global DM
1775 .  out    - scatter to the outer (with ghost and overlap points) block vector
1776 .  in     - scatter to block vector values only owned locally
1777 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1778 -  ctx - optional user-defined function context
1779 
1780    Level: advanced
1781 
1782    Notes:
1783    This function is only needed if auxiliary data needs to be set up on coarse grids.
1784 
1785    If this function is called multiple times, the hooks will be run in the order they are added.
1786 
1787    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1788    extract the finest level information from its context (instead of from the SNES).
1789 
1790 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1791 @*/
1792 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1793 {
1794   PetscErrorCode ierr;
1795   DMBlockRestrictHookLink link,*p;
1796 
1797   PetscFunctionBegin;
1798   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1799   for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1800   ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr);
1801   link->restricthook = restricthook;
1802   link->ctx = ctx;
1803   link->next = PETSC_NULL;
1804   *p = link;
1805   PetscFunctionReturn(0);
1806 }
1807 
1808 #undef __FUNCT__
1809 #define __FUNCT__ "DMBlockRestrict"
1810 /*@
1811    DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd()
1812 
1813    Collective if any hooks are
1814 
1815    Input Arguments:
1816 +  fine - finer DM to use as a base
1817 .  restrct - restriction matrix, apply using MatRestrict()
1818 .  inject - injection matrix, also use MatRestrict()
1819 -  coarse - coarer DM to update
1820 
1821    Level: developer
1822 
1823 .seealso: DMCoarsenHookAdd(), MatRestrict()
1824 @*/
1825 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block)
1826 {
1827   PetscErrorCode ierr;
1828   DMBlockRestrictHookLink link;
1829 
1830   PetscFunctionBegin;
1831   for (link=global->blockrestricthook; link; link=link->next) {
1832     if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);}
1833   }
1834   PetscFunctionReturn(0);
1835 }
1836 
1837 #undef __FUNCT__
1838 #define __FUNCT__ "DMGetCoarsenLevel"
1839 /*@
1840     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
1841 
1842     Not Collective
1843 
1844     Input Parameter:
1845 .   dm - the DM object
1846 
1847     Output Parameter:
1848 .   level - number of coarsenings
1849 
1850     Level: developer
1851 
1852 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1853 
1854 @*/
1855 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
1856 {
1857   PetscFunctionBegin;
1858   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1859   *level = dm->leveldown;
1860   PetscFunctionReturn(0);
1861 }
1862 
1863 
1864 
1865 #undef __FUNCT__
1866 #define __FUNCT__ "DMRefineHierarchy"
1867 /*@C
1868     DMRefineHierarchy - Refines a DM object, all levels at once
1869 
1870     Collective on DM
1871 
1872     Input Parameter:
1873 +   dm - the DM object
1874 -   nlevels - the number of levels of refinement
1875 
1876     Output Parameter:
1877 .   dmf - the refined DM hierarchy
1878 
1879     Level: developer
1880 
1881 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1882 
1883 @*/
1884 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
1885 {
1886   PetscErrorCode ierr;
1887 
1888   PetscFunctionBegin;
1889   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1890   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1891   if (nlevels == 0) PetscFunctionReturn(0);
1892   if (dm->ops->refinehierarchy) {
1893     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
1894   } else if (dm->ops->refine) {
1895     PetscInt i;
1896 
1897     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
1898     for (i=1; i<nlevels; i++) {
1899       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
1900     }
1901   } else {
1902     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
1903   }
1904   PetscFunctionReturn(0);
1905 }
1906 
1907 #undef __FUNCT__
1908 #define __FUNCT__ "DMCoarsenHierarchy"
1909 /*@C
1910     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
1911 
1912     Collective on DM
1913 
1914     Input Parameter:
1915 +   dm - the DM object
1916 -   nlevels - the number of levels of coarsening
1917 
1918     Output Parameter:
1919 .   dmc - the coarsened DM hierarchy
1920 
1921     Level: developer
1922 
1923 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1924 
1925 @*/
1926 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
1927 {
1928   PetscErrorCode ierr;
1929 
1930   PetscFunctionBegin;
1931   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1932   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
1933   if (nlevels == 0) PetscFunctionReturn(0);
1934   PetscValidPointer(dmc,3);
1935   if (dm->ops->coarsenhierarchy) {
1936     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
1937   } else if (dm->ops->coarsen) {
1938     PetscInt i;
1939 
1940     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
1941     for (i=1; i<nlevels; i++) {
1942       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
1943     }
1944   } else {
1945     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
1946   }
1947   PetscFunctionReturn(0);
1948 }
1949 
1950 #undef __FUNCT__
1951 #define __FUNCT__ "DMCreateAggregates"
1952 /*@
1953    DMCreateAggregates - Gets the aggregates that map between
1954    grids associated with two DMs.
1955 
1956    Collective on DM
1957 
1958    Input Parameters:
1959 +  dmc - the coarse grid DM
1960 -  dmf - the fine grid DM
1961 
1962    Output Parameters:
1963 .  rest - the restriction matrix (transpose of the projection matrix)
1964 
1965    Level: intermediate
1966 
1967 .keywords: interpolation, restriction, multigrid
1968 
1969 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
1970 @*/
1971 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
1972 {
1973   PetscErrorCode ierr;
1974 
1975   PetscFunctionBegin;
1976   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
1977   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
1978   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
1979   PetscFunctionReturn(0);
1980 }
1981 
1982 #undef __FUNCT__
1983 #define __FUNCT__ "DMSetApplicationContextDestroy"
1984 /*@C
1985     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
1986 
1987     Not Collective
1988 
1989     Input Parameters:
1990 +   dm - the DM object
1991 -   destroy - the destroy function
1992 
1993     Level: intermediate
1994 
1995 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
1996 
1997 @*/
1998 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
1999 {
2000   PetscFunctionBegin;
2001   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2002   dm->ctxdestroy = destroy;
2003   PetscFunctionReturn(0);
2004 }
2005 
2006 #undef __FUNCT__
2007 #define __FUNCT__ "DMSetApplicationContext"
2008 /*@
2009     DMSetApplicationContext - Set a user context into a DM object
2010 
2011     Not Collective
2012 
2013     Input Parameters:
2014 +   dm - the DM object
2015 -   ctx - the user context
2016 
2017     Level: intermediate
2018 
2019 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2020 
2021 @*/
2022 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2023 {
2024   PetscFunctionBegin;
2025   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2026   dm->ctx = ctx;
2027   PetscFunctionReturn(0);
2028 }
2029 
2030 #undef __FUNCT__
2031 #define __FUNCT__ "DMGetApplicationContext"
2032 /*@
2033     DMGetApplicationContext - Gets a user context from a DM object
2034 
2035     Not Collective
2036 
2037     Input Parameter:
2038 .   dm - the DM object
2039 
2040     Output Parameter:
2041 .   ctx - the user context
2042 
2043     Level: intermediate
2044 
2045 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2046 
2047 @*/
2048 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2049 {
2050   PetscFunctionBegin;
2051   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2052   *(void**)ctx = dm->ctx;
2053   PetscFunctionReturn(0);
2054 }
2055 
2056 #undef __FUNCT__
2057 #define __FUNCT__ "DMSetInitialGuess"
2058 /*@C
2059     DMSetInitialGuess - sets a function to compute an initial guess vector entries for the solvers
2060 
2061     Logically Collective on DM
2062 
2063     Input Parameter:
2064 +   dm - the DM object to destroy
2065 -   f - the function to compute the initial guess
2066 
2067     Level: intermediate
2068 
2069 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2070 
2071 @*/
2072 PetscErrorCode  DMSetInitialGuess(DM dm,PetscErrorCode (*f)(DM,Vec))
2073 {
2074   PetscFunctionBegin;
2075   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2076   dm->ops->initialguess = f;
2077   PetscFunctionReturn(0);
2078 }
2079 
2080 #undef __FUNCT__
2081 #define __FUNCT__ "DMSetFunction"
2082 /*@C
2083     DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES
2084 
2085     Logically Collective on DM
2086 
2087     Input Parameter:
2088 +   dm - the DM object
2089 -   f - the function to compute (use PETSC_NULL to cancel a previous function that was set)
2090 
2091     Level: intermediate
2092 
2093     Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian
2094            computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian.
2095 
2096 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2097          DMSetJacobian()
2098 
2099 @*/
2100 PetscErrorCode  DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2101 {
2102   PetscFunctionBegin;
2103   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2104   dm->ops->function = f;
2105   if (f) {
2106     dm->ops->functionj = f;
2107   }
2108   PetscFunctionReturn(0);
2109 }
2110 
2111 #undef __FUNCT__
2112 #define __FUNCT__ "DMSetJacobian"
2113 /*@C
2114     DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES
2115 
2116     Logically Collective on DM
2117 
2118     Input Parameter:
2119 +   dm - the DM object to destroy
2120 -   f - the function to compute the matrix entries
2121 
2122     Level: intermediate
2123 
2124 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2125          DMSetFunction()
2126 
2127 @*/
2128 PetscErrorCode  DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*))
2129 {
2130   PetscFunctionBegin;
2131   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2132   dm->ops->jacobian = f;
2133   PetscFunctionReturn(0);
2134 }
2135 
2136 #undef __FUNCT__
2137 #define __FUNCT__ "DMSetVariableBounds"
2138 /*@C
2139     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2140 
2141     Logically Collective on DM
2142 
2143     Input Parameter:
2144 +   dm - the DM object
2145 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2146 
2147     Level: intermediate
2148 
2149 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2150          DMSetJacobian()
2151 
2152 @*/
2153 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2154 {
2155   PetscFunctionBegin;
2156   dm->ops->computevariablebounds = f;
2157   PetscFunctionReturn(0);
2158 }
2159 
2160 #undef __FUNCT__
2161 #define __FUNCT__ "DMHasVariableBounds"
2162 /*@
2163     DMHasVariableBounds - does the DM object have a variable bounds function?
2164 
2165     Not Collective
2166 
2167     Input Parameter:
2168 .   dm - the DM object to destroy
2169 
2170     Output Parameter:
2171 .   flg - PETSC_TRUE if the variable bounds function exists
2172 
2173     Level: developer
2174 
2175 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2176 
2177 @*/
2178 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2179 {
2180   PetscFunctionBegin;
2181   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2182   PetscFunctionReturn(0);
2183 }
2184 
2185 #undef __FUNCT__
2186 #define __FUNCT__ "DMComputeVariableBounds"
2187 /*@C
2188     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2189 
2190     Logically Collective on DM
2191 
2192     Input Parameters:
2193 +   dm - the DM object to destroy
2194 -   x  - current solution at which the bounds are computed
2195 
2196     Output parameters:
2197 +   xl - lower bound
2198 -   xu - upper bound
2199 
2200     Level: intermediate
2201 
2202 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2203          DMSetFunction(), DMSetVariableBounds()
2204 
2205 @*/
2206 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2207 {
2208   PetscErrorCode ierr;
2209   PetscFunctionBegin;
2210   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2211   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2212   if (dm->ops->computevariablebounds) {
2213     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2214   }
2215   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2216   PetscFunctionReturn(0);
2217 }
2218 
2219 #undef __FUNCT__
2220 #define __FUNCT__ "DMComputeInitialGuess"
2221 /*@
2222     DMComputeInitialGuess - computes an initial guess vector entries for the KSP solvers
2223 
2224     Collective on DM
2225 
2226     Input Parameter:
2227 +   dm - the DM object to destroy
2228 -   x - the vector to hold the initial guess values
2229 
2230     Level: developer
2231 
2232 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetRhs(), DMSetMat()
2233 
2234 @*/
2235 PetscErrorCode  DMComputeInitialGuess(DM dm,Vec x)
2236 {
2237   PetscErrorCode ierr;
2238   PetscFunctionBegin;
2239   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2240   if (!dm->ops->initialguess) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetInitialGuess()");
2241   ierr = (*dm->ops->initialguess)(dm,x);CHKERRQ(ierr);
2242   PetscFunctionReturn(0);
2243 }
2244 
2245 #undef __FUNCT__
2246 #define __FUNCT__ "DMHasInitialGuess"
2247 /*@
2248     DMHasInitialGuess - does the DM object have an initial guess function
2249 
2250     Not Collective
2251 
2252     Input Parameter:
2253 .   dm - the DM object to destroy
2254 
2255     Output Parameter:
2256 .   flg - PETSC_TRUE if function exists
2257 
2258     Level: developer
2259 
2260 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2261 
2262 @*/
2263 PetscErrorCode  DMHasInitialGuess(DM dm,PetscBool  *flg)
2264 {
2265   PetscFunctionBegin;
2266   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2267   *flg =  (dm->ops->initialguess) ? PETSC_TRUE : PETSC_FALSE;
2268   PetscFunctionReturn(0);
2269 }
2270 
2271 #undef __FUNCT__
2272 #define __FUNCT__ "DMHasFunction"
2273 /*@
2274     DMHasFunction - does the DM object have a function
2275 
2276     Not Collective
2277 
2278     Input Parameter:
2279 .   dm - the DM object to destroy
2280 
2281     Output Parameter:
2282 .   flg - PETSC_TRUE if function exists
2283 
2284     Level: developer
2285 
2286 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2287 
2288 @*/
2289 PetscErrorCode  DMHasFunction(DM dm,PetscBool  *flg)
2290 {
2291   PetscFunctionBegin;
2292   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2293   *flg =  (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE;
2294   PetscFunctionReturn(0);
2295 }
2296 
2297 #undef __FUNCT__
2298 #define __FUNCT__ "DMHasJacobian"
2299 /*@
2300     DMHasJacobian - does the DM object have a matrix function
2301 
2302     Not Collective
2303 
2304     Input Parameter:
2305 .   dm - the DM object to destroy
2306 
2307     Output Parameter:
2308 .   flg - PETSC_TRUE if function exists
2309 
2310     Level: developer
2311 
2312 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2313 
2314 @*/
2315 PetscErrorCode  DMHasJacobian(DM dm,PetscBool  *flg)
2316 {
2317   PetscFunctionBegin;
2318   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2319   *flg =  (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE;
2320   PetscFunctionReturn(0);
2321 }
2322 
2323 #undef __FUNCT__
2324 #define __FUNCT__ "DMHasColoring"
2325 /*@
2326     DMHasColoring - does the DM object have a method of providing a coloring?
2327 
2328     Not Collective
2329 
2330     Input Parameter:
2331 .   dm - the DM object
2332 
2333     Output Parameter:
2334 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2335 
2336     Level: developer
2337 
2338 .seealso DMHasFunction(), DMCreateColoring()
2339 
2340 @*/
2341 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2342 {
2343   PetscFunctionBegin;
2344   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2345   PetscFunctionReturn(0);
2346 }
2347 
2348 #undef  __FUNCT__
2349 #define __FUNCT__ "DMSetVec"
2350 /*@C
2351     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2352 
2353     Collective on DM
2354 
2355     Input Parameter:
2356 +   dm - the DM object
2357 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2358 
2359     Level: developer
2360 
2361 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2362          DMSetFunction(), DMSetJacobian(), DMSetVariableBounds()
2363 
2364 @*/
2365 PetscErrorCode  DMSetVec(DM dm,Vec x)
2366 {
2367   PetscErrorCode ierr;
2368   PetscFunctionBegin;
2369   if (x) {
2370     if (!dm->x) {
2371       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2372     }
2373     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2374   }
2375   else if (dm->x) {
2376     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2377   }
2378   PetscFunctionReturn(0);
2379 }
2380 
2381 
2382 #undef __FUNCT__
2383 #define __FUNCT__ "DMComputeFunction"
2384 /*@
2385     DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES
2386 
2387     Collective on DM
2388 
2389     Input Parameter:
2390 +   dm - the DM object to destroy
2391 .   x - the location where the function is evaluationed, may be ignored for linear problems
2392 -   b - the vector to hold the right hand side entries
2393 
2394     Level: developer
2395 
2396 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2397          DMSetJacobian()
2398 
2399 @*/
2400 PetscErrorCode  DMComputeFunction(DM dm,Vec x,Vec b)
2401 {
2402   PetscErrorCode ierr;
2403   PetscFunctionBegin;
2404   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2405   if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()");
2406   PetscStackPush("DM user function");
2407   ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr);
2408   PetscStackPop;
2409   PetscFunctionReturn(0);
2410 }
2411 
2412 
2413 
2414 #undef __FUNCT__
2415 #define __FUNCT__ "DMComputeJacobian"
2416 /*@
2417     DMComputeJacobian - compute the matrix entries for the solver
2418 
2419     Collective on DM
2420 
2421     Input Parameter:
2422 +   dm - the DM object
2423 .   x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM
2424 .   A - matrix that defines the operator for the linear solve
2425 -   B - the matrix used to construct the preconditioner
2426 
2427     Level: developer
2428 
2429 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2430          DMSetFunction()
2431 
2432 @*/
2433 PetscErrorCode  DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag)
2434 {
2435   PetscErrorCode ierr;
2436 
2437   PetscFunctionBegin;
2438   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2439   if (!dm->ops->jacobian) {
2440     ISColoring     coloring;
2441     MatFDColoring  fd;
2442     MatType        mtype;
2443 
2444     ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr);
2445     ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr);
2446     ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr);
2447     ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr);
2448     ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr);
2449     ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr);
2450     ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr);
2451 
2452     dm->fd = fd;
2453     dm->ops->jacobian = DMComputeJacobianDefault;
2454 
2455     /* don't know why this is needed */
2456     ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr);
2457   }
2458   if (!x) x = dm->x;
2459   ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr);
2460 
2461   /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */
2462   if (x) {
2463     if (!dm->x) {
2464       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2465     }
2466     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2467   }
2468   if (A != B) {
2469     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2470     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2471   }
2472   PetscFunctionReturn(0);
2473 }
2474 
2475 
2476 PetscFList DMList                       = PETSC_NULL;
2477 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2478 
2479 #undef __FUNCT__
2480 #define __FUNCT__ "DMSetType"
2481 /*@C
2482   DMSetType - Builds a DM, for a particular DM implementation.
2483 
2484   Collective on DM
2485 
2486   Input Parameters:
2487 + dm     - The DM object
2488 - method - The name of the DM type
2489 
2490   Options Database Key:
2491 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2492 
2493   Notes:
2494   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2495 
2496   Level: intermediate
2497 
2498 .keywords: DM, set, type
2499 .seealso: DMGetType(), DMCreate()
2500 @*/
2501 PetscErrorCode  DMSetType(DM dm, DMType method)
2502 {
2503   PetscErrorCode (*r)(DM);
2504   PetscBool      match;
2505   PetscErrorCode ierr;
2506 
2507   PetscFunctionBegin;
2508   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2509   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2510   if (match) PetscFunctionReturn(0);
2511 
2512   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2513   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2514   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2515 
2516   if (dm->ops->destroy) {
2517     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2518     dm->ops->destroy = PETSC_NULL;
2519   }
2520   ierr = (*r)(dm);CHKERRQ(ierr);
2521   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2522   PetscFunctionReturn(0);
2523 }
2524 
2525 #undef __FUNCT__
2526 #define __FUNCT__ "DMGetType"
2527 /*@C
2528   DMGetType - Gets the DM type name (as a string) from the DM.
2529 
2530   Not Collective
2531 
2532   Input Parameter:
2533 . dm  - The DM
2534 
2535   Output Parameter:
2536 . type - The DM type name
2537 
2538   Level: intermediate
2539 
2540 .keywords: DM, get, type, name
2541 .seealso: DMSetType(), DMCreate()
2542 @*/
2543 PetscErrorCode  DMGetType(DM dm, DMType *type)
2544 {
2545   PetscErrorCode ierr;
2546 
2547   PetscFunctionBegin;
2548   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2549   PetscValidCharPointer(type,2);
2550   if (!DMRegisterAllCalled) {
2551     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2552   }
2553   *type = ((PetscObject)dm)->type_name;
2554   PetscFunctionReturn(0);
2555 }
2556 
2557 #undef __FUNCT__
2558 #define __FUNCT__ "DMConvert"
2559 /*@C
2560   DMConvert - Converts a DM to another DM, either of the same or different type.
2561 
2562   Collective on DM
2563 
2564   Input Parameters:
2565 + dm - the DM
2566 - newtype - new DM type (use "same" for the same type)
2567 
2568   Output Parameter:
2569 . M - pointer to new DM
2570 
2571   Notes:
2572   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2573   the MPI communicator of the generated DM is always the same as the communicator
2574   of the input DM.
2575 
2576   Level: intermediate
2577 
2578 .seealso: DMCreate()
2579 @*/
2580 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2581 {
2582   DM             B;
2583   char           convname[256];
2584   PetscBool      sametype, issame;
2585   PetscErrorCode ierr;
2586 
2587   PetscFunctionBegin;
2588   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2589   PetscValidType(dm,1);
2590   PetscValidPointer(M,3);
2591   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2592   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2593   {
2594     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2595 
2596     /*
2597        Order of precedence:
2598        1) See if a specialized converter is known to the current DM.
2599        2) See if a specialized converter is known to the desired DM class.
2600        3) See if a good general converter is registered for the desired class
2601        4) See if a good general converter is known for the current matrix.
2602        5) Use a really basic converter.
2603     */
2604 
2605     /* 1) See if a specialized converter is known to the current DM and the desired class */
2606     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2607     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2608     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2609     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2610     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2611     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2612     if (conv) goto foundconv;
2613 
2614     /* 2)  See if a specialized converter is known to the desired DM class. */
2615     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2616     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2617     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2618     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2619     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2620     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2621     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2622     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2623     if (conv) {
2624       ierr = DMDestroy(&B);CHKERRQ(ierr);
2625       goto foundconv;
2626     }
2627 
2628 #if 0
2629     /* 3) See if a good general converter is registered for the desired class */
2630     conv = B->ops->convertfrom;
2631     ierr = DMDestroy(&B);CHKERRQ(ierr);
2632     if (conv) goto foundconv;
2633 
2634     /* 4) See if a good general converter is known for the current matrix */
2635     if (dm->ops->convert) {
2636       conv = dm->ops->convert;
2637     }
2638     if (conv) goto foundconv;
2639 #endif
2640 
2641     /* 5) Use a really basic converter. */
2642     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2643 
2644     foundconv:
2645     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2646     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2647     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2648   }
2649   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2650   PetscFunctionReturn(0);
2651 }
2652 
2653 /*--------------------------------------------------------------------------------------------------------------------*/
2654 
2655 #undef __FUNCT__
2656 #define __FUNCT__ "DMRegister"
2657 /*@C
2658   DMRegister - See DMRegisterDynamic()
2659 
2660   Level: advanced
2661 @*/
2662 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2663 {
2664   char fullname[PETSC_MAX_PATH_LEN];
2665   PetscErrorCode ierr;
2666 
2667   PetscFunctionBegin;
2668   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2669   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2670   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2671   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2672   PetscFunctionReturn(0);
2673 }
2674 
2675 
2676 /*--------------------------------------------------------------------------------------------------------------------*/
2677 #undef __FUNCT__
2678 #define __FUNCT__ "DMRegisterDestroy"
2679 /*@C
2680    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2681 
2682    Not Collective
2683 
2684    Level: advanced
2685 
2686 .keywords: DM, register, destroy
2687 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2688 @*/
2689 PetscErrorCode  DMRegisterDestroy(void)
2690 {
2691   PetscErrorCode ierr;
2692 
2693   PetscFunctionBegin;
2694   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2695   DMRegisterAllCalled = PETSC_FALSE;
2696   PetscFunctionReturn(0);
2697 }
2698 
2699 #if defined(PETSC_HAVE_MATLAB_ENGINE)
2700 #include <mex.h>
2701 
2702 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext;
2703 
2704 #undef __FUNCT__
2705 #define __FUNCT__ "DMComputeFunction_Matlab"
2706 /*
2707    DMComputeFunction_Matlab - Calls the function that has been set with
2708                          DMSetFunctionMatlab().
2709 
2710    For linear problems x is null
2711 
2712 .seealso: DMSetFunction(), DMGetFunction()
2713 */
2714 PetscErrorCode  DMComputeFunction_Matlab(DM dm,Vec x,Vec y)
2715 {
2716   PetscErrorCode    ierr;
2717   DMMatlabContext   *sctx;
2718   int               nlhs = 1,nrhs = 4;
2719   mxArray	    *plhs[1],*prhs[4];
2720   long long int     lx = 0,ly = 0,ls = 0;
2721 
2722   PetscFunctionBegin;
2723   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2724   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
2725   PetscCheckSameComm(dm,1,y,3);
2726 
2727   /* call Matlab function in ctx with arguments x and y */
2728   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2729   ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr);
2730   ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr);
2731   ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr);
2732   prhs[0] =  mxCreateDoubleScalar((double)ls);
2733   prhs[1] =  mxCreateDoubleScalar((double)lx);
2734   prhs[2] =  mxCreateDoubleScalar((double)ly);
2735   prhs[3] =  mxCreateString(sctx->funcname);
2736   ierr    =  mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr);
2737   ierr    =  mxGetScalar(plhs[0]);CHKERRQ(ierr);
2738   mxDestroyArray(prhs[0]);
2739   mxDestroyArray(prhs[1]);
2740   mxDestroyArray(prhs[2]);
2741   mxDestroyArray(prhs[3]);
2742   mxDestroyArray(plhs[0]);
2743   PetscFunctionReturn(0);
2744 }
2745 
2746 
2747 #undef __FUNCT__
2748 #define __FUNCT__ "DMSetFunctionMatlab"
2749 /*
2750    DMSetFunctionMatlab - Sets the function evaluation routine
2751 
2752 */
2753 PetscErrorCode  DMSetFunctionMatlab(DM dm,const char *func)
2754 {
2755   PetscErrorCode    ierr;
2756   DMMatlabContext   *sctx;
2757 
2758   PetscFunctionBegin;
2759   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2760   /* currently sctx is memory bleed */
2761   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2762   if (!sctx) {
2763     ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr);
2764   }
2765   ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr);
2766   ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr);
2767   ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr);
2768   PetscFunctionReturn(0);
2769 }
2770 
2771 #undef __FUNCT__
2772 #define __FUNCT__ "DMComputeJacobian_Matlab"
2773 /*
2774    DMComputeJacobian_Matlab - Calls the function that has been set with
2775                          DMSetJacobianMatlab().
2776 
2777    For linear problems x is null
2778 
2779 .seealso: DMSetFunction(), DMGetFunction()
2780 */
2781 PetscErrorCode  DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str)
2782 {
2783   PetscErrorCode    ierr;
2784   DMMatlabContext   *sctx;
2785   int               nlhs = 2,nrhs = 5;
2786   mxArray	    *plhs[2],*prhs[5];
2787   long long int     lx = 0,lA = 0,lB = 0,ls = 0;
2788 
2789   PetscFunctionBegin;
2790   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2791   PetscValidHeaderSpecific(A,MAT_CLASSID,3);
2792 
2793   /* call MATLAB function in ctx with arguments x, A, and B */
2794   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2795   ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr);
2796   ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr);
2797   ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr);
2798   ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr);
2799   prhs[0] =  mxCreateDoubleScalar((double)ls);
2800   prhs[1] =  mxCreateDoubleScalar((double)lx);
2801   prhs[2] =  mxCreateDoubleScalar((double)lA);
2802   prhs[3] =  mxCreateDoubleScalar((double)lB);
2803   prhs[4] =  mxCreateString(sctx->jacname);
2804   ierr    =  mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr);
2805   *str    =  (MatStructure) mxGetScalar(plhs[0]);
2806   ierr    =  (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr);
2807   mxDestroyArray(prhs[0]);
2808   mxDestroyArray(prhs[1]);
2809   mxDestroyArray(prhs[2]);
2810   mxDestroyArray(prhs[3]);
2811   mxDestroyArray(prhs[4]);
2812   mxDestroyArray(plhs[0]);
2813   mxDestroyArray(plhs[1]);
2814   PetscFunctionReturn(0);
2815 }
2816 
2817 
2818 #undef __FUNCT__
2819 #define __FUNCT__ "DMSetJacobianMatlab"
2820 /*
2821    DMSetJacobianMatlab - Sets the Jacobian function evaluation routine
2822 
2823 */
2824 PetscErrorCode  DMSetJacobianMatlab(DM dm,const char *func)
2825 {
2826   PetscErrorCode    ierr;
2827   DMMatlabContext   *sctx;
2828 
2829   PetscFunctionBegin;
2830   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2831   /* currently sctx is memory bleed */
2832   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2833   if (!sctx) {
2834     ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr);
2835   }
2836   ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr);
2837   ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr);
2838   ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr);
2839   PetscFunctionReturn(0);
2840 }
2841 #endif
2842 
2843 #undef __FUNCT__
2844 #define __FUNCT__ "DMLoad"
2845 /*@C
2846   DMLoad - Loads a DM that has been stored in binary or HDF5 format
2847   with DMView().
2848 
2849   Collective on PetscViewer
2850 
2851   Input Parameters:
2852 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2853            some related function before a call to DMLoad().
2854 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2855            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2856 
2857    Level: intermediate
2858 
2859   Notes:
2860   Defaults to the DM DA.
2861 
2862   Notes for advanced users:
2863   Most users should not need to know the details of the binary storage
2864   format, since DMLoad() and DMView() completely hide these details.
2865   But for anyone who's interested, the standard binary matrix storage
2866   format is
2867 .vb
2868      has not yet been determined
2869 .ve
2870 
2871    In addition, PETSc automatically does the byte swapping for
2872 machines that store the bytes reversed, e.g.  DEC alpha, freebsd,
2873 linux, Windows and the paragon; thus if you write your own binary
2874 read/write routines you have to swap the bytes; see PetscBinaryRead()
2875 and PetscBinaryWrite() to see how this may be done.
2876 
2877   Concepts: vector^loading from file
2878 
2879 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2880 @*/
2881 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2882 {
2883   PetscErrorCode ierr;
2884 
2885   PetscFunctionBegin;
2886   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2887   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2888 
2889   if (!((PetscObject)newdm)->type_name) {
2890     ierr = DMSetType(newdm, DMDA);CHKERRQ(ierr);
2891   }
2892   ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2893   PetscFunctionReturn(0);
2894 }
2895 
2896 /******************************** FEM Support **********************************/
2897 
2898 #undef __FUNCT__
2899 #define __FUNCT__ "DMPrintCellVector"
2900 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2901   PetscInt       f;
2902   PetscErrorCode ierr;
2903 
2904   PetscFunctionBegin;
2905   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2906   for (f = 0; f < len; ++f) {
2907     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2908   }
2909   PetscFunctionReturn(0);
2910 }
2911 
2912 #undef __FUNCT__
2913 #define __FUNCT__ "DMPrintCellMatrix"
2914 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2915   PetscInt       f, g;
2916   PetscErrorCode ierr;
2917 
2918   PetscFunctionBegin;
2919   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2920   for (f = 0; f < rows; ++f) {
2921     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2922     for (g = 0; g < cols; ++g) {
2923       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2924     }
2925     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2926   }
2927   PetscFunctionReturn(0);
2928 }
2929 
2930 #undef __FUNCT__
2931 #define __FUNCT__ "DMGetLocalFunction"
2932 /*@C
2933   DMGetLocalFunction - Get the local residual function from this DM
2934 
2935   Not collective
2936 
2937   Input Parameter:
2938 . dm - The DM
2939 
2940   Output Parameter:
2941 . lf - The local residual function
2942 
2943    Calling sequence of lf:
2944 $    lf (SNES snes, Vec x, Vec f, void *ctx);
2945 
2946 +  snes - the SNES context
2947 .  x - local vector with the state at which to evaluate residual
2948 .  f - local vector to put residual in
2949 -  ctx - optional user-defined function context
2950 
2951   Level: intermediate
2952 
2953 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian()
2954 @*/
2955 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *))
2956 {
2957   PetscFunctionBegin;
2958   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2959   if (lf) *lf = dm->lf;
2960   PetscFunctionReturn(0);
2961 }
2962 
2963 #undef __FUNCT__
2964 #define __FUNCT__ "DMSetLocalFunction"
2965 /*@C
2966   DMSetLocalFunction - Set the local residual function from this DM
2967 
2968   Not collective
2969 
2970   Input Parameters:
2971 + dm - The DM
2972 - lf - The local residual function
2973 
2974    Calling sequence of lf:
2975 $    lf (SNES snes, Vec x, Vec f, void *ctx);
2976 
2977 +  snes - the SNES context
2978 .  x - local vector with the state at which to evaluate residual
2979 .  f - local vector to put residual in
2980 -  ctx - optional user-defined function context
2981 
2982   Level: intermediate
2983 
2984 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian()
2985 @*/
2986 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *))
2987 {
2988   PetscFunctionBegin;
2989   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2990   dm->lf = lf;
2991   PetscFunctionReturn(0);
2992 }
2993 
2994 #undef __FUNCT__
2995 #define __FUNCT__ "DMGetLocalJacobian"
2996 /*@C
2997   DMGetLocalJacobian - Get the local Jacobian function from this DM
2998 
2999   Not collective
3000 
3001   Input Parameter:
3002 . dm - The DM
3003 
3004   Output Parameter:
3005 . lj - The local Jacobian function
3006 
3007    Calling sequence of lj:
3008 $    lj (SNES snes, Vec x, Mat J, Mat M, void *ctx);
3009 
3010 +  snes - the SNES context
3011 .  x - local vector with the state at which to evaluate residual
3012 .  J - matrix to put Jacobian in
3013 .  M - matrix to use for defining Jacobian preconditioner
3014 -  ctx - optional user-defined function context
3015 
3016   Level: intermediate
3017 
3018 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction()
3019 @*/
3020 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *))
3021 {
3022   PetscFunctionBegin;
3023   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3024   if (lj) *lj = dm->lj;
3025   PetscFunctionReturn(0);
3026 }
3027 
3028 #undef __FUNCT__
3029 #define __FUNCT__ "DMSetLocalJacobian"
3030 /*@C
3031   DMSetLocalJacobian - Set the local Jacobian function from this DM
3032 
3033   Not collective
3034 
3035   Input Parameters:
3036 + dm - The DM
3037 - lj - The local Jacobian function
3038 
3039    Calling sequence of lj:
3040 $    lj (SNES snes, Vec x, Mat J, Mat M, void *ctx);
3041 
3042 +  snes - the SNES context
3043 .  x - local vector with the state at which to evaluate residual
3044 .  J - matrix to put Jacobian in
3045 .  M - matrix to use for defining Jacobian preconditioner
3046 -  ctx - optional user-defined function context
3047 
3048   Level: intermediate
3049 
3050 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction()
3051 @*/
3052 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat,  Mat, void *))
3053 {
3054   PetscFunctionBegin;
3055   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3056   dm->lj = lj;
3057   PetscFunctionReturn(0);
3058 }
3059 
3060 #undef __FUNCT__
3061 #define __FUNCT__ "DMGetDefaultSection"
3062 /*@
3063   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
3064 
3065   Input Parameter:
3066 . dm - The DM
3067 
3068   Output Parameter:
3069 . section - The PetscSection
3070 
3071   Level: intermediate
3072 
3073   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
3074 
3075 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
3076 @*/
3077 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
3078   PetscFunctionBegin;
3079   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3080   PetscValidPointer(section, 2);
3081   *section = dm->defaultSection;
3082   PetscFunctionReturn(0);
3083 }
3084 
3085 #undef __FUNCT__
3086 #define __FUNCT__ "DMSetDefaultSection"
3087 /*@
3088   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
3089 
3090   Input Parameters:
3091 + dm - The DM
3092 - section - The PetscSection
3093 
3094   Level: intermediate
3095 
3096   Note: Any existing Section will be destroyed
3097 
3098 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
3099 @*/
3100 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
3101   PetscInt       numFields;
3102   PetscInt       f;
3103   PetscErrorCode ierr;
3104 
3105   PetscFunctionBegin;
3106   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3107   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
3108   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
3109   dm->defaultSection = section;
3110   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
3111   if (numFields) {
3112     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
3113     for (f = 0; f < numFields; ++f) {
3114       const char *name;
3115 
3116       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
3117       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
3118     }
3119   }
3120   PetscFunctionReturn(0);
3121 }
3122 
3123 #undef __FUNCT__
3124 #define __FUNCT__ "DMGetDefaultGlobalSection"
3125 /*@
3126   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
3127 
3128   Input Parameter:
3129 . dm - The DM
3130 
3131   Output Parameter:
3132 . section - The PetscSection
3133 
3134   Level: intermediate
3135 
3136   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
3137 
3138 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
3139 @*/
3140 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
3141   PetscErrorCode ierr;
3142 
3143   PetscFunctionBegin;
3144   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3145   PetscValidPointer(section, 2);
3146   if (!dm->defaultGlobalSection) {
3147     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
3148   }
3149   *section = dm->defaultGlobalSection;
3150   PetscFunctionReturn(0);
3151 }
3152 
3153 #undef __FUNCT__
3154 #define __FUNCT__ "DMSetDefaultGlobalSection"
3155 /*@
3156   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
3157 
3158   Input Parameters:
3159 + dm - The DM
3160 - section - The PetscSection
3161 
3162   Level: intermediate
3163 
3164   Note: Any existing Section will be destroyed
3165 
3166 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
3167 @*/
3168 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
3169   PetscErrorCode ierr;
3170 
3171   PetscFunctionBegin;
3172   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3173   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
3174   dm->defaultGlobalSection = section;
3175   PetscFunctionReturn(0);
3176 }
3177 
3178 #undef __FUNCT__
3179 #define __FUNCT__ "DMGetDefaultSF"
3180 /*@
3181   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
3182   it is created from the default PetscSection layouts in the DM.
3183 
3184   Input Parameter:
3185 . dm - The DM
3186 
3187   Output Parameter:
3188 . sf - The PetscSF
3189 
3190   Level: intermediate
3191 
3192   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3193 
3194 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
3195 @*/
3196 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
3197   PetscInt       nroots;
3198   PetscErrorCode ierr;
3199 
3200   PetscFunctionBegin;
3201   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3202   PetscValidPointer(sf, 2);
3203   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
3204   if (nroots < 0) {
3205     PetscSection section, gSection;
3206 
3207     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
3208     if (section) {
3209       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
3210       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
3211     } else {
3212       *sf = PETSC_NULL;
3213       PetscFunctionReturn(0);
3214     }
3215   }
3216   *sf = dm->defaultSF;
3217   PetscFunctionReturn(0);
3218 }
3219 
3220 #undef __FUNCT__
3221 #define __FUNCT__ "DMSetDefaultSF"
3222 /*@
3223   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3224 
3225   Input Parameters:
3226 + dm - The DM
3227 - sf - The PetscSF
3228 
3229   Level: intermediate
3230 
3231   Note: Any previous SF is destroyed
3232 
3233 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3234 @*/
3235 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
3236   PetscErrorCode ierr;
3237 
3238   PetscFunctionBegin;
3239   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3240   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
3241   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3242   dm->defaultSF = sf;
3243   PetscFunctionReturn(0);
3244 }
3245 
3246 #undef __FUNCT__
3247 #define __FUNCT__ "DMCreateDefaultSF"
3248 /*@C
3249   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3250   describing the data layout.
3251 
3252   Input Parameters:
3253 + dm - The DM
3254 . localSection - PetscSection describing the local data layout
3255 - globalSection - PetscSection describing the global data layout
3256 
3257   Level: intermediate
3258 
3259 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3260 @*/
3261 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3262 {
3263   MPI_Comm        comm = ((PetscObject) dm)->comm;
3264   PetscLayout     layout;
3265   const PetscInt *ranges;
3266   PetscInt       *local;
3267   PetscSFNode    *remote;
3268   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
3269   PetscMPIInt     size, rank;
3270   PetscErrorCode  ierr;
3271 
3272   PetscFunctionBegin;
3273   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3274   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3275   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3276   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3277   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3278   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3279   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3280   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3281   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3282   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3283   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
3284     PetscInt gdof, gcdof;
3285 
3286     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3287     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3288     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3289   }
3290   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
3291   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
3292   for (p = pStart, l = 0; p < pEnd; ++p) {
3293     PetscInt *cind;
3294     PetscInt  dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3295 
3296     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3297     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3298     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3299     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3300     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3301     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3302     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3303     if (!gdof) continue; /* Censored point */
3304     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3305     if (gsize != dof-cdof) {
3306       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", size, p, dof-cdof, dof);
3307       cdof = 0; /* Ignore constraints */
3308     }
3309     for (d = 0, c = 0; d < dof; ++d) {
3310       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3311       local[l+d-c] = off+d;
3312     }
3313     if (gdof < 0) {
3314       for(d = 0; d < gsize; ++d, ++l) {
3315         PetscInt offset = -(goff+1) + d, r;
3316 
3317         for (r = 0; r < size; ++r) {
3318           if ((offset >= ranges[r]) && (offset < ranges[r+1])) break;
3319         }
3320         remote[l].rank  = r;
3321         remote[l].index = offset - ranges[r];
3322       }
3323     } else {
3324       for(d = 0; d < gsize; ++d, ++l) {
3325         remote[l].rank  = rank;
3326         remote[l].index = goff+d - ranges[rank];
3327       }
3328     }
3329   }
3330   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3331   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3332   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3333   PetscFunctionReturn(0);
3334 }
3335 
3336 #undef __FUNCT__
3337 #define __FUNCT__ "DMGetPointSF"
3338 /*@
3339   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3340 
3341   Input Parameter:
3342 . dm - The DM
3343 
3344   Output Parameter:
3345 . sf - The PetscSF
3346 
3347   Level: intermediate
3348 
3349   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3350 
3351 .seealso: DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3352 @*/
3353 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
3354   PetscFunctionBegin;
3355   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3356   PetscValidPointer(sf, 2);
3357   *sf = dm->sf;
3358   PetscFunctionReturn(0);
3359 }
3360 
3361 #undef __FUNCT__
3362 #define __FUNCT__ "DMGetNumFields"
3363 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3364 {
3365   PetscFunctionBegin;
3366   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3367   PetscValidPointer(numFields, 2);
3368   *numFields = dm->numFields;
3369   PetscFunctionReturn(0);
3370 }
3371 
3372 #undef __FUNCT__
3373 #define __FUNCT__ "DMSetNumFields"
3374 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3375 {
3376   PetscInt       f;
3377   PetscErrorCode ierr;
3378 
3379   PetscFunctionBegin;
3380   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3381   for (f = 0; f < dm->numFields; ++f) {
3382     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3383   }
3384   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3385   dm->numFields = numFields;
3386   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3387   for (f = 0; f < dm->numFields; ++f) {
3388     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3389   }
3390   PetscFunctionReturn(0);
3391 }
3392 
3393 #undef __FUNCT__
3394 #define __FUNCT__ "DMGetField"
3395 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3396 {
3397   PetscFunctionBegin;
3398   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3399   PetscValidPointer(field, 2);
3400   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3401   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3402   *field = dm->fields[f];
3403   PetscFunctionReturn(0);
3404 }
3405 
3406 #undef __FUNCT__
3407 #define __FUNCT__ "DMSetCoordinates"
3408 /*@
3409   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3410 
3411   Collective on DM
3412 
3413   Input Parameters:
3414 + dm - the DM
3415 - c - coordinate vector
3416 
3417   Note:
3418   The coordinates do include those for ghost points, which are in the local vector
3419 
3420   Level: intermediate
3421 
3422 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3423 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3424 @*/
3425 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3426 {
3427   PetscErrorCode ierr;
3428 
3429   PetscFunctionBegin;
3430   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3431   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3432   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3433   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3434   dm->coordinates = c;
3435   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3436   PetscFunctionReturn(0);
3437 }
3438 
3439 #undef __FUNCT__
3440 #define __FUNCT__ "DMSetCoordinatesLocal"
3441 /*@
3442   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3443 
3444   Collective on DM
3445 
3446    Input Parameters:
3447 +  dm - the DM
3448 -  c - coordinate vector
3449 
3450   Note:
3451   The coordinates of ghost points can be set using DMSetCoordinates()
3452   followed by DMGetCoordinatesLocal(). This is intended to enable the
3453   setting of ghost coordinates outside of the domain.
3454 
3455   Level: intermediate
3456 
3457 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3458 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3459 @*/
3460 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3461 {
3462   PetscErrorCode ierr;
3463 
3464   PetscFunctionBegin;
3465   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3466   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3467   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3468   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3469   dm->coordinatesLocal = c;
3470   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3471   PetscFunctionReturn(0);
3472 }
3473 
3474 #undef __FUNCT__
3475 #define __FUNCT__ "DMGetCoordinates"
3476 /*@
3477   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3478 
3479   Not Collective
3480 
3481   Input Parameter:
3482 . dm - the DM
3483 
3484   Output Parameter:
3485 . c - global coordinate vector
3486 
3487   Note:
3488   This is a borrowed reference, so the user should NOT destroy this vector
3489 
3490   Each process has only the local coordinates (does NOT have the ghost coordinates).
3491 
3492   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3493   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3494 
3495   Level: intermediate
3496 
3497 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3498 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3499 @*/
3500 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3501 {
3502   PetscErrorCode ierr;
3503 
3504   PetscFunctionBegin;
3505   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3506   PetscValidPointer(c,2);
3507   if (!dm->coordinates) {
3508     DM cdm;
3509 
3510     if (!dm->coordinatesLocal) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ORDER, "You must call DMSetCoordinates() or DMSetCoordinatesLocal() before this call");
3511     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3512     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3513     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3514     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3515     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3516   }
3517   *c = dm->coordinates;
3518   PetscFunctionReturn(0);
3519 }
3520 
3521 #undef __FUNCT__
3522 #define __FUNCT__ "DMGetCoordinatesLocal"
3523 /*@
3524   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3525 
3526   Collective on DM
3527 
3528   Input Parameter:
3529 . dm - the DM
3530 
3531   Output Parameter:
3532 . c - coordinate vector
3533 
3534   Note:
3535   This is a borrowed reference, so the user should NOT destroy this vector
3536 
3537   Each process has the local and ghost coordinates
3538 
3539   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3540   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3541 
3542   Level: intermediate
3543 
3544 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3545 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3546 @*/
3547 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3548 {
3549   PetscErrorCode ierr;
3550 
3551   PetscFunctionBegin;
3552   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3553   PetscValidPointer(c,2);
3554   if (!dm->coordinatesLocal) {
3555     DM cdm;
3556 
3557     if (!dm->coordinates) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ORDER, "You must call DMSetCoordinates() or DMSetCoordinatesLocal() before this call");
3558     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3559     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3560     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3561     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3562     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3563   }
3564   *c = dm->coordinatesLocal;
3565   PetscFunctionReturn(0);
3566 }
3567 
3568 #undef __FUNCT__
3569 #define __FUNCT__ "DMGetCoordinateDM"
3570 /*@
3571   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3572 
3573   Collective on DM
3574 
3575   Input Parameter:
3576 . dm - the DM
3577 
3578   Output Parameter:
3579 . cdm - coordinate DM
3580 
3581   Level: intermediate
3582 
3583 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3584 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3585 @*/
3586 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3587 {
3588   PetscErrorCode ierr;
3589 
3590   PetscFunctionBegin;
3591   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3592   PetscValidPointer(cdm,2);
3593   if (!dm->coordinateDM) {
3594     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3595     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3596   }
3597   *cdm = dm->coordinateDM;
3598   PetscFunctionReturn(0);
3599 }
3600