xref: /petsc/src/dm/interface/dm.c (revision 31d3f06ebde3773ebf5ce2d45e44be3e261635e3)
1 #include <petscsnes.h>
2 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
3 
4 PetscClassId  DM_CLASSID;
5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "DMCreate"
9 /*@
10   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
11 
12    If you never  call DMSetType()  it will generate an
13    error when you try to use the vector.
14 
15   Collective on MPI_Comm
16 
17   Input Parameter:
18 . comm - The communicator for the DM object
19 
20   Output Parameter:
21 . dm - The DM object
22 
23   Level: beginner
24 
25 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
26 @*/
27 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
28 {
29   DM             v;
30   PetscErrorCode ierr;
31 
32   PetscFunctionBegin;
33   PetscValidPointer(dm,2);
34   *dm = PETSC_NULL;
35 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
36   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
37   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
38   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
39 #endif
40 
41   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
42   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
43 
44 
45   v->ltogmap      = PETSC_NULL;
46   v->ltogmapb     = PETSC_NULL;
47   v->bs           = 1;
48   v->coloringtype = IS_COLORING_GLOBAL;
49   v->lf           = PETSC_NULL;
50   v->lj           = PETSC_NULL;
51   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
52   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
53   v->defaultSection       = PETSC_NULL;
54   v->defaultGlobalSection = PETSC_NULL;
55   {
56     PetscInt i;
57     for (i = 0; i < 10; ++i) {
58       v->nullspaceConstructors[i] = PETSC_NULL;
59     }
60   }
61   v->numFields = 0;
62   v->fields    = PETSC_NULL;
63 
64   *dm = v;
65   PetscFunctionReturn(0);
66 }
67 
68 #undef __FUNCT__
69 #define __FUNCT__ "DMSetVecType"
70 /*@C
71        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
72 
73    Logically Collective on DMDA
74 
75    Input Parameter:
76 +  da - initial distributed array
77 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
78 
79    Options Database:
80 .   -dm_vec_type ctype
81 
82    Level: intermediate
83 
84 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
85 @*/
86 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
87 {
88   PetscErrorCode ierr;
89 
90   PetscFunctionBegin;
91   PetscValidHeaderSpecific(da,DM_CLASSID,1);
92   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
93   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
94   PetscFunctionReturn(0);
95 }
96 
97 #undef __FUNCT__
98 #define __FUNCT__ "VecGetDM"
99 /*@
100   VecGetDM - Gets the DM defining the data layout of the vector
101 
102   Not collective
103 
104   Input Parameter:
105 . v - The Vec
106 
107   Output Parameter:
108 . dm - The DM
109 
110   Level: intermediate
111 
112 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
113 @*/
114 PetscErrorCode VecGetDM(Vec v, DM *dm)
115 {
116   PetscErrorCode ierr;
117 
118   PetscFunctionBegin;
119   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
120   PetscValidPointer(dm,2);
121   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
122   PetscFunctionReturn(0);
123 }
124 
125 #undef __FUNCT__
126 #define __FUNCT__ "VecSetDM"
127 /*@
128   VecSetDM - Sets the DM defining the data layout of the vector
129 
130   Not collective
131 
132   Input Parameters:
133 + v - The Vec
134 - dm - The DM
135 
136   Level: intermediate
137 
138 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
139 @*/
140 PetscErrorCode VecSetDM(Vec v, DM dm)
141 {
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
146   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
147   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
148   PetscFunctionReturn(0);
149 }
150 
151 #undef __FUNCT__
152 #define __FUNCT__ "DMSetMatType"
153 /*@C
154        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
155 
156    Logically Collective on DM
157 
158    Input Parameter:
159 +  dm - the DM context
160 .  ctype - the matrix type
161 
162    Options Database:
163 .   -dm_mat_type ctype
164 
165    Level: intermediate
166 
167 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
168 @*/
169 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
170 {
171   PetscErrorCode ierr;
172   PetscFunctionBegin;
173   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
174   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
175   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
176   PetscFunctionReturn(0);
177 }
178 
179 #undef __FUNCT__
180 #define __FUNCT__ "MatGetDM"
181 /*@
182   MatGetDM - Gets the DM defining the data layout of the matrix
183 
184   Not collective
185 
186   Input Parameter:
187 . A - The Mat
188 
189   Output Parameter:
190 . dm - The DM
191 
192   Level: intermediate
193 
194 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
195 @*/
196 PetscErrorCode MatGetDM(Mat A, DM *dm)
197 {
198   PetscErrorCode ierr;
199 
200   PetscFunctionBegin;
201   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
202   PetscValidPointer(dm,2);
203   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
204   PetscFunctionReturn(0);
205 }
206 
207 #undef __FUNCT__
208 #define __FUNCT__ "MatSetDM"
209 /*@
210   MatSetDM - Sets the DM defining the data layout of the matrix
211 
212   Not collective
213 
214   Input Parameters:
215 + A - The Mat
216 - dm - The DM
217 
218   Level: intermediate
219 
220 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
221 @*/
222 PetscErrorCode MatSetDM(Mat A, DM dm)
223 {
224   PetscErrorCode ierr;
225 
226   PetscFunctionBegin;
227   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
228   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
229   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "DMSetOptionsPrefix"
235 /*@C
236    DMSetOptionsPrefix - Sets the prefix used for searching for all
237    DMDA options in the database.
238 
239    Logically Collective on DMDA
240 
241    Input Parameter:
242 +  da - the DMDA context
243 -  prefix - the prefix to prepend to all option names
244 
245    Notes:
246    A hyphen (-) must NOT be given at the beginning of the prefix name.
247    The first character of all runtime options is AUTOMATICALLY the hyphen.
248 
249    Level: advanced
250 
251 .keywords: DMDA, set, options, prefix, database
252 
253 .seealso: DMSetFromOptions()
254 @*/
255 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
256 {
257   PetscErrorCode ierr;
258 
259   PetscFunctionBegin;
260   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
261   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
262   PetscFunctionReturn(0);
263 }
264 
265 #undef __FUNCT__
266 #define __FUNCT__ "DMDestroy"
267 /*@
268     DMDestroy - Destroys a vector packer or DMDA.
269 
270     Collective on DM
271 
272     Input Parameter:
273 .   dm - the DM object to destroy
274 
275     Level: developer
276 
277 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
278 
279 @*/
280 PetscErrorCode  DMDestroy(DM *dm)
281 {
282   PetscInt       i, cnt = 0, f;
283   DMNamedVecLink nlink,nnext;
284   PetscErrorCode ierr;
285 
286   PetscFunctionBegin;
287   if (!*dm) PetscFunctionReturn(0);
288   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
289 
290   /* count all the circular references of DM and its contained Vecs */
291   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
292     if ((*dm)->localin[i])  {cnt++;}
293     if ((*dm)->globalin[i]) {cnt++;}
294   }
295   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
296   if ((*dm)->x) {
297     DM obj;
298     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
299     if (obj == *dm) cnt++;
300   }
301 
302   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
303   /*
304      Need this test because the dm references the vectors that
305      reference the dm, so destroying the dm calls destroy on the
306      vectors that cause another destroy on the dm
307   */
308   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
309   ((PetscObject) (*dm))->refct = 0;
310   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
311     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
312     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
313   }
314   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
315     nnext = nlink->next;
316     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
317     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
318     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
319     ierr = PetscFree(nlink);CHKERRQ(ierr);
320   }
321   (*dm)->namedglobal = PETSC_NULL;
322 
323   /* Destroy the list of hooks */
324   {
325     DMCoarsenHookLink link,next;
326     for (link=(*dm)->coarsenhook; link; link=next) {
327       next = link->next;
328       ierr = PetscFree(link);CHKERRQ(ierr);
329     }
330     (*dm)->coarsenhook = PETSC_NULL;
331   }
332   {
333     DMRefineHookLink link,next;
334     for (link=(*dm)->refinehook; link; link=next) {
335       next = link->next;
336       ierr = PetscFree(link);CHKERRQ(ierr);
337     }
338     (*dm)->refinehook = PETSC_NULL;
339   }
340   /* Destroy the work arrays */
341   {
342     DMWorkLink link,next;
343     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
344     for (link=(*dm)->workin; link; link=next) {
345       next = link->next;
346       ierr = PetscFree(link->mem);CHKERRQ(ierr);
347       ierr = PetscFree(link);CHKERRQ(ierr);
348     }
349     (*dm)->workin = PETSC_NULL;
350   }
351 
352   if ((*dm)->ctx && (*dm)->ctxdestroy) {
353     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
354   }
355   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
356   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
357   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
358   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
359   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
360   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
361   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
362 
363   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
364   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
365   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
366   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
367   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
368 
369   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
370   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
371   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
372 
373   for (f = 0; f < (*dm)->numFields; ++f) {
374     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
375   }
376   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
377   /* if memory was published with AMS then destroy it */
378   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
379 
380   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
381   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
382   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
383   PetscFunctionReturn(0);
384 }
385 
386 #undef __FUNCT__
387 #define __FUNCT__ "DMSetUp"
388 /*@
389     DMSetUp - sets up the data structures inside a DM object
390 
391     Collective on DM
392 
393     Input Parameter:
394 .   dm - the DM object to setup
395 
396     Level: developer
397 
398 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
399 
400 @*/
401 PetscErrorCode  DMSetUp(DM dm)
402 {
403   PetscErrorCode ierr;
404 
405   PetscFunctionBegin;
406   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
407   if (dm->setupcalled) PetscFunctionReturn(0);
408   if (dm->ops->setup) {
409     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
410   }
411   dm->setupcalled = PETSC_TRUE;
412   PetscFunctionReturn(0);
413 }
414 
415 #undef __FUNCT__
416 #define __FUNCT__ "DMSetFromOptions"
417 /*@
418     DMSetFromOptions - sets parameters in a DM from the options database
419 
420     Collective on DM
421 
422     Input Parameter:
423 .   dm - the DM object to set options for
424 
425     Options Database:
426 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
427 .   -dm_vec_type <type>  type of vector to create inside DM
428 .   -dm_mat_type <type>  type of matrix to create inside DM
429 -   -dm_coloring_type <global or ghosted>
430 
431     Level: developer
432 
433 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
434 
435 @*/
436 PetscErrorCode  DMSetFromOptions(DM dm)
437 {
438   PetscBool      flg1 = PETSC_FALSE,flg2 = PETSC_FALSE,flg3 = PETSC_FALSE,flg4 = PETSC_FALSE,flg;
439   PetscErrorCode ierr;
440   char           typeName[256] = MATAIJ;
441 
442   PetscFunctionBegin;
443   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
444   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
445     ierr = PetscOptionsBool("-dm_view", "Information on DM", "DMView", flg1, &flg1, PETSC_NULL);CHKERRQ(ierr);
446     ierr = PetscOptionsBool("-dm_view_detail", "Exhaustive mesh description", "DMView", flg2, &flg2, PETSC_NULL);CHKERRQ(ierr);
447     ierr = PetscOptionsBool("-dm_view_vtk", "Output mesh in VTK format", "DMView", flg3, &flg3, PETSC_NULL);CHKERRQ(ierr);
448     ierr = PetscOptionsBool("-dm_view_latex", "Output mesh in LaTeX TikZ format", "DMView", flg4, &flg4, PETSC_NULL);CHKERRQ(ierr);
449     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
450     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
451     if (flg) {
452       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
453     }
454     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
455     if (flg) {
456       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
457     }
458     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
459     if (dm->ops->setfromoptions) {
460       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
461     }
462     /* process any options handlers added with PetscObjectAddOptionsHandler() */
463     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
464   ierr = PetscOptionsEnd();CHKERRQ(ierr);
465   if (flg1) {
466     ierr = DMView(dm, PETSC_VIEWER_STDOUT_WORLD);CHKERRQ(ierr);
467   }
468   if (flg2) {
469     PetscViewer viewer;
470 
471     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
472     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
473     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_INFO_DETAIL);CHKERRQ(ierr);
474     ierr = DMView(dm, viewer);CHKERRQ(ierr);
475     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
476   }
477   if (flg3) {
478     PetscViewer viewer;
479 
480     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
481     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
482     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_VTK);CHKERRQ(ierr);
483     ierr = PetscViewerFileSetName(viewer, "mesh.vtk");CHKERRQ(ierr);
484     ierr = DMView(dm, viewer);CHKERRQ(ierr);
485     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
486   }
487   if (flg4) {
488     PetscViewer viewer;
489 
490     ierr = PetscViewerCreate(((PetscObject) dm)->comm, &viewer);CHKERRQ(ierr);
491     ierr = PetscViewerSetType(viewer, PETSCVIEWERASCII);CHKERRQ(ierr);
492     ierr = PetscViewerSetFormat(viewer, PETSC_VIEWER_ASCII_LATEX);CHKERRQ(ierr);
493     ierr = PetscViewerFileSetName(viewer, "mesh.tex");CHKERRQ(ierr);
494     ierr = DMView(dm, viewer);CHKERRQ(ierr);
495     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
496   }
497   PetscFunctionReturn(0);
498 }
499 
500 #undef __FUNCT__
501 #define __FUNCT__ "DMView"
502 /*@C
503     DMView - Views a vector packer or DMDA.
504 
505     Collective on DM
506 
507     Input Parameter:
508 +   dm - the DM object to view
509 -   v - the viewer
510 
511     Level: developer
512 
513 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
514 
515 @*/
516 PetscErrorCode  DMView(DM dm,PetscViewer v)
517 {
518   PetscErrorCode ierr;
519 
520   PetscFunctionBegin;
521   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
522  if (!v) {
523     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
524   }
525   if (dm->ops->view) {
526     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
527   }
528   PetscFunctionReturn(0);
529 }
530 
531 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
532 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
533 
534 #undef __FUNCT__
535 #define __FUNCT__ "DMCreateGlobalVector"
536 /*@
537     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
538 
539     Collective on DM
540 
541     Input Parameter:
542 .   dm - the DM object
543 
544     Output Parameter:
545 .   vec - the global vector
546 
547     Level: beginner
548 
549 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
550 
551 @*/
552 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
553 {
554   PetscErrorCode ierr;
555 
556   PetscFunctionBegin;
557   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
558   if (dm->defaultSection) {
559     PetscSection gSection;
560     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
561 
562     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
563     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
564     for(p = pStart; p < pEnd; ++p) {
565       PetscInt dof, cdof;
566 
567       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
568       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
569       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
570       if ((dof > 0) && (dof-cdof != blockSize)) {
571         blockSize = 1;
572         break;
573       }
574     }
575     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
576     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
577     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
578     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
579     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
580     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
581     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
582     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
583     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
584     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
585     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
586     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
587   } else {
588     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
589   }
590   PetscFunctionReturn(0);
591 }
592 
593 #undef __FUNCT__
594 #define __FUNCT__ "DMCreateLocalVector"
595 /*@
596     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
597 
598     Not Collective
599 
600     Input Parameter:
601 .   dm - the DM object
602 
603     Output Parameter:
604 .   vec - the local vector
605 
606     Level: beginner
607 
608 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
609 
610 @*/
611 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
612 {
613   PetscErrorCode ierr;
614 
615   PetscFunctionBegin;
616   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
617   if (dm->defaultSection) {
618     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
619 
620     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
621     for(p = pStart; p < pEnd; ++p) {
622       PetscInt dof;
623 
624       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
625       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
626       if ((dof > 0) && (dof != blockSize)) {
627         blockSize = 1;
628         break;
629       }
630     }
631     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
632     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
633     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
634     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
635     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
636     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
637     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
638   } else {
639     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
640   }
641   PetscFunctionReturn(0);
642 }
643 
644 #undef __FUNCT__
645 #define __FUNCT__ "DMGetLocalToGlobalMapping"
646 /*@
647    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
648 
649    Collective on DM
650 
651    Input Parameter:
652 .  dm - the DM that provides the mapping
653 
654    Output Parameter:
655 .  ltog - the mapping
656 
657    Level: intermediate
658 
659    Notes:
660    This mapping can then be used by VecSetLocalToGlobalMapping() or
661    MatSetLocalToGlobalMapping().
662 
663 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
664 @*/
665 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
666 {
667   PetscErrorCode ierr;
668 
669   PetscFunctionBegin;
670   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
671   PetscValidPointer(ltog,2);
672   if (!dm->ltogmap) {
673     PetscSection section, sectionGlobal;
674 
675     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
676     if (section) {
677       PetscInt      *ltog;
678       PetscInt       pStart, pEnd, size, p, l;
679 
680       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
681       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
682       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
683       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
684       for (p = pStart, l = 0; p < pEnd; ++p) {
685         PetscInt dof, off, c;
686 
687         /* Should probably use constrained dofs */
688         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
689         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
690         for (c = 0; c < dof; ++c, ++l) {
691           ltog[l] = off+c;
692         }
693       }
694       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
695       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
696     } else {
697       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
698       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
699     }
700   }
701   *ltog = dm->ltogmap;
702   PetscFunctionReturn(0);
703 }
704 
705 #undef __FUNCT__
706 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
707 /*@
708    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
709 
710    Collective on DM
711 
712    Input Parameter:
713 .  da - the distributed array that provides the mapping
714 
715    Output Parameter:
716 .  ltog - the block mapping
717 
718    Level: intermediate
719 
720    Notes:
721    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
722    MatSetLocalToGlobalMappingBlock().
723 
724 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
725 @*/
726 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
727 {
728   PetscErrorCode ierr;
729 
730   PetscFunctionBegin;
731   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
732   PetscValidPointer(ltog,2);
733   if (!dm->ltogmapb) {
734     PetscInt bs;
735     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
736     if (bs > 1) {
737       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
738       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
739     } else {
740       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
741       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
742     }
743   }
744   *ltog = dm->ltogmapb;
745   PetscFunctionReturn(0);
746 }
747 
748 #undef __FUNCT__
749 #define __FUNCT__ "DMGetBlockSize"
750 /*@
751    DMGetBlockSize - Gets the inherent block size associated with a DM
752 
753    Not Collective
754 
755    Input Parameter:
756 .  dm - the DM with block structure
757 
758    Output Parameter:
759 .  bs - the block size, 1 implies no exploitable block structure
760 
761    Level: intermediate
762 
763 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
764 @*/
765 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
766 {
767   PetscFunctionBegin;
768   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
769   PetscValidPointer(bs,2);
770   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
771   *bs = dm->bs;
772   PetscFunctionReturn(0);
773 }
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "DMCreateInterpolation"
777 /*@
778     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
779 
780     Collective on DM
781 
782     Input Parameter:
783 +   dm1 - the DM object
784 -   dm2 - the second, finer DM object
785 
786     Output Parameter:
787 +  mat - the interpolation
788 -  vec - the scaling (optional)
789 
790     Level: developer
791 
792     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
793         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
794 
795         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
796         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
797 
798 
799 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
800 
801 @*/
802 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
803 {
804   PetscErrorCode ierr;
805 
806   PetscFunctionBegin;
807   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
808   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
809   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
810   PetscFunctionReturn(0);
811 }
812 
813 #undef __FUNCT__
814 #define __FUNCT__ "DMCreateInjection"
815 /*@
816     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
817 
818     Collective on DM
819 
820     Input Parameter:
821 +   dm1 - the DM object
822 -   dm2 - the second, finer DM object
823 
824     Output Parameter:
825 .   ctx - the injection
826 
827     Level: developer
828 
829    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
830         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
831 
832 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
833 
834 @*/
835 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
836 {
837   PetscErrorCode ierr;
838 
839   PetscFunctionBegin;
840   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
841   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
842   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
843   PetscFunctionReturn(0);
844 }
845 
846 #undef __FUNCT__
847 #define __FUNCT__ "DMCreateColoring"
848 /*@C
849     DMCreateColoring - Gets coloring for a DMDA or DMComposite
850 
851     Collective on DM
852 
853     Input Parameter:
854 +   dm - the DM object
855 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
856 -   matype - either MATAIJ or MATBAIJ
857 
858     Output Parameter:
859 .   coloring - the coloring
860 
861     Level: developer
862 
863 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
864 
865 @*/
866 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
867 {
868   PetscErrorCode ierr;
869 
870   PetscFunctionBegin;
871   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
872   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
873   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
874   PetscFunctionReturn(0);
875 }
876 
877 #undef __FUNCT__
878 #define __FUNCT__ "DMCreateMatrix"
879 /*@C
880     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
881 
882     Collective on DM
883 
884     Input Parameter:
885 +   dm - the DM object
886 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
887             any type which inherits from one of these (such as MATAIJ)
888 
889     Output Parameter:
890 .   mat - the empty Jacobian
891 
892     Level: beginner
893 
894     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
895        do not need to do it yourself.
896 
897        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
898        the nonzero pattern call DMDASetMatPreallocateOnly()
899 
900        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
901        internally by PETSc.
902 
903        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
904        the indices for the global numbering for DMDAs which is complicated.
905 
906 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
907 
908 @*/
909 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
910 {
911   PetscErrorCode ierr;
912 
913   PetscFunctionBegin;
914   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
915 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
916   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
917 #endif
918   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
919   PetscValidPointer(mat,3);
920   if (dm->mattype) {
921     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
922   } else {
923     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
924   }
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
930 /*@
931   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
932     preallocated but the nonzero structure and zero values will not be set.
933 
934   Logically Collective on DMDA
935 
936   Input Parameter:
937 + dm - the DM
938 - only - PETSC_TRUE if only want preallocation
939 
940   Level: developer
941 .seealso DMCreateMatrix()
942 @*/
943 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
944 {
945   PetscFunctionBegin;
946   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
947   dm->prealloc_only = only;
948   PetscFunctionReturn(0);
949 }
950 
951 #undef __FUNCT__
952 #define __FUNCT__ "DMGetWorkArray"
953 /*@C
954   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
955 
956   Not Collective
957 
958   Input Parameters:
959 + dm - the DM object
960 . count - The minium size
961 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
962 
963   Output Parameter:
964 . array - the work array
965 
966   Level: developer
967 
968 .seealso DMDestroy(), DMCreate()
969 @*/
970 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
971 {
972   PetscErrorCode ierr;
973   DMWorkLink link;
974   size_t size;
975 
976   PetscFunctionBegin;
977   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
978   PetscValidPointer(mem,4);
979   if (dm->workin) {
980     link = dm->workin;
981     dm->workin = dm->workin->next;
982   } else {
983     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
984   }
985   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
986   if (size*count > link->bytes) {
987     ierr = PetscFree(link->mem);CHKERRQ(ierr);
988     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
989     link->bytes = size*count;
990   }
991   link->next = dm->workout;
992   dm->workout = link;
993   *(void**)mem = link->mem;
994   PetscFunctionReturn(0);
995 }
996 
997 #undef __FUNCT__
998 #define __FUNCT__ "DMRestoreWorkArray"
999 /*@C
1000   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1001 
1002   Not Collective
1003 
1004   Input Parameters:
1005 + dm - the DM object
1006 . count - The minium size
1007 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1008 
1009   Output Parameter:
1010 . array - the work array
1011 
1012   Level: developer
1013 
1014 .seealso DMDestroy(), DMCreate()
1015 @*/
1016 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1017 {
1018   DMWorkLink *p,link;
1019 
1020   PetscFunctionBegin;
1021   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1022   PetscValidPointer(mem,4);
1023   for (p=&dm->workout; (link=*p); p=&link->next) {
1024     if (link->mem == *(void**)mem) {
1025       *p = link->next;
1026       link->next = dm->workin;
1027       dm->workin = link;
1028       *(void**)mem = PETSC_NULL;
1029       PetscFunctionReturn(0);
1030     }
1031   }
1032   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1033   PetscFunctionReturn(0);
1034 }
1035 
1036 #undef __FUNCT__
1037 #define __FUNCT__ "DMSetNullSpaceConstructor"
1038 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1039 {
1040   PetscFunctionBegin;
1041   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1042   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1043   dm->nullspaceConstructors[field] = nullsp;
1044   PetscFunctionReturn(0);
1045 }
1046 
1047 #undef __FUNCT__
1048 #define __FUNCT__ "DMCreateFieldIS"
1049 /*@C
1050   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1051 
1052   Not collective
1053 
1054   Input Parameter:
1055 . dm - the DM object
1056 
1057   Output Parameters:
1058 + numFields  - The number of fields (or PETSC_NULL if not requested)
1059 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1060 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1061 
1062   Level: intermediate
1063 
1064   Notes:
1065   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1066   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1067   PetscFree().
1068 
1069 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1070 @*/
1071 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1072 {
1073   PetscSection   section, sectionGlobal;
1074   PetscErrorCode ierr;
1075 
1076   PetscFunctionBegin;
1077   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1078   if (numFields) {
1079     PetscValidPointer(numFields,2);
1080     *numFields = 0;
1081   }
1082   if (fieldNames) {
1083     PetscValidPointer(fieldNames,3);
1084     *fieldNames = PETSC_NULL;
1085   }
1086   if (fields) {
1087     PetscValidPointer(fields,4);
1088     *fields = PETSC_NULL;
1089   }
1090   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1091   if (section) {
1092     PetscInt *fieldSizes, **fieldIndices;
1093     PetscInt  nF, f, pStart, pEnd, p;
1094 
1095     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1096     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1097     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1098     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1099     for (f = 0; f < nF; ++f) {
1100       fieldSizes[f] = 0;
1101     }
1102     for (p = pStart; p < pEnd; ++p) {
1103       PetscInt gdof;
1104 
1105       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1106       if (gdof > 0) {
1107         for (f = 0; f < nF; ++f) {
1108           PetscInt fdof, fcdof;
1109 
1110           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1111           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1112           fieldSizes[f] += fdof-fcdof;
1113         }
1114       }
1115     }
1116     for (f = 0; f < nF; ++f) {
1117       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1118       fieldSizes[f] = 0;
1119     }
1120     for (p = pStart; p < pEnd; ++p) {
1121       PetscInt gdof, goff;
1122 
1123       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1124       if (gdof > 0) {
1125         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1126         for (f = 0; f < nF; ++f) {
1127           PetscInt fdof, fcdof, fc;
1128 
1129           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1130           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1131           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1132             fieldIndices[f][fieldSizes[f]] = goff++;
1133           }
1134         }
1135       }
1136     }
1137     if (numFields) {*numFields = nF;}
1138     if (fieldNames) {
1139       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1140       for (f = 0; f < nF; ++f) {
1141         const char *fieldName;
1142 
1143         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1144         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1145       }
1146     }
1147     if (fields) {
1148       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1149       for (f = 0; f < nF; ++f) {
1150         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1151       }
1152     }
1153     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1154   } else {
1155     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1156   }
1157   PetscFunctionReturn(0);
1158 }
1159 
1160 #undef __FUNCT__
1161 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1162 /*@C
1163   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1164 
1165   Not Collective
1166 
1167   Input Parameters:
1168 + dm   - the DM object
1169 - name - the name of the field decomposition
1170 
1171   Output Parameter:
1172 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1173 
1174   Level: advanced
1175 
1176 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1177 @*/
1178 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1179 {
1180   PetscErrorCode ierr;
1181 
1182   PetscFunctionBegin;
1183   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1184   PetscValidCharPointer(name,2);
1185   PetscValidPointer(ddm,3);
1186   *ddm = PETSC_NULL;
1187   if (dm->ops->createfielddecompositiondm) {
1188     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1189   }
1190   PetscFunctionReturn(0);
1191 }
1192 
1193 #undef __FUNCT__
1194 #define __FUNCT__ "DMCreateFieldDecomposition"
1195 /*@C
1196   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1197                           corresponding to different fields: each IS contains the global indices of the dofs of the
1198                           corresponding field. The optional list of DMs define the DM for each subproblem.
1199                           Generalizes DMCreateFieldIS().
1200 
1201   Not collective
1202 
1203   Input Parameter:
1204 . dm - the DM object
1205 
1206   Output Parameters:
1207 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1208 . namelist  - The name for each field (or PETSC_NULL if not requested)
1209 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1210 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1211 
1212   Level: intermediate
1213 
1214   Notes:
1215   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1216   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1217   and all of the arrays should be freed with PetscFree().
1218 
1219 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1220 @*/
1221 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1222 {
1223   PetscErrorCode ierr;
1224 
1225   PetscFunctionBegin;
1226   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1227   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1228   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1229   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1230   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1231   if (!dm->ops->createfielddecomposition) {
1232     PetscSection section;
1233     PetscInt     numFields, f;
1234 
1235     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1236     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1237     if (section && numFields && dm->ops->createsubdm) {
1238       *len = numFields;
1239       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1240       for (f = 0; f < numFields; ++f) {
1241         const char *fieldName;
1242 
1243         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1244         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1245         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1246       }
1247     } else {
1248       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1249       /* By default there are no DMs associated with subproblems. */
1250       if (dmlist) *dmlist = PETSC_NULL;
1251     }
1252   }
1253   else {
1254     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist); CHKERRQ(ierr);
1255   }
1256   PetscFunctionReturn(0);
1257 }
1258 
1259 #undef __FUNCT__
1260 #define __FUNCT__ "DMCreateSubDM"
1261 /*@C
1262   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1263                   The fields are defined by DMCreateFieldIS().
1264 
1265   Not collective
1266 
1267   Input Parameters:
1268 + dm - the DM object
1269 . numFields - number of fields in this subproblem
1270 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1271 
1272   Output Parameters:
1273 . is - The global indices for the subproblem
1274 - dm - The DM for the subproblem
1275 
1276   Level: intermediate
1277 
1278 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1279 @*/
1280 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1281 {
1282   PetscErrorCode ierr;
1283 
1284   PetscFunctionBegin;
1285   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1286   PetscValidPointer(fields,3);
1287   if (is) {PetscValidPointer(is,4);}
1288   if (subdm) {PetscValidPointer(subdm,5);}
1289   if (dm->ops->createsubdm) {
1290     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm); CHKERRQ(ierr);
1291   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1292   PetscFunctionReturn(0);
1293 }
1294 
1295 #undef __FUNCT__
1296 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1297 /*@C
1298   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1299 
1300   Not Collective
1301 
1302   Input Parameters:
1303 + dm   - the DM object
1304 - name - the name of the subdomain decomposition
1305 
1306   Output Parameter:
1307 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1308 
1309   Level: advanced
1310 
1311 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1312 @*/
1313 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1314 {
1315   PetscErrorCode ierr;
1316 
1317   PetscFunctionBegin;
1318   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1319   PetscValidCharPointer(name,2);
1320   PetscValidPointer(ddm,3);
1321   *ddm = PETSC_NULL;
1322   if (dm->ops->createdomaindecompositiondm) {
1323     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm); CHKERRQ(ierr);
1324   }
1325   PetscFunctionReturn(0);
1326 }
1327 
1328 #undef __FUNCT__
1329 #define __FUNCT__ "DMCreateDomainDecomposition"
1330 /*@C
1331   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1332                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1333                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1334                           define a nonoverlapping covering, while outer subdomains can overlap.
1335                           The optional list of DMs define the DM for each subproblem.
1336 
1337   Not collective
1338 
1339   Input Parameter:
1340 . dm - the DM object
1341 
1342   Output Parameters:
1343 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1344 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1345 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1346 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1347 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1348 
1349   Level: intermediate
1350 
1351   Notes:
1352   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1353   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1354   and all of the arrays should be freed with PetscFree().
1355 
1356 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1357 @*/
1358 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1359 {
1360   PetscErrorCode ierr;
1361 
1362   PetscFunctionBegin;
1363   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1364   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1365   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1366   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1367   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1368   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1369   if (dm->ops->createdomaindecomposition) {
1370     ierr = (*dm->ops->createdomaindecomposition)(dm,len,namelist,innerislist,outerislist,dmlist); CHKERRQ(ierr);
1371   }
1372   PetscFunctionReturn(0);
1373 }
1374 
1375 #undef __FUNCT__
1376 #define __FUNCT__ "DMRefine"
1377 /*@
1378   DMRefine - Refines a DM object
1379 
1380   Collective on DM
1381 
1382   Input Parameter:
1383 + dm   - the DM object
1384 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1385 
1386   Output Parameter:
1387 . dmf - the refined DM, or PETSC_NULL
1388 
1389   Note: If no refinement was done, the return value is PETSC_NULL
1390 
1391   Level: developer
1392 
1393 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1394 @*/
1395 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1396 {
1397   PetscErrorCode ierr;
1398   DMRefineHookLink link;
1399 
1400   PetscFunctionBegin;
1401   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1402   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1403   if (*dmf) {
1404     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1405     (*dmf)->ops->initialguess = dm->ops->initialguess;
1406     (*dmf)->ops->function     = dm->ops->function;
1407     (*dmf)->ops->functionj    = dm->ops->functionj;
1408     if (dm->ops->jacobian != DMComputeJacobianDefault) {
1409       (*dmf)->ops->jacobian     = dm->ops->jacobian;
1410     }
1411     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1412     (*dmf)->ctx       = dm->ctx;
1413     (*dmf)->leveldown = dm->leveldown;
1414     (*dmf)->levelup   = dm->levelup + 1;
1415     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1416     for (link=dm->refinehook; link; link=link->next) {
1417       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1418     }
1419   }
1420   PetscFunctionReturn(0);
1421 }
1422 
1423 #undef __FUNCT__
1424 #define __FUNCT__ "DMRefineHookAdd"
1425 /*@
1426    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1427 
1428    Logically Collective
1429 
1430    Input Arguments:
1431 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1432 .  refinehook - function to run when setting up a coarser level
1433 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1434 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1435 
1436    Calling sequence of refinehook:
1437 $    refinehook(DM coarse,DM fine,void *ctx);
1438 
1439 +  coarse - coarse level DM
1440 .  fine - fine level DM to interpolate problem to
1441 -  ctx - optional user-defined function context
1442 
1443    Calling sequence for interphook:
1444 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1445 
1446 +  coarse - coarse level DM
1447 .  interp - matrix interpolating a coarse-level solution to the finer grid
1448 .  fine - fine level DM to update
1449 -  ctx - optional user-defined function context
1450 
1451    Level: advanced
1452 
1453    Notes:
1454    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1455 
1456    If this function is called multiple times, the hooks will be run in the order they are added.
1457 
1458 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1459 @*/
1460 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1461 {
1462   PetscErrorCode ierr;
1463   DMRefineHookLink link,*p;
1464 
1465   PetscFunctionBegin;
1466   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1467   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1468   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1469   link->refinehook = refinehook;
1470   link->interphook = interphook;
1471   link->ctx = ctx;
1472   link->next = PETSC_NULL;
1473   *p = link;
1474   PetscFunctionReturn(0);
1475 }
1476 
1477 #undef __FUNCT__
1478 #define __FUNCT__ "DMInterpolate"
1479 /*@
1480    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1481 
1482    Collective if any hooks are
1483 
1484    Input Arguments:
1485 +  coarse - coarser DM to use as a base
1486 .  restrct - interpolation matrix, apply using MatInterpolate()
1487 -  fine - finer DM to update
1488 
1489    Level: developer
1490 
1491 .seealso: DMRefineHookAdd(), MatInterpolate()
1492 @*/
1493 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1494 {
1495   PetscErrorCode ierr;
1496   DMRefineHookLink link;
1497 
1498   PetscFunctionBegin;
1499   for (link=fine->refinehook; link; link=link->next) {
1500     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1501   }
1502   PetscFunctionReturn(0);
1503 }
1504 
1505 #undef __FUNCT__
1506 #define __FUNCT__ "DMGetRefineLevel"
1507 /*@
1508     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1509 
1510     Not Collective
1511 
1512     Input Parameter:
1513 .   dm - the DM object
1514 
1515     Output Parameter:
1516 .   level - number of refinements
1517 
1518     Level: developer
1519 
1520 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1521 
1522 @*/
1523 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1524 {
1525   PetscFunctionBegin;
1526   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1527   *level = dm->levelup;
1528   PetscFunctionReturn(0);
1529 }
1530 
1531 #undef __FUNCT__
1532 #define __FUNCT__ "DMGlobalToLocalBegin"
1533 /*@
1534     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1535 
1536     Neighbor-wise Collective on DM
1537 
1538     Input Parameters:
1539 +   dm - the DM object
1540 .   g - the global vector
1541 .   mode - INSERT_VALUES or ADD_VALUES
1542 -   l - the local vector
1543 
1544 
1545     Level: beginner
1546 
1547 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1548 
1549 @*/
1550 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1551 {
1552   PetscSF        sf;
1553   PetscErrorCode ierr;
1554 
1555   PetscFunctionBegin;
1556   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1557   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1558   if (sf) {
1559     PetscScalar *lArray, *gArray;
1560 
1561     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1562     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1563     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1564     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1565     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1566     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1567   } else {
1568     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1569   }
1570   PetscFunctionReturn(0);
1571 }
1572 
1573 #undef __FUNCT__
1574 #define __FUNCT__ "DMGlobalToLocalEnd"
1575 /*@
1576     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1577 
1578     Neighbor-wise Collective on DM
1579 
1580     Input Parameters:
1581 +   dm - the DM object
1582 .   g - the global vector
1583 .   mode - INSERT_VALUES or ADD_VALUES
1584 -   l - the local vector
1585 
1586 
1587     Level: beginner
1588 
1589 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1590 
1591 @*/
1592 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1593 {
1594   PetscSF        sf;
1595   PetscErrorCode ierr;
1596   PetscScalar    *lArray, *gArray;
1597 
1598   PetscFunctionBegin;
1599   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1600   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1601   if (sf) {
1602   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1603 
1604     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1605     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1606     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1607     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1608     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1609   } else {
1610     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1611   }
1612   PetscFunctionReturn(0);
1613 }
1614 
1615 #undef __FUNCT__
1616 #define __FUNCT__ "DMLocalToGlobalBegin"
1617 /*@
1618     DMLocalToGlobalBegin - updates global vectors from local vectors
1619 
1620     Neighbor-wise Collective on DM
1621 
1622     Input Parameters:
1623 +   dm - the DM object
1624 .   l - the local vector
1625 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1626            base point.
1627 - - the global vector
1628 
1629     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1630            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1631            global array to the final global array with VecAXPY().
1632 
1633     Level: beginner
1634 
1635 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1636 
1637 @*/
1638 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1639 {
1640   PetscSF        sf;
1641   PetscErrorCode ierr;
1642 
1643   PetscFunctionBegin;
1644   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1645   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1646   if (sf) {
1647     MPI_Op       op;
1648     PetscScalar *lArray, *gArray;
1649 
1650     switch(mode) {
1651     case INSERT_VALUES:
1652     case INSERT_ALL_VALUES:
1653 #if defined(PETSC_HAVE_MPI_REPLACE)
1654       op = MPI_REPLACE; break;
1655 #else
1656       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1657 #endif
1658     case ADD_VALUES:
1659     case ADD_ALL_VALUES:
1660       op = MPI_SUM; break;
1661   default:
1662     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1663     }
1664     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1665     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1666     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1667     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1668     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1669   } else {
1670     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1671   }
1672   PetscFunctionReturn(0);
1673 }
1674 
1675 #undef __FUNCT__
1676 #define __FUNCT__ "DMLocalToGlobalEnd"
1677 /*@
1678     DMLocalToGlobalEnd - updates global vectors from local vectors
1679 
1680     Neighbor-wise Collective on DM
1681 
1682     Input Parameters:
1683 +   dm - the DM object
1684 .   l - the local vector
1685 .   mode - INSERT_VALUES or ADD_VALUES
1686 -   g - the global vector
1687 
1688 
1689     Level: beginner
1690 
1691 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1692 
1693 @*/
1694 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1695 {
1696   PetscSF        sf;
1697   PetscErrorCode ierr;
1698 
1699   PetscFunctionBegin;
1700   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1701   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1702   if (sf) {
1703     MPI_Op       op;
1704     PetscScalar *lArray, *gArray;
1705 
1706     switch(mode) {
1707     case INSERT_VALUES:
1708     case INSERT_ALL_VALUES:
1709 #if defined(PETSC_HAVE_MPI_REPLACE)
1710       op = MPI_REPLACE; break;
1711 #else
1712       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1713 #endif
1714     case ADD_VALUES:
1715     case ADD_ALL_VALUES:
1716       op = MPI_SUM; break;
1717     default:
1718       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1719     }
1720     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1721     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1722     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1723     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1724     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1725   } else {
1726     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1727   }
1728   PetscFunctionReturn(0);
1729 }
1730 
1731 #undef __FUNCT__
1732 #define __FUNCT__ "DMComputeJacobianDefault"
1733 /*@
1734     DMComputeJacobianDefault - computes the Jacobian using the DMComputeFunction() if Jacobian computer is not provided
1735 
1736     Collective on DM
1737 
1738     Input Parameter:
1739 +   dm - the DM object
1740 .   x - location to compute Jacobian at; may be ignored for linear problems
1741 .   A - matrix that defines the operator for the linear solve
1742 -   B - the matrix used to construct the preconditioner
1743 
1744     Level: developer
1745 
1746 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
1747          DMSetFunction()
1748 
1749 @*/
1750 PetscErrorCode  DMComputeJacobianDefault(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag)
1751 {
1752   PetscErrorCode ierr;
1753 
1754   PetscFunctionBegin;
1755   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1756   *stflag = SAME_NONZERO_PATTERN;
1757   ierr  = MatFDColoringApply(B,dm->fd,x,stflag,dm);CHKERRQ(ierr);
1758   if (A != B) {
1759     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1760     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1761   }
1762   PetscFunctionReturn(0);
1763 }
1764 
1765 #undef __FUNCT__
1766 #define __FUNCT__ "DMCoarsen"
1767 /*@
1768     DMCoarsen - Coarsens a DM object
1769 
1770     Collective on DM
1771 
1772     Input Parameter:
1773 +   dm - the DM object
1774 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1775 
1776     Output Parameter:
1777 .   dmc - the coarsened DM
1778 
1779     Level: developer
1780 
1781 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1782 
1783 @*/
1784 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1785 {
1786   PetscErrorCode ierr;
1787   DMCoarsenHookLink link;
1788 
1789   PetscFunctionBegin;
1790   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1791   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1792   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1793   (*dmc)->ops->initialguess = dm->ops->initialguess;
1794   (*dmc)->ops->function     = dm->ops->function;
1795   (*dmc)->ops->functionj    = dm->ops->functionj;
1796   if (dm->ops->jacobian != DMComputeJacobianDefault) {
1797     (*dmc)->ops->jacobian     = dm->ops->jacobian;
1798   }
1799   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1800   (*dmc)->ctx       = dm->ctx;
1801   (*dmc)->levelup   = dm->levelup;
1802   (*dmc)->leveldown = dm->leveldown + 1;
1803   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1804   for (link=dm->coarsenhook; link; link=link->next) {
1805     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1806   }
1807   PetscFunctionReturn(0);
1808 }
1809 
1810 #undef __FUNCT__
1811 #define __FUNCT__ "DMCoarsenHookAdd"
1812 /*@
1813    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1814 
1815    Logically Collective
1816 
1817    Input Arguments:
1818 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1819 .  coarsenhook - function to run when setting up a coarser level
1820 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1821 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1822 
1823    Calling sequence of coarsenhook:
1824 $    coarsenhook(DM fine,DM coarse,void *ctx);
1825 
1826 +  fine - fine level DM
1827 .  coarse - coarse level DM to restrict problem to
1828 -  ctx - optional user-defined function context
1829 
1830    Calling sequence for restricthook:
1831 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1832 
1833 +  fine - fine level DM
1834 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1835 .  rscale - scaling vector for restriction
1836 .  inject - matrix restricting by injection
1837 .  coarse - coarse level DM to update
1838 -  ctx - optional user-defined function context
1839 
1840    Level: advanced
1841 
1842    Notes:
1843    This function is only needed if auxiliary data needs to be set up on coarse grids.
1844 
1845    If this function is called multiple times, the hooks will be run in the order they are added.
1846 
1847    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1848    extract the finest level information from its context (instead of from the SNES).
1849 
1850 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1851 @*/
1852 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1853 {
1854   PetscErrorCode ierr;
1855   DMCoarsenHookLink link,*p;
1856 
1857   PetscFunctionBegin;
1858   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1859   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1860   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1861   link->coarsenhook = coarsenhook;
1862   link->restricthook = restricthook;
1863   link->ctx = ctx;
1864   link->next = PETSC_NULL;
1865   *p = link;
1866   PetscFunctionReturn(0);
1867 }
1868 
1869 #undef __FUNCT__
1870 #define __FUNCT__ "DMRestrict"
1871 /*@
1872    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1873 
1874    Collective if any hooks are
1875 
1876    Input Arguments:
1877 +  fine - finer DM to use as a base
1878 .  restrct - restriction matrix, apply using MatRestrict()
1879 .  inject - injection matrix, also use MatRestrict()
1880 -  coarse - coarer DM to update
1881 
1882    Level: developer
1883 
1884 .seealso: DMCoarsenHookAdd(), MatRestrict()
1885 @*/
1886 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1887 {
1888   PetscErrorCode ierr;
1889   DMCoarsenHookLink link;
1890 
1891   PetscFunctionBegin;
1892   for (link=fine->coarsenhook; link; link=link->next) {
1893     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1894   }
1895   PetscFunctionReturn(0);
1896 }
1897 
1898 #undef __FUNCT__
1899 #define __FUNCT__ "DMBlockRestrictHookAdd"
1900 /*@
1901    DMBlockRestrictHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1902 
1903    Logically Collective
1904 
1905    Input Arguments:
1906 +  global - global DM
1907 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1908 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1909 
1910    Calling sequence for restricthook:
1911 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1912 
1913 +  global - global DM
1914 .  out    - scatter to the outer (with ghost and overlap points) block vector
1915 .  in     - scatter to block vector values only owned locally
1916 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1917 -  ctx - optional user-defined function context
1918 
1919    Level: advanced
1920 
1921    Notes:
1922    This function is only needed if auxiliary data needs to be set up on coarse grids.
1923 
1924    If this function is called multiple times, the hooks will be run in the order they are added.
1925 
1926    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1927    extract the finest level information from its context (instead of from the SNES).
1928 
1929 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1930 @*/
1931 PetscErrorCode DMBlockRestrictHookAdd(DM global,PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1932 {
1933   PetscErrorCode ierr;
1934   DMBlockRestrictHookLink link,*p;
1935 
1936   PetscFunctionBegin;
1937   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1938   for (p=&global->blockrestricthook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1939   ierr = PetscMalloc(sizeof(struct _DMBlockRestrictHookLink),&link);CHKERRQ(ierr);
1940   link->restricthook = restricthook;
1941   link->ctx = ctx;
1942   link->next = PETSC_NULL;
1943   *p = link;
1944   PetscFunctionReturn(0);
1945 }
1946 
1947 #undef __FUNCT__
1948 #define __FUNCT__ "DMBlockRestrict"
1949 /*@
1950    DMBlockRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMBlockRestrictHookAdd()
1951 
1952    Collective if any hooks are
1953 
1954    Input Arguments:
1955 +  fine - finer DM to use as a base
1956 .  restrct - restriction matrix, apply using MatRestrict()
1957 .  inject - injection matrix, also use MatRestrict()
1958 -  coarse - coarer DM to update
1959 
1960    Level: developer
1961 
1962 .seealso: DMCoarsenHookAdd(), MatRestrict()
1963 @*/
1964 PetscErrorCode DMBlockRestrict(DM global,VecScatter in,VecScatter out,DM block)
1965 {
1966   PetscErrorCode ierr;
1967   DMBlockRestrictHookLink link;
1968 
1969   PetscFunctionBegin;
1970   for (link=global->blockrestricthook; link; link=link->next) {
1971     if (link->restricthook) {ierr = (*link->restricthook)(global,in,out,block,link->ctx);CHKERRQ(ierr);}
1972   }
1973   PetscFunctionReturn(0);
1974 }
1975 
1976 #undef __FUNCT__
1977 #define __FUNCT__ "DMGetCoarsenLevel"
1978 /*@
1979     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
1980 
1981     Not Collective
1982 
1983     Input Parameter:
1984 .   dm - the DM object
1985 
1986     Output Parameter:
1987 .   level - number of coarsenings
1988 
1989     Level: developer
1990 
1991 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1992 
1993 @*/
1994 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
1995 {
1996   PetscFunctionBegin;
1997   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1998   *level = dm->leveldown;
1999   PetscFunctionReturn(0);
2000 }
2001 
2002 
2003 
2004 #undef __FUNCT__
2005 #define __FUNCT__ "DMRefineHierarchy"
2006 /*@C
2007     DMRefineHierarchy - Refines a DM object, all levels at once
2008 
2009     Collective on DM
2010 
2011     Input Parameter:
2012 +   dm - the DM object
2013 -   nlevels - the number of levels of refinement
2014 
2015     Output Parameter:
2016 .   dmf - the refined DM hierarchy
2017 
2018     Level: developer
2019 
2020 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2021 
2022 @*/
2023 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2024 {
2025   PetscErrorCode ierr;
2026 
2027   PetscFunctionBegin;
2028   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2029   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2030   if (nlevels == 0) PetscFunctionReturn(0);
2031   if (dm->ops->refinehierarchy) {
2032     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2033   } else if (dm->ops->refine) {
2034     PetscInt i;
2035 
2036     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2037     for (i=1; i<nlevels; i++) {
2038       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2039     }
2040   } else {
2041     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2042   }
2043   PetscFunctionReturn(0);
2044 }
2045 
2046 #undef __FUNCT__
2047 #define __FUNCT__ "DMCoarsenHierarchy"
2048 /*@C
2049     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2050 
2051     Collective on DM
2052 
2053     Input Parameter:
2054 +   dm - the DM object
2055 -   nlevels - the number of levels of coarsening
2056 
2057     Output Parameter:
2058 .   dmc - the coarsened DM hierarchy
2059 
2060     Level: developer
2061 
2062 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2063 
2064 @*/
2065 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2066 {
2067   PetscErrorCode ierr;
2068 
2069   PetscFunctionBegin;
2070   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2071   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2072   if (nlevels == 0) PetscFunctionReturn(0);
2073   PetscValidPointer(dmc,3);
2074   if (dm->ops->coarsenhierarchy) {
2075     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2076   } else if (dm->ops->coarsen) {
2077     PetscInt i;
2078 
2079     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2080     for (i=1; i<nlevels; i++) {
2081       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2082     }
2083   } else {
2084     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2085   }
2086   PetscFunctionReturn(0);
2087 }
2088 
2089 #undef __FUNCT__
2090 #define __FUNCT__ "DMCreateAggregates"
2091 /*@
2092    DMCreateAggregates - Gets the aggregates that map between
2093    grids associated with two DMs.
2094 
2095    Collective on DM
2096 
2097    Input Parameters:
2098 +  dmc - the coarse grid DM
2099 -  dmf - the fine grid DM
2100 
2101    Output Parameters:
2102 .  rest - the restriction matrix (transpose of the projection matrix)
2103 
2104    Level: intermediate
2105 
2106 .keywords: interpolation, restriction, multigrid
2107 
2108 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2109 @*/
2110 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2111 {
2112   PetscErrorCode ierr;
2113 
2114   PetscFunctionBegin;
2115   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2116   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2117   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2118   PetscFunctionReturn(0);
2119 }
2120 
2121 #undef __FUNCT__
2122 #define __FUNCT__ "DMSetApplicationContextDestroy"
2123 /*@C
2124     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2125 
2126     Not Collective
2127 
2128     Input Parameters:
2129 +   dm - the DM object
2130 -   destroy - the destroy function
2131 
2132     Level: intermediate
2133 
2134 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2135 
2136 @*/
2137 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2138 {
2139   PetscFunctionBegin;
2140   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2141   dm->ctxdestroy = destroy;
2142   PetscFunctionReturn(0);
2143 }
2144 
2145 #undef __FUNCT__
2146 #define __FUNCT__ "DMSetApplicationContext"
2147 /*@
2148     DMSetApplicationContext - Set a user context into a DM object
2149 
2150     Not Collective
2151 
2152     Input Parameters:
2153 +   dm - the DM object
2154 -   ctx - the user context
2155 
2156     Level: intermediate
2157 
2158 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2159 
2160 @*/
2161 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2162 {
2163   PetscFunctionBegin;
2164   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2165   dm->ctx = ctx;
2166   PetscFunctionReturn(0);
2167 }
2168 
2169 #undef __FUNCT__
2170 #define __FUNCT__ "DMGetApplicationContext"
2171 /*@
2172     DMGetApplicationContext - Gets a user context from a DM object
2173 
2174     Not Collective
2175 
2176     Input Parameter:
2177 .   dm - the DM object
2178 
2179     Output Parameter:
2180 .   ctx - the user context
2181 
2182     Level: intermediate
2183 
2184 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2185 
2186 @*/
2187 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2188 {
2189   PetscFunctionBegin;
2190   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2191   *(void**)ctx = dm->ctx;
2192   PetscFunctionReturn(0);
2193 }
2194 
2195 #undef __FUNCT__
2196 #define __FUNCT__ "DMSetInitialGuess"
2197 /*@C
2198     DMSetInitialGuess - sets a function to compute an initial guess vector entries for the solvers
2199 
2200     Logically Collective on DM
2201 
2202     Input Parameter:
2203 +   dm - the DM object to destroy
2204 -   f - the function to compute the initial guess
2205 
2206     Level: intermediate
2207 
2208 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2209 
2210 @*/
2211 PetscErrorCode  DMSetInitialGuess(DM dm,PetscErrorCode (*f)(DM,Vec))
2212 {
2213   PetscFunctionBegin;
2214   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2215   dm->ops->initialguess = f;
2216   PetscFunctionReturn(0);
2217 }
2218 
2219 #undef __FUNCT__
2220 #define __FUNCT__ "DMSetFunction"
2221 /*@C
2222     DMSetFunction - sets a function to compute the right hand side vector entries for the KSP solver or nonlinear function for SNES
2223 
2224     Logically Collective on DM
2225 
2226     Input Parameter:
2227 +   dm - the DM object
2228 -   f - the function to compute (use PETSC_NULL to cancel a previous function that was set)
2229 
2230     Level: intermediate
2231 
2232     Notes: This sets both the function for function evaluations and the function used to compute Jacobians via finite differences if no Jacobian
2233            computer is provided with DMSetJacobian(). Canceling cancels the function, but not the function used to compute the Jacobian.
2234 
2235 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2236          DMSetJacobian()
2237 
2238 @*/
2239 PetscErrorCode  DMSetFunction(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2240 {
2241   PetscFunctionBegin;
2242   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2243   dm->ops->function = f;
2244   if (f) {
2245     dm->ops->functionj = f;
2246   }
2247   PetscFunctionReturn(0);
2248 }
2249 
2250 #undef __FUNCT__
2251 #define __FUNCT__ "DMSetJacobian"
2252 /*@C
2253     DMSetJacobian - sets a function to compute the matrix entries for the KSP solver or Jacobian for SNES
2254 
2255     Logically Collective on DM
2256 
2257     Input Parameter:
2258 +   dm - the DM object to destroy
2259 -   f - the function to compute the matrix entries
2260 
2261     Level: intermediate
2262 
2263 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2264          DMSetFunction()
2265 
2266 @*/
2267 PetscErrorCode  DMSetJacobian(DM dm,PetscErrorCode (*f)(DM,Vec,Mat,Mat,MatStructure*))
2268 {
2269   PetscFunctionBegin;
2270   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2271   dm->ops->jacobian = f;
2272   PetscFunctionReturn(0);
2273 }
2274 
2275 #undef __FUNCT__
2276 #define __FUNCT__ "DMSetVariableBounds"
2277 /*@C
2278     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2279 
2280     Logically Collective on DM
2281 
2282     Input Parameter:
2283 +   dm - the DM object
2284 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2285 
2286     Level: intermediate
2287 
2288 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2289          DMSetJacobian()
2290 
2291 @*/
2292 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2293 {
2294   PetscFunctionBegin;
2295   dm->ops->computevariablebounds = f;
2296   PetscFunctionReturn(0);
2297 }
2298 
2299 #undef __FUNCT__
2300 #define __FUNCT__ "DMHasVariableBounds"
2301 /*@
2302     DMHasVariableBounds - does the DM object have a variable bounds function?
2303 
2304     Not Collective
2305 
2306     Input Parameter:
2307 .   dm - the DM object to destroy
2308 
2309     Output Parameter:
2310 .   flg - PETSC_TRUE if the variable bounds function exists
2311 
2312     Level: developer
2313 
2314 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2315 
2316 @*/
2317 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2318 {
2319   PetscFunctionBegin;
2320   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2321   PetscFunctionReturn(0);
2322 }
2323 
2324 #undef __FUNCT__
2325 #define __FUNCT__ "DMComputeVariableBounds"
2326 /*@C
2327     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2328 
2329     Logically Collective on DM
2330 
2331     Input Parameters:
2332 +   dm - the DM object to destroy
2333 -   x  - current solution at which the bounds are computed
2334 
2335     Output parameters:
2336 +   xl - lower bound
2337 -   xu - upper bound
2338 
2339     Level: intermediate
2340 
2341 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2342          DMSetFunction(), DMSetVariableBounds()
2343 
2344 @*/
2345 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2346 {
2347   PetscErrorCode ierr;
2348   PetscFunctionBegin;
2349   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2350   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2351   if (dm->ops->computevariablebounds) {
2352     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu); CHKERRQ(ierr);
2353   }
2354   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2355   PetscFunctionReturn(0);
2356 }
2357 
2358 #undef __FUNCT__
2359 #define __FUNCT__ "DMComputeInitialGuess"
2360 /*@
2361     DMComputeInitialGuess - computes an initial guess vector entries for the KSP solvers
2362 
2363     Collective on DM
2364 
2365     Input Parameter:
2366 +   dm - the DM object to destroy
2367 -   x - the vector to hold the initial guess values
2368 
2369     Level: developer
2370 
2371 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetRhs(), DMSetMat()
2372 
2373 @*/
2374 PetscErrorCode  DMComputeInitialGuess(DM dm,Vec x)
2375 {
2376   PetscErrorCode ierr;
2377   PetscFunctionBegin;
2378   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2379   if (!dm->ops->initialguess) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetInitialGuess()");
2380   ierr = (*dm->ops->initialguess)(dm,x);CHKERRQ(ierr);
2381   PetscFunctionReturn(0);
2382 }
2383 
2384 #undef __FUNCT__
2385 #define __FUNCT__ "DMHasInitialGuess"
2386 /*@
2387     DMHasInitialGuess - does the DM object have an initial guess function
2388 
2389     Not Collective
2390 
2391     Input Parameter:
2392 .   dm - the DM object to destroy
2393 
2394     Output Parameter:
2395 .   flg - PETSC_TRUE if function exists
2396 
2397     Level: developer
2398 
2399 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2400 
2401 @*/
2402 PetscErrorCode  DMHasInitialGuess(DM dm,PetscBool  *flg)
2403 {
2404   PetscFunctionBegin;
2405   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2406   *flg =  (dm->ops->initialguess) ? PETSC_TRUE : PETSC_FALSE;
2407   PetscFunctionReturn(0);
2408 }
2409 
2410 #undef __FUNCT__
2411 #define __FUNCT__ "DMHasFunction"
2412 /*@
2413     DMHasFunction - does the DM object have a function
2414 
2415     Not Collective
2416 
2417     Input Parameter:
2418 .   dm - the DM object to destroy
2419 
2420     Output Parameter:
2421 .   flg - PETSC_TRUE if function exists
2422 
2423     Level: developer
2424 
2425 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2426 
2427 @*/
2428 PetscErrorCode  DMHasFunction(DM dm,PetscBool  *flg)
2429 {
2430   PetscFunctionBegin;
2431   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2432   *flg =  (dm->ops->function) ? PETSC_TRUE : PETSC_FALSE;
2433   PetscFunctionReturn(0);
2434 }
2435 
2436 #undef __FUNCT__
2437 #define __FUNCT__ "DMHasJacobian"
2438 /*@
2439     DMHasJacobian - does the DM object have a matrix function
2440 
2441     Not Collective
2442 
2443     Input Parameter:
2444 .   dm - the DM object to destroy
2445 
2446     Output Parameter:
2447 .   flg - PETSC_TRUE if function exists
2448 
2449     Level: developer
2450 
2451 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetFunction(), DMSetJacobian()
2452 
2453 @*/
2454 PetscErrorCode  DMHasJacobian(DM dm,PetscBool  *flg)
2455 {
2456   PetscFunctionBegin;
2457   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2458   *flg =  (dm->ops->jacobian) ? PETSC_TRUE : PETSC_FALSE;
2459   PetscFunctionReturn(0);
2460 }
2461 
2462 #undef __FUNCT__
2463 #define __FUNCT__ "DMHasColoring"
2464 /*@
2465     DMHasColoring - does the DM object have a method of providing a coloring?
2466 
2467     Not Collective
2468 
2469     Input Parameter:
2470 .   dm - the DM object
2471 
2472     Output Parameter:
2473 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2474 
2475     Level: developer
2476 
2477 .seealso DMHasFunction(), DMCreateColoring()
2478 
2479 @*/
2480 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2481 {
2482   PetscFunctionBegin;
2483   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2484   PetscFunctionReturn(0);
2485 }
2486 
2487 #undef  __FUNCT__
2488 #define __FUNCT__ "DMSetVec"
2489 /*@C
2490     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2491 
2492     Collective on DM
2493 
2494     Input Parameter:
2495 +   dm - the DM object
2496 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2497 
2498     Level: developer
2499 
2500 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2501          DMSetFunction(), DMSetJacobian(), DMSetVariableBounds()
2502 
2503 @*/
2504 PetscErrorCode  DMSetVec(DM dm,Vec x)
2505 {
2506   PetscErrorCode ierr;
2507   PetscFunctionBegin;
2508   if (x) {
2509     if (!dm->x) {
2510       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2511     }
2512     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2513   }
2514   else if (dm->x) {
2515     ierr = VecDestroy(&dm->x);  CHKERRQ(ierr);
2516   }
2517   PetscFunctionReturn(0);
2518 }
2519 
2520 
2521 #undef __FUNCT__
2522 #define __FUNCT__ "DMComputeFunction"
2523 /*@
2524     DMComputeFunction - computes the right hand side vector entries for the KSP solver or nonlinear function for SNES
2525 
2526     Collective on DM
2527 
2528     Input Parameter:
2529 +   dm - the DM object to destroy
2530 .   x - the location where the function is evaluationed, may be ignored for linear problems
2531 -   b - the vector to hold the right hand side entries
2532 
2533     Level: developer
2534 
2535 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2536          DMSetJacobian()
2537 
2538 @*/
2539 PetscErrorCode  DMComputeFunction(DM dm,Vec x,Vec b)
2540 {
2541   PetscErrorCode ierr;
2542   PetscFunctionBegin;
2543   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2544   if (!dm->ops->function) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Need to provide function with DMSetFunction()");
2545   PetscStackPush("DM user function");
2546   ierr = (*dm->ops->function)(dm,x,b);CHKERRQ(ierr);
2547   PetscStackPop;
2548   PetscFunctionReturn(0);
2549 }
2550 
2551 
2552 
2553 #undef __FUNCT__
2554 #define __FUNCT__ "DMComputeJacobian"
2555 /*@
2556     DMComputeJacobian - compute the matrix entries for the solver
2557 
2558     Collective on DM
2559 
2560     Input Parameter:
2561 +   dm - the DM object
2562 .   x - location to compute Jacobian at; will be PETSC_NULL for linear problems, for nonlinear problems if not provided then pulled from DM
2563 .   A - matrix that defines the operator for the linear solve
2564 -   B - the matrix used to construct the preconditioner
2565 
2566     Level: developer
2567 
2568 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(), DMSetInitialGuess(),
2569          DMSetFunction()
2570 
2571 @*/
2572 PetscErrorCode  DMComputeJacobian(DM dm,Vec x,Mat A,Mat B,MatStructure *stflag)
2573 {
2574   PetscErrorCode ierr;
2575 
2576   PetscFunctionBegin;
2577   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2578   if (!dm->ops->jacobian) {
2579     ISColoring     coloring;
2580     MatFDColoring  fd;
2581     MatType        mtype;
2582 
2583     ierr = PetscObjectGetType((PetscObject)B,&mtype);CHKERRQ(ierr);
2584     ierr = DMCreateColoring(dm,dm->coloringtype,mtype,&coloring);CHKERRQ(ierr);
2585     ierr = MatFDColoringCreate(B,coloring,&fd);CHKERRQ(ierr);
2586     ierr = ISColoringDestroy(&coloring);CHKERRQ(ierr);
2587     ierr = MatFDColoringSetFunction(fd,(PetscErrorCode (*)(void))dm->ops->functionj,dm);CHKERRQ(ierr);
2588     ierr = PetscObjectSetOptionsPrefix((PetscObject)fd,((PetscObject)dm)->prefix);CHKERRQ(ierr);
2589     ierr = MatFDColoringSetFromOptions(fd);CHKERRQ(ierr);
2590 
2591     dm->fd = fd;
2592     dm->ops->jacobian = DMComputeJacobianDefault;
2593 
2594     /* don't know why this is needed */
2595     ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr);
2596   }
2597   if (!x) x = dm->x;
2598   ierr = (*dm->ops->jacobian)(dm,x,A,B,stflag);CHKERRQ(ierr);
2599 
2600   /* if matrix depends on x; i.e. nonlinear problem, keep copy of input vector since needed by multigrid methods to generate coarse grid matrices */
2601   if (x) {
2602     if (!dm->x) {
2603       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2604     }
2605     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2606   }
2607   if (A != B) {
2608     ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2609     ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
2610   }
2611   PetscFunctionReturn(0);
2612 }
2613 
2614 
2615 PetscFList DMList                       = PETSC_NULL;
2616 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2617 
2618 #undef __FUNCT__
2619 #define __FUNCT__ "DMSetType"
2620 /*@C
2621   DMSetType - Builds a DM, for a particular DM implementation.
2622 
2623   Collective on DM
2624 
2625   Input Parameters:
2626 + dm     - The DM object
2627 - method - The name of the DM type
2628 
2629   Options Database Key:
2630 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2631 
2632   Notes:
2633   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2634 
2635   Level: intermediate
2636 
2637 .keywords: DM, set, type
2638 .seealso: DMGetType(), DMCreate()
2639 @*/
2640 PetscErrorCode  DMSetType(DM dm, DMType method)
2641 {
2642   PetscErrorCode (*r)(DM);
2643   PetscBool      match;
2644   PetscErrorCode ierr;
2645 
2646   PetscFunctionBegin;
2647   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2648   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2649   if (match) PetscFunctionReturn(0);
2650 
2651   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2652   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2653   if (!r) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2654 
2655   if (dm->ops->destroy) {
2656     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2657     dm->ops->destroy = PETSC_NULL;
2658   }
2659   ierr = (*r)(dm);CHKERRQ(ierr);
2660   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2661   PetscFunctionReturn(0);
2662 }
2663 
2664 #undef __FUNCT__
2665 #define __FUNCT__ "DMGetType"
2666 /*@C
2667   DMGetType - Gets the DM type name (as a string) from the DM.
2668 
2669   Not Collective
2670 
2671   Input Parameter:
2672 . dm  - The DM
2673 
2674   Output Parameter:
2675 . type - The DM type name
2676 
2677   Level: intermediate
2678 
2679 .keywords: DM, get, type, name
2680 .seealso: DMSetType(), DMCreate()
2681 @*/
2682 PetscErrorCode  DMGetType(DM dm, DMType *type)
2683 {
2684   PetscErrorCode ierr;
2685 
2686   PetscFunctionBegin;
2687   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2688   PetscValidCharPointer(type,2);
2689   if (!DMRegisterAllCalled) {
2690     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2691   }
2692   *type = ((PetscObject)dm)->type_name;
2693   PetscFunctionReturn(0);
2694 }
2695 
2696 #undef __FUNCT__
2697 #define __FUNCT__ "DMConvert"
2698 /*@C
2699   DMConvert - Converts a DM to another DM, either of the same or different type.
2700 
2701   Collective on DM
2702 
2703   Input Parameters:
2704 + dm - the DM
2705 - newtype - new DM type (use "same" for the same type)
2706 
2707   Output Parameter:
2708 . M - pointer to new DM
2709 
2710   Notes:
2711   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2712   the MPI communicator of the generated DM is always the same as the communicator
2713   of the input DM.
2714 
2715   Level: intermediate
2716 
2717 .seealso: DMCreate()
2718 @*/
2719 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2720 {
2721   DM             B;
2722   char           convname[256];
2723   PetscBool      sametype, issame;
2724   PetscErrorCode ierr;
2725 
2726   PetscFunctionBegin;
2727   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2728   PetscValidType(dm,1);
2729   PetscValidPointer(M,3);
2730   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2731   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2732   {
2733     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2734 
2735     /*
2736        Order of precedence:
2737        1) See if a specialized converter is known to the current DM.
2738        2) See if a specialized converter is known to the desired DM class.
2739        3) See if a good general converter is registered for the desired class
2740        4) See if a good general converter is known for the current matrix.
2741        5) Use a really basic converter.
2742     */
2743 
2744     /* 1) See if a specialized converter is known to the current DM and the desired class */
2745     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2746     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2747     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2748     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2749     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2750     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2751     if (conv) goto foundconv;
2752 
2753     /* 2)  See if a specialized converter is known to the desired DM class. */
2754     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2755     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2756     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2757     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2758     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2759     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2760     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2761     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2762     if (conv) {
2763       ierr = DMDestroy(&B);CHKERRQ(ierr);
2764       goto foundconv;
2765     }
2766 
2767 #if 0
2768     /* 3) See if a good general converter is registered for the desired class */
2769     conv = B->ops->convertfrom;
2770     ierr = DMDestroy(&B);CHKERRQ(ierr);
2771     if (conv) goto foundconv;
2772 
2773     /* 4) See if a good general converter is known for the current matrix */
2774     if (dm->ops->convert) {
2775       conv = dm->ops->convert;
2776     }
2777     if (conv) goto foundconv;
2778 #endif
2779 
2780     /* 5) Use a really basic converter. */
2781     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2782 
2783     foundconv:
2784     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2785     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2786     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2787   }
2788   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2789   PetscFunctionReturn(0);
2790 }
2791 
2792 /*--------------------------------------------------------------------------------------------------------------------*/
2793 
2794 #undef __FUNCT__
2795 #define __FUNCT__ "DMRegister"
2796 /*@C
2797   DMRegister - See DMRegisterDynamic()
2798 
2799   Level: advanced
2800 @*/
2801 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2802 {
2803   char fullname[PETSC_MAX_PATH_LEN];
2804   PetscErrorCode ierr;
2805 
2806   PetscFunctionBegin;
2807   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2808   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2809   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2810   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2811   PetscFunctionReturn(0);
2812 }
2813 
2814 
2815 /*--------------------------------------------------------------------------------------------------------------------*/
2816 #undef __FUNCT__
2817 #define __FUNCT__ "DMRegisterDestroy"
2818 /*@C
2819    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2820 
2821    Not Collective
2822 
2823    Level: advanced
2824 
2825 .keywords: DM, register, destroy
2826 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2827 @*/
2828 PetscErrorCode  DMRegisterDestroy(void)
2829 {
2830   PetscErrorCode ierr;
2831 
2832   PetscFunctionBegin;
2833   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2834   DMRegisterAllCalled = PETSC_FALSE;
2835   PetscFunctionReturn(0);
2836 }
2837 
2838 #if defined(PETSC_HAVE_MATLAB_ENGINE)
2839 #include <mex.h>
2840 
2841 typedef struct {char *funcname; char *jacname; mxArray *ctx;} DMMatlabContext;
2842 
2843 #undef __FUNCT__
2844 #define __FUNCT__ "DMComputeFunction_Matlab"
2845 /*
2846    DMComputeFunction_Matlab - Calls the function that has been set with
2847                          DMSetFunctionMatlab().
2848 
2849    For linear problems x is null
2850 
2851 .seealso: DMSetFunction(), DMGetFunction()
2852 */
2853 PetscErrorCode  DMComputeFunction_Matlab(DM dm,Vec x,Vec y)
2854 {
2855   PetscErrorCode    ierr;
2856   DMMatlabContext   *sctx;
2857   int               nlhs = 1,nrhs = 4;
2858   mxArray	    *plhs[1],*prhs[4];
2859   long long int     lx = 0,ly = 0,ls = 0;
2860 
2861   PetscFunctionBegin;
2862   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2863   PetscValidHeaderSpecific(y,VEC_CLASSID,3);
2864   PetscCheckSameComm(dm,1,y,3);
2865 
2866   /* call Matlab function in ctx with arguments x and y */
2867   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2868   ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr);
2869   ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr);
2870   ierr = PetscMemcpy(&ly,&y,sizeof(y));CHKERRQ(ierr);
2871   prhs[0] =  mxCreateDoubleScalar((double)ls);
2872   prhs[1] =  mxCreateDoubleScalar((double)lx);
2873   prhs[2] =  mxCreateDoubleScalar((double)ly);
2874   prhs[3] =  mxCreateString(sctx->funcname);
2875   ierr    =  mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeFunctionInternal");CHKERRQ(ierr);
2876   ierr    =  mxGetScalar(plhs[0]);CHKERRQ(ierr);
2877   mxDestroyArray(prhs[0]);
2878   mxDestroyArray(prhs[1]);
2879   mxDestroyArray(prhs[2]);
2880   mxDestroyArray(prhs[3]);
2881   mxDestroyArray(plhs[0]);
2882   PetscFunctionReturn(0);
2883 }
2884 
2885 
2886 #undef __FUNCT__
2887 #define __FUNCT__ "DMSetFunctionMatlab"
2888 /*
2889    DMSetFunctionMatlab - Sets the function evaluation routine
2890 
2891 */
2892 PetscErrorCode  DMSetFunctionMatlab(DM dm,const char *func)
2893 {
2894   PetscErrorCode    ierr;
2895   DMMatlabContext   *sctx;
2896 
2897   PetscFunctionBegin;
2898   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2899   /* currently sctx is memory bleed */
2900   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2901   if (!sctx) {
2902     ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr);
2903   }
2904   ierr = PetscStrallocpy(func,&sctx->funcname);CHKERRQ(ierr);
2905   ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr);
2906   ierr = DMSetFunction(dm,DMComputeFunction_Matlab);CHKERRQ(ierr);
2907   PetscFunctionReturn(0);
2908 }
2909 
2910 #undef __FUNCT__
2911 #define __FUNCT__ "DMComputeJacobian_Matlab"
2912 /*
2913    DMComputeJacobian_Matlab - Calls the function that has been set with
2914                          DMSetJacobianMatlab().
2915 
2916    For linear problems x is null
2917 
2918 .seealso: DMSetFunction(), DMGetFunction()
2919 */
2920 PetscErrorCode  DMComputeJacobian_Matlab(DM dm,Vec x,Mat A,Mat B,MatStructure *str)
2921 {
2922   PetscErrorCode    ierr;
2923   DMMatlabContext   *sctx;
2924   int               nlhs = 2,nrhs = 5;
2925   mxArray	    *plhs[2],*prhs[5];
2926   long long int     lx = 0,lA = 0,lB = 0,ls = 0;
2927 
2928   PetscFunctionBegin;
2929   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2930   PetscValidHeaderSpecific(A,MAT_CLASSID,3);
2931 
2932   /* call MATLAB function in ctx with arguments x, A, and B */
2933   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2934   ierr = PetscMemcpy(&ls,&dm,sizeof(dm));CHKERRQ(ierr);
2935   ierr = PetscMemcpy(&lx,&x,sizeof(x));CHKERRQ(ierr);
2936   ierr = PetscMemcpy(&lA,&A,sizeof(A));CHKERRQ(ierr);
2937   ierr = PetscMemcpy(&lB,&B,sizeof(B));CHKERRQ(ierr);
2938   prhs[0] =  mxCreateDoubleScalar((double)ls);
2939   prhs[1] =  mxCreateDoubleScalar((double)lx);
2940   prhs[2] =  mxCreateDoubleScalar((double)lA);
2941   prhs[3] =  mxCreateDoubleScalar((double)lB);
2942   prhs[4] =  mxCreateString(sctx->jacname);
2943   ierr    =  mexCallMATLAB(nlhs,plhs,nrhs,prhs,"PetscDMComputeJacobianInternal");CHKERRQ(ierr);
2944   *str    =  (MatStructure) mxGetScalar(plhs[0]);
2945   ierr    =  (PetscInt) mxGetScalar(plhs[1]);CHKERRQ(ierr);
2946   mxDestroyArray(prhs[0]);
2947   mxDestroyArray(prhs[1]);
2948   mxDestroyArray(prhs[2]);
2949   mxDestroyArray(prhs[3]);
2950   mxDestroyArray(prhs[4]);
2951   mxDestroyArray(plhs[0]);
2952   mxDestroyArray(plhs[1]);
2953   PetscFunctionReturn(0);
2954 }
2955 
2956 
2957 #undef __FUNCT__
2958 #define __FUNCT__ "DMSetJacobianMatlab"
2959 /*
2960    DMSetJacobianMatlab - Sets the Jacobian function evaluation routine
2961 
2962 */
2963 PetscErrorCode  DMSetJacobianMatlab(DM dm,const char *func)
2964 {
2965   PetscErrorCode    ierr;
2966   DMMatlabContext   *sctx;
2967 
2968   PetscFunctionBegin;
2969   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2970   /* currently sctx is memory bleed */
2971   ierr = DMGetApplicationContext(dm,&sctx);CHKERRQ(ierr);
2972   if (!sctx) {
2973     ierr = PetscMalloc(sizeof(DMMatlabContext),&sctx);CHKERRQ(ierr);
2974   }
2975   ierr = PetscStrallocpy(func,&sctx->jacname);CHKERRQ(ierr);
2976   ierr = DMSetApplicationContext(dm,sctx);CHKERRQ(ierr);
2977   ierr = DMSetJacobian(dm,DMComputeJacobian_Matlab);CHKERRQ(ierr);
2978   PetscFunctionReturn(0);
2979 }
2980 #endif
2981 
2982 #undef __FUNCT__
2983 #define __FUNCT__ "DMLoad"
2984 /*@C
2985   DMLoad - Loads a DM that has been stored in binary or HDF5 format
2986   with DMView().
2987 
2988   Collective on PetscViewer
2989 
2990   Input Parameters:
2991 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2992            some related function before a call to DMLoad().
2993 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2994            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2995 
2996    Level: intermediate
2997 
2998   Notes:
2999   Defaults to the DM DA.
3000 
3001   Notes for advanced users:
3002   Most users should not need to know the details of the binary storage
3003   format, since DMLoad() and DMView() completely hide these details.
3004   But for anyone who's interested, the standard binary matrix storage
3005   format is
3006 .vb
3007      has not yet been determined
3008 .ve
3009 
3010    In addition, PETSc automatically does the byte swapping for
3011 machines that store the bytes reversed, e.g.  DEC alpha, freebsd,
3012 linux, Windows and the paragon; thus if you write your own binary
3013 read/write routines you have to swap the bytes; see PetscBinaryRead()
3014 and PetscBinaryWrite() to see how this may be done.
3015 
3016   Concepts: vector^loading from file
3017 
3018 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
3019 @*/
3020 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
3021 {
3022   PetscErrorCode ierr;
3023 
3024   PetscFunctionBegin;
3025   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
3026   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
3027 
3028   if (!((PetscObject)newdm)->type_name) {
3029     ierr = DMSetType(newdm, DMDA);CHKERRQ(ierr);
3030   }
3031   ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
3032   PetscFunctionReturn(0);
3033 }
3034 
3035 /******************************** FEM Support **********************************/
3036 
3037 #undef __FUNCT__
3038 #define __FUNCT__ "DMPrintCellVector"
3039 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
3040   PetscInt       f;
3041   PetscErrorCode ierr;
3042 
3043   PetscFunctionBegin;
3044   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
3045   for (f = 0; f < len; ++f) {
3046     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
3047   }
3048   PetscFunctionReturn(0);
3049 }
3050 
3051 #undef __FUNCT__
3052 #define __FUNCT__ "DMPrintCellMatrix"
3053 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
3054   PetscInt       f, g;
3055   PetscErrorCode ierr;
3056 
3057   PetscFunctionBegin;
3058   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
3059   for (f = 0; f < rows; ++f) {
3060     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
3061     for (g = 0; g < cols; ++g) {
3062       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
3063     }
3064     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
3065   }
3066   PetscFunctionReturn(0);
3067 }
3068 
3069 #undef __FUNCT__
3070 #define __FUNCT__ "DMGetLocalFunction"
3071 /*@C
3072   DMGetLocalFunction - Get the local residual function from this DM
3073 
3074   Not collective
3075 
3076   Input Parameter:
3077 . dm - The DM
3078 
3079   Output Parameter:
3080 . lf - The local residual function
3081 
3082    Calling sequence of lf:
3083 $    lf (SNES snes, Vec x, Vec f, void *ctx);
3084 
3085 +  snes - the SNES context
3086 .  x - local vector with the state at which to evaluate residual
3087 .  f - local vector to put residual in
3088 -  ctx - optional user-defined function context
3089 
3090   Level: intermediate
3091 
3092 .seealso DMSetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian()
3093 @*/
3094 PetscErrorCode DMGetLocalFunction(DM dm, PetscErrorCode (**lf)(DM, Vec, Vec, void *))
3095 {
3096   PetscFunctionBegin;
3097   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3098   if (lf) *lf = dm->lf;
3099   PetscFunctionReturn(0);
3100 }
3101 
3102 #undef __FUNCT__
3103 #define __FUNCT__ "DMSetLocalFunction"
3104 /*@C
3105   DMSetLocalFunction - Set the local residual function from this DM
3106 
3107   Not collective
3108 
3109   Input Parameters:
3110 + dm - The DM
3111 - lf - The local residual function
3112 
3113    Calling sequence of lf:
3114 $    lf (SNES snes, Vec x, Vec f, void *ctx);
3115 
3116 +  snes - the SNES context
3117 .  x - local vector with the state at which to evaluate residual
3118 .  f - local vector to put residual in
3119 -  ctx - optional user-defined function context
3120 
3121   Level: intermediate
3122 
3123 .seealso DMGetLocalFunction(), DMGetLocalJacobian(), DMSetLocalJacobian()
3124 @*/
3125 PetscErrorCode DMSetLocalFunction(DM dm, PetscErrorCode (*lf)(DM, Vec, Vec, void *))
3126 {
3127   PetscFunctionBegin;
3128   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3129   dm->lf = lf;
3130   PetscFunctionReturn(0);
3131 }
3132 
3133 #undef __FUNCT__
3134 #define __FUNCT__ "DMGetLocalJacobian"
3135 /*@C
3136   DMGetLocalJacobian - Get the local Jacobian function from this DM
3137 
3138   Not collective
3139 
3140   Input Parameter:
3141 . dm - The DM
3142 
3143   Output Parameter:
3144 . lj - The local Jacobian function
3145 
3146    Calling sequence of lj:
3147 $    lj (SNES snes, Vec x, Mat J, Mat M, void *ctx);
3148 
3149 +  snes - the SNES context
3150 .  x - local vector with the state at which to evaluate residual
3151 .  J - matrix to put Jacobian in
3152 .  M - matrix to use for defining Jacobian preconditioner
3153 -  ctx - optional user-defined function context
3154 
3155   Level: intermediate
3156 
3157 .seealso DMSetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction()
3158 @*/
3159 PetscErrorCode DMGetLocalJacobian(DM dm, PetscErrorCode (**lj)(DM, Vec, Mat, Mat, void *))
3160 {
3161   PetscFunctionBegin;
3162   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3163   if (lj) *lj = dm->lj;
3164   PetscFunctionReturn(0);
3165 }
3166 
3167 #undef __FUNCT__
3168 #define __FUNCT__ "DMSetLocalJacobian"
3169 /*@C
3170   DMSetLocalJacobian - Set the local Jacobian function from this DM
3171 
3172   Not collective
3173 
3174   Input Parameters:
3175 + dm - The DM
3176 - lj - The local Jacobian function
3177 
3178    Calling sequence of lj:
3179 $    lj (SNES snes, Vec x, Mat J, Mat M, void *ctx);
3180 
3181 +  snes - the SNES context
3182 .  x - local vector with the state at which to evaluate residual
3183 .  J - matrix to put Jacobian in
3184 .  M - matrix to use for defining Jacobian preconditioner
3185 -  ctx - optional user-defined function context
3186 
3187   Level: intermediate
3188 
3189 .seealso DMGetLocalJacobian(), DMGetLocalFunction(), DMSetLocalFunction()
3190 @*/
3191 PetscErrorCode DMSetLocalJacobian(DM dm, PetscErrorCode (*lj)(DM, Vec, Mat,  Mat, void *))
3192 {
3193   PetscFunctionBegin;
3194   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3195   dm->lj = lj;
3196   PetscFunctionReturn(0);
3197 }
3198 
3199 #undef __FUNCT__
3200 #define __FUNCT__ "DMGetDefaultSection"
3201 /*@
3202   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
3203 
3204   Input Parameter:
3205 . dm - The DM
3206 
3207   Output Parameter:
3208 . section - The PetscSection
3209 
3210   Level: intermediate
3211 
3212   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
3213 
3214 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
3215 @*/
3216 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
3217   PetscFunctionBegin;
3218   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3219   PetscValidPointer(section, 2);
3220   *section = dm->defaultSection;
3221   PetscFunctionReturn(0);
3222 }
3223 
3224 #undef __FUNCT__
3225 #define __FUNCT__ "DMSetDefaultSection"
3226 /*@
3227   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
3228 
3229   Input Parameters:
3230 + dm - The DM
3231 - section - The PetscSection
3232 
3233   Level: intermediate
3234 
3235   Note: Any existing Section will be destroyed
3236 
3237 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
3238 @*/
3239 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
3240   PetscInt       numFields;
3241   PetscInt       f;
3242   PetscErrorCode ierr;
3243 
3244   PetscFunctionBegin;
3245   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3246   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
3247   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
3248   dm->defaultSection = section;
3249   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
3250   if (numFields) {
3251     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
3252     for (f = 0; f < numFields; ++f) {
3253       const char *name;
3254 
3255       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
3256       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
3257     }
3258   }
3259   PetscFunctionReturn(0);
3260 }
3261 
3262 #undef __FUNCT__
3263 #define __FUNCT__ "DMGetDefaultGlobalSection"
3264 /*@
3265   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
3266 
3267   Collective on DM
3268 
3269   Input Parameter:
3270 . dm - The DM
3271 
3272   Output Parameter:
3273 . section - The PetscSection
3274 
3275   Level: intermediate
3276 
3277   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
3278 
3279 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
3280 @*/
3281 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
3282   PetscErrorCode ierr;
3283 
3284   PetscFunctionBegin;
3285   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3286   PetscValidPointer(section, 2);
3287   if (!dm->defaultGlobalSection) {
3288     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
3289     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
3290     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
3291   }
3292   *section = dm->defaultGlobalSection;
3293   PetscFunctionReturn(0);
3294 }
3295 
3296 #undef __FUNCT__
3297 #define __FUNCT__ "DMSetDefaultGlobalSection"
3298 /*@
3299   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
3300 
3301   Input Parameters:
3302 + dm - The DM
3303 - section - The PetscSection
3304 
3305   Level: intermediate
3306 
3307   Note: Any existing Section will be destroyed
3308 
3309 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
3310 @*/
3311 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
3312   PetscErrorCode ierr;
3313 
3314   PetscFunctionBegin;
3315   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3316   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
3317   dm->defaultGlobalSection = section;
3318   PetscFunctionReturn(0);
3319 }
3320 
3321 #undef __FUNCT__
3322 #define __FUNCT__ "DMGetDefaultSF"
3323 /*@
3324   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
3325   it is created from the default PetscSection layouts in the DM.
3326 
3327   Input Parameter:
3328 . dm - The DM
3329 
3330   Output Parameter:
3331 . sf - The PetscSF
3332 
3333   Level: intermediate
3334 
3335   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3336 
3337 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
3338 @*/
3339 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
3340   PetscInt       nroots;
3341   PetscErrorCode ierr;
3342 
3343   PetscFunctionBegin;
3344   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3345   PetscValidPointer(sf, 2);
3346   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
3347   if (nroots < 0) {
3348     PetscSection section, gSection;
3349 
3350     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
3351     if (section) {
3352       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
3353       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
3354     } else {
3355       *sf = PETSC_NULL;
3356       PetscFunctionReturn(0);
3357     }
3358   }
3359   *sf = dm->defaultSF;
3360   PetscFunctionReturn(0);
3361 }
3362 
3363 #undef __FUNCT__
3364 #define __FUNCT__ "DMSetDefaultSF"
3365 /*@
3366   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3367 
3368   Input Parameters:
3369 + dm - The DM
3370 - sf - The PetscSF
3371 
3372   Level: intermediate
3373 
3374   Note: Any previous SF is destroyed
3375 
3376 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3377 @*/
3378 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
3379   PetscErrorCode ierr;
3380 
3381   PetscFunctionBegin;
3382   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3383   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
3384   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3385   dm->defaultSF = sf;
3386   PetscFunctionReturn(0);
3387 }
3388 
3389 #undef __FUNCT__
3390 #define __FUNCT__ "DMCreateDefaultSF"
3391 /*@C
3392   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3393   describing the data layout.
3394 
3395   Input Parameters:
3396 + dm - The DM
3397 . localSection - PetscSection describing the local data layout
3398 - globalSection - PetscSection describing the global data layout
3399 
3400   Level: intermediate
3401 
3402 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3403 @*/
3404 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3405 {
3406   MPI_Comm        comm = ((PetscObject) dm)->comm;
3407   PetscLayout     layout;
3408   const PetscInt *ranges;
3409   PetscInt       *local;
3410   PetscSFNode    *remote;
3411   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
3412   PetscMPIInt     size, rank;
3413   PetscErrorCode  ierr;
3414 
3415   PetscFunctionBegin;
3416   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3417   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3418   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3419   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3420   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3421   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3422   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3423   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3424   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3425   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3426   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
3427     PetscInt gdof, gcdof;
3428 
3429     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3430     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3431     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3432   }
3433   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
3434   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
3435   for (p = pStart, l = 0; p < pEnd; ++p) {
3436     const PetscInt *cind;
3437     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3438 
3439     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3440     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3441     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3442     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3443     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3444     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3445     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3446     if (!gdof) continue; /* Censored point */
3447     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3448     if (gsize != dof-cdof) {
3449       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3450       cdof = 0; /* Ignore constraints */
3451     }
3452     for (d = 0, c = 0; d < dof; ++d) {
3453       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3454       local[l+d-c] = off+d;
3455     }
3456     if (gdof < 0) {
3457       for(d = 0; d < gsize; ++d, ++l) {
3458         PetscInt offset = -(goff+1) + d, r;
3459 
3460         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3461         if (r < 0) r = -(r+2);
3462         remote[l].rank  = r;
3463         remote[l].index = offset - ranges[r];
3464       }
3465     } else {
3466       for(d = 0; d < gsize; ++d, ++l) {
3467         remote[l].rank  = rank;
3468         remote[l].index = goff+d - ranges[rank];
3469       }
3470     }
3471   }
3472   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3473   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3474   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3475   PetscFunctionReturn(0);
3476 }
3477 
3478 #undef __FUNCT__
3479 #define __FUNCT__ "DMGetPointSF"
3480 /*@
3481   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3482 
3483   Input Parameter:
3484 . dm - The DM
3485 
3486   Output Parameter:
3487 . sf - The PetscSF
3488 
3489   Level: intermediate
3490 
3491   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3492 
3493 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3494 @*/
3495 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
3496   PetscFunctionBegin;
3497   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3498   PetscValidPointer(sf, 2);
3499   *sf = dm->sf;
3500   PetscFunctionReturn(0);
3501 }
3502 
3503 #undef __FUNCT__
3504 #define __FUNCT__ "DMSetPointSF"
3505 /*@
3506   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3507 
3508   Input Parameters:
3509 + dm - The DM
3510 - sf - The PetscSF
3511 
3512   Level: intermediate
3513 
3514 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3515 @*/
3516 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
3517   PetscErrorCode ierr;
3518 
3519   PetscFunctionBegin;
3520   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3521   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3522   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3523   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3524   dm->sf = sf;
3525   PetscFunctionReturn(0);
3526 }
3527 
3528 #undef __FUNCT__
3529 #define __FUNCT__ "DMGetNumFields"
3530 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3531 {
3532   PetscFunctionBegin;
3533   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3534   PetscValidPointer(numFields, 2);
3535   *numFields = dm->numFields;
3536   PetscFunctionReturn(0);
3537 }
3538 
3539 #undef __FUNCT__
3540 #define __FUNCT__ "DMSetNumFields"
3541 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3542 {
3543   PetscInt       f;
3544   PetscErrorCode ierr;
3545 
3546   PetscFunctionBegin;
3547   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3548   for (f = 0; f < dm->numFields; ++f) {
3549     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3550   }
3551   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3552   dm->numFields = numFields;
3553   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3554   for (f = 0; f < dm->numFields; ++f) {
3555     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3556   }
3557   PetscFunctionReturn(0);
3558 }
3559 
3560 #undef __FUNCT__
3561 #define __FUNCT__ "DMGetField"
3562 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3563 {
3564   PetscFunctionBegin;
3565   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3566   PetscValidPointer(field, 2);
3567   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3568   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3569   *field = dm->fields[f];
3570   PetscFunctionReturn(0);
3571 }
3572 
3573 #undef __FUNCT__
3574 #define __FUNCT__ "DMSetCoordinates"
3575 /*@
3576   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3577 
3578   Collective on DM
3579 
3580   Input Parameters:
3581 + dm - the DM
3582 - c - coordinate vector
3583 
3584   Note:
3585   The coordinates do include those for ghost points, which are in the local vector
3586 
3587   Level: intermediate
3588 
3589 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3590 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3591 @*/
3592 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3593 {
3594   PetscErrorCode ierr;
3595 
3596   PetscFunctionBegin;
3597   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3598   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3599   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3600   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3601   dm->coordinates = c;
3602   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3603   PetscFunctionReturn(0);
3604 }
3605 
3606 #undef __FUNCT__
3607 #define __FUNCT__ "DMSetCoordinatesLocal"
3608 /*@
3609   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3610 
3611   Collective on DM
3612 
3613    Input Parameters:
3614 +  dm - the DM
3615 -  c - coordinate vector
3616 
3617   Note:
3618   The coordinates of ghost points can be set using DMSetCoordinates()
3619   followed by DMGetCoordinatesLocal(). This is intended to enable the
3620   setting of ghost coordinates outside of the domain.
3621 
3622   Level: intermediate
3623 
3624 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3625 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3626 @*/
3627 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3628 {
3629   PetscErrorCode ierr;
3630 
3631   PetscFunctionBegin;
3632   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3633   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3634   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3635   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3636   dm->coordinatesLocal = c;
3637   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3638   PetscFunctionReturn(0);
3639 }
3640 
3641 #undef __FUNCT__
3642 #define __FUNCT__ "DMGetCoordinates"
3643 /*@
3644   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3645 
3646   Not Collective
3647 
3648   Input Parameter:
3649 . dm - the DM
3650 
3651   Output Parameter:
3652 . c - global coordinate vector
3653 
3654   Note:
3655   This is a borrowed reference, so the user should NOT destroy this vector
3656 
3657   Each process has only the local coordinates (does NOT have the ghost coordinates).
3658 
3659   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3660   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3661 
3662   Level: intermediate
3663 
3664 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3665 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3666 @*/
3667 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3668 {
3669   PetscErrorCode ierr;
3670 
3671   PetscFunctionBegin;
3672   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3673   PetscValidPointer(c,2);
3674   if (!dm->coordinates && dm->coordinatesLocal) {
3675     DM cdm;
3676 
3677     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3678     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3679     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3680     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3681     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3682   }
3683   *c = dm->coordinates;
3684   PetscFunctionReturn(0);
3685 }
3686 
3687 #undef __FUNCT__
3688 #define __FUNCT__ "DMGetCoordinatesLocal"
3689 /*@
3690   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3691 
3692   Collective on DM
3693 
3694   Input Parameter:
3695 . dm - the DM
3696 
3697   Output Parameter:
3698 . c - coordinate vector
3699 
3700   Note:
3701   This is a borrowed reference, so the user should NOT destroy this vector
3702 
3703   Each process has the local and ghost coordinates
3704 
3705   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3706   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3707 
3708   Level: intermediate
3709 
3710 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3711 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3712 @*/
3713 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3714 {
3715   PetscErrorCode ierr;
3716 
3717   PetscFunctionBegin;
3718   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3719   PetscValidPointer(c,2);
3720   if (!dm->coordinatesLocal && dm->coordinates) {
3721     DM cdm;
3722 
3723     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3724     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3725     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3726     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3727     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3728   }
3729   *c = dm->coordinatesLocal;
3730   PetscFunctionReturn(0);
3731 }
3732 
3733 #undef __FUNCT__
3734 #define __FUNCT__ "DMGetCoordinateDM"
3735 /*@
3736   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3737 
3738   Collective on DM
3739 
3740   Input Parameter:
3741 . dm - the DM
3742 
3743   Output Parameter:
3744 . cdm - coordinate DM
3745 
3746   Level: intermediate
3747 
3748 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3749 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3750 @*/
3751 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3752 {
3753   PetscErrorCode ierr;
3754 
3755   PetscFunctionBegin;
3756   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3757   PetscValidPointer(cdm,2);
3758   if (!dm->coordinateDM) {
3759     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3760     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3761   }
3762   *cdm = dm->coordinateDM;
3763   PetscFunctionReturn(0);
3764 }
3765