xref: /petsc/src/dm/interface/dm.c (revision 3bf036e263fd78807e2931ff42d9ddcd8aae3fd4)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMViewFromOptions"
8 /*
9   Processes command line options to determine if/how a dm
10   is to be viewed.
11 
12 */
13 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
14 {
15   PetscErrorCode    ierr;
16   PetscBool         flg;
17   PetscViewer       viewer;
18   PetscViewerFormat format;
19 
20   PetscFunctionBegin;
21   ierr = PetscOptionsGetViewer(((PetscObject)dm)->comm,((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
22   if (flg) {
23     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
24     ierr = DMView(dm,viewer);CHKERRQ(ierr);
25     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
26     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMCreate"
34 /*@
35   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
36 
37    If you never  call DMSetType()  it will generate an
38    error when you try to use the vector.
39 
40   Collective on MPI_Comm
41 
42   Input Parameter:
43 . comm - The communicator for the DM object
44 
45   Output Parameter:
46 . dm - The DM object
47 
48   Level: beginner
49 
50 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
51 @*/
52 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
53 {
54   DM             v;
55   PetscErrorCode ierr;
56 
57   PetscFunctionBegin;
58   PetscValidPointer(dm,2);
59   *dm = PETSC_NULL;
60 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
61   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
62   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
63   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
64 #endif
65 
66   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
67   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
68 
69 
70   v->ltogmap      = PETSC_NULL;
71   v->ltogmapb     = PETSC_NULL;
72   v->bs           = 1;
73   v->coloringtype = IS_COLORING_GLOBAL;
74   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
75   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
76   v->defaultSection       = PETSC_NULL;
77   v->defaultGlobalSection = PETSC_NULL;
78   {
79     PetscInt i;
80     for (i = 0; i < 10; ++i) {
81       v->nullspaceConstructors[i] = PETSC_NULL;
82     }
83   }
84   v->numFields = 0;
85   v->fields    = PETSC_NULL;
86 
87   *dm = v;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMSetVecType"
93 /*@C
94        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
95 
96    Logically Collective on DMDA
97 
98    Input Parameter:
99 +  da - initial distributed array
100 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
101 
102    Options Database:
103 .   -dm_vec_type ctype
104 
105    Level: intermediate
106 
107 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
108 @*/
109 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   PetscValidHeaderSpecific(da,DM_CLASSID,1);
115   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
116   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "VecGetDM"
122 /*@
123   VecGetDM - Gets the DM defining the data layout of the vector
124 
125   Not collective
126 
127   Input Parameter:
128 . v - The Vec
129 
130   Output Parameter:
131 . dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecGetDM(Vec v, DM *dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidPointer(dm,2);
144   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "VecSetDM"
150 /*@
151   VecSetDM - Sets the DM defining the data layout of the vector
152 
153   Not collective
154 
155   Input Parameters:
156 + v - The Vec
157 - dm - The DM
158 
159   Level: intermediate
160 
161 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
162 @*/
163 PetscErrorCode VecSetDM(Vec v, DM dm)
164 {
165   PetscErrorCode ierr;
166 
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
169   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
170   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "DMSetMatType"
176 /*@C
177        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
178 
179    Logically Collective on DM
180 
181    Input Parameter:
182 +  dm - the DM context
183 .  ctype - the matrix type
184 
185    Options Database:
186 .   -dm_mat_type ctype
187 
188    Level: intermediate
189 
190 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
191 @*/
192 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
193 {
194   PetscErrorCode ierr;
195   PetscFunctionBegin;
196   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
197   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
198   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
199   PetscFunctionReturn(0);
200 }
201 
202 #undef __FUNCT__
203 #define __FUNCT__ "MatGetDM"
204 /*@
205   MatGetDM - Gets the DM defining the data layout of the matrix
206 
207   Not collective
208 
209   Input Parameter:
210 . A - The Mat
211 
212   Output Parameter:
213 . dm - The DM
214 
215   Level: intermediate
216 
217 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
218 @*/
219 PetscErrorCode MatGetDM(Mat A, DM *dm)
220 {
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
225   PetscValidPointer(dm,2);
226   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "MatSetDM"
232 /*@
233   MatSetDM - Sets the DM defining the data layout of the matrix
234 
235   Not collective
236 
237   Input Parameters:
238 + A - The Mat
239 - dm - The DM
240 
241   Level: intermediate
242 
243 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
244 @*/
245 PetscErrorCode MatSetDM(Mat A, DM dm)
246 {
247   PetscErrorCode ierr;
248 
249   PetscFunctionBegin;
250   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
251   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
252   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "DMSetOptionsPrefix"
258 /*@C
259    DMSetOptionsPrefix - Sets the prefix used for searching for all
260    DMDA options in the database.
261 
262    Logically Collective on DMDA
263 
264    Input Parameter:
265 +  da - the DMDA context
266 -  prefix - the prefix to prepend to all option names
267 
268    Notes:
269    A hyphen (-) must NOT be given at the beginning of the prefix name.
270    The first character of all runtime options is AUTOMATICALLY the hyphen.
271 
272    Level: advanced
273 
274 .keywords: DMDA, set, options, prefix, database
275 
276 .seealso: DMSetFromOptions()
277 @*/
278 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
279 {
280   PetscErrorCode ierr;
281 
282   PetscFunctionBegin;
283   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
284   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
285   PetscFunctionReturn(0);
286 }
287 
288 #undef __FUNCT__
289 #define __FUNCT__ "DMDestroy"
290 /*@
291     DMDestroy - Destroys a vector packer or DMDA.
292 
293     Collective on DM
294 
295     Input Parameter:
296 .   dm - the DM object to destroy
297 
298     Level: developer
299 
300 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
301 
302 @*/
303 PetscErrorCode  DMDestroy(DM *dm)
304 {
305   PetscInt       i, cnt = 0, f;
306   DMNamedVecLink nlink,nnext;
307   PetscErrorCode ierr;
308 
309   PetscFunctionBegin;
310   if (!*dm) PetscFunctionReturn(0);
311   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
312 
313   /* count all the circular references of DM and its contained Vecs */
314   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
315     if ((*dm)->localin[i])  {cnt++;}
316     if ((*dm)->globalin[i]) {cnt++;}
317   }
318   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
319   if ((*dm)->x) {
320     DM obj;
321     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
322     if (obj == *dm) cnt++;
323   }
324 
325   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
326   /*
327      Need this test because the dm references the vectors that
328      reference the dm, so destroying the dm calls destroy on the
329      vectors that cause another destroy on the dm
330   */
331   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
332   ((PetscObject) (*dm))->refct = 0;
333   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
334     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
335     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
336   }
337   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
338     nnext = nlink->next;
339     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
340     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
341     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
342     ierr = PetscFree(nlink);CHKERRQ(ierr);
343   }
344   (*dm)->namedglobal = PETSC_NULL;
345 
346   /* Destroy the list of hooks */
347   {
348     DMCoarsenHookLink link,next;
349     for (link=(*dm)->coarsenhook; link; link=next) {
350       next = link->next;
351       ierr = PetscFree(link);CHKERRQ(ierr);
352     }
353     (*dm)->coarsenhook = PETSC_NULL;
354   }
355   {
356     DMRefineHookLink link,next;
357     for (link=(*dm)->refinehook; link; link=next) {
358       next = link->next;
359       ierr = PetscFree(link);CHKERRQ(ierr);
360     }
361     (*dm)->refinehook = PETSC_NULL;
362   }
363   {
364     DMSubDomainHookLink link,next;
365     for (link=(*dm)->subdomainhook; link; link=next) {
366       next = link->next;
367       ierr = PetscFree(link);CHKERRQ(ierr);
368     }
369     (*dm)->subdomainhook = PETSC_NULL;
370   }
371   {
372     DMGlobalToLocalHookLink link,next;
373     for (link=(*dm)->gtolhook; link; link=next) {
374       next = link->next;
375       ierr = PetscFree(link);CHKERRQ(ierr);
376     }
377     (*dm)->gtolhook = PETSC_NULL;
378   }
379   /* Destroy the work arrays */
380   {
381     DMWorkLink link,next;
382     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
383     for (link=(*dm)->workin; link; link=next) {
384       next = link->next;
385       ierr = PetscFree(link->mem);CHKERRQ(ierr);
386       ierr = PetscFree(link);CHKERRQ(ierr);
387     }
388     (*dm)->workin = PETSC_NULL;
389   }
390 
391   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
392   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
393   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
394 
395   if ((*dm)->ctx && (*dm)->ctxdestroy) {
396     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
397   }
398   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
399   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
400   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
401   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
402   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
403   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
404   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
405 
406   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
407   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
408   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
409   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
410   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
411 
412   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
413   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
414   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
415 
416   for (f = 0; f < (*dm)->numFields; ++f) {
417     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
418   }
419   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
420   /* if memory was published with AMS then destroy it */
421   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
422 
423   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
424   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
425   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
426   PetscFunctionReturn(0);
427 }
428 
429 #undef __FUNCT__
430 #define __FUNCT__ "DMSetUp"
431 /*@
432     DMSetUp - sets up the data structures inside a DM object
433 
434     Collective on DM
435 
436     Input Parameter:
437 .   dm - the DM object to setup
438 
439     Level: developer
440 
441 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
442 
443 @*/
444 PetscErrorCode  DMSetUp(DM dm)
445 {
446   PetscErrorCode ierr;
447 
448   PetscFunctionBegin;
449   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
450   if (dm->setupcalled) PetscFunctionReturn(0);
451   if (dm->ops->setup) {
452     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
453   }
454   dm->setupcalled = PETSC_TRUE;
455   PetscFunctionReturn(0);
456 }
457 
458 #undef __FUNCT__
459 #define __FUNCT__ "DMSetFromOptions"
460 /*@
461     DMSetFromOptions - sets parameters in a DM from the options database
462 
463     Collective on DM
464 
465     Input Parameter:
466 .   dm - the DM object to set options for
467 
468     Options Database:
469 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
470 .   -dm_vec_type <type>  type of vector to create inside DM
471 .   -dm_mat_type <type>  type of matrix to create inside DM
472 -   -dm_coloring_type <global or ghosted>
473 
474     Level: developer
475 
476 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
477 
478 @*/
479 PetscErrorCode  DMSetFromOptions(DM dm)
480 {
481   char           typeName[256] = MATAIJ;
482   PetscBool      flg;
483   PetscErrorCode ierr;
484 
485   PetscFunctionBegin;
486   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
487   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
488     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
489     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
490     if (flg) {
491       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
492     }
493     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
494     if (flg) {
495       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
496     }
497     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
498     if (dm->ops->setfromoptions) {
499       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
500     }
501     /* process any options handlers added with PetscObjectAddOptionsHandler() */
502     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
503   ierr = PetscOptionsEnd();CHKERRQ(ierr);
504   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
505   PetscFunctionReturn(0);
506 }
507 
508 #undef __FUNCT__
509 #define __FUNCT__ "DMView"
510 /*@C
511     DMView - Views a vector packer or DMDA.
512 
513     Collective on DM
514 
515     Input Parameter:
516 +   dm - the DM object to view
517 -   v - the viewer
518 
519     Level: developer
520 
521 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
522 
523 @*/
524 PetscErrorCode  DMView(DM dm,PetscViewer v)
525 {
526   PetscErrorCode ierr;
527   PetscBool      isbinary;
528 
529   PetscFunctionBegin;
530   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
531   if (!v) {
532     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
533   }
534   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
535   if (isbinary) {
536     PetscInt         classid = DM_FILE_CLASSID;
537     char             type[256];
538 
539     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
540     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
541     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
542   }
543   if (dm->ops->view) {
544     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
545   }
546   PetscFunctionReturn(0);
547 }
548 
549 #undef __FUNCT__
550 #define __FUNCT__ "DMCreateGlobalVector"
551 /*@
552     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
553 
554     Collective on DM
555 
556     Input Parameter:
557 .   dm - the DM object
558 
559     Output Parameter:
560 .   vec - the global vector
561 
562     Level: beginner
563 
564 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
565 
566 @*/
567 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
568 {
569   PetscErrorCode ierr;
570 
571   PetscFunctionBegin;
572   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
573   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
574   PetscFunctionReturn(0);
575 }
576 
577 #undef __FUNCT__
578 #define __FUNCT__ "DMCreateLocalVector"
579 /*@
580     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
581 
582     Not Collective
583 
584     Input Parameter:
585 .   dm - the DM object
586 
587     Output Parameter:
588 .   vec - the local vector
589 
590     Level: beginner
591 
592 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
593 
594 @*/
595 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
596 {
597   PetscErrorCode ierr;
598 
599   PetscFunctionBegin;
600   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
601   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
602   PetscFunctionReturn(0);
603 }
604 
605 #undef __FUNCT__
606 #define __FUNCT__ "DMGetLocalToGlobalMapping"
607 /*@
608    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
609 
610    Collective on DM
611 
612    Input Parameter:
613 .  dm - the DM that provides the mapping
614 
615    Output Parameter:
616 .  ltog - the mapping
617 
618    Level: intermediate
619 
620    Notes:
621    This mapping can then be used by VecSetLocalToGlobalMapping() or
622    MatSetLocalToGlobalMapping().
623 
624 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
625 @*/
626 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
627 {
628   PetscErrorCode ierr;
629 
630   PetscFunctionBegin;
631   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
632   PetscValidPointer(ltog,2);
633   if (!dm->ltogmap) {
634     PetscSection section, sectionGlobal;
635 
636     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
637     if (section) {
638       PetscInt      *ltog;
639       PetscInt       pStart, pEnd, size, p, l;
640 
641       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
642       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
643       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
644       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
645       for (p = pStart, l = 0; p < pEnd; ++p) {
646         PetscInt dof, off, c;
647 
648         /* Should probably use constrained dofs */
649         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
650         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
651         for (c = 0; c < dof; ++c, ++l) {
652           ltog[l] = off+c;
653         }
654       }
655       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
656       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
657     } else {
658       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
659       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
660     }
661   }
662   *ltog = dm->ltogmap;
663   PetscFunctionReturn(0);
664 }
665 
666 #undef __FUNCT__
667 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
668 /*@
669    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
670 
671    Collective on DM
672 
673    Input Parameter:
674 .  da - the distributed array that provides the mapping
675 
676    Output Parameter:
677 .  ltog - the block mapping
678 
679    Level: intermediate
680 
681    Notes:
682    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
683    MatSetLocalToGlobalMappingBlock().
684 
685 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
686 @*/
687 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
688 {
689   PetscErrorCode ierr;
690 
691   PetscFunctionBegin;
692   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
693   PetscValidPointer(ltog,2);
694   if (!dm->ltogmapb) {
695     PetscInt bs;
696     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
697     if (bs > 1) {
698       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
699       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
700     } else {
701       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
702       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
703     }
704   }
705   *ltog = dm->ltogmapb;
706   PetscFunctionReturn(0);
707 }
708 
709 #undef __FUNCT__
710 #define __FUNCT__ "DMGetBlockSize"
711 /*@
712    DMGetBlockSize - Gets the inherent block size associated with a DM
713 
714    Not Collective
715 
716    Input Parameter:
717 .  dm - the DM with block structure
718 
719    Output Parameter:
720 .  bs - the block size, 1 implies no exploitable block structure
721 
722    Level: intermediate
723 
724 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
725 @*/
726 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
727 {
728   PetscFunctionBegin;
729   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
730   PetscValidPointer(bs,2);
731   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
732   *bs = dm->bs;
733   PetscFunctionReturn(0);
734 }
735 
736 #undef __FUNCT__
737 #define __FUNCT__ "DMCreateInterpolation"
738 /*@
739     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
740 
741     Collective on DM
742 
743     Input Parameter:
744 +   dm1 - the DM object
745 -   dm2 - the second, finer DM object
746 
747     Output Parameter:
748 +  mat - the interpolation
749 -  vec - the scaling (optional)
750 
751     Level: developer
752 
753     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
754         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
755 
756         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
757         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
758 
759 
760 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
761 
762 @*/
763 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
764 {
765   PetscErrorCode ierr;
766 
767   PetscFunctionBegin;
768   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
769   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
770   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
771   PetscFunctionReturn(0);
772 }
773 
774 #undef __FUNCT__
775 #define __FUNCT__ "DMCreateInjection"
776 /*@
777     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
778 
779     Collective on DM
780 
781     Input Parameter:
782 +   dm1 - the DM object
783 -   dm2 - the second, finer DM object
784 
785     Output Parameter:
786 .   ctx - the injection
787 
788     Level: developer
789 
790    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
791         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
792 
793 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
794 
795 @*/
796 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
797 {
798   PetscErrorCode ierr;
799 
800   PetscFunctionBegin;
801   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
802   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
803   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
804   PetscFunctionReturn(0);
805 }
806 
807 #undef __FUNCT__
808 #define __FUNCT__ "DMCreateColoring"
809 /*@C
810     DMCreateColoring - Gets coloring for a DMDA or DMComposite
811 
812     Collective on DM
813 
814     Input Parameter:
815 +   dm - the DM object
816 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
817 -   matype - either MATAIJ or MATBAIJ
818 
819     Output Parameter:
820 .   coloring - the coloring
821 
822     Level: developer
823 
824 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
825 
826 @*/
827 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
828 {
829   PetscErrorCode ierr;
830 
831   PetscFunctionBegin;
832   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
833   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
834   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
835   PetscFunctionReturn(0);
836 }
837 
838 #undef __FUNCT__
839 #define __FUNCT__ "DMCreateMatrix"
840 /*@C
841     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
842 
843     Collective on DM
844 
845     Input Parameter:
846 +   dm - the DM object
847 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
848             any type which inherits from one of these (such as MATAIJ)
849 
850     Output Parameter:
851 .   mat - the empty Jacobian
852 
853     Level: beginner
854 
855     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
856        do not need to do it yourself.
857 
858        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
859        the nonzero pattern call DMDASetMatPreallocateOnly()
860 
861        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
862        internally by PETSc.
863 
864        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
865        the indices for the global numbering for DMDAs which is complicated.
866 
867 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
868 
869 @*/
870 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
871 {
872   PetscErrorCode ierr;
873 
874   PetscFunctionBegin;
875   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
876 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
877   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
878 #endif
879   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
880   PetscValidPointer(mat,3);
881   if (dm->mattype) {
882     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
883   } else {
884     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
885   }
886   PetscFunctionReturn(0);
887 }
888 
889 #undef __FUNCT__
890 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
891 /*@
892   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
893     preallocated but the nonzero structure and zero values will not be set.
894 
895   Logically Collective on DMDA
896 
897   Input Parameter:
898 + dm - the DM
899 - only - PETSC_TRUE if only want preallocation
900 
901   Level: developer
902 .seealso DMCreateMatrix()
903 @*/
904 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
905 {
906   PetscFunctionBegin;
907   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
908   dm->prealloc_only = only;
909   PetscFunctionReturn(0);
910 }
911 
912 #undef __FUNCT__
913 #define __FUNCT__ "DMGetWorkArray"
914 /*@C
915   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
916 
917   Not Collective
918 
919   Input Parameters:
920 + dm - the DM object
921 . count - The minium size
922 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
923 
924   Output Parameter:
925 . array - the work array
926 
927   Level: developer
928 
929 .seealso DMDestroy(), DMCreate()
930 @*/
931 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
932 {
933   PetscErrorCode ierr;
934   DMWorkLink link;
935   size_t size;
936 
937   PetscFunctionBegin;
938   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
939   PetscValidPointer(mem,4);
940   if (dm->workin) {
941     link = dm->workin;
942     dm->workin = dm->workin->next;
943   } else {
944     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
945   }
946   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
947   if (size*count > link->bytes) {
948     ierr = PetscFree(link->mem);CHKERRQ(ierr);
949     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
950     link->bytes = size*count;
951   }
952   link->next = dm->workout;
953   dm->workout = link;
954   *(void**)mem = link->mem;
955   PetscFunctionReturn(0);
956 }
957 
958 #undef __FUNCT__
959 #define __FUNCT__ "DMRestoreWorkArray"
960 /*@C
961   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
962 
963   Not Collective
964 
965   Input Parameters:
966 + dm - the DM object
967 . count - The minium size
968 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
969 
970   Output Parameter:
971 . array - the work array
972 
973   Level: developer
974 
975 .seealso DMDestroy(), DMCreate()
976 @*/
977 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
978 {
979   DMWorkLink *p,link;
980 
981   PetscFunctionBegin;
982   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
983   PetscValidPointer(mem,4);
984   for (p=&dm->workout; (link=*p); p=&link->next) {
985     if (link->mem == *(void**)mem) {
986       *p = link->next;
987       link->next = dm->workin;
988       dm->workin = link;
989       *(void**)mem = PETSC_NULL;
990       PetscFunctionReturn(0);
991     }
992   }
993   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
994   PetscFunctionReturn(0);
995 }
996 
997 #undef __FUNCT__
998 #define __FUNCT__ "DMSetNullSpaceConstructor"
999 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1000 {
1001   PetscFunctionBegin;
1002   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1003   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1004   dm->nullspaceConstructors[field] = nullsp;
1005   PetscFunctionReturn(0);
1006 }
1007 
1008 #undef __FUNCT__
1009 #define __FUNCT__ "DMCreateFieldIS"
1010 /*@C
1011   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1012 
1013   Not collective
1014 
1015   Input Parameter:
1016 . dm - the DM object
1017 
1018   Output Parameters:
1019 + numFields  - The number of fields (or PETSC_NULL if not requested)
1020 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1021 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1022 
1023   Level: intermediate
1024 
1025   Notes:
1026   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1027   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1028   PetscFree().
1029 
1030 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1031 @*/
1032 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1033 {
1034   PetscSection   section, sectionGlobal;
1035   PetscErrorCode ierr;
1036 
1037   PetscFunctionBegin;
1038   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1039   if (numFields) {
1040     PetscValidPointer(numFields,2);
1041     *numFields = 0;
1042   }
1043   if (fieldNames) {
1044     PetscValidPointer(fieldNames,3);
1045     *fieldNames = PETSC_NULL;
1046   }
1047   if (fields) {
1048     PetscValidPointer(fields,4);
1049     *fields = PETSC_NULL;
1050   }
1051   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1052   if (section) {
1053     PetscInt *fieldSizes, **fieldIndices;
1054     PetscInt  nF, f, pStart, pEnd, p;
1055 
1056     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1057     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1058     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1059     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1060     for (f = 0; f < nF; ++f) {
1061       fieldSizes[f] = 0;
1062     }
1063     for (p = pStart; p < pEnd; ++p) {
1064       PetscInt gdof;
1065 
1066       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1067       if (gdof > 0) {
1068         for (f = 0; f < nF; ++f) {
1069           PetscInt fdof, fcdof;
1070 
1071           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1072           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1073           fieldSizes[f] += fdof-fcdof;
1074         }
1075       }
1076     }
1077     for (f = 0; f < nF; ++f) {
1078       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1079       fieldSizes[f] = 0;
1080     }
1081     for (p = pStart; p < pEnd; ++p) {
1082       PetscInt gdof, goff;
1083 
1084       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1085       if (gdof > 0) {
1086         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1087         for (f = 0; f < nF; ++f) {
1088           PetscInt fdof, fcdof, fc;
1089 
1090           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1091           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1092           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1093             fieldIndices[f][fieldSizes[f]] = goff++;
1094           }
1095         }
1096       }
1097     }
1098     if (numFields) {*numFields = nF;}
1099     if (fieldNames) {
1100       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1101       for (f = 0; f < nF; ++f) {
1102         const char *fieldName;
1103 
1104         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1105         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1106       }
1107     }
1108     if (fields) {
1109       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1110       for (f = 0; f < nF; ++f) {
1111         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1112       }
1113     }
1114     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1115   } else {
1116     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1117   }
1118   PetscFunctionReturn(0);
1119 }
1120 
1121 #undef __FUNCT__
1122 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1123 /*@C
1124   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1125 
1126   Not Collective
1127 
1128   Input Parameters:
1129 + dm   - the DM object
1130 - name - the name of the field decomposition
1131 
1132   Output Parameter:
1133 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1134 
1135   Level: advanced
1136 
1137 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1138 @*/
1139 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1140 {
1141   PetscErrorCode ierr;
1142 
1143   PetscFunctionBegin;
1144   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1145   PetscValidCharPointer(name,2);
1146   PetscValidPointer(ddm,3);
1147   *ddm = PETSC_NULL;
1148   if (dm->ops->createfielddecompositiondm) {
1149     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1150   }
1151   PetscFunctionReturn(0);
1152 }
1153 
1154 #undef __FUNCT__
1155 #define __FUNCT__ "DMCreateFieldDecomposition"
1156 /*@C
1157   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1158                           corresponding to different fields: each IS contains the global indices of the dofs of the
1159                           corresponding field. The optional list of DMs define the DM for each subproblem.
1160                           Generalizes DMCreateFieldIS().
1161 
1162   Not collective
1163 
1164   Input Parameter:
1165 . dm - the DM object
1166 
1167   Output Parameters:
1168 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1169 . namelist  - The name for each field (or PETSC_NULL if not requested)
1170 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1171 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1172 
1173   Level: intermediate
1174 
1175   Notes:
1176   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1177   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1178   and all of the arrays should be freed with PetscFree().
1179 
1180 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1181 @*/
1182 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1183 {
1184   PetscErrorCode ierr;
1185 
1186   PetscFunctionBegin;
1187   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1188   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1189   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1190   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1191   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1192   if (!dm->ops->createfielddecomposition) {
1193     PetscSection section;
1194     PetscInt     numFields, f;
1195 
1196     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1197     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1198     if (section && numFields && dm->ops->createsubdm) {
1199       *len = numFields;
1200       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1201       for (f = 0; f < numFields; ++f) {
1202         const char *fieldName;
1203 
1204         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1205         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1206         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1207       }
1208     } else {
1209       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1210       /* By default there are no DMs associated with subproblems. */
1211       if (dmlist) *dmlist = PETSC_NULL;
1212     }
1213   }
1214   else {
1215     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1216   }
1217   PetscFunctionReturn(0);
1218 }
1219 
1220 #undef __FUNCT__
1221 #define __FUNCT__ "DMCreateSubDM"
1222 /*@C
1223   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1224                   The fields are defined by DMCreateFieldIS().
1225 
1226   Not collective
1227 
1228   Input Parameters:
1229 + dm - the DM object
1230 . numFields - number of fields in this subproblem
1231 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1232 
1233   Output Parameters:
1234 . is - The global indices for the subproblem
1235 - dm - The DM for the subproblem
1236 
1237   Level: intermediate
1238 
1239 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1240 @*/
1241 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1242 {
1243   PetscErrorCode ierr;
1244 
1245   PetscFunctionBegin;
1246   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1247   PetscValidPointer(fields,3);
1248   if (is) {PetscValidPointer(is,4);}
1249   if (subdm) {PetscValidPointer(subdm,5);}
1250   if (dm->ops->createsubdm) {
1251     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1252   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1253   PetscFunctionReturn(0);
1254 }
1255 
1256 #undef __FUNCT__
1257 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1258 /*@C
1259   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1260 
1261   Not Collective
1262 
1263   Input Parameters:
1264 + dm   - the DM object
1265 - name - the name of the subdomain decomposition
1266 
1267   Output Parameter:
1268 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1269 
1270   Level: advanced
1271 
1272 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1273 @*/
1274 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1275 {
1276   PetscErrorCode ierr;
1277 
1278   PetscFunctionBegin;
1279   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1280   PetscValidCharPointer(name,2);
1281   PetscValidPointer(ddm,3);
1282   *ddm = PETSC_NULL;
1283   if (dm->ops->createdomaindecompositiondm) {
1284     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1285   }
1286   PetscFunctionReturn(0);
1287 }
1288 
1289 #undef __FUNCT__
1290 #define __FUNCT__ "DMCreateDomainDecomposition"
1291 /*@C
1292   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1293                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1294                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1295                           define a nonoverlapping covering, while outer subdomains can overlap.
1296                           The optional list of DMs define the DM for each subproblem.
1297 
1298   Not collective
1299 
1300   Input Parameter:
1301 . dm - the DM object
1302 
1303   Output Parameters:
1304 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1305 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1306 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1307 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1308 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1309 
1310   Level: intermediate
1311 
1312   Notes:
1313   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1314   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1315   and all of the arrays should be freed with PetscFree().
1316 
1317 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1318 @*/
1319 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1320 {
1321   PetscErrorCode ierr;
1322   DMSubDomainHookLink link;
1323   PetscInt            i,l;
1324 
1325   PetscFunctionBegin;
1326   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1327   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1328   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1329   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1330   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1331   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1332   if (dm->ops->createdomaindecomposition) {
1333     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1334   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DMCreateDomainDecomposition not supported for this DM type!");CHKERRQ(ierr);
1335   if (len) *len = l;
1336   for (i = 0; i < l; i++) {
1337     for (link=dm->subdomainhook; link; link=link->next) {
1338       if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1339     }
1340     (*dmlist)[i]->ctx = dm->ctx;
1341   }
1342   PetscFunctionReturn(0);
1343 }
1344 
1345 
1346 #undef __FUNCT__
1347 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1348 /*@C
1349   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1350 
1351   Not collective
1352 
1353   Input Parameters:
1354 + dm - the DM object
1355 . n  - the number of subdomain scatters
1356 - subdms - the local subdomains
1357 
1358   Output Parameters:
1359 + n     - the number of scatters returned
1360 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1361 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1362 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1363 
1364   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1365   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1366   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1367   solution and residual data.
1368 
1369   Level: developer
1370 
1371 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1372 @*/
1373 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1374 {
1375   PetscErrorCode ierr;
1376 
1377   PetscFunctionBegin;
1378   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1379   PetscValidPointer(subdms,3);
1380   PetscValidPointer(iscat,4);
1381   PetscValidPointer(oscat,5);
1382   PetscValidPointer(gscat,6);
1383   if (dm->ops->createddscatters) {
1384     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1385   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1386   PetscFunctionReturn(0);
1387 }
1388 
1389 #undef __FUNCT__
1390 #define __FUNCT__ "DMRefine"
1391 /*@
1392   DMRefine - Refines a DM object
1393 
1394   Collective on DM
1395 
1396   Input Parameter:
1397 + dm   - the DM object
1398 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1399 
1400   Output Parameter:
1401 . dmf - the refined DM, or PETSC_NULL
1402 
1403   Note: If no refinement was done, the return value is PETSC_NULL
1404 
1405   Level: developer
1406 
1407 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1408 @*/
1409 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1410 {
1411   PetscErrorCode ierr;
1412   DMRefineHookLink link;
1413 
1414   PetscFunctionBegin;
1415   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1416   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1417   if (*dmf) {
1418     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1419     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1420     (*dmf)->ctx       = dm->ctx;
1421     (*dmf)->leveldown = dm->leveldown;
1422     (*dmf)->levelup   = dm->levelup + 1;
1423     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1424     for (link=dm->refinehook; link; link=link->next) {
1425       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1426     }
1427   }
1428   PetscFunctionReturn(0);
1429 }
1430 
1431 #undef __FUNCT__
1432 #define __FUNCT__ "DMRefineHookAdd"
1433 /*@
1434    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1435 
1436    Logically Collective
1437 
1438    Input Arguments:
1439 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1440 .  refinehook - function to run when setting up a coarser level
1441 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1442 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1443 
1444    Calling sequence of refinehook:
1445 $    refinehook(DM coarse,DM fine,void *ctx);
1446 
1447 +  coarse - coarse level DM
1448 .  fine - fine level DM to interpolate problem to
1449 -  ctx - optional user-defined function context
1450 
1451    Calling sequence for interphook:
1452 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1453 
1454 +  coarse - coarse level DM
1455 .  interp - matrix interpolating a coarse-level solution to the finer grid
1456 .  fine - fine level DM to update
1457 -  ctx - optional user-defined function context
1458 
1459    Level: advanced
1460 
1461    Notes:
1462    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1463 
1464    If this function is called multiple times, the hooks will be run in the order they are added.
1465 
1466 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1467 @*/
1468 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1469 {
1470   PetscErrorCode ierr;
1471   DMRefineHookLink link,*p;
1472 
1473   PetscFunctionBegin;
1474   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1475   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1476   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1477   link->refinehook = refinehook;
1478   link->interphook = interphook;
1479   link->ctx = ctx;
1480   link->next = PETSC_NULL;
1481   *p = link;
1482   PetscFunctionReturn(0);
1483 }
1484 
1485 #undef __FUNCT__
1486 #define __FUNCT__ "DMInterpolate"
1487 /*@
1488    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1489 
1490    Collective if any hooks are
1491 
1492    Input Arguments:
1493 +  coarse - coarser DM to use as a base
1494 .  restrct - interpolation matrix, apply using MatInterpolate()
1495 -  fine - finer DM to update
1496 
1497    Level: developer
1498 
1499 .seealso: DMRefineHookAdd(), MatInterpolate()
1500 @*/
1501 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1502 {
1503   PetscErrorCode ierr;
1504   DMRefineHookLink link;
1505 
1506   PetscFunctionBegin;
1507   for (link=fine->refinehook; link; link=link->next) {
1508     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1509   }
1510   PetscFunctionReturn(0);
1511 }
1512 
1513 #undef __FUNCT__
1514 #define __FUNCT__ "DMGetRefineLevel"
1515 /*@
1516     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1517 
1518     Not Collective
1519 
1520     Input Parameter:
1521 .   dm - the DM object
1522 
1523     Output Parameter:
1524 .   level - number of refinements
1525 
1526     Level: developer
1527 
1528 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1529 
1530 @*/
1531 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1532 {
1533   PetscFunctionBegin;
1534   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1535   *level = dm->levelup;
1536   PetscFunctionReturn(0);
1537 }
1538 
1539 #undef __FUNCT__
1540 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1541 /*@
1542    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1543 
1544    Logically Collective
1545 
1546    Input Arguments:
1547 +  dm - the DM
1548 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1549 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1550 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1551 
1552    Calling sequence for beginhook:
1553 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1554 
1555 +  dm - global DM
1556 .  g - global vector
1557 .  mode - mode
1558 .  l - local vector
1559 -  ctx - optional user-defined function context
1560 
1561 
1562    Calling sequence for endhook:
1563 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1564 
1565 +  global - global DM
1566 -  ctx - optional user-defined function context
1567 
1568    Level: advanced
1569 
1570    Notes:
1571    This function is only needed if auxiliary data needs to be set up on coarse grids.
1572 
1573    If this function is called multiple times, the hooks will be run in the order they are added.
1574 
1575    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1576    extract the finest level information from its context (instead of from the SNES).
1577 
1578 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1579 @*/
1580 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1581 {
1582   PetscErrorCode ierr;
1583   DMGlobalToLocalHookLink link,*p;
1584 
1585   PetscFunctionBegin;
1586   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1587   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1588   ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1589   link->beginhook = beginhook;
1590   link->endhook = endhook;
1591   link->ctx = ctx;
1592   link->next = PETSC_NULL;
1593   *p = link;
1594   PetscFunctionReturn(0);
1595 }
1596 
1597 #undef __FUNCT__
1598 #define __FUNCT__ "DMGlobalToLocalBegin"
1599 /*@
1600     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1601 
1602     Neighbor-wise Collective on DM
1603 
1604     Input Parameters:
1605 +   dm - the DM object
1606 .   g - the global vector
1607 .   mode - INSERT_VALUES or ADD_VALUES
1608 -   l - the local vector
1609 
1610 
1611     Level: beginner
1612 
1613 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1614 
1615 @*/
1616 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1617 {
1618   PetscSF        sf;
1619   PetscErrorCode ierr;
1620   DMGlobalToLocalHookLink link;
1621 
1622   PetscFunctionBegin;
1623   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1624   for (link=dm->gtolhook; link; link=link->next) {
1625     if (link->beginhook) {ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1626   }
1627   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1628   if (sf) {
1629     PetscScalar *lArray, *gArray;
1630 
1631     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1632     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1633     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1634     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1635     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1636     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1637   } else {
1638     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1639   }
1640   PetscFunctionReturn(0);
1641 }
1642 
1643 #undef __FUNCT__
1644 #define __FUNCT__ "DMGlobalToLocalEnd"
1645 /*@
1646     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1647 
1648     Neighbor-wise Collective on DM
1649 
1650     Input Parameters:
1651 +   dm - the DM object
1652 .   g - the global vector
1653 .   mode - INSERT_VALUES or ADD_VALUES
1654 -   l - the local vector
1655 
1656 
1657     Level: beginner
1658 
1659 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1660 
1661 @*/
1662 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1663 {
1664   PetscSF        sf;
1665   PetscErrorCode ierr;
1666   PetscScalar    *lArray, *gArray;
1667   DMGlobalToLocalHookLink link;
1668 
1669   PetscFunctionBegin;
1670   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1671   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1672   if (sf) {
1673   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1674 
1675     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1676     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1677     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1678     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1679     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1680   } else {
1681     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1682   }
1683   for (link=dm->gtolhook; link; link=link->next) {
1684     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1685   }
1686   PetscFunctionReturn(0);
1687 }
1688 
1689 #undef __FUNCT__
1690 #define __FUNCT__ "DMLocalToGlobalBegin"
1691 /*@
1692     DMLocalToGlobalBegin - updates global vectors from local vectors
1693 
1694     Neighbor-wise Collective on DM
1695 
1696     Input Parameters:
1697 +   dm - the DM object
1698 .   l - the local vector
1699 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1700            base point.
1701 - - the global vector
1702 
1703     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1704            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1705            global array to the final global array with VecAXPY().
1706 
1707     Level: beginner
1708 
1709 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1710 
1711 @*/
1712 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1713 {
1714   PetscSF        sf;
1715   PetscErrorCode ierr;
1716 
1717   PetscFunctionBegin;
1718   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1719   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1720   if (sf) {
1721     MPI_Op       op;
1722     PetscScalar *lArray, *gArray;
1723 
1724     switch(mode) {
1725     case INSERT_VALUES:
1726     case INSERT_ALL_VALUES:
1727 #if defined(PETSC_HAVE_MPI_REPLACE)
1728       op = MPI_REPLACE; break;
1729 #else
1730       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1731 #endif
1732     case ADD_VALUES:
1733     case ADD_ALL_VALUES:
1734       op = MPI_SUM; break;
1735   default:
1736     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1737     }
1738     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1739     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1740     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1741     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1742     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1743   } else {
1744     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1745   }
1746   PetscFunctionReturn(0);
1747 }
1748 
1749 #undef __FUNCT__
1750 #define __FUNCT__ "DMLocalToGlobalEnd"
1751 /*@
1752     DMLocalToGlobalEnd - updates global vectors from local vectors
1753 
1754     Neighbor-wise Collective on DM
1755 
1756     Input Parameters:
1757 +   dm - the DM object
1758 .   l - the local vector
1759 .   mode - INSERT_VALUES or ADD_VALUES
1760 -   g - the global vector
1761 
1762 
1763     Level: beginner
1764 
1765 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1766 
1767 @*/
1768 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1769 {
1770   PetscSF        sf;
1771   PetscErrorCode ierr;
1772 
1773   PetscFunctionBegin;
1774   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1775   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1776   if (sf) {
1777     MPI_Op       op;
1778     PetscScalar *lArray, *gArray;
1779 
1780     switch(mode) {
1781     case INSERT_VALUES:
1782     case INSERT_ALL_VALUES:
1783 #if defined(PETSC_HAVE_MPI_REPLACE)
1784       op = MPI_REPLACE; break;
1785 #else
1786       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1787 #endif
1788     case ADD_VALUES:
1789     case ADD_ALL_VALUES:
1790       op = MPI_SUM; break;
1791     default:
1792       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1793     }
1794     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1795     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1796     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1797     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1798     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1799   } else {
1800     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1801   }
1802   PetscFunctionReturn(0);
1803 }
1804 
1805 #undef __FUNCT__
1806 #define __FUNCT__ "DMCoarsen"
1807 /*@
1808     DMCoarsen - Coarsens a DM object
1809 
1810     Collective on DM
1811 
1812     Input Parameter:
1813 +   dm - the DM object
1814 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1815 
1816     Output Parameter:
1817 .   dmc - the coarsened DM
1818 
1819     Level: developer
1820 
1821 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1822 
1823 @*/
1824 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1825 {
1826   PetscErrorCode ierr;
1827   DMCoarsenHookLink link;
1828 
1829   PetscFunctionBegin;
1830   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1831   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1832   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1833   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1834   (*dmc)->ctx       = dm->ctx;
1835   (*dmc)->levelup   = dm->levelup;
1836   (*dmc)->leveldown = dm->leveldown + 1;
1837   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1838   for (link=dm->coarsenhook; link; link=link->next) {
1839     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1840   }
1841   PetscFunctionReturn(0);
1842 }
1843 
1844 #undef __FUNCT__
1845 #define __FUNCT__ "DMCoarsenHookAdd"
1846 /*@
1847    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1848 
1849    Logically Collective
1850 
1851    Input Arguments:
1852 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1853 .  coarsenhook - function to run when setting up a coarser level
1854 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1855 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1856 
1857    Calling sequence of coarsenhook:
1858 $    coarsenhook(DM fine,DM coarse,void *ctx);
1859 
1860 +  fine - fine level DM
1861 .  coarse - coarse level DM to restrict problem to
1862 -  ctx - optional user-defined function context
1863 
1864    Calling sequence for restricthook:
1865 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1866 
1867 +  fine - fine level DM
1868 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1869 .  rscale - scaling vector for restriction
1870 .  inject - matrix restricting by injection
1871 .  coarse - coarse level DM to update
1872 -  ctx - optional user-defined function context
1873 
1874    Level: advanced
1875 
1876    Notes:
1877    This function is only needed if auxiliary data needs to be set up on coarse grids.
1878 
1879    If this function is called multiple times, the hooks will be run in the order they are added.
1880 
1881    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1882    extract the finest level information from its context (instead of from the SNES).
1883 
1884 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1885 @*/
1886 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1887 {
1888   PetscErrorCode ierr;
1889   DMCoarsenHookLink link,*p;
1890 
1891   PetscFunctionBegin;
1892   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1893   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1894   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1895   link->coarsenhook = coarsenhook;
1896   link->restricthook = restricthook;
1897   link->ctx = ctx;
1898   link->next = PETSC_NULL;
1899   *p = link;
1900   PetscFunctionReturn(0);
1901 }
1902 
1903 #undef __FUNCT__
1904 #define __FUNCT__ "DMRestrict"
1905 /*@
1906    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1907 
1908    Collective if any hooks are
1909 
1910    Input Arguments:
1911 +  fine - finer DM to use as a base
1912 .  restrct - restriction matrix, apply using MatRestrict()
1913 .  inject - injection matrix, also use MatRestrict()
1914 -  coarse - coarer DM to update
1915 
1916    Level: developer
1917 
1918 .seealso: DMCoarsenHookAdd(), MatRestrict()
1919 @*/
1920 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1921 {
1922   PetscErrorCode ierr;
1923   DMCoarsenHookLink link;
1924 
1925   PetscFunctionBegin;
1926   for (link=fine->coarsenhook; link; link=link->next) {
1927     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1928   }
1929   PetscFunctionReturn(0);
1930 }
1931 
1932 #undef __FUNCT__
1933 #define __FUNCT__ "DMSubDomainHookAdd"
1934 /*@
1935    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1936 
1937    Logically Collective
1938 
1939    Input Arguments:
1940 +  global - global DM
1941 .
1942 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1943 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1944 
1945    Calling sequence for restricthook:
1946 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1947 
1948 +  global - global DM
1949 .  out    - scatter to the outer (with ghost and overlap points) block vector
1950 .  in     - scatter to block vector values only owned locally
1951 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1952 -  ctx - optional user-defined function context
1953 
1954    Level: advanced
1955 
1956    Notes:
1957    This function is only needed if auxiliary data needs to be set up on coarse grids.
1958 
1959    If this function is called multiple times, the hooks will be run in the order they are added.
1960 
1961    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1962    extract the finest level information from its context (instead of from the SNES).
1963 
1964 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1965 @*/
1966 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1967 {
1968   PetscErrorCode ierr;
1969   DMSubDomainHookLink link,*p;
1970 
1971   PetscFunctionBegin;
1972   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1973   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1974   ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
1975   link->restricthook = restricthook;
1976   link->ddhook = ddhook;
1977   link->ctx = ctx;
1978   link->next = PETSC_NULL;
1979   *p = link;
1980   PetscFunctionReturn(0);
1981 }
1982 
1983 #undef __FUNCT__
1984 #define __FUNCT__ "DMSubDomainRestrict"
1985 /*@
1986    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
1987 
1988    Collective if any hooks are
1989 
1990    Input Arguments:
1991 +  fine - finer DM to use as a base
1992 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
1993 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
1994 -  coarse - coarer DM to update
1995 
1996    Level: developer
1997 
1998 .seealso: DMCoarsenHookAdd(), MatRestrict()
1999 @*/
2000 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2001 {
2002   PetscErrorCode ierr;
2003   DMSubDomainHookLink link;
2004 
2005   PetscFunctionBegin;
2006   for (link=global->subdomainhook; link; link=link->next) {
2007     if (link->restricthook) {ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);}
2008   }
2009   PetscFunctionReturn(0);
2010 }
2011 
2012 #undef __FUNCT__
2013 #define __FUNCT__ "DMGetCoarsenLevel"
2014 /*@
2015     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2016 
2017     Not Collective
2018 
2019     Input Parameter:
2020 .   dm - the DM object
2021 
2022     Output Parameter:
2023 .   level - number of coarsenings
2024 
2025     Level: developer
2026 
2027 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2028 
2029 @*/
2030 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2031 {
2032   PetscFunctionBegin;
2033   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2034   *level = dm->leveldown;
2035   PetscFunctionReturn(0);
2036 }
2037 
2038 
2039 
2040 #undef __FUNCT__
2041 #define __FUNCT__ "DMRefineHierarchy"
2042 /*@C
2043     DMRefineHierarchy - Refines a DM object, all levels at once
2044 
2045     Collective on DM
2046 
2047     Input Parameter:
2048 +   dm - the DM object
2049 -   nlevels - the number of levels of refinement
2050 
2051     Output Parameter:
2052 .   dmf - the refined DM hierarchy
2053 
2054     Level: developer
2055 
2056 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2057 
2058 @*/
2059 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2060 {
2061   PetscErrorCode ierr;
2062 
2063   PetscFunctionBegin;
2064   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2065   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2066   if (nlevels == 0) PetscFunctionReturn(0);
2067   if (dm->ops->refinehierarchy) {
2068     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2069   } else if (dm->ops->refine) {
2070     PetscInt i;
2071 
2072     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2073     for (i=1; i<nlevels; i++) {
2074       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2075     }
2076   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2077   PetscFunctionReturn(0);
2078 }
2079 
2080 #undef __FUNCT__
2081 #define __FUNCT__ "DMCoarsenHierarchy"
2082 /*@C
2083     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2084 
2085     Collective on DM
2086 
2087     Input Parameter:
2088 +   dm - the DM object
2089 -   nlevels - the number of levels of coarsening
2090 
2091     Output Parameter:
2092 .   dmc - the coarsened DM hierarchy
2093 
2094     Level: developer
2095 
2096 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2097 
2098 @*/
2099 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2100 {
2101   PetscErrorCode ierr;
2102 
2103   PetscFunctionBegin;
2104   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2105   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2106   if (nlevels == 0) PetscFunctionReturn(0);
2107   PetscValidPointer(dmc,3);
2108   if (dm->ops->coarsenhierarchy) {
2109     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2110   } else if (dm->ops->coarsen) {
2111     PetscInt i;
2112 
2113     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2114     for (i=1; i<nlevels; i++) {
2115       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2116     }
2117   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2118   PetscFunctionReturn(0);
2119 }
2120 
2121 #undef __FUNCT__
2122 #define __FUNCT__ "DMCreateAggregates"
2123 /*@
2124    DMCreateAggregates - Gets the aggregates that map between
2125    grids associated with two DMs.
2126 
2127    Collective on DM
2128 
2129    Input Parameters:
2130 +  dmc - the coarse grid DM
2131 -  dmf - the fine grid DM
2132 
2133    Output Parameters:
2134 .  rest - the restriction matrix (transpose of the projection matrix)
2135 
2136    Level: intermediate
2137 
2138 .keywords: interpolation, restriction, multigrid
2139 
2140 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2141 @*/
2142 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2143 {
2144   PetscErrorCode ierr;
2145 
2146   PetscFunctionBegin;
2147   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2148   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2149   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2150   PetscFunctionReturn(0);
2151 }
2152 
2153 #undef __FUNCT__
2154 #define __FUNCT__ "DMSetApplicationContextDestroy"
2155 /*@C
2156     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2157 
2158     Not Collective
2159 
2160     Input Parameters:
2161 +   dm - the DM object
2162 -   destroy - the destroy function
2163 
2164     Level: intermediate
2165 
2166 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2167 
2168 @*/
2169 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2170 {
2171   PetscFunctionBegin;
2172   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2173   dm->ctxdestroy = destroy;
2174   PetscFunctionReturn(0);
2175 }
2176 
2177 #undef __FUNCT__
2178 #define __FUNCT__ "DMSetApplicationContext"
2179 /*@
2180     DMSetApplicationContext - Set a user context into a DM object
2181 
2182     Not Collective
2183 
2184     Input Parameters:
2185 +   dm - the DM object
2186 -   ctx - the user context
2187 
2188     Level: intermediate
2189 
2190 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2191 
2192 @*/
2193 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2194 {
2195   PetscFunctionBegin;
2196   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2197   dm->ctx = ctx;
2198   PetscFunctionReturn(0);
2199 }
2200 
2201 #undef __FUNCT__
2202 #define __FUNCT__ "DMGetApplicationContext"
2203 /*@
2204     DMGetApplicationContext - Gets a user context from a DM object
2205 
2206     Not Collective
2207 
2208     Input Parameter:
2209 .   dm - the DM object
2210 
2211     Output Parameter:
2212 .   ctx - the user context
2213 
2214     Level: intermediate
2215 
2216 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2217 
2218 @*/
2219 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2220 {
2221   PetscFunctionBegin;
2222   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2223   *(void**)ctx = dm->ctx;
2224   PetscFunctionReturn(0);
2225 }
2226 
2227 #undef __FUNCT__
2228 #define __FUNCT__ "DMSetVariableBounds"
2229 /*@C
2230     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2231 
2232     Logically Collective on DM
2233 
2234     Input Parameter:
2235 +   dm - the DM object
2236 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2237 
2238     Level: intermediate
2239 
2240 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2241          DMSetJacobian()
2242 
2243 @*/
2244 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2245 {
2246   PetscFunctionBegin;
2247   dm->ops->computevariablebounds = f;
2248   PetscFunctionReturn(0);
2249 }
2250 
2251 #undef __FUNCT__
2252 #define __FUNCT__ "DMHasVariableBounds"
2253 /*@
2254     DMHasVariableBounds - does the DM object have a variable bounds function?
2255 
2256     Not Collective
2257 
2258     Input Parameter:
2259 .   dm - the DM object to destroy
2260 
2261     Output Parameter:
2262 .   flg - PETSC_TRUE if the variable bounds function exists
2263 
2264     Level: developer
2265 
2266 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2267 
2268 @*/
2269 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2270 {
2271   PetscFunctionBegin;
2272   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2273   PetscFunctionReturn(0);
2274 }
2275 
2276 #undef __FUNCT__
2277 #define __FUNCT__ "DMComputeVariableBounds"
2278 /*@C
2279     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2280 
2281     Logically Collective on DM
2282 
2283     Input Parameters:
2284 +   dm - the DM object to destroy
2285 -   x  - current solution at which the bounds are computed
2286 
2287     Output parameters:
2288 +   xl - lower bound
2289 -   xu - upper bound
2290 
2291     Level: intermediate
2292 
2293 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2294 
2295 @*/
2296 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2297 {
2298   PetscErrorCode ierr;
2299   PetscFunctionBegin;
2300   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2301   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2302   if (dm->ops->computevariablebounds) {
2303     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2304   }
2305   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2306   PetscFunctionReturn(0);
2307 }
2308 
2309 #undef __FUNCT__
2310 #define __FUNCT__ "DMHasColoring"
2311 /*@
2312     DMHasColoring - does the DM object have a method of providing a coloring?
2313 
2314     Not Collective
2315 
2316     Input Parameter:
2317 .   dm - the DM object
2318 
2319     Output Parameter:
2320 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2321 
2322     Level: developer
2323 
2324 .seealso DMHasFunction(), DMCreateColoring()
2325 
2326 @*/
2327 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2328 {
2329   PetscFunctionBegin;
2330   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2331   PetscFunctionReturn(0);
2332 }
2333 
2334 #undef  __FUNCT__
2335 #define __FUNCT__ "DMSetVec"
2336 /*@C
2337     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2338 
2339     Collective on DM
2340 
2341     Input Parameter:
2342 +   dm - the DM object
2343 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2344 
2345     Level: developer
2346 
2347 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2348 
2349 @*/
2350 PetscErrorCode  DMSetVec(DM dm,Vec x)
2351 {
2352   PetscErrorCode ierr;
2353   PetscFunctionBegin;
2354   if (x) {
2355     if (!dm->x) {
2356       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2357     }
2358     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2359   }
2360   else if (dm->x) {
2361     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2362   }
2363   PetscFunctionReturn(0);
2364 }
2365 
2366 PetscFList DMList                       = PETSC_NULL;
2367 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2368 
2369 #undef __FUNCT__
2370 #define __FUNCT__ "DMSetType"
2371 /*@C
2372   DMSetType - Builds a DM, for a particular DM implementation.
2373 
2374   Collective on DM
2375 
2376   Input Parameters:
2377 + dm     - The DM object
2378 - method - The name of the DM type
2379 
2380   Options Database Key:
2381 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2382 
2383   Notes:
2384   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2385 
2386   Level: intermediate
2387 
2388 .keywords: DM, set, type
2389 .seealso: DMGetType(), DMCreate()
2390 @*/
2391 PetscErrorCode  DMSetType(DM dm, DMType method)
2392 {
2393   PetscErrorCode (*r)(DM);
2394   PetscBool      match;
2395   PetscErrorCode ierr;
2396 
2397   PetscFunctionBegin;
2398   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2399   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2400   if (match) PetscFunctionReturn(0);
2401 
2402   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2403   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2404   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2405 
2406   if (dm->ops->destroy) {
2407     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2408     dm->ops->destroy = PETSC_NULL;
2409   }
2410   ierr = (*r)(dm);CHKERRQ(ierr);
2411   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2412   PetscFunctionReturn(0);
2413 }
2414 
2415 #undef __FUNCT__
2416 #define __FUNCT__ "DMGetType"
2417 /*@C
2418   DMGetType - Gets the DM type name (as a string) from the DM.
2419 
2420   Not Collective
2421 
2422   Input Parameter:
2423 . dm  - The DM
2424 
2425   Output Parameter:
2426 . type - The DM type name
2427 
2428   Level: intermediate
2429 
2430 .keywords: DM, get, type, name
2431 .seealso: DMSetType(), DMCreate()
2432 @*/
2433 PetscErrorCode  DMGetType(DM dm, DMType *type)
2434 {
2435   PetscErrorCode ierr;
2436 
2437   PetscFunctionBegin;
2438   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2439   PetscValidCharPointer(type,2);
2440   if (!DMRegisterAllCalled) {
2441     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2442   }
2443   *type = ((PetscObject)dm)->type_name;
2444   PetscFunctionReturn(0);
2445 }
2446 
2447 #undef __FUNCT__
2448 #define __FUNCT__ "DMConvert"
2449 /*@C
2450   DMConvert - Converts a DM to another DM, either of the same or different type.
2451 
2452   Collective on DM
2453 
2454   Input Parameters:
2455 + dm - the DM
2456 - newtype - new DM type (use "same" for the same type)
2457 
2458   Output Parameter:
2459 . M - pointer to new DM
2460 
2461   Notes:
2462   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2463   the MPI communicator of the generated DM is always the same as the communicator
2464   of the input DM.
2465 
2466   Level: intermediate
2467 
2468 .seealso: DMCreate()
2469 @*/
2470 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2471 {
2472   DM             B;
2473   char           convname[256];
2474   PetscBool      sametype, issame;
2475   PetscErrorCode ierr;
2476 
2477   PetscFunctionBegin;
2478   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2479   PetscValidType(dm,1);
2480   PetscValidPointer(M,3);
2481   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2482   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2483   {
2484     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2485 
2486     /*
2487        Order of precedence:
2488        1) See if a specialized converter is known to the current DM.
2489        2) See if a specialized converter is known to the desired DM class.
2490        3) See if a good general converter is registered for the desired class
2491        4) See if a good general converter is known for the current matrix.
2492        5) Use a really basic converter.
2493     */
2494 
2495     /* 1) See if a specialized converter is known to the current DM and the desired class */
2496     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2497     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2498     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2499     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2500     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2501     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2502     if (conv) goto foundconv;
2503 
2504     /* 2)  See if a specialized converter is known to the desired DM class. */
2505     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2506     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2507     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2508     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2509     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2510     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2511     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2512     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2513     if (conv) {
2514       ierr = DMDestroy(&B);CHKERRQ(ierr);
2515       goto foundconv;
2516     }
2517 
2518 #if 0
2519     /* 3) See if a good general converter is registered for the desired class */
2520     conv = B->ops->convertfrom;
2521     ierr = DMDestroy(&B);CHKERRQ(ierr);
2522     if (conv) goto foundconv;
2523 
2524     /* 4) See if a good general converter is known for the current matrix */
2525     if (dm->ops->convert) {
2526       conv = dm->ops->convert;
2527     }
2528     if (conv) goto foundconv;
2529 #endif
2530 
2531     /* 5) Use a really basic converter. */
2532     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2533 
2534     foundconv:
2535     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2536     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2537     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2538   }
2539   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2540   PetscFunctionReturn(0);
2541 }
2542 
2543 /*--------------------------------------------------------------------------------------------------------------------*/
2544 
2545 #undef __FUNCT__
2546 #define __FUNCT__ "DMRegister"
2547 /*@C
2548   DMRegister - See DMRegisterDynamic()
2549 
2550   Level: advanced
2551 @*/
2552 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2553 {
2554   char fullname[PETSC_MAX_PATH_LEN];
2555   PetscErrorCode ierr;
2556 
2557   PetscFunctionBegin;
2558   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2559   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2560   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2561   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2562   PetscFunctionReturn(0);
2563 }
2564 
2565 
2566 /*--------------------------------------------------------------------------------------------------------------------*/
2567 #undef __FUNCT__
2568 #define __FUNCT__ "DMRegisterDestroy"
2569 /*@C
2570    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2571 
2572    Not Collective
2573 
2574    Level: advanced
2575 
2576 .keywords: DM, register, destroy
2577 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2578 @*/
2579 PetscErrorCode  DMRegisterDestroy(void)
2580 {
2581   PetscErrorCode ierr;
2582 
2583   PetscFunctionBegin;
2584   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2585   DMRegisterAllCalled = PETSC_FALSE;
2586   PetscFunctionReturn(0);
2587 }
2588 
2589 #undef __FUNCT__
2590 #define __FUNCT__ "DMLoad"
2591 /*@C
2592   DMLoad - Loads a DM that has been stored in binary  with DMView().
2593 
2594   Collective on PetscViewer
2595 
2596   Input Parameters:
2597 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2598            some related function before a call to DMLoad().
2599 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2600            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2601 
2602    Level: intermediate
2603 
2604   Notes:
2605    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2606 
2607   Notes for advanced users:
2608   Most users should not need to know the details of the binary storage
2609   format, since DMLoad() and DMView() completely hide these details.
2610   But for anyone who's interested, the standard binary matrix storage
2611   format is
2612 .vb
2613      has not yet been determined
2614 .ve
2615 
2616 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2617 @*/
2618 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2619 {
2620   PetscErrorCode ierr;
2621   PetscBool      isbinary;
2622   PetscInt       classid;
2623   char           type[256];
2624 
2625   PetscFunctionBegin;
2626   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2627   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2628   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2629   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2630 
2631   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2632   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2633   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2634   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2635   if (newdm->ops->load) {
2636     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2637   }
2638   PetscFunctionReturn(0);
2639 }
2640 
2641 /******************************** FEM Support **********************************/
2642 
2643 #undef __FUNCT__
2644 #define __FUNCT__ "DMPrintCellVector"
2645 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2646   PetscInt       f;
2647   PetscErrorCode ierr;
2648 
2649   PetscFunctionBegin;
2650   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2651   for (f = 0; f < len; ++f) {
2652     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2653   }
2654   PetscFunctionReturn(0);
2655 }
2656 
2657 #undef __FUNCT__
2658 #define __FUNCT__ "DMPrintCellMatrix"
2659 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2660   PetscInt       f, g;
2661   PetscErrorCode ierr;
2662 
2663   PetscFunctionBegin;
2664   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2665   for (f = 0; f < rows; ++f) {
2666     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2667     for (g = 0; g < cols; ++g) {
2668       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2669     }
2670     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2671   }
2672   PetscFunctionReturn(0);
2673 }
2674 
2675 #undef __FUNCT__
2676 #define __FUNCT__ "DMGetDefaultSection"
2677 /*@
2678   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2679 
2680   Input Parameter:
2681 . dm - The DM
2682 
2683   Output Parameter:
2684 . section - The PetscSection
2685 
2686   Level: intermediate
2687 
2688   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2689 
2690 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2691 @*/
2692 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2693   PetscFunctionBegin;
2694   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2695   PetscValidPointer(section, 2);
2696   *section = dm->defaultSection;
2697   PetscFunctionReturn(0);
2698 }
2699 
2700 #undef __FUNCT__
2701 #define __FUNCT__ "DMSetDefaultSection"
2702 /*@
2703   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2704 
2705   Input Parameters:
2706 + dm - The DM
2707 - section - The PetscSection
2708 
2709   Level: intermediate
2710 
2711   Note: Any existing Section will be destroyed
2712 
2713 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2714 @*/
2715 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2716   PetscInt       numFields;
2717   PetscInt       f;
2718   PetscErrorCode ierr;
2719 
2720   PetscFunctionBegin;
2721   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2722   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2723   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2724   dm->defaultSection = section;
2725   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2726   if (numFields) {
2727     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2728     for (f = 0; f < numFields; ++f) {
2729       const char *name;
2730 
2731       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2732       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2733     }
2734   }
2735   PetscFunctionReturn(0);
2736 }
2737 
2738 #undef __FUNCT__
2739 #define __FUNCT__ "DMGetDefaultGlobalSection"
2740 /*@
2741   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2742 
2743   Collective on DM
2744 
2745   Input Parameter:
2746 . dm - The DM
2747 
2748   Output Parameter:
2749 . section - The PetscSection
2750 
2751   Level: intermediate
2752 
2753   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2754 
2755 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2756 @*/
2757 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2758   PetscErrorCode ierr;
2759 
2760   PetscFunctionBegin;
2761   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2762   PetscValidPointer(section, 2);
2763   if (!dm->defaultGlobalSection) {
2764     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2765     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2766     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2767   }
2768   *section = dm->defaultGlobalSection;
2769   PetscFunctionReturn(0);
2770 }
2771 
2772 #undef __FUNCT__
2773 #define __FUNCT__ "DMSetDefaultGlobalSection"
2774 /*@
2775   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2776 
2777   Input Parameters:
2778 + dm - The DM
2779 - section - The PetscSection
2780 
2781   Level: intermediate
2782 
2783   Note: Any existing Section will be destroyed
2784 
2785 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2786 @*/
2787 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2788   PetscErrorCode ierr;
2789 
2790   PetscFunctionBegin;
2791   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2792   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2793   dm->defaultGlobalSection = section;
2794   PetscFunctionReturn(0);
2795 }
2796 
2797 #undef __FUNCT__
2798 #define __FUNCT__ "DMGetDefaultSF"
2799 /*@
2800   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2801   it is created from the default PetscSection layouts in the DM.
2802 
2803   Input Parameter:
2804 . dm - The DM
2805 
2806   Output Parameter:
2807 . sf - The PetscSF
2808 
2809   Level: intermediate
2810 
2811   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2812 
2813 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2814 @*/
2815 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2816   PetscInt       nroots;
2817   PetscErrorCode ierr;
2818 
2819   PetscFunctionBegin;
2820   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2821   PetscValidPointer(sf, 2);
2822   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2823   if (nroots < 0) {
2824     PetscSection section, gSection;
2825 
2826     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2827     if (section) {
2828       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2829       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2830     } else {
2831       *sf = PETSC_NULL;
2832       PetscFunctionReturn(0);
2833     }
2834   }
2835   *sf = dm->defaultSF;
2836   PetscFunctionReturn(0);
2837 }
2838 
2839 #undef __FUNCT__
2840 #define __FUNCT__ "DMSetDefaultSF"
2841 /*@
2842   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2843 
2844   Input Parameters:
2845 + dm - The DM
2846 - sf - The PetscSF
2847 
2848   Level: intermediate
2849 
2850   Note: Any previous SF is destroyed
2851 
2852 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2853 @*/
2854 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2855   PetscErrorCode ierr;
2856 
2857   PetscFunctionBegin;
2858   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2859   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2860   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2861   dm->defaultSF = sf;
2862   PetscFunctionReturn(0);
2863 }
2864 
2865 #undef __FUNCT__
2866 #define __FUNCT__ "DMCreateDefaultSF"
2867 /*@C
2868   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2869   describing the data layout.
2870 
2871   Input Parameters:
2872 + dm - The DM
2873 . localSection - PetscSection describing the local data layout
2874 - globalSection - PetscSection describing the global data layout
2875 
2876   Level: intermediate
2877 
2878 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2879 @*/
2880 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2881 {
2882   MPI_Comm        comm = ((PetscObject) dm)->comm;
2883   PetscLayout     layout;
2884   const PetscInt *ranges;
2885   PetscInt       *local;
2886   PetscSFNode    *remote;
2887   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2888   PetscMPIInt     size, rank;
2889   PetscErrorCode  ierr;
2890 
2891   PetscFunctionBegin;
2892   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2893   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2894   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2895   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2896   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2897   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2898   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2899   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2900   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2901   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2902   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2903     PetscInt gdof, gcdof;
2904 
2905     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2906     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2907     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2908   }
2909   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2910   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2911   for (p = pStart, l = 0; p < pEnd; ++p) {
2912     const PetscInt *cind;
2913     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2914 
2915     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2916     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2917     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2918     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2919     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2920     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2921     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2922     if (!gdof) continue; /* Censored point */
2923     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2924     if (gsize != dof-cdof) {
2925       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2926       cdof = 0; /* Ignore constraints */
2927     }
2928     for (d = 0, c = 0; d < dof; ++d) {
2929       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2930       local[l+d-c] = off+d;
2931     }
2932     if (gdof < 0) {
2933       for(d = 0; d < gsize; ++d, ++l) {
2934         PetscInt offset = -(goff+1) + d, r;
2935 
2936         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2937         if (r < 0) r = -(r+2);
2938         remote[l].rank  = r;
2939         remote[l].index = offset - ranges[r];
2940       }
2941     } else {
2942       for(d = 0; d < gsize; ++d, ++l) {
2943         remote[l].rank  = rank;
2944         remote[l].index = goff+d - ranges[rank];
2945       }
2946     }
2947   }
2948   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2949   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2950   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2951   PetscFunctionReturn(0);
2952 }
2953 
2954 #undef __FUNCT__
2955 #define __FUNCT__ "DMGetPointSF"
2956 /*@
2957   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2958 
2959   Input Parameter:
2960 . dm - The DM
2961 
2962   Output Parameter:
2963 . sf - The PetscSF
2964 
2965   Level: intermediate
2966 
2967   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2968 
2969 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2970 @*/
2971 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
2972   PetscFunctionBegin;
2973   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2974   PetscValidPointer(sf, 2);
2975   *sf = dm->sf;
2976   PetscFunctionReturn(0);
2977 }
2978 
2979 #undef __FUNCT__
2980 #define __FUNCT__ "DMSetPointSF"
2981 /*@
2982   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
2983 
2984   Input Parameters:
2985 + dm - The DM
2986 - sf - The PetscSF
2987 
2988   Level: intermediate
2989 
2990 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2991 @*/
2992 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
2993   PetscErrorCode ierr;
2994 
2995   PetscFunctionBegin;
2996   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2997   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
2998   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
2999   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3000   dm->sf = sf;
3001   PetscFunctionReturn(0);
3002 }
3003 
3004 #undef __FUNCT__
3005 #define __FUNCT__ "DMGetNumFields"
3006 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3007 {
3008   PetscFunctionBegin;
3009   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3010   PetscValidPointer(numFields, 2);
3011   *numFields = dm->numFields;
3012   PetscFunctionReturn(0);
3013 }
3014 
3015 #undef __FUNCT__
3016 #define __FUNCT__ "DMSetNumFields"
3017 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3018 {
3019   PetscInt       f;
3020   PetscErrorCode ierr;
3021 
3022   PetscFunctionBegin;
3023   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3024   for (f = 0; f < dm->numFields; ++f) {
3025     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3026   }
3027   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3028   dm->numFields = numFields;
3029   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3030   for (f = 0; f < dm->numFields; ++f) {
3031     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3032   }
3033   PetscFunctionReturn(0);
3034 }
3035 
3036 #undef __FUNCT__
3037 #define __FUNCT__ "DMGetField"
3038 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3039 {
3040   PetscFunctionBegin;
3041   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3042   PetscValidPointer(field, 2);
3043   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3044   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3045   *field = dm->fields[f];
3046   PetscFunctionReturn(0);
3047 }
3048 
3049 #undef __FUNCT__
3050 #define __FUNCT__ "DMSetCoordinates"
3051 /*@
3052   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3053 
3054   Collective on DM
3055 
3056   Input Parameters:
3057 + dm - the DM
3058 - c - coordinate vector
3059 
3060   Note:
3061   The coordinates do include those for ghost points, which are in the local vector
3062 
3063   Level: intermediate
3064 
3065 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3066 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3067 @*/
3068 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3069 {
3070   PetscErrorCode ierr;
3071 
3072   PetscFunctionBegin;
3073   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3074   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3075   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3076   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3077   dm->coordinates = c;
3078   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3079   PetscFunctionReturn(0);
3080 }
3081 
3082 #undef __FUNCT__
3083 #define __FUNCT__ "DMSetCoordinatesLocal"
3084 /*@
3085   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3086 
3087   Collective on DM
3088 
3089    Input Parameters:
3090 +  dm - the DM
3091 -  c - coordinate vector
3092 
3093   Note:
3094   The coordinates of ghost points can be set using DMSetCoordinates()
3095   followed by DMGetCoordinatesLocal(). This is intended to enable the
3096   setting of ghost coordinates outside of the domain.
3097 
3098   Level: intermediate
3099 
3100 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3101 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3102 @*/
3103 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3104 {
3105   PetscErrorCode ierr;
3106 
3107   PetscFunctionBegin;
3108   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3109   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3110   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3111   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3112   dm->coordinatesLocal = c;
3113   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3114   PetscFunctionReturn(0);
3115 }
3116 
3117 #undef __FUNCT__
3118 #define __FUNCT__ "DMGetCoordinates"
3119 /*@
3120   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3121 
3122   Not Collective
3123 
3124   Input Parameter:
3125 . dm - the DM
3126 
3127   Output Parameter:
3128 . c - global coordinate vector
3129 
3130   Note:
3131   This is a borrowed reference, so the user should NOT destroy this vector
3132 
3133   Each process has only the local coordinates (does NOT have the ghost coordinates).
3134 
3135   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3136   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3137 
3138   Level: intermediate
3139 
3140 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3141 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3142 @*/
3143 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3144 {
3145   PetscErrorCode ierr;
3146 
3147   PetscFunctionBegin;
3148   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3149   PetscValidPointer(c,2);
3150   if (!dm->coordinates && dm->coordinatesLocal) {
3151     DM cdm = PETSC_NULL;
3152 
3153     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3154     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3155     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3156     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3157     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3158   }
3159   *c = dm->coordinates;
3160   PetscFunctionReturn(0);
3161 }
3162 
3163 #undef __FUNCT__
3164 #define __FUNCT__ "DMGetCoordinatesLocal"
3165 /*@
3166   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3167 
3168   Collective on DM
3169 
3170   Input Parameter:
3171 . dm - the DM
3172 
3173   Output Parameter:
3174 . c - coordinate vector
3175 
3176   Note:
3177   This is a borrowed reference, so the user should NOT destroy this vector
3178 
3179   Each process has the local and ghost coordinates
3180 
3181   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3182   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3183 
3184   Level: intermediate
3185 
3186 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3187 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3188 @*/
3189 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3190 {
3191   PetscErrorCode ierr;
3192 
3193   PetscFunctionBegin;
3194   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3195   PetscValidPointer(c,2);
3196   if (!dm->coordinatesLocal && dm->coordinates) {
3197     DM cdm = PETSC_NULL;
3198 
3199     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3200     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3201     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3202     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3203     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3204   }
3205   *c = dm->coordinatesLocal;
3206   PetscFunctionReturn(0);
3207 }
3208 
3209 #undef __FUNCT__
3210 #define __FUNCT__ "DMGetCoordinateDM"
3211 /*@
3212   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3213 
3214   Collective on DM
3215 
3216   Input Parameter:
3217 . dm - the DM
3218 
3219   Output Parameter:
3220 . cdm - coordinate DM
3221 
3222   Level: intermediate
3223 
3224 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3225 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3226 @*/
3227 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3228 {
3229   PetscErrorCode ierr;
3230 
3231   PetscFunctionBegin;
3232   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3233   PetscValidPointer(cdm,2);
3234   if (!dm->coordinateDM) {
3235     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3236     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3237   }
3238   *cdm = dm->coordinateDM;
3239   PetscFunctionReturn(0);
3240 }
3241 
3242 #undef __FUNCT__
3243 #define __FUNCT__ "DMLocatePoints"
3244 /*@
3245   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3246 
3247   Not collective
3248 
3249   Input Parameters:
3250 + dm - The DM
3251 - v - The Vec of points
3252 
3253   Output Parameter:
3254 . cells - The local cell numbers for cells which contain the points
3255 
3256   Level: developer
3257 
3258 .keywords: point location, mesh
3259 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3260 @*/
3261 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3262 {
3263   PetscErrorCode ierr;
3264 
3265   PetscFunctionBegin;
3266   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3267   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3268   PetscValidPointer(cells,3);
3269   if (dm->ops->locatepoints) {
3270     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3271   } else {
3272     SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Point location not available for this DM");
3273   }
3274   PetscFunctionReturn(0);
3275 }
3276