xref: /petsc/src/dm/interface/dm.c (revision 6d75e2106b7ce71d78ffad317d596124b1823eef)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMViewFromOptions"
8 /*
9   Processes command line options to determine if/how a dm
10   is to be viewed.
11 
12 */
13 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
14 {
15   PetscErrorCode    ierr;
16   PetscBool         flg;
17   PetscViewer       viewer;
18   PetscViewerFormat format;
19 
20   PetscFunctionBegin;
21   ierr = PetscOptionsGetViewer(((PetscObject)dm)->comm,((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
22   if (flg) {
23     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
24     ierr = DMView(dm,viewer);CHKERRQ(ierr);
25     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
26     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMCreate"
34 /*@
35   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
36 
37    If you never  call DMSetType()  it will generate an
38    error when you try to use the vector.
39 
40   Collective on MPI_Comm
41 
42   Input Parameter:
43 . comm - The communicator for the DM object
44 
45   Output Parameter:
46 . dm - The DM object
47 
48   Level: beginner
49 
50 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
51 @*/
52 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
53 {
54   DM             v;
55   PetscErrorCode ierr;
56 
57   PetscFunctionBegin;
58   PetscValidPointer(dm,2);
59   *dm = PETSC_NULL;
60 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
61   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
62   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
63   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
64 #endif
65 
66   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
67   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
68 
69 
70   v->ltogmap      = PETSC_NULL;
71   v->ltogmapb     = PETSC_NULL;
72   v->bs           = 1;
73   v->coloringtype = IS_COLORING_GLOBAL;
74   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
75   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
76   v->defaultSection       = PETSC_NULL;
77   v->defaultGlobalSection = PETSC_NULL;
78   {
79     PetscInt i;
80     for (i = 0; i < 10; ++i) {
81       v->nullspaceConstructors[i] = PETSC_NULL;
82     }
83   }
84   v->numFields = 0;
85   v->fields    = PETSC_NULL;
86 
87   *dm = v;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMSetVecType"
93 /*@C
94        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
95 
96    Logically Collective on DMDA
97 
98    Input Parameter:
99 +  da - initial distributed array
100 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
101 
102    Options Database:
103 .   -dm_vec_type ctype
104 
105    Level: intermediate
106 
107 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
108 @*/
109 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   PetscValidHeaderSpecific(da,DM_CLASSID,1);
115   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
116   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "VecGetDM"
122 /*@
123   VecGetDM - Gets the DM defining the data layout of the vector
124 
125   Not collective
126 
127   Input Parameter:
128 . v - The Vec
129 
130   Output Parameter:
131 . dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecGetDM(Vec v, DM *dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidPointer(dm,2);
144   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "VecSetDM"
150 /*@
151   VecSetDM - Sets the DM defining the data layout of the vector
152 
153   Not collective
154 
155   Input Parameters:
156 + v - The Vec
157 - dm - The DM
158 
159   Level: intermediate
160 
161 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
162 @*/
163 PetscErrorCode VecSetDM(Vec v, DM dm)
164 {
165   PetscErrorCode ierr;
166 
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
169   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
170   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "DMSetMatType"
176 /*@C
177        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
178 
179    Logically Collective on DM
180 
181    Input Parameter:
182 +  dm - the DM context
183 .  ctype - the matrix type
184 
185    Options Database:
186 .   -dm_mat_type ctype
187 
188    Level: intermediate
189 
190 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
191 @*/
192 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
193 {
194   PetscErrorCode ierr;
195 
196   PetscFunctionBegin;
197   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
198   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
199   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "MatGetDM"
205 /*@
206   MatGetDM - Gets the DM defining the data layout of the matrix
207 
208   Not collective
209 
210   Input Parameter:
211 . A - The Mat
212 
213   Output Parameter:
214 . dm - The DM
215 
216   Level: intermediate
217 
218 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
219 @*/
220 PetscErrorCode MatGetDM(Mat A, DM *dm)
221 {
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
226   PetscValidPointer(dm,2);
227   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatSetDM"
233 /*@
234   MatSetDM - Sets the DM defining the data layout of the matrix
235 
236   Not collective
237 
238   Input Parameters:
239 + A - The Mat
240 - dm - The DM
241 
242   Level: intermediate
243 
244 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
245 @*/
246 PetscErrorCode MatSetDM(Mat A, DM dm)
247 {
248   PetscErrorCode ierr;
249 
250   PetscFunctionBegin;
251   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
252   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
253   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMSetOptionsPrefix"
259 /*@C
260    DMSetOptionsPrefix - Sets the prefix used for searching for all
261    DMDA options in the database.
262 
263    Logically Collective on DMDA
264 
265    Input Parameter:
266 +  da - the DMDA context
267 -  prefix - the prefix to prepend to all option names
268 
269    Notes:
270    A hyphen (-) must NOT be given at the beginning of the prefix name.
271    The first character of all runtime options is AUTOMATICALLY the hyphen.
272 
273    Level: advanced
274 
275 .keywords: DMDA, set, options, prefix, database
276 
277 .seealso: DMSetFromOptions()
278 @*/
279 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
280 {
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
285   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "DMDestroy"
291 /*@
292     DMDestroy - Destroys a vector packer or DMDA.
293 
294     Collective on DM
295 
296     Input Parameter:
297 .   dm - the DM object to destroy
298 
299     Level: developer
300 
301 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
302 
303 @*/
304 PetscErrorCode  DMDestroy(DM *dm)
305 {
306   PetscInt       i, cnt = 0, f;
307   DMNamedVecLink nlink,nnext;
308   PetscErrorCode ierr;
309 
310   PetscFunctionBegin;
311   if (!*dm) PetscFunctionReturn(0);
312   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
313 
314   /* count all the circular references of DM and its contained Vecs */
315   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
316     if ((*dm)->localin[i])  {cnt++;}
317     if ((*dm)->globalin[i]) {cnt++;}
318   }
319   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
320   if ((*dm)->x) {
321     DM obj;
322     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
323     if (obj == *dm) cnt++;
324   }
325 
326   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
327   /*
328      Need this test because the dm references the vectors that
329      reference the dm, so destroying the dm calls destroy on the
330      vectors that cause another destroy on the dm
331   */
332   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
333   ((PetscObject) (*dm))->refct = 0;
334   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
335     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
336     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
337   }
338   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
339     nnext = nlink->next;
340     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
341     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
342     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
343     ierr = PetscFree(nlink);CHKERRQ(ierr);
344   }
345   (*dm)->namedglobal = PETSC_NULL;
346 
347   /* Destroy the list of hooks */
348   {
349     DMCoarsenHookLink link,next;
350     for (link=(*dm)->coarsenhook; link; link=next) {
351       next = link->next;
352       ierr = PetscFree(link);CHKERRQ(ierr);
353     }
354     (*dm)->coarsenhook = PETSC_NULL;
355   }
356   {
357     DMRefineHookLink link,next;
358     for (link=(*dm)->refinehook; link; link=next) {
359       next = link->next;
360       ierr = PetscFree(link);CHKERRQ(ierr);
361     }
362     (*dm)->refinehook = PETSC_NULL;
363   }
364   {
365     DMSubDomainHookLink link,next;
366     for (link=(*dm)->subdomainhook; link; link=next) {
367       next = link->next;
368       ierr = PetscFree(link);CHKERRQ(ierr);
369     }
370     (*dm)->subdomainhook = PETSC_NULL;
371   }
372   {
373     DMGlobalToLocalHookLink link,next;
374     for (link=(*dm)->gtolhook; link; link=next) {
375       next = link->next;
376       ierr = PetscFree(link);CHKERRQ(ierr);
377     }
378     (*dm)->gtolhook = PETSC_NULL;
379   }
380   /* Destroy the work arrays */
381   {
382     DMWorkLink link,next;
383     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
384     for (link=(*dm)->workin; link; link=next) {
385       next = link->next;
386       ierr = PetscFree(link->mem);CHKERRQ(ierr);
387       ierr = PetscFree(link);CHKERRQ(ierr);
388     }
389     (*dm)->workin = PETSC_NULL;
390   }
391 
392   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
393   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
394   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
395 
396   if ((*dm)->ctx && (*dm)->ctxdestroy) {
397     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
398   }
399   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
400   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
401   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
402   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
403   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
404   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
405   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
406 
407   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
408   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
409   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
410   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
411   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
412 
413   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
414   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
415   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
416 
417   for (f = 0; f < (*dm)->numFields; ++f) {
418     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
419   }
420   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
421   /* if memory was published with AMS then destroy it */
422   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
423 
424   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
425   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
426   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
427   PetscFunctionReturn(0);
428 }
429 
430 #undef __FUNCT__
431 #define __FUNCT__ "DMSetUp"
432 /*@
433     DMSetUp - sets up the data structures inside a DM object
434 
435     Collective on DM
436 
437     Input Parameter:
438 .   dm - the DM object to setup
439 
440     Level: developer
441 
442 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
443 
444 @*/
445 PetscErrorCode  DMSetUp(DM dm)
446 {
447   PetscErrorCode ierr;
448 
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
451   if (dm->setupcalled) PetscFunctionReturn(0);
452   if (dm->ops->setup) {
453     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
454   }
455   dm->setupcalled = PETSC_TRUE;
456   PetscFunctionReturn(0);
457 }
458 
459 #undef __FUNCT__
460 #define __FUNCT__ "DMSetFromOptions"
461 /*@
462     DMSetFromOptions - sets parameters in a DM from the options database
463 
464     Collective on DM
465 
466     Input Parameter:
467 .   dm - the DM object to set options for
468 
469     Options Database:
470 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
471 .   -dm_vec_type <type>  type of vector to create inside DM
472 .   -dm_mat_type <type>  type of matrix to create inside DM
473 -   -dm_coloring_type <global or ghosted>
474 
475     Level: developer
476 
477 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
478 
479 @*/
480 PetscErrorCode  DMSetFromOptions(DM dm)
481 {
482   char           typeName[256] = MATAIJ;
483   PetscBool      flg;
484   PetscErrorCode ierr;
485 
486   PetscFunctionBegin;
487   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
488   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
489     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
490     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
491     if (flg) {
492       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
493     }
494     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
495     if (flg) {
496       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
497     }
498     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
499     if (dm->ops->setfromoptions) {
500       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
501     }
502     /* process any options handlers added with PetscObjectAddOptionsHandler() */
503     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
504   ierr = PetscOptionsEnd();CHKERRQ(ierr);
505   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
506   PetscFunctionReturn(0);
507 }
508 
509 #undef __FUNCT__
510 #define __FUNCT__ "DMView"
511 /*@C
512     DMView - Views a vector packer or DMDA.
513 
514     Collective on DM
515 
516     Input Parameter:
517 +   dm - the DM object to view
518 -   v - the viewer
519 
520     Level: developer
521 
522 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
523 
524 @*/
525 PetscErrorCode  DMView(DM dm,PetscViewer v)
526 {
527   PetscErrorCode ierr;
528   PetscBool      isbinary;
529 
530   PetscFunctionBegin;
531   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
532   if (!v) {
533     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
534   }
535   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
536   if (isbinary) {
537     PetscInt         classid = DM_FILE_CLASSID;
538     char             type[256];
539 
540     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
541     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
542     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
543   }
544   if (dm->ops->view) {
545     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
546   }
547   PetscFunctionReturn(0);
548 }
549 
550 #undef __FUNCT__
551 #define __FUNCT__ "DMCreateGlobalVector"
552 /*@
553     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
554 
555     Collective on DM
556 
557     Input Parameter:
558 .   dm - the DM object
559 
560     Output Parameter:
561 .   vec - the global vector
562 
563     Level: beginner
564 
565 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
566 
567 @*/
568 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
569 {
570   PetscErrorCode ierr;
571 
572   PetscFunctionBegin;
573   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
574   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
575   PetscFunctionReturn(0);
576 }
577 
578 #undef __FUNCT__
579 #define __FUNCT__ "DMCreateLocalVector"
580 /*@
581     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
582 
583     Not Collective
584 
585     Input Parameter:
586 .   dm - the DM object
587 
588     Output Parameter:
589 .   vec - the local vector
590 
591     Level: beginner
592 
593 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
594 
595 @*/
596 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
597 {
598   PetscErrorCode ierr;
599 
600   PetscFunctionBegin;
601   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
602   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
603   PetscFunctionReturn(0);
604 }
605 
606 #undef __FUNCT__
607 #define __FUNCT__ "DMGetLocalToGlobalMapping"
608 /*@
609    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
610 
611    Collective on DM
612 
613    Input Parameter:
614 .  dm - the DM that provides the mapping
615 
616    Output Parameter:
617 .  ltog - the mapping
618 
619    Level: intermediate
620 
621    Notes:
622    This mapping can then be used by VecSetLocalToGlobalMapping() or
623    MatSetLocalToGlobalMapping().
624 
625 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
626 @*/
627 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
628 {
629   PetscErrorCode ierr;
630 
631   PetscFunctionBegin;
632   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
633   PetscValidPointer(ltog,2);
634   if (!dm->ltogmap) {
635     PetscSection section, sectionGlobal;
636 
637     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
638     if (section) {
639       PetscInt      *ltog;
640       PetscInt       pStart, pEnd, size, p, l;
641 
642       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
643       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
644       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
645       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
646       for (p = pStart, l = 0; p < pEnd; ++p) {
647         PetscInt dof, off, c;
648 
649         /* Should probably use constrained dofs */
650         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
651         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
652         for (c = 0; c < dof; ++c, ++l) {
653           ltog[l] = off+c;
654         }
655       }
656       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
657       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
658     } else {
659       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
660       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
661     }
662   }
663   *ltog = dm->ltogmap;
664   PetscFunctionReturn(0);
665 }
666 
667 #undef __FUNCT__
668 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
669 /*@
670    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
671 
672    Collective on DM
673 
674    Input Parameter:
675 .  da - the distributed array that provides the mapping
676 
677    Output Parameter:
678 .  ltog - the block mapping
679 
680    Level: intermediate
681 
682    Notes:
683    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
684    MatSetLocalToGlobalMappingBlock().
685 
686 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
687 @*/
688 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
689 {
690   PetscErrorCode ierr;
691 
692   PetscFunctionBegin;
693   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
694   PetscValidPointer(ltog,2);
695   if (!dm->ltogmapb) {
696     PetscInt bs;
697     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
698     if (bs > 1) {
699       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
700       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
701     } else {
702       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
703       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
704     }
705   }
706   *ltog = dm->ltogmapb;
707   PetscFunctionReturn(0);
708 }
709 
710 #undef __FUNCT__
711 #define __FUNCT__ "DMGetBlockSize"
712 /*@
713    DMGetBlockSize - Gets the inherent block size associated with a DM
714 
715    Not Collective
716 
717    Input Parameter:
718 .  dm - the DM with block structure
719 
720    Output Parameter:
721 .  bs - the block size, 1 implies no exploitable block structure
722 
723    Level: intermediate
724 
725 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
726 @*/
727 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
728 {
729   PetscFunctionBegin;
730   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
731   PetscValidPointer(bs,2);
732   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
733   *bs = dm->bs;
734   PetscFunctionReturn(0);
735 }
736 
737 #undef __FUNCT__
738 #define __FUNCT__ "DMCreateInterpolation"
739 /*@
740     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
741 
742     Collective on DM
743 
744     Input Parameter:
745 +   dm1 - the DM object
746 -   dm2 - the second, finer DM object
747 
748     Output Parameter:
749 +  mat - the interpolation
750 -  vec - the scaling (optional)
751 
752     Level: developer
753 
754     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
755         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
756 
757         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
758         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
759 
760 
761 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
762 
763 @*/
764 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
765 {
766   PetscErrorCode ierr;
767 
768   PetscFunctionBegin;
769   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
770   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
771   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
772   PetscFunctionReturn(0);
773 }
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "DMCreateInjection"
777 /*@
778     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
779 
780     Collective on DM
781 
782     Input Parameter:
783 +   dm1 - the DM object
784 -   dm2 - the second, finer DM object
785 
786     Output Parameter:
787 .   ctx - the injection
788 
789     Level: developer
790 
791    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
792         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
793 
794 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
795 
796 @*/
797 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
798 {
799   PetscErrorCode ierr;
800 
801   PetscFunctionBegin;
802   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
803   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
804   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
805   PetscFunctionReturn(0);
806 }
807 
808 #undef __FUNCT__
809 #define __FUNCT__ "DMCreateColoring"
810 /*@C
811     DMCreateColoring - Gets coloring for a DMDA or DMComposite
812 
813     Collective on DM
814 
815     Input Parameter:
816 +   dm - the DM object
817 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
818 -   matype - either MATAIJ or MATBAIJ
819 
820     Output Parameter:
821 .   coloring - the coloring
822 
823     Level: developer
824 
825 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
826 
827 @*/
828 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
829 {
830   PetscErrorCode ierr;
831 
832   PetscFunctionBegin;
833   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
834   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
835   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
836   PetscFunctionReturn(0);
837 }
838 
839 #undef __FUNCT__
840 #define __FUNCT__ "DMCreateMatrix"
841 /*@C
842     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
843 
844     Collective on DM
845 
846     Input Parameter:
847 +   dm - the DM object
848 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
849             any type which inherits from one of these (such as MATAIJ)
850 
851     Output Parameter:
852 .   mat - the empty Jacobian
853 
854     Level: beginner
855 
856     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
857        do not need to do it yourself.
858 
859        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
860        the nonzero pattern call DMDASetMatPreallocateOnly()
861 
862        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
863        internally by PETSc.
864 
865        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
866        the indices for the global numbering for DMDAs which is complicated.
867 
868 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
869 
870 @*/
871 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
872 {
873   PetscErrorCode ierr;
874 
875   PetscFunctionBegin;
876   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
877 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
878   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
879 #endif
880   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
881   PetscValidPointer(mat,3);
882   if (dm->mattype) {
883     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
884   } else {
885     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
886   }
887   PetscFunctionReturn(0);
888 }
889 
890 #undef __FUNCT__
891 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
892 /*@
893   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
894     preallocated but the nonzero structure and zero values will not be set.
895 
896   Logically Collective on DMDA
897 
898   Input Parameter:
899 + dm - the DM
900 - only - PETSC_TRUE if only want preallocation
901 
902   Level: developer
903 .seealso DMCreateMatrix()
904 @*/
905 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
906 {
907   PetscFunctionBegin;
908   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
909   dm->prealloc_only = only;
910   PetscFunctionReturn(0);
911 }
912 
913 #undef __FUNCT__
914 #define __FUNCT__ "DMGetWorkArray"
915 /*@C
916   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
917 
918   Not Collective
919 
920   Input Parameters:
921 + dm - the DM object
922 . count - The minium size
923 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
924 
925   Output Parameter:
926 . array - the work array
927 
928   Level: developer
929 
930 .seealso DMDestroy(), DMCreate()
931 @*/
932 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
933 {
934   PetscErrorCode ierr;
935   DMWorkLink link;
936   size_t size;
937 
938   PetscFunctionBegin;
939   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
940   PetscValidPointer(mem,4);
941   if (dm->workin) {
942     link = dm->workin;
943     dm->workin = dm->workin->next;
944   } else {
945     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
946   }
947   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
948   if (size*count > link->bytes) {
949     ierr = PetscFree(link->mem);CHKERRQ(ierr);
950     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
951     link->bytes = size*count;
952   }
953   link->next = dm->workout;
954   dm->workout = link;
955   *(void**)mem = link->mem;
956   PetscFunctionReturn(0);
957 }
958 
959 #undef __FUNCT__
960 #define __FUNCT__ "DMRestoreWorkArray"
961 /*@C
962   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
963 
964   Not Collective
965 
966   Input Parameters:
967 + dm - the DM object
968 . count - The minium size
969 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
970 
971   Output Parameter:
972 . array - the work array
973 
974   Level: developer
975 
976 .seealso DMDestroy(), DMCreate()
977 @*/
978 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
979 {
980   DMWorkLink *p,link;
981 
982   PetscFunctionBegin;
983   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
984   PetscValidPointer(mem,4);
985   for (p=&dm->workout; (link=*p); p=&link->next) {
986     if (link->mem == *(void**)mem) {
987       *p = link->next;
988       link->next = dm->workin;
989       dm->workin = link;
990       *(void**)mem = PETSC_NULL;
991       PetscFunctionReturn(0);
992     }
993   }
994   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
995   PetscFunctionReturn(0);
996 }
997 
998 #undef __FUNCT__
999 #define __FUNCT__ "DMSetNullSpaceConstructor"
1000 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1001 {
1002   PetscFunctionBegin;
1003   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1004   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1005   dm->nullspaceConstructors[field] = nullsp;
1006   PetscFunctionReturn(0);
1007 }
1008 
1009 #undef __FUNCT__
1010 #define __FUNCT__ "DMCreateFieldIS"
1011 /*@C
1012   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1013 
1014   Not collective
1015 
1016   Input Parameter:
1017 . dm - the DM object
1018 
1019   Output Parameters:
1020 + numFields  - The number of fields (or PETSC_NULL if not requested)
1021 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1022 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1023 
1024   Level: intermediate
1025 
1026   Notes:
1027   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1028   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1029   PetscFree().
1030 
1031 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1032 @*/
1033 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1034 {
1035   PetscSection   section, sectionGlobal;
1036   PetscErrorCode ierr;
1037 
1038   PetscFunctionBegin;
1039   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1040   if (numFields) {
1041     PetscValidPointer(numFields,2);
1042     *numFields = 0;
1043   }
1044   if (fieldNames) {
1045     PetscValidPointer(fieldNames,3);
1046     *fieldNames = PETSC_NULL;
1047   }
1048   if (fields) {
1049     PetscValidPointer(fields,4);
1050     *fields = PETSC_NULL;
1051   }
1052   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1053   if (section) {
1054     PetscInt *fieldSizes, **fieldIndices;
1055     PetscInt  nF, f, pStart, pEnd, p;
1056 
1057     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1058     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1059     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1060     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1061     for (f = 0; f < nF; ++f) {
1062       fieldSizes[f] = 0;
1063     }
1064     for (p = pStart; p < pEnd; ++p) {
1065       PetscInt gdof;
1066 
1067       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1068       if (gdof > 0) {
1069         for (f = 0; f < nF; ++f) {
1070           PetscInt fdof, fcdof;
1071 
1072           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1073           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1074           fieldSizes[f] += fdof-fcdof;
1075         }
1076       }
1077     }
1078     for (f = 0; f < nF; ++f) {
1079       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1080       fieldSizes[f] = 0;
1081     }
1082     for (p = pStart; p < pEnd; ++p) {
1083       PetscInt gdof, goff;
1084 
1085       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1086       if (gdof > 0) {
1087         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1088         for (f = 0; f < nF; ++f) {
1089           PetscInt fdof, fcdof, fc;
1090 
1091           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1092           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1093           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1094             fieldIndices[f][fieldSizes[f]] = goff++;
1095           }
1096         }
1097       }
1098     }
1099     if (numFields) {*numFields = nF;}
1100     if (fieldNames) {
1101       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1102       for (f = 0; f < nF; ++f) {
1103         const char *fieldName;
1104 
1105         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1106         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1107       }
1108     }
1109     if (fields) {
1110       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1111       for (f = 0; f < nF; ++f) {
1112         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1113       }
1114     }
1115     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1116   } else {
1117     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1118   }
1119   PetscFunctionReturn(0);
1120 }
1121 
1122 #undef __FUNCT__
1123 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1124 /*@C
1125   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1126 
1127   Not Collective
1128 
1129   Input Parameters:
1130 + dm   - the DM object
1131 - name - the name of the field decomposition
1132 
1133   Output Parameter:
1134 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1135 
1136   Level: advanced
1137 
1138 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1139 @*/
1140 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1141 {
1142   PetscErrorCode ierr;
1143 
1144   PetscFunctionBegin;
1145   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1146   PetscValidCharPointer(name,2);
1147   PetscValidPointer(ddm,3);
1148   *ddm = PETSC_NULL;
1149   if (dm->ops->createfielddecompositiondm) {
1150     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1151   }
1152   PetscFunctionReturn(0);
1153 }
1154 
1155 #undef __FUNCT__
1156 #define __FUNCT__ "DMCreateFieldDecomposition"
1157 /*@C
1158   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1159                           corresponding to different fields: each IS contains the global indices of the dofs of the
1160                           corresponding field. The optional list of DMs define the DM for each subproblem.
1161                           Generalizes DMCreateFieldIS().
1162 
1163   Not collective
1164 
1165   Input Parameter:
1166 . dm - the DM object
1167 
1168   Output Parameters:
1169 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1170 . namelist  - The name for each field (or PETSC_NULL if not requested)
1171 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1172 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1173 
1174   Level: intermediate
1175 
1176   Notes:
1177   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1178   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1179   and all of the arrays should be freed with PetscFree().
1180 
1181 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1182 @*/
1183 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1184 {
1185   PetscErrorCode ierr;
1186 
1187   PetscFunctionBegin;
1188   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1189   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1190   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1191   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1192   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1193   if (!dm->ops->createfielddecomposition) {
1194     PetscSection section;
1195     PetscInt     numFields, f;
1196 
1197     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1198     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1199     if (section && numFields && dm->ops->createsubdm) {
1200       *len = numFields;
1201       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1202       for (f = 0; f < numFields; ++f) {
1203         const char *fieldName;
1204 
1205         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1206         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1207         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1208       }
1209     } else {
1210       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1211       /* By default there are no DMs associated with subproblems. */
1212       if (dmlist) *dmlist = PETSC_NULL;
1213     }
1214   }
1215   else {
1216     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1217   }
1218   PetscFunctionReturn(0);
1219 }
1220 
1221 #undef __FUNCT__
1222 #define __FUNCT__ "DMCreateSubDM"
1223 /*@C
1224   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1225                   The fields are defined by DMCreateFieldIS().
1226 
1227   Not collective
1228 
1229   Input Parameters:
1230 + dm - the DM object
1231 . numFields - number of fields in this subproblem
1232 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1233 
1234   Output Parameters:
1235 . is - The global indices for the subproblem
1236 - dm - The DM for the subproblem
1237 
1238   Level: intermediate
1239 
1240 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1241 @*/
1242 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1243 {
1244   PetscErrorCode ierr;
1245 
1246   PetscFunctionBegin;
1247   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1248   PetscValidPointer(fields,3);
1249   if (is) {PetscValidPointer(is,4);}
1250   if (subdm) {PetscValidPointer(subdm,5);}
1251   if (dm->ops->createsubdm) {
1252     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1253   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 #undef __FUNCT__
1258 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1259 /*@C
1260   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1261 
1262   Not Collective
1263 
1264   Input Parameters:
1265 + dm   - the DM object
1266 - name - the name of the subdomain decomposition
1267 
1268   Output Parameter:
1269 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1270 
1271   Level: advanced
1272 
1273 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1274 @*/
1275 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1276 {
1277   PetscErrorCode ierr;
1278 
1279   PetscFunctionBegin;
1280   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1281   PetscValidCharPointer(name,2);
1282   PetscValidPointer(ddm,3);
1283   *ddm = PETSC_NULL;
1284   if (dm->ops->createdomaindecompositiondm) {
1285     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1286   }
1287   PetscFunctionReturn(0);
1288 }
1289 
1290 #undef __FUNCT__
1291 #define __FUNCT__ "DMCreateDomainDecomposition"
1292 /*@C
1293   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1294                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1295                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1296                           define a nonoverlapping covering, while outer subdomains can overlap.
1297                           The optional list of DMs define the DM for each subproblem.
1298 
1299   Not collective
1300 
1301   Input Parameter:
1302 . dm - the DM object
1303 
1304   Output Parameters:
1305 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1306 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1307 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1308 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1309 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1310 
1311   Level: intermediate
1312 
1313   Notes:
1314   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1315   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1316   and all of the arrays should be freed with PetscFree().
1317 
1318 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1319 @*/
1320 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1321 {
1322   PetscErrorCode ierr;
1323   DMSubDomainHookLink link;
1324   PetscInt            i,l;
1325 
1326   PetscFunctionBegin;
1327   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1328   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1329   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1330   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1331   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1332   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1333   if (dm->ops->createdomaindecomposition) {
1334     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1335   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DMCreateDomainDecomposition not supported for this DM type!");CHKERRQ(ierr);
1336   if (len) *len = l;
1337   for (i = 0; i < l; i++) {
1338     for (link=dm->subdomainhook; link; link=link->next) {
1339       if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1340     }
1341     (*dmlist)[i]->ctx = dm->ctx;
1342   }
1343   PetscFunctionReturn(0);
1344 }
1345 
1346 
1347 #undef __FUNCT__
1348 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1349 /*@C
1350   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1351 
1352   Not collective
1353 
1354   Input Parameters:
1355 + dm - the DM object
1356 . n  - the number of subdomain scatters
1357 - subdms - the local subdomains
1358 
1359   Output Parameters:
1360 + n     - the number of scatters returned
1361 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1362 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1363 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1364 
1365   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1366   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1367   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1368   solution and residual data.
1369 
1370   Level: developer
1371 
1372 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1373 @*/
1374 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1375 {
1376   PetscErrorCode ierr;
1377 
1378   PetscFunctionBegin;
1379   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1380   PetscValidPointer(subdms,3);
1381   PetscValidPointer(iscat,4);
1382   PetscValidPointer(oscat,5);
1383   PetscValidPointer(gscat,6);
1384   if (dm->ops->createddscatters) {
1385     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1386   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1387   PetscFunctionReturn(0);
1388 }
1389 
1390 #undef __FUNCT__
1391 #define __FUNCT__ "DMRefine"
1392 /*@
1393   DMRefine - Refines a DM object
1394 
1395   Collective on DM
1396 
1397   Input Parameter:
1398 + dm   - the DM object
1399 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1400 
1401   Output Parameter:
1402 . dmf - the refined DM, or PETSC_NULL
1403 
1404   Note: If no refinement was done, the return value is PETSC_NULL
1405 
1406   Level: developer
1407 
1408 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1409 @*/
1410 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1411 {
1412   PetscErrorCode ierr;
1413   DMRefineHookLink link;
1414 
1415   PetscFunctionBegin;
1416   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1417   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1418   if (*dmf) {
1419     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1420     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1421     (*dmf)->ctx       = dm->ctx;
1422     (*dmf)->leveldown = dm->leveldown;
1423     (*dmf)->levelup   = dm->levelup + 1;
1424     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1425     for (link=dm->refinehook; link; link=link->next) {
1426       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1427     }
1428   }
1429   PetscFunctionReturn(0);
1430 }
1431 
1432 #undef __FUNCT__
1433 #define __FUNCT__ "DMRefineHookAdd"
1434 /*@
1435    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1436 
1437    Logically Collective
1438 
1439    Input Arguments:
1440 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1441 .  refinehook - function to run when setting up a coarser level
1442 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1443 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1444 
1445    Calling sequence of refinehook:
1446 $    refinehook(DM coarse,DM fine,void *ctx);
1447 
1448 +  coarse - coarse level DM
1449 .  fine - fine level DM to interpolate problem to
1450 -  ctx - optional user-defined function context
1451 
1452    Calling sequence for interphook:
1453 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1454 
1455 +  coarse - coarse level DM
1456 .  interp - matrix interpolating a coarse-level solution to the finer grid
1457 .  fine - fine level DM to update
1458 -  ctx - optional user-defined function context
1459 
1460    Level: advanced
1461 
1462    Notes:
1463    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1464 
1465    If this function is called multiple times, the hooks will be run in the order they are added.
1466 
1467 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1468 @*/
1469 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1470 {
1471   PetscErrorCode ierr;
1472   DMRefineHookLink link,*p;
1473 
1474   PetscFunctionBegin;
1475   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1476   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1477   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1478   link->refinehook = refinehook;
1479   link->interphook = interphook;
1480   link->ctx = ctx;
1481   link->next = PETSC_NULL;
1482   *p = link;
1483   PetscFunctionReturn(0);
1484 }
1485 
1486 #undef __FUNCT__
1487 #define __FUNCT__ "DMInterpolate"
1488 /*@
1489    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1490 
1491    Collective if any hooks are
1492 
1493    Input Arguments:
1494 +  coarse - coarser DM to use as a base
1495 .  restrct - interpolation matrix, apply using MatInterpolate()
1496 -  fine - finer DM to update
1497 
1498    Level: developer
1499 
1500 .seealso: DMRefineHookAdd(), MatInterpolate()
1501 @*/
1502 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1503 {
1504   PetscErrorCode ierr;
1505   DMRefineHookLink link;
1506 
1507   PetscFunctionBegin;
1508   for (link=fine->refinehook; link; link=link->next) {
1509     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1510   }
1511   PetscFunctionReturn(0);
1512 }
1513 
1514 #undef __FUNCT__
1515 #define __FUNCT__ "DMGetRefineLevel"
1516 /*@
1517     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1518 
1519     Not Collective
1520 
1521     Input Parameter:
1522 .   dm - the DM object
1523 
1524     Output Parameter:
1525 .   level - number of refinements
1526 
1527     Level: developer
1528 
1529 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1530 
1531 @*/
1532 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1533 {
1534   PetscFunctionBegin;
1535   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1536   *level = dm->levelup;
1537   PetscFunctionReturn(0);
1538 }
1539 
1540 #undef __FUNCT__
1541 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1542 /*@
1543    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1544 
1545    Logically Collective
1546 
1547    Input Arguments:
1548 +  dm - the DM
1549 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1550 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1551 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1552 
1553    Calling sequence for beginhook:
1554 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1555 
1556 +  dm - global DM
1557 .  g - global vector
1558 .  mode - mode
1559 .  l - local vector
1560 -  ctx - optional user-defined function context
1561 
1562 
1563    Calling sequence for endhook:
1564 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1565 
1566 +  global - global DM
1567 -  ctx - optional user-defined function context
1568 
1569    Level: advanced
1570 
1571    Notes:
1572    This function is only needed if auxiliary data needs to be set up on coarse grids.
1573 
1574    If this function is called multiple times, the hooks will be run in the order they are added.
1575 
1576    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1577    extract the finest level information from its context (instead of from the SNES).
1578 
1579 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1580 @*/
1581 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1582 {
1583   PetscErrorCode ierr;
1584   DMGlobalToLocalHookLink link,*p;
1585 
1586   PetscFunctionBegin;
1587   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1588   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1589   ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1590   link->beginhook = beginhook;
1591   link->endhook = endhook;
1592   link->ctx = ctx;
1593   link->next = PETSC_NULL;
1594   *p = link;
1595   PetscFunctionReturn(0);
1596 }
1597 
1598 #undef __FUNCT__
1599 #define __FUNCT__ "DMGlobalToLocalBegin"
1600 /*@
1601     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1602 
1603     Neighbor-wise Collective on DM
1604 
1605     Input Parameters:
1606 +   dm - the DM object
1607 .   g - the global vector
1608 .   mode - INSERT_VALUES or ADD_VALUES
1609 -   l - the local vector
1610 
1611 
1612     Level: beginner
1613 
1614 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1615 
1616 @*/
1617 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1618 {
1619   PetscSF        sf;
1620   PetscErrorCode ierr;
1621   DMGlobalToLocalHookLink link;
1622 
1623   PetscFunctionBegin;
1624   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1625   for (link=dm->gtolhook; link; link=link->next) {
1626     if (link->beginhook) {ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1627   }
1628   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1629   if (sf) {
1630     PetscScalar *lArray, *gArray;
1631 
1632     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1633     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1634     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1635     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1636     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1637     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1638   } else {
1639     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1640   }
1641   PetscFunctionReturn(0);
1642 }
1643 
1644 #undef __FUNCT__
1645 #define __FUNCT__ "DMGlobalToLocalEnd"
1646 /*@
1647     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1648 
1649     Neighbor-wise Collective on DM
1650 
1651     Input Parameters:
1652 +   dm - the DM object
1653 .   g - the global vector
1654 .   mode - INSERT_VALUES or ADD_VALUES
1655 -   l - the local vector
1656 
1657 
1658     Level: beginner
1659 
1660 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1661 
1662 @*/
1663 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1664 {
1665   PetscSF        sf;
1666   PetscErrorCode ierr;
1667   PetscScalar    *lArray, *gArray;
1668   DMGlobalToLocalHookLink link;
1669 
1670   PetscFunctionBegin;
1671   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1672   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1673   if (sf) {
1674   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1675 
1676     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1677     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1678     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1679     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1680     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1681   } else {
1682     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1683   }
1684   for (link=dm->gtolhook; link; link=link->next) {
1685     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1686   }
1687   PetscFunctionReturn(0);
1688 }
1689 
1690 #undef __FUNCT__
1691 #define __FUNCT__ "DMLocalToGlobalBegin"
1692 /*@
1693     DMLocalToGlobalBegin - updates global vectors from local vectors
1694 
1695     Neighbor-wise Collective on DM
1696 
1697     Input Parameters:
1698 +   dm - the DM object
1699 .   l - the local vector
1700 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1701            base point.
1702 - - the global vector
1703 
1704     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1705            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1706            global array to the final global array with VecAXPY().
1707 
1708     Level: beginner
1709 
1710 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1711 
1712 @*/
1713 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1714 {
1715   PetscSF        sf;
1716   PetscErrorCode ierr;
1717 
1718   PetscFunctionBegin;
1719   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1720   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1721   if (sf) {
1722     MPI_Op       op;
1723     PetscScalar *lArray, *gArray;
1724 
1725     switch(mode) {
1726     case INSERT_VALUES:
1727     case INSERT_ALL_VALUES:
1728 #if defined(PETSC_HAVE_MPI_REPLACE)
1729       op = MPI_REPLACE; break;
1730 #else
1731       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1732 #endif
1733     case ADD_VALUES:
1734     case ADD_ALL_VALUES:
1735       op = MPI_SUM; break;
1736   default:
1737     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1738     }
1739     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1740     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1741     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1742     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1743     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1744   } else {
1745     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1746   }
1747   PetscFunctionReturn(0);
1748 }
1749 
1750 #undef __FUNCT__
1751 #define __FUNCT__ "DMLocalToGlobalEnd"
1752 /*@
1753     DMLocalToGlobalEnd - updates global vectors from local vectors
1754 
1755     Neighbor-wise Collective on DM
1756 
1757     Input Parameters:
1758 +   dm - the DM object
1759 .   l - the local vector
1760 .   mode - INSERT_VALUES or ADD_VALUES
1761 -   g - the global vector
1762 
1763 
1764     Level: beginner
1765 
1766 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1767 
1768 @*/
1769 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1770 {
1771   PetscSF        sf;
1772   PetscErrorCode ierr;
1773 
1774   PetscFunctionBegin;
1775   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1776   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1777   if (sf) {
1778     MPI_Op       op;
1779     PetscScalar *lArray, *gArray;
1780 
1781     switch(mode) {
1782     case INSERT_VALUES:
1783     case INSERT_ALL_VALUES:
1784 #if defined(PETSC_HAVE_MPI_REPLACE)
1785       op = MPI_REPLACE; break;
1786 #else
1787       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1788 #endif
1789     case ADD_VALUES:
1790     case ADD_ALL_VALUES:
1791       op = MPI_SUM; break;
1792     default:
1793       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1794     }
1795     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1796     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1797     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1798     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1799     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1800   } else {
1801     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1802   }
1803   PetscFunctionReturn(0);
1804 }
1805 
1806 #undef __FUNCT__
1807 #define __FUNCT__ "DMCoarsen"
1808 /*@
1809     DMCoarsen - Coarsens a DM object
1810 
1811     Collective on DM
1812 
1813     Input Parameter:
1814 +   dm - the DM object
1815 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1816 
1817     Output Parameter:
1818 .   dmc - the coarsened DM
1819 
1820     Level: developer
1821 
1822 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1823 
1824 @*/
1825 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1826 {
1827   PetscErrorCode ierr;
1828   DMCoarsenHookLink link;
1829 
1830   PetscFunctionBegin;
1831   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1832   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1833   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1834   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1835   (*dmc)->ctx       = dm->ctx;
1836   (*dmc)->levelup   = dm->levelup;
1837   (*dmc)->leveldown = dm->leveldown + 1;
1838   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1839   for (link=dm->coarsenhook; link; link=link->next) {
1840     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1841   }
1842   PetscFunctionReturn(0);
1843 }
1844 
1845 #undef __FUNCT__
1846 #define __FUNCT__ "DMCoarsenHookAdd"
1847 /*@
1848    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1849 
1850    Logically Collective
1851 
1852    Input Arguments:
1853 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1854 .  coarsenhook - function to run when setting up a coarser level
1855 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1856 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1857 
1858    Calling sequence of coarsenhook:
1859 $    coarsenhook(DM fine,DM coarse,void *ctx);
1860 
1861 +  fine - fine level DM
1862 .  coarse - coarse level DM to restrict problem to
1863 -  ctx - optional user-defined function context
1864 
1865    Calling sequence for restricthook:
1866 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1867 
1868 +  fine - fine level DM
1869 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1870 .  rscale - scaling vector for restriction
1871 .  inject - matrix restricting by injection
1872 .  coarse - coarse level DM to update
1873 -  ctx - optional user-defined function context
1874 
1875    Level: advanced
1876 
1877    Notes:
1878    This function is only needed if auxiliary data needs to be set up on coarse grids.
1879 
1880    If this function is called multiple times, the hooks will be run in the order they are added.
1881 
1882    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1883    extract the finest level information from its context (instead of from the SNES).
1884 
1885 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1886 @*/
1887 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1888 {
1889   PetscErrorCode ierr;
1890   DMCoarsenHookLink link,*p;
1891 
1892   PetscFunctionBegin;
1893   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1894   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1895   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1896   link->coarsenhook = coarsenhook;
1897   link->restricthook = restricthook;
1898   link->ctx = ctx;
1899   link->next = PETSC_NULL;
1900   *p = link;
1901   PetscFunctionReturn(0);
1902 }
1903 
1904 #undef __FUNCT__
1905 #define __FUNCT__ "DMRestrict"
1906 /*@
1907    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1908 
1909    Collective if any hooks are
1910 
1911    Input Arguments:
1912 +  fine - finer DM to use as a base
1913 .  restrct - restriction matrix, apply using MatRestrict()
1914 .  inject - injection matrix, also use MatRestrict()
1915 -  coarse - coarer DM to update
1916 
1917    Level: developer
1918 
1919 .seealso: DMCoarsenHookAdd(), MatRestrict()
1920 @*/
1921 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1922 {
1923   PetscErrorCode ierr;
1924   DMCoarsenHookLink link;
1925 
1926   PetscFunctionBegin;
1927   for (link=fine->coarsenhook; link; link=link->next) {
1928     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1929   }
1930   PetscFunctionReturn(0);
1931 }
1932 
1933 #undef __FUNCT__
1934 #define __FUNCT__ "DMSubDomainHookAdd"
1935 /*@
1936    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1937 
1938    Logically Collective
1939 
1940    Input Arguments:
1941 +  global - global DM
1942 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1943 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1944 
1945    Calling sequence for restricthook:
1946 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1947 
1948 +  global - global DM
1949 .  out    - scatter to the outer (with ghost and overlap points) block vector
1950 .  in     - scatter to block vector values only owned locally
1951 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1952 -  ctx - optional user-defined function context
1953 
1954    Level: advanced
1955 
1956    Notes:
1957    This function is only needed if auxiliary data needs to be set up on coarse grids.
1958 
1959    If this function is called multiple times, the hooks will be run in the order they are added.
1960 
1961    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1962    extract the finest level information from its context (instead of from the SNES).
1963 
1964 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1965 @*/
1966 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1967 {
1968   PetscErrorCode      ierr;
1969   DMSubDomainHookLink link,*p;
1970 
1971   PetscFunctionBegin;
1972   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1973   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1974   ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
1975   link->restricthook = restricthook;
1976   link->ddhook = ddhook;
1977   link->ctx = ctx;
1978   link->next = PETSC_NULL;
1979   *p = link;
1980   PetscFunctionReturn(0);
1981 }
1982 
1983 #undef __FUNCT__
1984 #define __FUNCT__ "DMSubDomainRestrict"
1985 /*@
1986    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
1987 
1988    Collective if any hooks are
1989 
1990    Input Arguments:
1991 +  fine - finer DM to use as a base
1992 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
1993 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
1994 -  coarse - coarer DM to update
1995 
1996    Level: developer
1997 
1998 .seealso: DMCoarsenHookAdd(), MatRestrict()
1999 @*/
2000 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2001 {
2002   PetscErrorCode ierr;
2003   DMSubDomainHookLink link;
2004 
2005   PetscFunctionBegin;
2006   for (link=global->subdomainhook; link; link=link->next) {
2007     if (link->restricthook) {ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);}
2008   }
2009   PetscFunctionReturn(0);
2010 }
2011 
2012 #undef __FUNCT__
2013 #define __FUNCT__ "DMGetCoarsenLevel"
2014 /*@
2015     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2016 
2017     Not Collective
2018 
2019     Input Parameter:
2020 .   dm - the DM object
2021 
2022     Output Parameter:
2023 .   level - number of coarsenings
2024 
2025     Level: developer
2026 
2027 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2028 
2029 @*/
2030 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2031 {
2032   PetscFunctionBegin;
2033   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2034   *level = dm->leveldown;
2035   PetscFunctionReturn(0);
2036 }
2037 
2038 
2039 
2040 #undef __FUNCT__
2041 #define __FUNCT__ "DMRefineHierarchy"
2042 /*@C
2043     DMRefineHierarchy - Refines a DM object, all levels at once
2044 
2045     Collective on DM
2046 
2047     Input Parameter:
2048 +   dm - the DM object
2049 -   nlevels - the number of levels of refinement
2050 
2051     Output Parameter:
2052 .   dmf - the refined DM hierarchy
2053 
2054     Level: developer
2055 
2056 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2057 
2058 @*/
2059 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2060 {
2061   PetscErrorCode ierr;
2062 
2063   PetscFunctionBegin;
2064   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2065   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2066   if (nlevels == 0) PetscFunctionReturn(0);
2067   if (dm->ops->refinehierarchy) {
2068     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2069   } else if (dm->ops->refine) {
2070     PetscInt i;
2071 
2072     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2073     for (i=1; i<nlevels; i++) {
2074       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2075     }
2076   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2077   PetscFunctionReturn(0);
2078 }
2079 
2080 #undef __FUNCT__
2081 #define __FUNCT__ "DMCoarsenHierarchy"
2082 /*@C
2083     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2084 
2085     Collective on DM
2086 
2087     Input Parameter:
2088 +   dm - the DM object
2089 -   nlevels - the number of levels of coarsening
2090 
2091     Output Parameter:
2092 .   dmc - the coarsened DM hierarchy
2093 
2094     Level: developer
2095 
2096 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2097 
2098 @*/
2099 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2100 {
2101   PetscErrorCode ierr;
2102 
2103   PetscFunctionBegin;
2104   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2105   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2106   if (nlevels == 0) PetscFunctionReturn(0);
2107   PetscValidPointer(dmc,3);
2108   if (dm->ops->coarsenhierarchy) {
2109     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2110   } else if (dm->ops->coarsen) {
2111     PetscInt i;
2112 
2113     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2114     for (i=1; i<nlevels; i++) {
2115       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2116     }
2117   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2118   PetscFunctionReturn(0);
2119 }
2120 
2121 #undef __FUNCT__
2122 #define __FUNCT__ "DMCreateAggregates"
2123 /*@
2124    DMCreateAggregates - Gets the aggregates that map between
2125    grids associated with two DMs.
2126 
2127    Collective on DM
2128 
2129    Input Parameters:
2130 +  dmc - the coarse grid DM
2131 -  dmf - the fine grid DM
2132 
2133    Output Parameters:
2134 .  rest - the restriction matrix (transpose of the projection matrix)
2135 
2136    Level: intermediate
2137 
2138 .keywords: interpolation, restriction, multigrid
2139 
2140 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2141 @*/
2142 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2143 {
2144   PetscErrorCode ierr;
2145 
2146   PetscFunctionBegin;
2147   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2148   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2149   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2150   PetscFunctionReturn(0);
2151 }
2152 
2153 #undef __FUNCT__
2154 #define __FUNCT__ "DMSetApplicationContextDestroy"
2155 /*@C
2156     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2157 
2158     Not Collective
2159 
2160     Input Parameters:
2161 +   dm - the DM object
2162 -   destroy - the destroy function
2163 
2164     Level: intermediate
2165 
2166 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2167 
2168 @*/
2169 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2170 {
2171   PetscFunctionBegin;
2172   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2173   dm->ctxdestroy = destroy;
2174   PetscFunctionReturn(0);
2175 }
2176 
2177 #undef __FUNCT__
2178 #define __FUNCT__ "DMSetApplicationContext"
2179 /*@
2180     DMSetApplicationContext - Set a user context into a DM object
2181 
2182     Not Collective
2183 
2184     Input Parameters:
2185 +   dm - the DM object
2186 -   ctx - the user context
2187 
2188     Level: intermediate
2189 
2190 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2191 
2192 @*/
2193 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2194 {
2195   PetscFunctionBegin;
2196   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2197   dm->ctx = ctx;
2198   PetscFunctionReturn(0);
2199 }
2200 
2201 #undef __FUNCT__
2202 #define __FUNCT__ "DMGetApplicationContext"
2203 /*@
2204     DMGetApplicationContext - Gets a user context from a DM object
2205 
2206     Not Collective
2207 
2208     Input Parameter:
2209 .   dm - the DM object
2210 
2211     Output Parameter:
2212 .   ctx - the user context
2213 
2214     Level: intermediate
2215 
2216 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2217 
2218 @*/
2219 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2220 {
2221   PetscFunctionBegin;
2222   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2223   *(void**)ctx = dm->ctx;
2224   PetscFunctionReturn(0);
2225 }
2226 
2227 #undef __FUNCT__
2228 #define __FUNCT__ "DMSetVariableBounds"
2229 /*@C
2230     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2231 
2232     Logically Collective on DM
2233 
2234     Input Parameter:
2235 +   dm - the DM object
2236 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2237 
2238     Level: intermediate
2239 
2240 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2241          DMSetJacobian()
2242 
2243 @*/
2244 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2245 {
2246   PetscFunctionBegin;
2247   dm->ops->computevariablebounds = f;
2248   PetscFunctionReturn(0);
2249 }
2250 
2251 #undef __FUNCT__
2252 #define __FUNCT__ "DMHasVariableBounds"
2253 /*@
2254     DMHasVariableBounds - does the DM object have a variable bounds function?
2255 
2256     Not Collective
2257 
2258     Input Parameter:
2259 .   dm - the DM object to destroy
2260 
2261     Output Parameter:
2262 .   flg - PETSC_TRUE if the variable bounds function exists
2263 
2264     Level: developer
2265 
2266 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2267 
2268 @*/
2269 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2270 {
2271   PetscFunctionBegin;
2272   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2273   PetscFunctionReturn(0);
2274 }
2275 
2276 #undef __FUNCT__
2277 #define __FUNCT__ "DMComputeVariableBounds"
2278 /*@C
2279     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2280 
2281     Logically Collective on DM
2282 
2283     Input Parameters:
2284 +   dm - the DM object to destroy
2285 -   x  - current solution at which the bounds are computed
2286 
2287     Output parameters:
2288 +   xl - lower bound
2289 -   xu - upper bound
2290 
2291     Level: intermediate
2292 
2293 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2294 
2295 @*/
2296 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2297 {
2298   PetscErrorCode ierr;
2299   PetscFunctionBegin;
2300   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2301   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2302   if (dm->ops->computevariablebounds) {
2303     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2304   }
2305   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2306   PetscFunctionReturn(0);
2307 }
2308 
2309 #undef __FUNCT__
2310 #define __FUNCT__ "DMHasColoring"
2311 /*@
2312     DMHasColoring - does the DM object have a method of providing a coloring?
2313 
2314     Not Collective
2315 
2316     Input Parameter:
2317 .   dm - the DM object
2318 
2319     Output Parameter:
2320 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2321 
2322     Level: developer
2323 
2324 .seealso DMHasFunction(), DMCreateColoring()
2325 
2326 @*/
2327 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2328 {
2329   PetscFunctionBegin;
2330   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2331   PetscFunctionReturn(0);
2332 }
2333 
2334 #undef  __FUNCT__
2335 #define __FUNCT__ "DMSetVec"
2336 /*@C
2337     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2338 
2339     Collective on DM
2340 
2341     Input Parameter:
2342 +   dm - the DM object
2343 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2344 
2345     Level: developer
2346 
2347 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2348 
2349 @*/
2350 PetscErrorCode  DMSetVec(DM dm,Vec x)
2351 {
2352   PetscErrorCode ierr;
2353   PetscFunctionBegin;
2354   if (x) {
2355     if (!dm->x) {
2356       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2357     }
2358     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2359   }
2360   else if (dm->x) {
2361     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2362   }
2363   PetscFunctionReturn(0);
2364 }
2365 
2366 PetscFList DMList                       = PETSC_NULL;
2367 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2368 
2369 #undef __FUNCT__
2370 #define __FUNCT__ "DMSetType"
2371 /*@C
2372   DMSetType - Builds a DM, for a particular DM implementation.
2373 
2374   Collective on DM
2375 
2376   Input Parameters:
2377 + dm     - The DM object
2378 - method - The name of the DM type
2379 
2380   Options Database Key:
2381 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2382 
2383   Notes:
2384   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2385 
2386   Level: intermediate
2387 
2388 .keywords: DM, set, type
2389 .seealso: DMGetType(), DMCreate()
2390 @*/
2391 PetscErrorCode  DMSetType(DM dm, DMType method)
2392 {
2393   PetscErrorCode (*r)(DM);
2394   PetscBool      match;
2395   PetscErrorCode ierr;
2396 
2397   PetscFunctionBegin;
2398   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2399   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2400   if (match) PetscFunctionReturn(0);
2401 
2402   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2403   ierr = PetscFListFind(((PetscObject)dm)->comm, DMList, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2404   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2405 
2406   if (dm->ops->destroy) {
2407     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2408     dm->ops->destroy = PETSC_NULL;
2409   }
2410   ierr = (*r)(dm);CHKERRQ(ierr);
2411   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2412   PetscFunctionReturn(0);
2413 }
2414 
2415 #undef __FUNCT__
2416 #define __FUNCT__ "DMGetType"
2417 /*@C
2418   DMGetType - Gets the DM type name (as a string) from the DM.
2419 
2420   Not Collective
2421 
2422   Input Parameter:
2423 . dm  - The DM
2424 
2425   Output Parameter:
2426 . type - The DM type name
2427 
2428   Level: intermediate
2429 
2430 .keywords: DM, get, type, name
2431 .seealso: DMSetType(), DMCreate()
2432 @*/
2433 PetscErrorCode  DMGetType(DM dm, DMType *type)
2434 {
2435   PetscErrorCode ierr;
2436 
2437   PetscFunctionBegin;
2438   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2439   PetscValidCharPointer(type,2);
2440   if (!DMRegisterAllCalled) {
2441     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2442   }
2443   *type = ((PetscObject)dm)->type_name;
2444   PetscFunctionReturn(0);
2445 }
2446 
2447 #undef __FUNCT__
2448 #define __FUNCT__ "DMConvert"
2449 /*@C
2450   DMConvert - Converts a DM to another DM, either of the same or different type.
2451 
2452   Collective on DM
2453 
2454   Input Parameters:
2455 + dm - the DM
2456 - newtype - new DM type (use "same" for the same type)
2457 
2458   Output Parameter:
2459 . M - pointer to new DM
2460 
2461   Notes:
2462   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2463   the MPI communicator of the generated DM is always the same as the communicator
2464   of the input DM.
2465 
2466   Level: intermediate
2467 
2468 .seealso: DMCreate()
2469 @*/
2470 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2471 {
2472   DM             B;
2473   char           convname[256];
2474   PetscBool      sametype, issame;
2475   PetscErrorCode ierr;
2476 
2477   PetscFunctionBegin;
2478   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2479   PetscValidType(dm,1);
2480   PetscValidPointer(M,3);
2481   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2482   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2483   {
2484     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2485 
2486     /*
2487        Order of precedence:
2488        1) See if a specialized converter is known to the current DM.
2489        2) See if a specialized converter is known to the desired DM class.
2490        3) See if a good general converter is registered for the desired class
2491        4) See if a good general converter is known for the current matrix.
2492        5) Use a really basic converter.
2493     */
2494 
2495     /* 1) See if a specialized converter is known to the current DM and the desired class */
2496     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2497     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2498     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2499     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2500     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2501     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2502     if (conv) goto foundconv;
2503 
2504     /* 2)  See if a specialized converter is known to the desired DM class. */
2505     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2506     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2507     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2508     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2509     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2510     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2511     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2512     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2513     if (conv) {
2514       ierr = DMDestroy(&B);CHKERRQ(ierr);
2515       goto foundconv;
2516     }
2517 
2518 #if 0
2519     /* 3) See if a good general converter is registered for the desired class */
2520     conv = B->ops->convertfrom;
2521     ierr = DMDestroy(&B);CHKERRQ(ierr);
2522     if (conv) goto foundconv;
2523 
2524     /* 4) See if a good general converter is known for the current matrix */
2525     if (dm->ops->convert) {
2526       conv = dm->ops->convert;
2527     }
2528     if (conv) goto foundconv;
2529 #endif
2530 
2531     /* 5) Use a really basic converter. */
2532     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2533 
2534     foundconv:
2535     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2536     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2537     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2538   }
2539   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2540   PetscFunctionReturn(0);
2541 }
2542 
2543 /*--------------------------------------------------------------------------------------------------------------------*/
2544 
2545 #undef __FUNCT__
2546 #define __FUNCT__ "DMRegister"
2547 /*@C
2548   DMRegister - See DMRegisterDynamic()
2549 
2550   Level: advanced
2551 @*/
2552 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2553 {
2554   char fullname[PETSC_MAX_PATH_LEN];
2555   PetscErrorCode ierr;
2556 
2557   PetscFunctionBegin;
2558   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2559   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2560   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2561   ierr = PetscFListAdd(PETSC_COMM_WORLD,&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2562   PetscFunctionReturn(0);
2563 }
2564 
2565 
2566 /*--------------------------------------------------------------------------------------------------------------------*/
2567 #undef __FUNCT__
2568 #define __FUNCT__ "DMRegisterDestroy"
2569 /*@C
2570    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2571 
2572    Not Collective
2573 
2574    Level: advanced
2575 
2576 .keywords: DM, register, destroy
2577 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2578 @*/
2579 PetscErrorCode  DMRegisterDestroy(void)
2580 {
2581   PetscErrorCode ierr;
2582 
2583   PetscFunctionBegin;
2584   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2585   DMRegisterAllCalled = PETSC_FALSE;
2586   PetscFunctionReturn(0);
2587 }
2588 
2589 #undef __FUNCT__
2590 #define __FUNCT__ "DMLoad"
2591 /*@C
2592   DMLoad - Loads a DM that has been stored in binary  with DMView().
2593 
2594   Collective on PetscViewer
2595 
2596   Input Parameters:
2597 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2598            some related function before a call to DMLoad().
2599 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2600            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2601 
2602    Level: intermediate
2603 
2604   Notes:
2605    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2606 
2607   Notes for advanced users:
2608   Most users should not need to know the details of the binary storage
2609   format, since DMLoad() and DMView() completely hide these details.
2610   But for anyone who's interested, the standard binary matrix storage
2611   format is
2612 .vb
2613      has not yet been determined
2614 .ve
2615 
2616 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2617 @*/
2618 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2619 {
2620   PetscErrorCode ierr;
2621   PetscBool      isbinary;
2622   PetscInt       classid;
2623   char           type[256];
2624 
2625   PetscFunctionBegin;
2626   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2627   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2628   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2629   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2630 
2631   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2632   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2633   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2634   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2635   if (newdm->ops->load) {
2636     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2637   }
2638   PetscFunctionReturn(0);
2639 }
2640 
2641 /******************************** FEM Support **********************************/
2642 
2643 #undef __FUNCT__
2644 #define __FUNCT__ "DMPrintCellVector"
2645 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2646   PetscInt       f;
2647   PetscErrorCode ierr;
2648 
2649   PetscFunctionBegin;
2650   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2651   for (f = 0; f < len; ++f) {
2652     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2653   }
2654   PetscFunctionReturn(0);
2655 }
2656 
2657 #undef __FUNCT__
2658 #define __FUNCT__ "DMPrintCellMatrix"
2659 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2660   PetscInt       f, g;
2661   PetscErrorCode ierr;
2662 
2663   PetscFunctionBegin;
2664   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2665   for (f = 0; f < rows; ++f) {
2666     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2667     for (g = 0; g < cols; ++g) {
2668       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2669     }
2670     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2671   }
2672   PetscFunctionReturn(0);
2673 }
2674 
2675 #undef __FUNCT__
2676 #define __FUNCT__ "DMGetDefaultSection"
2677 /*@
2678   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2679 
2680   Input Parameter:
2681 . dm - The DM
2682 
2683   Output Parameter:
2684 . section - The PetscSection
2685 
2686   Level: intermediate
2687 
2688   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2689 
2690 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2691 @*/
2692 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2693   PetscFunctionBegin;
2694   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2695   PetscValidPointer(section, 2);
2696   *section = dm->defaultSection;
2697   PetscFunctionReturn(0);
2698 }
2699 
2700 #undef __FUNCT__
2701 #define __FUNCT__ "DMSetDefaultSection"
2702 /*@
2703   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2704 
2705   Input Parameters:
2706 + dm - The DM
2707 - section - The PetscSection
2708 
2709   Level: intermediate
2710 
2711   Note: Any existing Section will be destroyed
2712 
2713 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2714 @*/
2715 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2716   PetscInt       numFields;
2717   PetscInt       f;
2718   PetscErrorCode ierr;
2719 
2720   PetscFunctionBegin;
2721   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2722   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2723   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2724   dm->defaultSection = section;
2725   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2726   if (numFields) {
2727     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2728     for (f = 0; f < numFields; ++f) {
2729       const char *name;
2730 
2731       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2732       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2733     }
2734   }
2735   PetscFunctionReturn(0);
2736 }
2737 
2738 #undef __FUNCT__
2739 #define __FUNCT__ "DMGetDefaultGlobalSection"
2740 /*@
2741   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2742 
2743   Collective on DM
2744 
2745   Input Parameter:
2746 . dm - The DM
2747 
2748   Output Parameter:
2749 . section - The PetscSection
2750 
2751   Level: intermediate
2752 
2753   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2754 
2755 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2756 @*/
2757 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2758   PetscErrorCode ierr;
2759 
2760   PetscFunctionBegin;
2761   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2762   PetscValidPointer(section, 2);
2763   if (!dm->defaultGlobalSection) {
2764     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2765     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2766     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2767   }
2768   *section = dm->defaultGlobalSection;
2769   PetscFunctionReturn(0);
2770 }
2771 
2772 #undef __FUNCT__
2773 #define __FUNCT__ "DMSetDefaultGlobalSection"
2774 /*@
2775   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2776 
2777   Input Parameters:
2778 + dm - The DM
2779 - section - The PetscSection
2780 
2781   Level: intermediate
2782 
2783   Note: Any existing Section will be destroyed
2784 
2785 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2786 @*/
2787 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2788   PetscErrorCode ierr;
2789 
2790   PetscFunctionBegin;
2791   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2792   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2793   dm->defaultGlobalSection = section;
2794   PetscFunctionReturn(0);
2795 }
2796 
2797 #undef __FUNCT__
2798 #define __FUNCT__ "DMGetDefaultSF"
2799 /*@
2800   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2801   it is created from the default PetscSection layouts in the DM.
2802 
2803   Input Parameter:
2804 . dm - The DM
2805 
2806   Output Parameter:
2807 . sf - The PetscSF
2808 
2809   Level: intermediate
2810 
2811   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2812 
2813 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2814 @*/
2815 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2816   PetscInt       nroots;
2817   PetscErrorCode ierr;
2818 
2819   PetscFunctionBegin;
2820   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2821   PetscValidPointer(sf, 2);
2822   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2823   if (nroots < 0) {
2824     PetscSection section, gSection;
2825 
2826     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2827     if (section) {
2828       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2829       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2830     } else {
2831       *sf = PETSC_NULL;
2832       PetscFunctionReturn(0);
2833     }
2834   }
2835   *sf = dm->defaultSF;
2836   PetscFunctionReturn(0);
2837 }
2838 
2839 #undef __FUNCT__
2840 #define __FUNCT__ "DMSetDefaultSF"
2841 /*@
2842   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2843 
2844   Input Parameters:
2845 + dm - The DM
2846 - sf - The PetscSF
2847 
2848   Level: intermediate
2849 
2850   Note: Any previous SF is destroyed
2851 
2852 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2853 @*/
2854 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2855   PetscErrorCode ierr;
2856 
2857   PetscFunctionBegin;
2858   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2859   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2860   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2861   dm->defaultSF = sf;
2862   PetscFunctionReturn(0);
2863 }
2864 
2865 #undef __FUNCT__
2866 #define __FUNCT__ "DMCreateDefaultSF"
2867 /*@C
2868   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2869   describing the data layout.
2870 
2871   Input Parameters:
2872 + dm - The DM
2873 . localSection - PetscSection describing the local data layout
2874 - globalSection - PetscSection describing the global data layout
2875 
2876   Level: intermediate
2877 
2878 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2879 @*/
2880 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2881 {
2882   MPI_Comm        comm = ((PetscObject) dm)->comm;
2883   PetscLayout     layout;
2884   const PetscInt *ranges;
2885   PetscInt       *local;
2886   PetscSFNode    *remote;
2887   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2888   PetscMPIInt     size, rank;
2889   PetscErrorCode  ierr;
2890 
2891   PetscFunctionBegin;
2892   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2893   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2894   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2895   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2896   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2897   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2898   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2899   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2900   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2901   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2902   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2903     PetscInt gdof, gcdof;
2904 
2905     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2906     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2907     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2908   }
2909   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2910   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2911   for (p = pStart, l = 0; p < pEnd; ++p) {
2912     const PetscInt *cind;
2913     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2914 
2915     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2916     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2917     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2918     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2919     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2920     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2921     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2922     if (!gdof) continue; /* Censored point */
2923     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2924     if (gsize != dof-cdof) {
2925       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2926       cdof = 0; /* Ignore constraints */
2927     }
2928     for (d = 0, c = 0; d < dof; ++d) {
2929       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2930       local[l+d-c] = off+d;
2931     }
2932     if (gdof < 0) {
2933       for(d = 0; d < gsize; ++d, ++l) {
2934         PetscInt offset = -(goff+1) + d, r;
2935 
2936         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2937         if (r < 0) r = -(r+2);
2938         remote[l].rank  = r;
2939         remote[l].index = offset - ranges[r];
2940       }
2941     } else {
2942       for(d = 0; d < gsize; ++d, ++l) {
2943         remote[l].rank  = rank;
2944         remote[l].index = goff+d - ranges[rank];
2945       }
2946     }
2947   }
2948   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2949   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2950   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2951   PetscFunctionReturn(0);
2952 }
2953 
2954 #undef __FUNCT__
2955 #define __FUNCT__ "DMGetPointSF"
2956 /*@
2957   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2958 
2959   Input Parameter:
2960 . dm - The DM
2961 
2962   Output Parameter:
2963 . sf - The PetscSF
2964 
2965   Level: intermediate
2966 
2967   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2968 
2969 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2970 @*/
2971 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
2972   PetscFunctionBegin;
2973   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2974   PetscValidPointer(sf, 2);
2975   *sf = dm->sf;
2976   PetscFunctionReturn(0);
2977 }
2978 
2979 #undef __FUNCT__
2980 #define __FUNCT__ "DMSetPointSF"
2981 /*@
2982   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
2983 
2984   Input Parameters:
2985 + dm - The DM
2986 - sf - The PetscSF
2987 
2988   Level: intermediate
2989 
2990 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2991 @*/
2992 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
2993   PetscErrorCode ierr;
2994 
2995   PetscFunctionBegin;
2996   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2997   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
2998   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
2999   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3000   dm->sf = sf;
3001   PetscFunctionReturn(0);
3002 }
3003 
3004 #undef __FUNCT__
3005 #define __FUNCT__ "DMGetNumFields"
3006 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3007 {
3008   PetscFunctionBegin;
3009   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3010   PetscValidPointer(numFields, 2);
3011   *numFields = dm->numFields;
3012   PetscFunctionReturn(0);
3013 }
3014 
3015 #undef __FUNCT__
3016 #define __FUNCT__ "DMSetNumFields"
3017 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3018 {
3019   PetscInt       f;
3020   PetscErrorCode ierr;
3021 
3022   PetscFunctionBegin;
3023   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3024   for (f = 0; f < dm->numFields; ++f) {
3025     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3026   }
3027   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3028   dm->numFields = numFields;
3029   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3030   for (f = 0; f < dm->numFields; ++f) {
3031     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3032   }
3033   PetscFunctionReturn(0);
3034 }
3035 
3036 #undef __FUNCT__
3037 #define __FUNCT__ "DMGetField"
3038 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3039 {
3040   PetscFunctionBegin;
3041   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3042   PetscValidPointer(field, 2);
3043   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3044   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3045   *field = dm->fields[f];
3046   PetscFunctionReturn(0);
3047 }
3048 
3049 #undef __FUNCT__
3050 #define __FUNCT__ "DMSetCoordinates"
3051 /*@
3052   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3053 
3054   Collective on DM
3055 
3056   Input Parameters:
3057 + dm - the DM
3058 - c - coordinate vector
3059 
3060   Note:
3061   The coordinates do include those for ghost points, which are in the local vector
3062 
3063   Level: intermediate
3064 
3065 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3066 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3067 @*/
3068 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3069 {
3070   PetscErrorCode ierr;
3071 
3072   PetscFunctionBegin;
3073   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3074   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3075   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3076   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3077   dm->coordinates = c;
3078   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3079   PetscFunctionReturn(0);
3080 }
3081 
3082 #undef __FUNCT__
3083 #define __FUNCT__ "DMSetCoordinatesLocal"
3084 /*@
3085   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3086 
3087   Collective on DM
3088 
3089    Input Parameters:
3090 +  dm - the DM
3091 -  c - coordinate vector
3092 
3093   Note:
3094   The coordinates of ghost points can be set using DMSetCoordinates()
3095   followed by DMGetCoordinatesLocal(). This is intended to enable the
3096   setting of ghost coordinates outside of the domain.
3097 
3098   Level: intermediate
3099 
3100 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3101 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3102 @*/
3103 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3104 {
3105   PetscErrorCode ierr;
3106 
3107   PetscFunctionBegin;
3108   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3109   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3110   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3111   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3112   dm->coordinatesLocal = c;
3113   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3114   PetscFunctionReturn(0);
3115 }
3116 
3117 #undef __FUNCT__
3118 #define __FUNCT__ "DMGetCoordinates"
3119 /*@
3120   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3121 
3122   Not Collective
3123 
3124   Input Parameter:
3125 . dm - the DM
3126 
3127   Output Parameter:
3128 . c - global coordinate vector
3129 
3130   Note:
3131   This is a borrowed reference, so the user should NOT destroy this vector
3132 
3133   Each process has only the local coordinates (does NOT have the ghost coordinates).
3134 
3135   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3136   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3137 
3138   Level: intermediate
3139 
3140 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3141 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3142 @*/
3143 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3144 {
3145   PetscErrorCode ierr;
3146 
3147   PetscFunctionBegin;
3148   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3149   PetscValidPointer(c,2);
3150   if (!dm->coordinates && dm->coordinatesLocal) {
3151     DM cdm = PETSC_NULL;
3152 
3153     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3154     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3155     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3156     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3157     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3158   }
3159   *c = dm->coordinates;
3160   PetscFunctionReturn(0);
3161 }
3162 
3163 #undef __FUNCT__
3164 #define __FUNCT__ "DMGetCoordinatesLocal"
3165 /*@
3166   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3167 
3168   Collective on DM
3169 
3170   Input Parameter:
3171 . dm - the DM
3172 
3173   Output Parameter:
3174 . c - coordinate vector
3175 
3176   Note:
3177   This is a borrowed reference, so the user should NOT destroy this vector
3178 
3179   Each process has the local and ghost coordinates
3180 
3181   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3182   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3183 
3184   Level: intermediate
3185 
3186 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3187 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3188 @*/
3189 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3190 {
3191   PetscErrorCode ierr;
3192 
3193   PetscFunctionBegin;
3194   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3195   PetscValidPointer(c,2);
3196   if (!dm->coordinatesLocal && dm->coordinates) {
3197     DM cdm = PETSC_NULL;
3198 
3199     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3200     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3201     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3202     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3203     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3204   }
3205   *c = dm->coordinatesLocal;
3206   PetscFunctionReturn(0);
3207 }
3208 
3209 #undef __FUNCT__
3210 #define __FUNCT__ "DMGetCoordinateDM"
3211 /*@
3212   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3213 
3214   Collective on DM
3215 
3216   Input Parameter:
3217 . dm - the DM
3218 
3219   Output Parameter:
3220 . cdm - coordinate DM
3221 
3222   Level: intermediate
3223 
3224 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3225 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3226 @*/
3227 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3228 {
3229   PetscErrorCode ierr;
3230 
3231   PetscFunctionBegin;
3232   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3233   PetscValidPointer(cdm,2);
3234   if (!dm->coordinateDM) {
3235     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3236     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3237   }
3238   *cdm = dm->coordinateDM;
3239   PetscFunctionReturn(0);
3240 }
3241 
3242 #undef __FUNCT__
3243 #define __FUNCT__ "DMLocatePoints"
3244 /*@
3245   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3246 
3247   Not collective
3248 
3249   Input Parameters:
3250 + dm - The DM
3251 - v - The Vec of points
3252 
3253   Output Parameter:
3254 . cells - The local cell numbers for cells which contain the points
3255 
3256   Level: developer
3257 
3258 .keywords: point location, mesh
3259 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3260 @*/
3261 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3262 {
3263   PetscErrorCode ierr;
3264 
3265   PetscFunctionBegin;
3266   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3267   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3268   PetscValidPointer(cells,3);
3269   if (dm->ops->locatepoints) {
3270     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3271   } else {
3272     SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Point location not available for this DM");
3273   }
3274   PetscFunctionReturn(0);
3275 }
3276