xref: /petsc/src/dm/interface/dm.c (revision 3e1910f1ab6113d8365e15c6b8c907ccce7ce4ea)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 #include <petscsf.h>
3 
4 PetscClassId  DM_CLASSID;
5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "DMViewFromOptions"
9 /*
10   Processes command line options to determine if/how a dm
11   is to be viewed.
12 
13 */
14 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
15 {
16   PetscErrorCode    ierr;
17   PetscBool         flg;
18   PetscViewer       viewer;
19   PetscViewerFormat format;
20 
21   PetscFunctionBegin;
22   ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
23   if (flg) {
24     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
25     ierr = DMView(dm,viewer);CHKERRQ(ierr);
26     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
27     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
28   }
29   PetscFunctionReturn(0);
30 }
31 
32 
33 #undef __FUNCT__
34 #define __FUNCT__ "DMCreate"
35 /*@
36   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
37 
38    If you never  call DMSetType()  it will generate an
39    error when you try to use the vector.
40 
41   Collective on MPI_Comm
42 
43   Input Parameter:
44 . comm - The communicator for the DM object
45 
46   Output Parameter:
47 . dm - The DM object
48 
49   Level: beginner
50 
51 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
52 @*/
53 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
54 {
55   DM             v;
56   PetscErrorCode ierr;
57 
58   PetscFunctionBegin;
59   PetscValidPointer(dm,2);
60   *dm = NULL;
61 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
62   ierr = VecInitializePackage();CHKERRQ(ierr);
63   ierr = MatInitializePackage();CHKERRQ(ierr);
64   ierr = DMInitializePackage();CHKERRQ(ierr);
65 #endif
66 
67   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
68   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
69 
70 
71   v->ltogmap              = NULL;
72   v->ltogmapb             = NULL;
73   v->bs                   = 1;
74   v->coloringtype         = IS_COLORING_GLOBAL;
75   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
76   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
77   v->defaultSection       = NULL;
78   v->defaultGlobalSection = NULL;
79   {
80     PetscInt i;
81     for (i = 0; i < 10; ++i) {
82       v->nullspaceConstructors[i] = NULL;
83     }
84   }
85   v->numFields = 0;
86   v->fields    = NULL;
87 
88   *dm = v;
89   PetscFunctionReturn(0);
90 }
91 
92 #undef __FUNCT__
93 #define __FUNCT__ "DMSetVecType"
94 /*@C
95        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
96 
97    Logically Collective on DMDA
98 
99    Input Parameter:
100 +  da - initial distributed array
101 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
102 
103    Options Database:
104 .   -dm_vec_type ctype
105 
106    Level: intermediate
107 
108 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
109 @*/
110 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
111 {
112   PetscErrorCode ierr;
113 
114   PetscFunctionBegin;
115   PetscValidHeaderSpecific(da,DM_CLASSID,1);
116   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
117   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
118   PetscFunctionReturn(0);
119 }
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "VecGetDM"
123 /*@
124   VecGetDM - Gets the DM defining the data layout of the vector
125 
126   Not collective
127 
128   Input Parameter:
129 . v - The Vec
130 
131   Output Parameter:
132 . dm - The DM
133 
134   Level: intermediate
135 
136 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
137 @*/
138 PetscErrorCode VecGetDM(Vec v, DM *dm)
139 {
140   PetscErrorCode ierr;
141 
142   PetscFunctionBegin;
143   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
144   PetscValidPointer(dm,2);
145   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
146   PetscFunctionReturn(0);
147 }
148 
149 #undef __FUNCT__
150 #define __FUNCT__ "VecSetDM"
151 /*@
152   VecSetDM - Sets the DM defining the data layout of the vector
153 
154   Not collective
155 
156   Input Parameters:
157 + v - The Vec
158 - dm - The DM
159 
160   Level: intermediate
161 
162 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
163 @*/
164 PetscErrorCode VecSetDM(Vec v, DM dm)
165 {
166   PetscErrorCode ierr;
167 
168   PetscFunctionBegin;
169   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
170   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
171   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
172   PetscFunctionReturn(0);
173 }
174 
175 #undef __FUNCT__
176 #define __FUNCT__ "DMSetMatType"
177 /*@C
178        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
179 
180    Logically Collective on DM
181 
182    Input Parameter:
183 +  dm - the DM context
184 .  ctype - the matrix type
185 
186    Options Database:
187 .   -dm_mat_type ctype
188 
189    Level: intermediate
190 
191 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
192 @*/
193 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
194 {
195   PetscErrorCode ierr;
196 
197   PetscFunctionBegin;
198   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
199   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
200   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 #undef __FUNCT__
205 #define __FUNCT__ "MatGetDM"
206 /*@
207   MatGetDM - Gets the DM defining the data layout of the matrix
208 
209   Not collective
210 
211   Input Parameter:
212 . A - The Mat
213 
214   Output Parameter:
215 . dm - The DM
216 
217   Level: intermediate
218 
219 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
220 @*/
221 PetscErrorCode MatGetDM(Mat A, DM *dm)
222 {
223   PetscErrorCode ierr;
224 
225   PetscFunctionBegin;
226   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
227   PetscValidPointer(dm,2);
228   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 #undef __FUNCT__
233 #define __FUNCT__ "MatSetDM"
234 /*@
235   MatSetDM - Sets the DM defining the data layout of the matrix
236 
237   Not collective
238 
239   Input Parameters:
240 + A - The Mat
241 - dm - The DM
242 
243   Level: intermediate
244 
245 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
246 @*/
247 PetscErrorCode MatSetDM(Mat A, DM dm)
248 {
249   PetscErrorCode ierr;
250 
251   PetscFunctionBegin;
252   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
253   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
254   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
255   PetscFunctionReturn(0);
256 }
257 
258 #undef __FUNCT__
259 #define __FUNCT__ "DMSetOptionsPrefix"
260 /*@C
261    DMSetOptionsPrefix - Sets the prefix used for searching for all
262    DMDA options in the database.
263 
264    Logically Collective on DMDA
265 
266    Input Parameter:
267 +  da - the DMDA context
268 -  prefix - the prefix to prepend to all option names
269 
270    Notes:
271    A hyphen (-) must NOT be given at the beginning of the prefix name.
272    The first character of all runtime options is AUTOMATICALLY the hyphen.
273 
274    Level: advanced
275 
276 .keywords: DMDA, set, options, prefix, database
277 
278 .seealso: DMSetFromOptions()
279 @*/
280 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
281 {
282   PetscErrorCode ierr;
283 
284   PetscFunctionBegin;
285   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
286   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
287   PetscFunctionReturn(0);
288 }
289 
290 #undef __FUNCT__
291 #define __FUNCT__ "DMDestroy"
292 /*@
293     DMDestroy - Destroys a vector packer or DMDA.
294 
295     Collective on DM
296 
297     Input Parameter:
298 .   dm - the DM object to destroy
299 
300     Level: developer
301 
302 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
303 
304 @*/
305 PetscErrorCode  DMDestroy(DM *dm)
306 {
307   PetscInt       i, cnt = 0, f;
308   DMNamedVecLink nlink,nnext;
309   PetscErrorCode ierr;
310 
311   PetscFunctionBegin;
312   if (!*dm) PetscFunctionReturn(0);
313   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
314 
315   /* count all the circular references of DM and its contained Vecs */
316   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
317     if ((*dm)->localin[i])  cnt++;
318     if ((*dm)->globalin[i]) cnt++;
319   }
320   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
321   if ((*dm)->x) {
322     DM obj;
323     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
324     if (obj == *dm) cnt++;
325   }
326   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
327   if ((*dm)->x) {
328     DM obj;
329     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
330     if (obj == *dm) cnt++;
331   }
332 
333   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
334   /*
335      Need this test because the dm references the vectors that
336      reference the dm, so destroying the dm calls destroy on the
337      vectors that cause another destroy on the dm
338   */
339   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
340   ((PetscObject) (*dm))->refct = 0;
341   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
342     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
343     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
344   }
345   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
346     nnext = nlink->next;
347     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
348     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
349     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
350     ierr = PetscFree(nlink);CHKERRQ(ierr);
351   }
352   (*dm)->namedglobal = NULL;
353 
354   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
355     nnext = nlink->next;
356     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
357     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
358     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
359     ierr = PetscFree(nlink);CHKERRQ(ierr);
360   }
361   (*dm)->namedlocal = NULL;
362 
363   /* Destroy the list of hooks */
364   {
365     DMCoarsenHookLink link,next;
366     for (link=(*dm)->coarsenhook; link; link=next) {
367       next = link->next;
368       ierr = PetscFree(link);CHKERRQ(ierr);
369     }
370     (*dm)->coarsenhook = NULL;
371   }
372   {
373     DMRefineHookLink link,next;
374     for (link=(*dm)->refinehook; link; link=next) {
375       next = link->next;
376       ierr = PetscFree(link);CHKERRQ(ierr);
377     }
378     (*dm)->refinehook = NULL;
379   }
380   {
381     DMSubDomainHookLink link,next;
382     for (link=(*dm)->subdomainhook; link; link=next) {
383       next = link->next;
384       ierr = PetscFree(link);CHKERRQ(ierr);
385     }
386     (*dm)->subdomainhook = NULL;
387   }
388   {
389     DMGlobalToLocalHookLink link,next;
390     for (link=(*dm)->gtolhook; link; link=next) {
391       next = link->next;
392       ierr = PetscFree(link);CHKERRQ(ierr);
393     }
394     (*dm)->gtolhook = NULL;
395   }
396   /* Destroy the work arrays */
397   {
398     DMWorkLink link,next;
399     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
400     for (link=(*dm)->workin; link; link=next) {
401       next = link->next;
402       ierr = PetscFree(link->mem);CHKERRQ(ierr);
403       ierr = PetscFree(link);CHKERRQ(ierr);
404     }
405     (*dm)->workin = NULL;
406   }
407 
408   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
409   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
410   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
411 
412   if ((*dm)->ctx && (*dm)->ctxdestroy) {
413     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
414   }
415   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
416   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
417   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
418   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
419   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
420   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
421   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
422 
423   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
424   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
425   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
426   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
427   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
428 
429   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
430   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
431   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
432 
433   for (f = 0; f < (*dm)->numFields; ++f) {
434     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
435   }
436   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
437   /* if memory was published with AMS then destroy it */
438   ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr);
439 
440   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
441   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
442   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
443   PetscFunctionReturn(0);
444 }
445 
446 #undef __FUNCT__
447 #define __FUNCT__ "DMSetUp"
448 /*@
449     DMSetUp - sets up the data structures inside a DM object
450 
451     Collective on DM
452 
453     Input Parameter:
454 .   dm - the DM object to setup
455 
456     Level: developer
457 
458 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
459 
460 @*/
461 PetscErrorCode  DMSetUp(DM dm)
462 {
463   PetscErrorCode ierr;
464 
465   PetscFunctionBegin;
466   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
467   if (dm->setupcalled) PetscFunctionReturn(0);
468   if (dm->ops->setup) {
469     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
470   }
471   dm->setupcalled = PETSC_TRUE;
472   PetscFunctionReturn(0);
473 }
474 
475 #undef __FUNCT__
476 #define __FUNCT__ "DMSetFromOptions"
477 /*@
478     DMSetFromOptions - sets parameters in a DM from the options database
479 
480     Collective on DM
481 
482     Input Parameter:
483 .   dm - the DM object to set options for
484 
485     Options Database:
486 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
487 .   -dm_vec_type <type>  type of vector to create inside DM
488 .   -dm_mat_type <type>  type of matrix to create inside DM
489 -   -dm_coloring_type <global or ghosted>
490 
491     Level: developer
492 
493 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
494 
495 @*/
496 PetscErrorCode  DMSetFromOptions(DM dm)
497 {
498   char           typeName[256] = MATAIJ;
499   PetscBool      flg;
500   PetscErrorCode ierr;
501 
502   PetscFunctionBegin;
503   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
504   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
505   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
506   ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
507   if (flg) {
508     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
509   }
510   ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
511   if (flg) {
512     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
513   }
514   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
515   if (dm->ops->setfromoptions) {
516     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
517   }
518   /* process any options handlers added with PetscObjectAddOptionsHandler() */
519   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
520   ierr = PetscOptionsEnd();CHKERRQ(ierr);
521   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
522   PetscFunctionReturn(0);
523 }
524 
525 #undef __FUNCT__
526 #define __FUNCT__ "DMView"
527 /*@C
528     DMView - Views a vector packer or DMDA.
529 
530     Collective on DM
531 
532     Input Parameter:
533 +   dm - the DM object to view
534 -   v - the viewer
535 
536     Level: developer
537 
538 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
539 
540 @*/
541 PetscErrorCode  DMView(DM dm,PetscViewer v)
542 {
543   PetscErrorCode ierr;
544   PetscBool      isbinary;
545 
546   PetscFunctionBegin;
547   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
548   if (!v) {
549     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
550   }
551   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
552   if (isbinary) {
553     PetscInt classid = DM_FILE_CLASSID;
554     char     type[256];
555 
556     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
557     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
558     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
559   }
560   if (dm->ops->view) {
561     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
562   }
563   PetscFunctionReturn(0);
564 }
565 
566 #undef __FUNCT__
567 #define __FUNCT__ "DMCreateGlobalVector"
568 /*@
569     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
570 
571     Collective on DM
572 
573     Input Parameter:
574 .   dm - the DM object
575 
576     Output Parameter:
577 .   vec - the global vector
578 
579     Level: beginner
580 
581 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
582 
583 @*/
584 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
585 {
586   PetscErrorCode ierr;
587 
588   PetscFunctionBegin;
589   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
590   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
591   PetscFunctionReturn(0);
592 }
593 
594 #undef __FUNCT__
595 #define __FUNCT__ "DMCreateLocalVector"
596 /*@
597     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
598 
599     Not Collective
600 
601     Input Parameter:
602 .   dm - the DM object
603 
604     Output Parameter:
605 .   vec - the local vector
606 
607     Level: beginner
608 
609 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
610 
611 @*/
612 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
613 {
614   PetscErrorCode ierr;
615 
616   PetscFunctionBegin;
617   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
618   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
619   PetscFunctionReturn(0);
620 }
621 
622 #undef __FUNCT__
623 #define __FUNCT__ "DMGetLocalToGlobalMapping"
624 /*@
625    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
626 
627    Collective on DM
628 
629    Input Parameter:
630 .  dm - the DM that provides the mapping
631 
632    Output Parameter:
633 .  ltog - the mapping
634 
635    Level: intermediate
636 
637    Notes:
638    This mapping can then be used by VecSetLocalToGlobalMapping() or
639    MatSetLocalToGlobalMapping().
640 
641 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
642 @*/
643 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
644 {
645   PetscErrorCode ierr;
646 
647   PetscFunctionBegin;
648   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
649   PetscValidPointer(ltog,2);
650   if (!dm->ltogmap) {
651     PetscSection section, sectionGlobal;
652 
653     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
654     if (section) {
655       PetscInt *ltog;
656       PetscInt pStart, pEnd, size, p, l;
657 
658       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
659       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
660       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
661       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
662       for (p = pStart, l = 0; p < pEnd; ++p) {
663         PetscInt dof, off, c;
664 
665         /* Should probably use constrained dofs */
666         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
667         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
668         for (c = 0; c < dof; ++c, ++l) {
669           ltog[l] = off+c;
670         }
671       }
672       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
673       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
674     } else {
675       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
676       ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr);
677     }
678   }
679   *ltog = dm->ltogmap;
680   PetscFunctionReturn(0);
681 }
682 
683 #undef __FUNCT__
684 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
685 /*@
686    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
687 
688    Collective on DM
689 
690    Input Parameter:
691 .  da - the distributed array that provides the mapping
692 
693    Output Parameter:
694 .  ltog - the block mapping
695 
696    Level: intermediate
697 
698    Notes:
699    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
700    MatSetLocalToGlobalMappingBlock().
701 
702 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
703 @*/
704 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
705 {
706   PetscErrorCode ierr;
707 
708   PetscFunctionBegin;
709   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
710   PetscValidPointer(ltog,2);
711   if (!dm->ltogmapb) {
712     PetscInt bs;
713     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
714     if (bs > 1) {
715       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
716       ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
717     } else {
718       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
719       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
720     }
721   }
722   *ltog = dm->ltogmapb;
723   PetscFunctionReturn(0);
724 }
725 
726 #undef __FUNCT__
727 #define __FUNCT__ "DMGetBlockSize"
728 /*@
729    DMGetBlockSize - Gets the inherent block size associated with a DM
730 
731    Not Collective
732 
733    Input Parameter:
734 .  dm - the DM with block structure
735 
736    Output Parameter:
737 .  bs - the block size, 1 implies no exploitable block structure
738 
739    Level: intermediate
740 
741 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
742 @*/
743 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
744 {
745   PetscFunctionBegin;
746   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
747   PetscValidPointer(bs,2);
748   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
749   *bs = dm->bs;
750   PetscFunctionReturn(0);
751 }
752 
753 #undef __FUNCT__
754 #define __FUNCT__ "DMCreateInterpolation"
755 /*@
756     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
757 
758     Collective on DM
759 
760     Input Parameter:
761 +   dm1 - the DM object
762 -   dm2 - the second, finer DM object
763 
764     Output Parameter:
765 +  mat - the interpolation
766 -  vec - the scaling (optional)
767 
768     Level: developer
769 
770     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
771         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
772 
773         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
774         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
775 
776 
777 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
778 
779 @*/
780 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
781 {
782   PetscErrorCode ierr;
783 
784   PetscFunctionBegin;
785   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
786   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
787   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
788   PetscFunctionReturn(0);
789 }
790 
791 #undef __FUNCT__
792 #define __FUNCT__ "DMCreateInjection"
793 /*@
794     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
795 
796     Collective on DM
797 
798     Input Parameter:
799 +   dm1 - the DM object
800 -   dm2 - the second, finer DM object
801 
802     Output Parameter:
803 .   ctx - the injection
804 
805     Level: developer
806 
807    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
808         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
809 
810 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
811 
812 @*/
813 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
814 {
815   PetscErrorCode ierr;
816 
817   PetscFunctionBegin;
818   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
819   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
820   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
821   PetscFunctionReturn(0);
822 }
823 
824 #undef __FUNCT__
825 #define __FUNCT__ "DMCreateColoring"
826 /*@C
827     DMCreateColoring - Gets coloring for a DMDA or DMComposite
828 
829     Collective on DM
830 
831     Input Parameter:
832 +   dm - the DM object
833 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
834 -   matype - either MATAIJ or MATBAIJ
835 
836     Output Parameter:
837 .   coloring - the coloring
838 
839     Level: developer
840 
841 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
842 
843 @*/
844 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
845 {
846   PetscErrorCode ierr;
847 
848   PetscFunctionBegin;
849   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
850   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
851   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
852   PetscFunctionReturn(0);
853 }
854 
855 #undef __FUNCT__
856 #define __FUNCT__ "DMCreateMatrix"
857 /*@C
858     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
859 
860     Collective on DM
861 
862     Input Parameter:
863 +   dm - the DM object
864 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
865             any type which inherits from one of these (such as MATAIJ)
866 
867     Output Parameter:
868 .   mat - the empty Jacobian
869 
870     Level: beginner
871 
872     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
873        do not need to do it yourself.
874 
875        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
876        the nonzero pattern call DMDASetMatPreallocateOnly()
877 
878        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
879        internally by PETSc.
880 
881        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
882        the indices for the global numbering for DMDAs which is complicated.
883 
884 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
885 
886 @*/
887 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
888 {
889   PetscErrorCode ierr;
890 
891   PetscFunctionBegin;
892   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
893 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
894   ierr = MatInitializePackage();CHKERRQ(ierr);
895 #endif
896   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
897   PetscValidPointer(mat,3);
898   if (dm->mattype) {
899     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
900   } else {
901     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
902   }
903   PetscFunctionReturn(0);
904 }
905 
906 #undef __FUNCT__
907 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
908 /*@
909   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
910     preallocated but the nonzero structure and zero values will not be set.
911 
912   Logically Collective on DMDA
913 
914   Input Parameter:
915 + dm - the DM
916 - only - PETSC_TRUE if only want preallocation
917 
918   Level: developer
919 .seealso DMCreateMatrix()
920 @*/
921 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
922 {
923   PetscFunctionBegin;
924   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
925   dm->prealloc_only = only;
926   PetscFunctionReturn(0);
927 }
928 
929 #undef __FUNCT__
930 #define __FUNCT__ "DMGetWorkArray"
931 /*@C
932   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
933 
934   Not Collective
935 
936   Input Parameters:
937 + dm - the DM object
938 . count - The minium size
939 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
940 
941   Output Parameter:
942 . array - the work array
943 
944   Level: developer
945 
946 .seealso DMDestroy(), DMCreate()
947 @*/
948 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
949 {
950   PetscErrorCode ierr;
951   DMWorkLink     link;
952   size_t         size;
953 
954   PetscFunctionBegin;
955   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
956   PetscValidPointer(mem,4);
957   if (dm->workin) {
958     link       = dm->workin;
959     dm->workin = dm->workin->next;
960   } else {
961     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
962   }
963   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
964   if (size*count > link->bytes) {
965     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
966     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
967     link->bytes = size*count;
968   }
969   link->next   = dm->workout;
970   dm->workout  = link;
971   *(void**)mem = link->mem;
972   PetscFunctionReturn(0);
973 }
974 
975 #undef __FUNCT__
976 #define __FUNCT__ "DMRestoreWorkArray"
977 /*@C
978   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
979 
980   Not Collective
981 
982   Input Parameters:
983 + dm - the DM object
984 . count - The minium size
985 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
986 
987   Output Parameter:
988 . array - the work array
989 
990   Level: developer
991 
992 .seealso DMDestroy(), DMCreate()
993 @*/
994 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
995 {
996   DMWorkLink *p,link;
997 
998   PetscFunctionBegin;
999   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1000   PetscValidPointer(mem,4);
1001   for (p=&dm->workout; (link=*p); p=&link->next) {
1002     if (link->mem == *(void**)mem) {
1003       *p           = link->next;
1004       link->next   = dm->workin;
1005       dm->workin   = link;
1006       *(void**)mem = NULL;
1007       PetscFunctionReturn(0);
1008     }
1009   }
1010   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1011   PetscFunctionReturn(0);
1012 }
1013 
1014 #undef __FUNCT__
1015 #define __FUNCT__ "DMSetNullSpaceConstructor"
1016 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1017 {
1018   PetscFunctionBegin;
1019   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1020   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1021   dm->nullspaceConstructors[field] = nullsp;
1022   PetscFunctionReturn(0);
1023 }
1024 
1025 #undef __FUNCT__
1026 #define __FUNCT__ "DMCreateFieldIS"
1027 /*@C
1028   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1029 
1030   Not collective
1031 
1032   Input Parameter:
1033 . dm - the DM object
1034 
1035   Output Parameters:
1036 + numFields  - The number of fields (or NULL if not requested)
1037 . fieldNames - The name for each field (or NULL if not requested)
1038 - fields     - The global indices for each field (or NULL if not requested)
1039 
1040   Level: intermediate
1041 
1042   Notes:
1043   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1044   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1045   PetscFree().
1046 
1047 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1048 @*/
1049 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1050 {
1051   PetscSection   section, sectionGlobal;
1052   PetscErrorCode ierr;
1053 
1054   PetscFunctionBegin;
1055   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1056   if (numFields) {
1057     PetscValidPointer(numFields,2);
1058     *numFields = 0;
1059   }
1060   if (fieldNames) {
1061     PetscValidPointer(fieldNames,3);
1062     *fieldNames = NULL;
1063   }
1064   if (fields) {
1065     PetscValidPointer(fields,4);
1066     *fields = NULL;
1067   }
1068   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1069   if (section) {
1070     PetscInt *fieldSizes, **fieldIndices;
1071     PetscInt nF, f, pStart, pEnd, p;
1072 
1073     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1074     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1075     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr);
1076     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1077     for (f = 0; f < nF; ++f) {
1078       fieldSizes[f] = 0;
1079     }
1080     for (p = pStart; p < pEnd; ++p) {
1081       PetscInt gdof;
1082 
1083       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1084       if (gdof > 0) {
1085         for (f = 0; f < nF; ++f) {
1086           PetscInt fdof, fcdof;
1087 
1088           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1089           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1090           fieldSizes[f] += fdof-fcdof;
1091         }
1092       }
1093     }
1094     for (f = 0; f < nF; ++f) {
1095       ierr          = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1096       fieldSizes[f] = 0;
1097     }
1098     for (p = pStart; p < pEnd; ++p) {
1099       PetscInt gdof, goff;
1100 
1101       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1102       if (gdof > 0) {
1103         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1104         for (f = 0; f < nF; ++f) {
1105           PetscInt fdof, fcdof, fc;
1106 
1107           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1108           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1109           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1110             fieldIndices[f][fieldSizes[f]] = goff++;
1111           }
1112         }
1113       }
1114     }
1115     if (numFields) *numFields = nF;
1116     if (fieldNames) {
1117       ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr);
1118       for (f = 0; f < nF; ++f) {
1119         const char *fieldName;
1120 
1121         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1122         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1123       }
1124     }
1125     if (fields) {
1126       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1127       for (f = 0; f < nF; ++f) {
1128         ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1129       }
1130     }
1131     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1132   } else if (dm->ops->createfieldis) {
1133     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1134   }
1135   PetscFunctionReturn(0);
1136 }
1137 
1138 
1139 #undef __FUNCT__
1140 #define __FUNCT__ "DMCreateFieldDecomposition"
1141 /*@C
1142   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1143                           corresponding to different fields: each IS contains the global indices of the dofs of the
1144                           corresponding field. The optional list of DMs define the DM for each subproblem.
1145                           Generalizes DMCreateFieldIS().
1146 
1147   Not collective
1148 
1149   Input Parameter:
1150 . dm - the DM object
1151 
1152   Output Parameters:
1153 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1154 . namelist  - The name for each field (or NULL if not requested)
1155 . islist    - The global indices for each field (or NULL if not requested)
1156 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1157 
1158   Level: intermediate
1159 
1160   Notes:
1161   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1162   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1163   and all of the arrays should be freed with PetscFree().
1164 
1165 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1166 @*/
1167 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1168 {
1169   PetscErrorCode ierr;
1170 
1171   PetscFunctionBegin;
1172   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1173   if (len) {
1174     PetscValidPointer(len,2);
1175     *len = 0;
1176   }
1177   if (namelist) {
1178     PetscValidPointer(namelist,3);
1179     *namelist = 0;
1180   }
1181   if (islist) {
1182     PetscValidPointer(islist,4);
1183     *islist = 0;
1184   }
1185   if (dmlist) {
1186     PetscValidPointer(dmlist,5);
1187     *dmlist = 0;
1188   }
1189   /*
1190    Is it a good idea to apply the following check across all impls?
1191    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1192    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1193    */
1194   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1195   if (!dm->ops->createfielddecomposition) {
1196     PetscSection section;
1197     PetscInt     numFields, f;
1198 
1199     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1200     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1201     if (section && numFields && dm->ops->createsubdm) {
1202       *len = numFields;
1203       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1204       for (f = 0; f < numFields; ++f) {
1205         const char *fieldName;
1206 
1207         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1208         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1209         ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1210       }
1211     } else {
1212       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1213       /* By default there are no DMs associated with subproblems. */
1214       if (dmlist) *dmlist = NULL;
1215     }
1216   } else {
1217     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1218   }
1219   PetscFunctionReturn(0);
1220 }
1221 
1222 #undef __FUNCT__
1223 #define __FUNCT__ "DMCreateSubDM"
1224 /*@C
1225   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1226                   The fields are defined by DMCreateFieldIS().
1227 
1228   Not collective
1229 
1230   Input Parameters:
1231 + dm - the DM object
1232 . numFields - number of fields in this subproblem
1233 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1234 
1235   Output Parameters:
1236 . is - The global indices for the subproblem
1237 - dm - The DM for the subproblem
1238 
1239   Level: intermediate
1240 
1241 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1242 @*/
1243 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1244 {
1245   PetscErrorCode ierr;
1246 
1247   PetscFunctionBegin;
1248   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1249   PetscValidPointer(fields,3);
1250   if (is) PetscValidPointer(is,4);
1251   if (subdm) PetscValidPointer(subdm,5);
1252   if (dm->ops->createsubdm) {
1253     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1254   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1255   PetscFunctionReturn(0);
1256 }
1257 
1258 
1259 #undef __FUNCT__
1260 #define __FUNCT__ "DMCreateDomainDecomposition"
1261 /*@C
1262   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1263                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1264                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1265                           define a nonoverlapping covering, while outer subdomains can overlap.
1266                           The optional list of DMs define the DM for each subproblem.
1267 
1268   Not collective
1269 
1270   Input Parameter:
1271 . dm - the DM object
1272 
1273   Output Parameters:
1274 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1275 . namelist    - The name for each subdomain (or NULL if not requested)
1276 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1277 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1278 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1279 
1280   Level: intermediate
1281 
1282   Notes:
1283   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1284   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1285   and all of the arrays should be freed with PetscFree().
1286 
1287 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1288 @*/
1289 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1290 {
1291   PetscErrorCode      ierr;
1292   DMSubDomainHookLink link;
1293   PetscInt            i,l;
1294 
1295   PetscFunctionBegin;
1296   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1297   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1298   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1299   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1300   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1301   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1302   /*
1303    Is it a good idea to apply the following check across all impls?
1304    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1305    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1306    */
1307   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1308   if (dm->ops->createdomaindecomposition) {
1309     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1310     /* copy subdomain hooks and context over to the subdomain DMs */
1311     if (dmlist) {
1312       for (i = 0; i < l; i++) {
1313         for (link=dm->subdomainhook; link; link=link->next) {
1314           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1315         }
1316         (*dmlist)[i]->ctx = dm->ctx;
1317       }
1318     }
1319     if (len) *len = l;
1320   }
1321   PetscFunctionReturn(0);
1322 }
1323 
1324 
1325 #undef __FUNCT__
1326 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1327 /*@C
1328   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1329 
1330   Not collective
1331 
1332   Input Parameters:
1333 + dm - the DM object
1334 . n  - the number of subdomain scatters
1335 - subdms - the local subdomains
1336 
1337   Output Parameters:
1338 + n     - the number of scatters returned
1339 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1340 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1341 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1342 
1343   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1344   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1345   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1346   solution and residual data.
1347 
1348   Level: developer
1349 
1350 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1351 @*/
1352 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1353 {
1354   PetscErrorCode ierr;
1355 
1356   PetscFunctionBegin;
1357   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1358   PetscValidPointer(subdms,3);
1359   PetscValidPointer(iscat,4);
1360   PetscValidPointer(oscat,5);
1361   PetscValidPointer(gscat,6);
1362   if (dm->ops->createddscatters) {
1363     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1364   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1365   PetscFunctionReturn(0);
1366 }
1367 
1368 #undef __FUNCT__
1369 #define __FUNCT__ "DMRefine"
1370 /*@
1371   DMRefine - Refines a DM object
1372 
1373   Collective on DM
1374 
1375   Input Parameter:
1376 + dm   - the DM object
1377 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1378 
1379   Output Parameter:
1380 . dmf - the refined DM, or NULL
1381 
1382   Note: If no refinement was done, the return value is NULL
1383 
1384   Level: developer
1385 
1386 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1387 @*/
1388 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1389 {
1390   PetscErrorCode   ierr;
1391   DMRefineHookLink link;
1392 
1393   PetscFunctionBegin;
1394   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1395   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1396   if (*dmf) {
1397     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1398 
1399     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1400 
1401     (*dmf)->ctx       = dm->ctx;
1402     (*dmf)->leveldown = dm->leveldown;
1403     (*dmf)->levelup   = dm->levelup + 1;
1404 
1405     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1406     for (link=dm->refinehook; link; link=link->next) {
1407       if (link->refinehook) {
1408         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1409       }
1410     }
1411   }
1412   PetscFunctionReturn(0);
1413 }
1414 
1415 #undef __FUNCT__
1416 #define __FUNCT__ "DMRefineHookAdd"
1417 /*@C
1418    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1419 
1420    Logically Collective
1421 
1422    Input Arguments:
1423 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1424 .  refinehook - function to run when setting up a coarser level
1425 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1426 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1427 
1428    Calling sequence of refinehook:
1429 $    refinehook(DM coarse,DM fine,void *ctx);
1430 
1431 +  coarse - coarse level DM
1432 .  fine - fine level DM to interpolate problem to
1433 -  ctx - optional user-defined function context
1434 
1435    Calling sequence for interphook:
1436 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1437 
1438 +  coarse - coarse level DM
1439 .  interp - matrix interpolating a coarse-level solution to the finer grid
1440 .  fine - fine level DM to update
1441 -  ctx - optional user-defined function context
1442 
1443    Level: advanced
1444 
1445    Notes:
1446    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1447 
1448    If this function is called multiple times, the hooks will be run in the order they are added.
1449 
1450    This function is currently not available from Fortran.
1451 
1452 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1453 @*/
1454 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1455 {
1456   PetscErrorCode   ierr;
1457   DMRefineHookLink link,*p;
1458 
1459   PetscFunctionBegin;
1460   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1461   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1462   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1463   link->refinehook = refinehook;
1464   link->interphook = interphook;
1465   link->ctx        = ctx;
1466   link->next       = NULL;
1467   *p               = link;
1468   PetscFunctionReturn(0);
1469 }
1470 
1471 #undef __FUNCT__
1472 #define __FUNCT__ "DMInterpolate"
1473 /*@
1474    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1475 
1476    Collective if any hooks are
1477 
1478    Input Arguments:
1479 +  coarse - coarser DM to use as a base
1480 .  restrct - interpolation matrix, apply using MatInterpolate()
1481 -  fine - finer DM to update
1482 
1483    Level: developer
1484 
1485 .seealso: DMRefineHookAdd(), MatInterpolate()
1486 @*/
1487 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1488 {
1489   PetscErrorCode   ierr;
1490   DMRefineHookLink link;
1491 
1492   PetscFunctionBegin;
1493   for (link=fine->refinehook; link; link=link->next) {
1494     if (link->interphook) {
1495       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1496     }
1497   }
1498   PetscFunctionReturn(0);
1499 }
1500 
1501 #undef __FUNCT__
1502 #define __FUNCT__ "DMGetRefineLevel"
1503 /*@
1504     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1505 
1506     Not Collective
1507 
1508     Input Parameter:
1509 .   dm - the DM object
1510 
1511     Output Parameter:
1512 .   level - number of refinements
1513 
1514     Level: developer
1515 
1516 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1517 
1518 @*/
1519 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1520 {
1521   PetscFunctionBegin;
1522   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1523   *level = dm->levelup;
1524   PetscFunctionReturn(0);
1525 }
1526 
1527 #undef __FUNCT__
1528 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1529 /*@C
1530    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1531 
1532    Logically Collective
1533 
1534    Input Arguments:
1535 +  dm - the DM
1536 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1537 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1538 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1539 
1540    Calling sequence for beginhook:
1541 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1542 
1543 +  dm - global DM
1544 .  g - global vector
1545 .  mode - mode
1546 .  l - local vector
1547 -  ctx - optional user-defined function context
1548 
1549 
1550    Calling sequence for endhook:
1551 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1552 
1553 +  global - global DM
1554 -  ctx - optional user-defined function context
1555 
1556    Level: advanced
1557 
1558    Notes:
1559    This function is only needed if auxiliary data needs to be set up on coarse grids.
1560 
1561    If this function is called multiple times, the hooks will be run in the order they are added.
1562 
1563    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1564    extract the finest level information from its context (instead of from the SNES).
1565 
1566    This function is currently not available from Fortran.
1567 
1568 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1569 @*/
1570 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1571 {
1572   PetscErrorCode          ierr;
1573   DMGlobalToLocalHookLink link,*p;
1574 
1575   PetscFunctionBegin;
1576   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1577   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1578   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1579   link->beginhook = beginhook;
1580   link->endhook   = endhook;
1581   link->ctx       = ctx;
1582   link->next      = NULL;
1583   *p              = link;
1584   PetscFunctionReturn(0);
1585 }
1586 
1587 #undef __FUNCT__
1588 #define __FUNCT__ "DMGlobalToLocalBegin"
1589 /*@
1590     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1591 
1592     Neighbor-wise Collective on DM
1593 
1594     Input Parameters:
1595 +   dm - the DM object
1596 .   g - the global vector
1597 .   mode - INSERT_VALUES or ADD_VALUES
1598 -   l - the local vector
1599 
1600 
1601     Level: beginner
1602 
1603 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1604 
1605 @*/
1606 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1607 {
1608   PetscSF                 sf;
1609   PetscErrorCode          ierr;
1610   DMGlobalToLocalHookLink link;
1611 
1612   PetscFunctionBegin;
1613   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1614   for (link=dm->gtolhook; link; link=link->next) {
1615     if (link->beginhook) {
1616       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1617     }
1618   }
1619   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1620   if (sf) {
1621     PetscScalar *lArray, *gArray;
1622 
1623     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1624     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1625     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1626     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1627     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1628     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1629   } else {
1630     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1631   }
1632   PetscFunctionReturn(0);
1633 }
1634 
1635 #undef __FUNCT__
1636 #define __FUNCT__ "DMGlobalToLocalEnd"
1637 /*@
1638     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1639 
1640     Neighbor-wise Collective on DM
1641 
1642     Input Parameters:
1643 +   dm - the DM object
1644 .   g - the global vector
1645 .   mode - INSERT_VALUES or ADD_VALUES
1646 -   l - the local vector
1647 
1648 
1649     Level: beginner
1650 
1651 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1652 
1653 @*/
1654 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1655 {
1656   PetscSF                 sf;
1657   PetscErrorCode          ierr;
1658   PetscScalar             *lArray, *gArray;
1659   DMGlobalToLocalHookLink link;
1660 
1661   PetscFunctionBegin;
1662   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1663   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1664   if (sf) {
1665     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1666 
1667     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1668     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1669     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1670     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1671     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1672   } else {
1673     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1674   }
1675   for (link=dm->gtolhook; link; link=link->next) {
1676     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1677   }
1678   PetscFunctionReturn(0);
1679 }
1680 
1681 #undef __FUNCT__
1682 #define __FUNCT__ "DMLocalToGlobalBegin"
1683 /*@
1684     DMLocalToGlobalBegin - updates global vectors from local vectors
1685 
1686     Neighbor-wise Collective on DM
1687 
1688     Input Parameters:
1689 +   dm - the DM object
1690 .   l - the local vector
1691 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1692            base point.
1693 - - the global vector
1694 
1695     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1696            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1697            global array to the final global array with VecAXPY().
1698 
1699     Level: beginner
1700 
1701 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1702 
1703 @*/
1704 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1705 {
1706   PetscSF        sf;
1707   PetscErrorCode ierr;
1708 
1709   PetscFunctionBegin;
1710   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1711   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1712   if (sf) {
1713     MPI_Op      op;
1714     PetscScalar *lArray, *gArray;
1715 
1716     switch (mode) {
1717     case INSERT_VALUES:
1718     case INSERT_ALL_VALUES:
1719 #if defined(PETSC_HAVE_MPI_REPLACE)
1720       op = MPI_REPLACE; break;
1721 #else
1722       SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1723 #endif
1724     case ADD_VALUES:
1725     case ADD_ALL_VALUES:
1726       op = MPI_SUM; break;
1727     default:
1728       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1729     }
1730     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1731     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1732     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1733     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1734     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1735   } else {
1736     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1737   }
1738   PetscFunctionReturn(0);
1739 }
1740 
1741 #undef __FUNCT__
1742 #define __FUNCT__ "DMLocalToGlobalEnd"
1743 /*@
1744     DMLocalToGlobalEnd - updates global vectors from local vectors
1745 
1746     Neighbor-wise Collective on DM
1747 
1748     Input Parameters:
1749 +   dm - the DM object
1750 .   l - the local vector
1751 .   mode - INSERT_VALUES or ADD_VALUES
1752 -   g - the global vector
1753 
1754 
1755     Level: beginner
1756 
1757 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1758 
1759 @*/
1760 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1761 {
1762   PetscSF        sf;
1763   PetscErrorCode ierr;
1764 
1765   PetscFunctionBegin;
1766   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1767   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1768   if (sf) {
1769     MPI_Op      op;
1770     PetscScalar *lArray, *gArray;
1771 
1772     switch (mode) {
1773     case INSERT_VALUES:
1774     case INSERT_ALL_VALUES:
1775 #if defined(PETSC_HAVE_MPI_REPLACE)
1776       op = MPI_REPLACE; break;
1777 #else
1778       SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1779 #endif
1780     case ADD_VALUES:
1781     case ADD_ALL_VALUES:
1782       op = MPI_SUM; break;
1783     default:
1784       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1785     }
1786     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1787     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1788     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1789     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1790     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1791   } else {
1792     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1793   }
1794   PetscFunctionReturn(0);
1795 }
1796 
1797 #undef __FUNCT__
1798 #define __FUNCT__ "DMCoarsen"
1799 /*@
1800     DMCoarsen - Coarsens a DM object
1801 
1802     Collective on DM
1803 
1804     Input Parameter:
1805 +   dm - the DM object
1806 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1807 
1808     Output Parameter:
1809 .   dmc - the coarsened DM
1810 
1811     Level: developer
1812 
1813 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1814 
1815 @*/
1816 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1817 {
1818   PetscErrorCode    ierr;
1819   DMCoarsenHookLink link;
1820 
1821   PetscFunctionBegin;
1822   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1823   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1824   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1825   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1826   (*dmc)->ctx               = dm->ctx;
1827   (*dmc)->levelup           = dm->levelup;
1828   (*dmc)->leveldown         = dm->leveldown + 1;
1829   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1830   for (link=dm->coarsenhook; link; link=link->next) {
1831     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1832   }
1833   PetscFunctionReturn(0);
1834 }
1835 
1836 #undef __FUNCT__
1837 #define __FUNCT__ "DMCoarsenHookAdd"
1838 /*@C
1839    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1840 
1841    Logically Collective
1842 
1843    Input Arguments:
1844 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1845 .  coarsenhook - function to run when setting up a coarser level
1846 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1847 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1848 
1849    Calling sequence of coarsenhook:
1850 $    coarsenhook(DM fine,DM coarse,void *ctx);
1851 
1852 +  fine - fine level DM
1853 .  coarse - coarse level DM to restrict problem to
1854 -  ctx - optional user-defined function context
1855 
1856    Calling sequence for restricthook:
1857 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1858 
1859 +  fine - fine level DM
1860 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1861 .  rscale - scaling vector for restriction
1862 .  inject - matrix restricting by injection
1863 .  coarse - coarse level DM to update
1864 -  ctx - optional user-defined function context
1865 
1866    Level: advanced
1867 
1868    Notes:
1869    This function is only needed if auxiliary data needs to be set up on coarse grids.
1870 
1871    If this function is called multiple times, the hooks will be run in the order they are added.
1872 
1873    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1874    extract the finest level information from its context (instead of from the SNES).
1875 
1876    This function is currently not available from Fortran.
1877 
1878 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1879 @*/
1880 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1881 {
1882   PetscErrorCode    ierr;
1883   DMCoarsenHookLink link,*p;
1884 
1885   PetscFunctionBegin;
1886   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1887   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1888   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1889   link->coarsenhook  = coarsenhook;
1890   link->restricthook = restricthook;
1891   link->ctx          = ctx;
1892   link->next         = NULL;
1893   *p                 = link;
1894   PetscFunctionReturn(0);
1895 }
1896 
1897 #undef __FUNCT__
1898 #define __FUNCT__ "DMRestrict"
1899 /*@
1900    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1901 
1902    Collective if any hooks are
1903 
1904    Input Arguments:
1905 +  fine - finer DM to use as a base
1906 .  restrct - restriction matrix, apply using MatRestrict()
1907 .  inject - injection matrix, also use MatRestrict()
1908 -  coarse - coarer DM to update
1909 
1910    Level: developer
1911 
1912 .seealso: DMCoarsenHookAdd(), MatRestrict()
1913 @*/
1914 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1915 {
1916   PetscErrorCode    ierr;
1917   DMCoarsenHookLink link;
1918 
1919   PetscFunctionBegin;
1920   for (link=fine->coarsenhook; link; link=link->next) {
1921     if (link->restricthook) {
1922       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
1923     }
1924   }
1925   PetscFunctionReturn(0);
1926 }
1927 
1928 #undef __FUNCT__
1929 #define __FUNCT__ "DMSubDomainHookAdd"
1930 /*@C
1931    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1932 
1933    Logically Collective
1934 
1935    Input Arguments:
1936 +  global - global DM
1937 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1938 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1939 
1940    Calling sequence for restricthook:
1941 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1942 
1943 +  global - global DM
1944 .  out    - scatter to the outer (with ghost and overlap points) block vector
1945 .  in     - scatter to block vector values only owned locally
1946 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1947 -  ctx - optional user-defined function context
1948 
1949    Level: advanced
1950 
1951    Notes:
1952    This function is only needed if auxiliary data needs to be set up on coarse grids.
1953 
1954    If this function is called multiple times, the hooks will be run in the order they are added.
1955 
1956    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1957    extract the finest level information from its context (instead of from the SNES).
1958 
1959    This function is currently not available from Fortran.
1960 
1961 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1962 @*/
1963 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1964 {
1965   PetscErrorCode      ierr;
1966   DMSubDomainHookLink link,*p;
1967 
1968   PetscFunctionBegin;
1969   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1970   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1971   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
1972   link->restricthook = restricthook;
1973   link->ddhook       = ddhook;
1974   link->ctx          = ctx;
1975   link->next         = NULL;
1976   *p                 = link;
1977   PetscFunctionReturn(0);
1978 }
1979 
1980 #undef __FUNCT__
1981 #define __FUNCT__ "DMSubDomainRestrict"
1982 /*@
1983    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
1984 
1985    Collective if any hooks are
1986 
1987    Input Arguments:
1988 +  fine - finer DM to use as a base
1989 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
1990 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
1991 -  coarse - coarer DM to update
1992 
1993    Level: developer
1994 
1995 .seealso: DMCoarsenHookAdd(), MatRestrict()
1996 @*/
1997 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
1998 {
1999   PetscErrorCode      ierr;
2000   DMSubDomainHookLink link;
2001 
2002   PetscFunctionBegin;
2003   for (link=global->subdomainhook; link; link=link->next) {
2004     if (link->restricthook) {
2005       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
2006     }
2007   }
2008   PetscFunctionReturn(0);
2009 }
2010 
2011 #undef __FUNCT__
2012 #define __FUNCT__ "DMGetCoarsenLevel"
2013 /*@
2014     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2015 
2016     Not Collective
2017 
2018     Input Parameter:
2019 .   dm - the DM object
2020 
2021     Output Parameter:
2022 .   level - number of coarsenings
2023 
2024     Level: developer
2025 
2026 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2027 
2028 @*/
2029 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2030 {
2031   PetscFunctionBegin;
2032   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2033   *level = dm->leveldown;
2034   PetscFunctionReturn(0);
2035 }
2036 
2037 
2038 
2039 #undef __FUNCT__
2040 #define __FUNCT__ "DMRefineHierarchy"
2041 /*@C
2042     DMRefineHierarchy - Refines a DM object, all levels at once
2043 
2044     Collective on DM
2045 
2046     Input Parameter:
2047 +   dm - the DM object
2048 -   nlevels - the number of levels of refinement
2049 
2050     Output Parameter:
2051 .   dmf - the refined DM hierarchy
2052 
2053     Level: developer
2054 
2055 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2056 
2057 @*/
2058 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2059 {
2060   PetscErrorCode ierr;
2061 
2062   PetscFunctionBegin;
2063   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2064   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2065   if (nlevels == 0) PetscFunctionReturn(0);
2066   if (dm->ops->refinehierarchy) {
2067     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2068   } else if (dm->ops->refine) {
2069     PetscInt i;
2070 
2071     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2072     for (i=1; i<nlevels; i++) {
2073       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2074     }
2075   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2076   PetscFunctionReturn(0);
2077 }
2078 
2079 #undef __FUNCT__
2080 #define __FUNCT__ "DMCoarsenHierarchy"
2081 /*@C
2082     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2083 
2084     Collective on DM
2085 
2086     Input Parameter:
2087 +   dm - the DM object
2088 -   nlevels - the number of levels of coarsening
2089 
2090     Output Parameter:
2091 .   dmc - the coarsened DM hierarchy
2092 
2093     Level: developer
2094 
2095 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2096 
2097 @*/
2098 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2099 {
2100   PetscErrorCode ierr;
2101 
2102   PetscFunctionBegin;
2103   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2104   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2105   if (nlevels == 0) PetscFunctionReturn(0);
2106   PetscValidPointer(dmc,3);
2107   if (dm->ops->coarsenhierarchy) {
2108     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2109   } else if (dm->ops->coarsen) {
2110     PetscInt i;
2111 
2112     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2113     for (i=1; i<nlevels; i++) {
2114       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2115     }
2116   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2117   PetscFunctionReturn(0);
2118 }
2119 
2120 #undef __FUNCT__
2121 #define __FUNCT__ "DMCreateAggregates"
2122 /*@
2123    DMCreateAggregates - Gets the aggregates that map between
2124    grids associated with two DMs.
2125 
2126    Collective on DM
2127 
2128    Input Parameters:
2129 +  dmc - the coarse grid DM
2130 -  dmf - the fine grid DM
2131 
2132    Output Parameters:
2133 .  rest - the restriction matrix (transpose of the projection matrix)
2134 
2135    Level: intermediate
2136 
2137 .keywords: interpolation, restriction, multigrid
2138 
2139 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2140 @*/
2141 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2142 {
2143   PetscErrorCode ierr;
2144 
2145   PetscFunctionBegin;
2146   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2147   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2148   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2149   PetscFunctionReturn(0);
2150 }
2151 
2152 #undef __FUNCT__
2153 #define __FUNCT__ "DMSetApplicationContextDestroy"
2154 /*@C
2155     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2156 
2157     Not Collective
2158 
2159     Input Parameters:
2160 +   dm - the DM object
2161 -   destroy - the destroy function
2162 
2163     Level: intermediate
2164 
2165 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2166 
2167 @*/
2168 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2169 {
2170   PetscFunctionBegin;
2171   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2172   dm->ctxdestroy = destroy;
2173   PetscFunctionReturn(0);
2174 }
2175 
2176 #undef __FUNCT__
2177 #define __FUNCT__ "DMSetApplicationContext"
2178 /*@
2179     DMSetApplicationContext - Set a user context into a DM object
2180 
2181     Not Collective
2182 
2183     Input Parameters:
2184 +   dm - the DM object
2185 -   ctx - the user context
2186 
2187     Level: intermediate
2188 
2189 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2190 
2191 @*/
2192 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2193 {
2194   PetscFunctionBegin;
2195   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2196   dm->ctx = ctx;
2197   PetscFunctionReturn(0);
2198 }
2199 
2200 #undef __FUNCT__
2201 #define __FUNCT__ "DMGetApplicationContext"
2202 /*@
2203     DMGetApplicationContext - Gets a user context from a DM object
2204 
2205     Not Collective
2206 
2207     Input Parameter:
2208 .   dm - the DM object
2209 
2210     Output Parameter:
2211 .   ctx - the user context
2212 
2213     Level: intermediate
2214 
2215 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2216 
2217 @*/
2218 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2219 {
2220   PetscFunctionBegin;
2221   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2222   *(void**)ctx = dm->ctx;
2223   PetscFunctionReturn(0);
2224 }
2225 
2226 #undef __FUNCT__
2227 #define __FUNCT__ "DMSetVariableBounds"
2228 /*@C
2229     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2230 
2231     Logically Collective on DM
2232 
2233     Input Parameter:
2234 +   dm - the DM object
2235 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2236 
2237     Level: intermediate
2238 
2239 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2240          DMSetJacobian()
2241 
2242 @*/
2243 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2244 {
2245   PetscFunctionBegin;
2246   dm->ops->computevariablebounds = f;
2247   PetscFunctionReturn(0);
2248 }
2249 
2250 #undef __FUNCT__
2251 #define __FUNCT__ "DMHasVariableBounds"
2252 /*@
2253     DMHasVariableBounds - does the DM object have a variable bounds function?
2254 
2255     Not Collective
2256 
2257     Input Parameter:
2258 .   dm - the DM object to destroy
2259 
2260     Output Parameter:
2261 .   flg - PETSC_TRUE if the variable bounds function exists
2262 
2263     Level: developer
2264 
2265 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2266 
2267 @*/
2268 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2269 {
2270   PetscFunctionBegin;
2271   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2272   PetscFunctionReturn(0);
2273 }
2274 
2275 #undef __FUNCT__
2276 #define __FUNCT__ "DMComputeVariableBounds"
2277 /*@C
2278     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2279 
2280     Logically Collective on DM
2281 
2282     Input Parameters:
2283 +   dm - the DM object to destroy
2284 -   x  - current solution at which the bounds are computed
2285 
2286     Output parameters:
2287 +   xl - lower bound
2288 -   xu - upper bound
2289 
2290     Level: intermediate
2291 
2292 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2293 
2294 @*/
2295 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2296 {
2297   PetscErrorCode ierr;
2298 
2299   PetscFunctionBegin;
2300   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2301   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2302   if (dm->ops->computevariablebounds) {
2303     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2304   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2305   PetscFunctionReturn(0);
2306 }
2307 
2308 #undef __FUNCT__
2309 #define __FUNCT__ "DMHasColoring"
2310 /*@
2311     DMHasColoring - does the DM object have a method of providing a coloring?
2312 
2313     Not Collective
2314 
2315     Input Parameter:
2316 .   dm - the DM object
2317 
2318     Output Parameter:
2319 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2320 
2321     Level: developer
2322 
2323 .seealso DMHasFunction(), DMCreateColoring()
2324 
2325 @*/
2326 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2327 {
2328   PetscFunctionBegin;
2329   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2330   PetscFunctionReturn(0);
2331 }
2332 
2333 #undef  __FUNCT__
2334 #define __FUNCT__ "DMSetVec"
2335 /*@C
2336     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2337 
2338     Collective on DM
2339 
2340     Input Parameter:
2341 +   dm - the DM object
2342 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2343 
2344     Level: developer
2345 
2346 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2347 
2348 @*/
2349 PetscErrorCode  DMSetVec(DM dm,Vec x)
2350 {
2351   PetscErrorCode ierr;
2352 
2353   PetscFunctionBegin;
2354   if (x) {
2355     if (!dm->x) {
2356       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2357     }
2358     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2359   } else if (dm->x) {
2360     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2361   }
2362   PetscFunctionReturn(0);
2363 }
2364 
2365 PetscFunctionList DMList              = NULL;
2366 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2367 
2368 #undef __FUNCT__
2369 #define __FUNCT__ "DMSetType"
2370 /*@C
2371   DMSetType - Builds a DM, for a particular DM implementation.
2372 
2373   Collective on DM
2374 
2375   Input Parameters:
2376 + dm     - The DM object
2377 - method - The name of the DM type
2378 
2379   Options Database Key:
2380 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2381 
2382   Notes:
2383   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2384 
2385   Level: intermediate
2386 
2387 .keywords: DM, set, type
2388 .seealso: DMGetType(), DMCreate()
2389 @*/
2390 PetscErrorCode  DMSetType(DM dm, DMType method)
2391 {
2392   PetscErrorCode (*r)(DM);
2393   PetscBool      match;
2394   PetscErrorCode ierr;
2395 
2396   PetscFunctionBegin;
2397   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2398   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2399   if (match) PetscFunctionReturn(0);
2400 
2401   if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);}
2402   ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr);
2403   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2404 
2405   if (dm->ops->destroy) {
2406     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2407     dm->ops->destroy = NULL;
2408   }
2409   ierr = (*r)(dm);CHKERRQ(ierr);
2410   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2411   PetscFunctionReturn(0);
2412 }
2413 
2414 #undef __FUNCT__
2415 #define __FUNCT__ "DMGetType"
2416 /*@C
2417   DMGetType - Gets the DM type name (as a string) from the DM.
2418 
2419   Not Collective
2420 
2421   Input Parameter:
2422 . dm  - The DM
2423 
2424   Output Parameter:
2425 . type - The DM type name
2426 
2427   Level: intermediate
2428 
2429 .keywords: DM, get, type, name
2430 .seealso: DMSetType(), DMCreate()
2431 @*/
2432 PetscErrorCode  DMGetType(DM dm, DMType *type)
2433 {
2434   PetscErrorCode ierr;
2435 
2436   PetscFunctionBegin;
2437   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2438   PetscValidCharPointer(type,2);
2439   if (!DMRegisterAllCalled) {
2440     ierr = DMRegisterAll();CHKERRQ(ierr);
2441   }
2442   *type = ((PetscObject)dm)->type_name;
2443   PetscFunctionReturn(0);
2444 }
2445 
2446 #undef __FUNCT__
2447 #define __FUNCT__ "DMConvert"
2448 /*@C
2449   DMConvert - Converts a DM to another DM, either of the same or different type.
2450 
2451   Collective on DM
2452 
2453   Input Parameters:
2454 + dm - the DM
2455 - newtype - new DM type (use "same" for the same type)
2456 
2457   Output Parameter:
2458 . M - pointer to new DM
2459 
2460   Notes:
2461   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2462   the MPI communicator of the generated DM is always the same as the communicator
2463   of the input DM.
2464 
2465   Level: intermediate
2466 
2467 .seealso: DMCreate()
2468 @*/
2469 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2470 {
2471   DM             B;
2472   char           convname[256];
2473   PetscBool      sametype, issame;
2474   PetscErrorCode ierr;
2475 
2476   PetscFunctionBegin;
2477   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2478   PetscValidType(dm,1);
2479   PetscValidPointer(M,3);
2480   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2481   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2482   {
2483     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2484 
2485     /*
2486        Order of precedence:
2487        1) See if a specialized converter is known to the current DM.
2488        2) See if a specialized converter is known to the desired DM class.
2489        3) See if a good general converter is registered for the desired class
2490        4) See if a good general converter is known for the current matrix.
2491        5) Use a really basic converter.
2492     */
2493 
2494     /* 1) See if a specialized converter is known to the current DM and the desired class */
2495     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2496     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2497     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2498     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2499     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2500     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr);
2501     if (conv) goto foundconv;
2502 
2503     /* 2)  See if a specialized converter is known to the desired DM class. */
2504     ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr);
2505     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2506     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2507     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2508     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2509     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2510     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2511     ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr);
2512     if (conv) {
2513       ierr = DMDestroy(&B);CHKERRQ(ierr);
2514       goto foundconv;
2515     }
2516 
2517 #if 0
2518     /* 3) See if a good general converter is registered for the desired class */
2519     conv = B->ops->convertfrom;
2520     ierr = DMDestroy(&B);CHKERRQ(ierr);
2521     if (conv) goto foundconv;
2522 
2523     /* 4) See if a good general converter is known for the current matrix */
2524     if (dm->ops->convert) {
2525       conv = dm->ops->convert;
2526     }
2527     if (conv) goto foundconv;
2528 #endif
2529 
2530     /* 5) Use a really basic converter. */
2531     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2532 
2533 foundconv:
2534     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2535     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2536     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2537   }
2538   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2539   PetscFunctionReturn(0);
2540 }
2541 
2542 /*--------------------------------------------------------------------------------------------------------------------*/
2543 
2544 #undef __FUNCT__
2545 #define __FUNCT__ "DMRegister"
2546 /*@C
2547   DMRegister -  Adds a new DM component implementation
2548 
2549   Not Collective
2550 
2551   Input Parameters:
2552 + name        - The name of a new user-defined creation routine
2553 - create_func - The creation routine itself
2554 
2555   Notes:
2556   DMRegister() may be called multiple times to add several user-defined DMs
2557 
2558 
2559   Sample usage:
2560 .vb
2561     DMRegister("my_da", MyDMCreate);
2562 .ve
2563 
2564   Then, your DM type can be chosen with the procedural interface via
2565 .vb
2566     DMCreate(MPI_Comm, DM *);
2567     DMSetType(DM,"my_da");
2568 .ve
2569    or at runtime via the option
2570 .vb
2571     -da_type my_da
2572 .ve
2573 
2574   Level: advanced
2575 
2576 .keywords: DM, register
2577 .seealso: DMRegisterAll(), DMRegisterDestroy()
2578 
2579 @*/
2580 PetscErrorCode  DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2581 {
2582   PetscErrorCode ierr;
2583 
2584   PetscFunctionBegin;
2585   ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr);
2586   PetscFunctionReturn(0);
2587 }
2588 
2589 
2590 /*--------------------------------------------------------------------------------------------------------------------*/
2591 #undef __FUNCT__
2592 #define __FUNCT__ "DMRegisterDestroy"
2593 /*@C
2594    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister().
2595 
2596    Not Collective
2597 
2598    Level: advanced
2599 
2600 .keywords: DM, register, destroy
2601 .seealso: DMRegister(), DMRegisterAll()
2602 @*/
2603 PetscErrorCode  DMRegisterDestroy(void)
2604 {
2605   PetscErrorCode ierr;
2606 
2607   PetscFunctionBegin;
2608   ierr                = PetscFunctionListDestroy(&DMList);CHKERRQ(ierr);
2609   DMRegisterAllCalled = PETSC_FALSE;
2610   PetscFunctionReturn(0);
2611 }
2612 
2613 #undef __FUNCT__
2614 #define __FUNCT__ "DMLoad"
2615 /*@C
2616   DMLoad - Loads a DM that has been stored in binary  with DMView().
2617 
2618   Collective on PetscViewer
2619 
2620   Input Parameters:
2621 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2622            some related function before a call to DMLoad().
2623 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2624            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2625 
2626    Level: intermediate
2627 
2628   Notes:
2629    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2630 
2631   Notes for advanced users:
2632   Most users should not need to know the details of the binary storage
2633   format, since DMLoad() and DMView() completely hide these details.
2634   But for anyone who's interested, the standard binary matrix storage
2635   format is
2636 .vb
2637      has not yet been determined
2638 .ve
2639 
2640 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2641 @*/
2642 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2643 {
2644   PetscErrorCode ierr;
2645   PetscBool      isbinary;
2646   PetscInt       classid;
2647   char           type[256];
2648 
2649   PetscFunctionBegin;
2650   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2651   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2652   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2653   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2654 
2655   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2656   if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file");
2657   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2658   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2659   if (newdm->ops->load) {
2660     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2661   }
2662   PetscFunctionReturn(0);
2663 }
2664 
2665 /******************************** FEM Support **********************************/
2666 
2667 #undef __FUNCT__
2668 #define __FUNCT__ "DMPrintCellVector"
2669 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2670 {
2671   PetscInt       f;
2672   PetscErrorCode ierr;
2673 
2674   PetscFunctionBegin;
2675   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2676   for (f = 0; f < len; ++f) {
2677     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2678   }
2679   PetscFunctionReturn(0);
2680 }
2681 
2682 #undef __FUNCT__
2683 #define __FUNCT__ "DMPrintCellMatrix"
2684 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2685 {
2686   PetscInt       f, g;
2687   PetscErrorCode ierr;
2688 
2689   PetscFunctionBegin;
2690   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2691   for (f = 0; f < rows; ++f) {
2692     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2693     for (g = 0; g < cols; ++g) {
2694       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2695     }
2696     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2697   }
2698   PetscFunctionReturn(0);
2699 }
2700 
2701 #undef __FUNCT__
2702 #define __FUNCT__ "DMGetDefaultSection"
2703 /*@
2704   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2705 
2706   Input Parameter:
2707 . dm - The DM
2708 
2709   Output Parameter:
2710 . section - The PetscSection
2711 
2712   Level: intermediate
2713 
2714   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2715 
2716 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2717 @*/
2718 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2719 {
2720   PetscFunctionBegin;
2721   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2722   PetscValidPointer(section, 2);
2723   *section = dm->defaultSection;
2724   PetscFunctionReturn(0);
2725 }
2726 
2727 #undef __FUNCT__
2728 #define __FUNCT__ "DMSetDefaultSection"
2729 /*@
2730   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2731 
2732   Input Parameters:
2733 + dm - The DM
2734 - section - The PetscSection
2735 
2736   Level: intermediate
2737 
2738   Note: Any existing Section will be destroyed
2739 
2740 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2741 @*/
2742 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2743 {
2744   PetscInt       numFields;
2745   PetscInt       f;
2746   PetscErrorCode ierr;
2747 
2748   PetscFunctionBegin;
2749   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2750   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2751   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2752   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2753   dm->defaultSection = section;
2754   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2755   if (numFields) {
2756     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2757     for (f = 0; f < numFields; ++f) {
2758       const char *name;
2759 
2760       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2761       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2762     }
2763   }
2764   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2765   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2766   PetscFunctionReturn(0);
2767 }
2768 
2769 #undef __FUNCT__
2770 #define __FUNCT__ "DMGetDefaultGlobalSection"
2771 /*@
2772   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2773 
2774   Collective on DM
2775 
2776   Input Parameter:
2777 . dm - The DM
2778 
2779   Output Parameter:
2780 . section - The PetscSection
2781 
2782   Level: intermediate
2783 
2784   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2785 
2786 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2787 @*/
2788 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2789 {
2790   PetscErrorCode ierr;
2791 
2792   PetscFunctionBegin;
2793   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2794   PetscValidPointer(section, 2);
2795   if (!dm->defaultGlobalSection) {
2796     if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2797     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2798     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2799   }
2800   *section = dm->defaultGlobalSection;
2801   PetscFunctionReturn(0);
2802 }
2803 
2804 #undef __FUNCT__
2805 #define __FUNCT__ "DMSetDefaultGlobalSection"
2806 /*@
2807   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2808 
2809   Input Parameters:
2810 + dm - The DM
2811 - section - The PetscSection, or NULL
2812 
2813   Level: intermediate
2814 
2815   Note: Any existing Section will be destroyed
2816 
2817 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2818 @*/
2819 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2820 {
2821   PetscErrorCode ierr;
2822 
2823   PetscFunctionBegin;
2824   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2825   if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2826   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2827   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2828   dm->defaultGlobalSection = section;
2829   PetscFunctionReturn(0);
2830 }
2831 
2832 #undef __FUNCT__
2833 #define __FUNCT__ "DMGetDefaultSF"
2834 /*@
2835   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2836   it is created from the default PetscSection layouts in the DM.
2837 
2838   Input Parameter:
2839 . dm - The DM
2840 
2841   Output Parameter:
2842 . sf - The PetscSF
2843 
2844   Level: intermediate
2845 
2846   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2847 
2848 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2849 @*/
2850 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2851 {
2852   PetscInt       nroots;
2853   PetscErrorCode ierr;
2854 
2855   PetscFunctionBegin;
2856   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2857   PetscValidPointer(sf, 2);
2858   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
2859   if (nroots < 0) {
2860     PetscSection section, gSection;
2861 
2862     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2863     if (section) {
2864       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2865       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2866     } else {
2867       *sf = NULL;
2868       PetscFunctionReturn(0);
2869     }
2870   }
2871   *sf = dm->defaultSF;
2872   PetscFunctionReturn(0);
2873 }
2874 
2875 #undef __FUNCT__
2876 #define __FUNCT__ "DMSetDefaultSF"
2877 /*@
2878   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2879 
2880   Input Parameters:
2881 + dm - The DM
2882 - sf - The PetscSF
2883 
2884   Level: intermediate
2885 
2886   Note: Any previous SF is destroyed
2887 
2888 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2889 @*/
2890 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
2891 {
2892   PetscErrorCode ierr;
2893 
2894   PetscFunctionBegin;
2895   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2896   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2897   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2898   dm->defaultSF = sf;
2899   PetscFunctionReturn(0);
2900 }
2901 
2902 #undef __FUNCT__
2903 #define __FUNCT__ "DMCreateDefaultSF"
2904 /*@C
2905   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2906   describing the data layout.
2907 
2908   Input Parameters:
2909 + dm - The DM
2910 . localSection - PetscSection describing the local data layout
2911 - globalSection - PetscSection describing the global data layout
2912 
2913   Level: intermediate
2914 
2915 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2916 @*/
2917 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2918 {
2919   MPI_Comm       comm;
2920   PetscLayout    layout;
2921   const PetscInt *ranges;
2922   PetscInt       *local;
2923   PetscSFNode    *remote;
2924   PetscInt       pStart, pEnd, p, nroots, nleaves, l;
2925   PetscMPIInt    size, rank;
2926   PetscErrorCode ierr;
2927 
2928   PetscFunctionBegin;
2929   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
2930   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2931   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2932   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2933   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2934   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2935   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2936   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2937   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2938   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2939   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2940   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2941     PetscInt gdof, gcdof;
2942 
2943     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2944     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2945     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2946   }
2947   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2948   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2949   for (p = pStart, l = 0; p < pEnd; ++p) {
2950     const PetscInt *cind;
2951     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2952 
2953     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2954     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2955     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2956     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2957     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2958     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2959     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2960     if (!gdof) continue; /* Censored point */
2961     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2962     if (gsize != dof-cdof) {
2963       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2964       cdof = 0; /* Ignore constraints */
2965     }
2966     for (d = 0, c = 0; d < dof; ++d) {
2967       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2968       local[l+d-c] = off+d;
2969     }
2970     if (gdof < 0) {
2971       for (d = 0; d < gsize; ++d, ++l) {
2972         PetscInt offset = -(goff+1) + d, r;
2973 
2974         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2975         if (r < 0) r = -(r+2);
2976         remote[l].rank  = r;
2977         remote[l].index = offset - ranges[r];
2978       }
2979     } else {
2980       for (d = 0; d < gsize; ++d, ++l) {
2981         remote[l].rank  = rank;
2982         remote[l].index = goff+d - ranges[rank];
2983       }
2984     }
2985   }
2986   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2987   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2988   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2989   PetscFunctionReturn(0);
2990 }
2991 
2992 #undef __FUNCT__
2993 #define __FUNCT__ "DMGetPointSF"
2994 /*@
2995   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2996 
2997   Input Parameter:
2998 . dm - The DM
2999 
3000   Output Parameter:
3001 . sf - The PetscSF
3002 
3003   Level: intermediate
3004 
3005   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3006 
3007 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3008 @*/
3009 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3010 {
3011   PetscFunctionBegin;
3012   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3013   PetscValidPointer(sf, 2);
3014   *sf = dm->sf;
3015   PetscFunctionReturn(0);
3016 }
3017 
3018 #undef __FUNCT__
3019 #define __FUNCT__ "DMSetPointSF"
3020 /*@
3021   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3022 
3023   Input Parameters:
3024 + dm - The DM
3025 - sf - The PetscSF
3026 
3027   Level: intermediate
3028 
3029 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3030 @*/
3031 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3032 {
3033   PetscErrorCode ierr;
3034 
3035   PetscFunctionBegin;
3036   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3037   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3038   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3039   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3040   dm->sf = sf;
3041   PetscFunctionReturn(0);
3042 }
3043 
3044 #undef __FUNCT__
3045 #define __FUNCT__ "DMGetNumFields"
3046 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3047 {
3048   PetscFunctionBegin;
3049   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3050   PetscValidPointer(numFields, 2);
3051   *numFields = dm->numFields;
3052   PetscFunctionReturn(0);
3053 }
3054 
3055 #undef __FUNCT__
3056 #define __FUNCT__ "DMSetNumFields"
3057 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3058 {
3059   PetscInt       f;
3060   PetscErrorCode ierr;
3061 
3062   PetscFunctionBegin;
3063   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3064   for (f = 0; f < dm->numFields; ++f) {
3065     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3066   }
3067   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3068   dm->numFields = numFields;
3069   ierr          = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3070   for (f = 0; f < dm->numFields; ++f) {
3071     ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3072   }
3073   PetscFunctionReturn(0);
3074 }
3075 
3076 #undef __FUNCT__
3077 #define __FUNCT__ "DMGetField"
3078 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3079 {
3080   PetscFunctionBegin;
3081   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3082   PetscValidPointer(field, 2);
3083   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3084   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3085   *field = dm->fields[f];
3086   PetscFunctionReturn(0);
3087 }
3088 
3089 #undef __FUNCT__
3090 #define __FUNCT__ "DMSetCoordinates"
3091 /*@
3092   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3093 
3094   Collective on DM
3095 
3096   Input Parameters:
3097 + dm - the DM
3098 - c - coordinate vector
3099 
3100   Note:
3101   The coordinates do include those for ghost points, which are in the local vector
3102 
3103   Level: intermediate
3104 
3105 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3106 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3107 @*/
3108 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3109 {
3110   PetscErrorCode ierr;
3111 
3112   PetscFunctionBegin;
3113   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3114   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3115   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3116   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3117   dm->coordinates = c;
3118   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3119   PetscFunctionReturn(0);
3120 }
3121 
3122 #undef __FUNCT__
3123 #define __FUNCT__ "DMSetCoordinatesLocal"
3124 /*@
3125   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3126 
3127   Collective on DM
3128 
3129    Input Parameters:
3130 +  dm - the DM
3131 -  c - coordinate vector
3132 
3133   Note:
3134   The coordinates of ghost points can be set using DMSetCoordinates()
3135   followed by DMGetCoordinatesLocal(). This is intended to enable the
3136   setting of ghost coordinates outside of the domain.
3137 
3138   Level: intermediate
3139 
3140 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3141 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3142 @*/
3143 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3144 {
3145   PetscErrorCode ierr;
3146 
3147   PetscFunctionBegin;
3148   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3149   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3150   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3151   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3152 
3153   dm->coordinatesLocal = c;
3154 
3155   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3156   PetscFunctionReturn(0);
3157 }
3158 
3159 #undef __FUNCT__
3160 #define __FUNCT__ "DMGetCoordinates"
3161 /*@
3162   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3163 
3164   Not Collective
3165 
3166   Input Parameter:
3167 . dm - the DM
3168 
3169   Output Parameter:
3170 . c - global coordinate vector
3171 
3172   Note:
3173   This is a borrowed reference, so the user should NOT destroy this vector
3174 
3175   Each process has only the local coordinates (does NOT have the ghost coordinates).
3176 
3177   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3178   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3179 
3180   Level: intermediate
3181 
3182 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3183 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3184 @*/
3185 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3186 {
3187   PetscErrorCode ierr;
3188 
3189   PetscFunctionBegin;
3190   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3191   PetscValidPointer(c,2);
3192   if (!dm->coordinates && dm->coordinatesLocal) {
3193     DM cdm = NULL;
3194 
3195     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3196     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3197     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3198     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3199     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3200   }
3201   *c = dm->coordinates;
3202   PetscFunctionReturn(0);
3203 }
3204 
3205 #undef __FUNCT__
3206 #define __FUNCT__ "DMGetCoordinatesLocal"
3207 /*@
3208   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3209 
3210   Collective on DM
3211 
3212   Input Parameter:
3213 . dm - the DM
3214 
3215   Output Parameter:
3216 . c - coordinate vector
3217 
3218   Note:
3219   This is a borrowed reference, so the user should NOT destroy this vector
3220 
3221   Each process has the local and ghost coordinates
3222 
3223   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3224   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3225 
3226   Level: intermediate
3227 
3228 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3229 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3230 @*/
3231 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3232 {
3233   PetscErrorCode ierr;
3234 
3235   PetscFunctionBegin;
3236   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3237   PetscValidPointer(c,2);
3238   if (!dm->coordinatesLocal && dm->coordinates) {
3239     DM cdm = NULL;
3240 
3241     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3242     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3243     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3244     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3245     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3246   }
3247   *c = dm->coordinatesLocal;
3248   PetscFunctionReturn(0);
3249 }
3250 
3251 #undef __FUNCT__
3252 #define __FUNCT__ "DMGetCoordinateDM"
3253 /*@
3254   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3255 
3256   Collective on DM
3257 
3258   Input Parameter:
3259 . dm - the DM
3260 
3261   Output Parameter:
3262 . cdm - coordinate DM
3263 
3264   Level: intermediate
3265 
3266 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3267 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3268 @*/
3269 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3270 {
3271   PetscErrorCode ierr;
3272 
3273   PetscFunctionBegin;
3274   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3275   PetscValidPointer(cdm,2);
3276   if (!dm->coordinateDM) {
3277     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3278     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3279   }
3280   *cdm = dm->coordinateDM;
3281   PetscFunctionReturn(0);
3282 }
3283 
3284 #undef __FUNCT__
3285 #define __FUNCT__ "DMLocatePoints"
3286 /*@
3287   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3288 
3289   Not collective
3290 
3291   Input Parameters:
3292 + dm - The DM
3293 - v - The Vec of points
3294 
3295   Output Parameter:
3296 . cells - The local cell numbers for cells which contain the points
3297 
3298   Level: developer
3299 
3300 .keywords: point location, mesh
3301 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3302 @*/
3303 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3304 {
3305   PetscErrorCode ierr;
3306 
3307   PetscFunctionBegin;
3308   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3309   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3310   PetscValidPointer(cells,3);
3311   if (dm->ops->locatepoints) {
3312     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3313   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3314   PetscFunctionReturn(0);
3315 }
3316