xref: /petsc/src/dm/interface/dm.c (revision 519f805a543c2a7f195bcba21173bb2cdfadb956)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMViewFromOptions"
8 /*
9   Processes command line options to determine if/how a dm
10   is to be viewed.
11 
12 */
13 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
14 {
15   PetscErrorCode    ierr;
16   PetscBool         flg;
17   PetscViewer       viewer;
18   PetscViewerFormat format;
19 
20   PetscFunctionBegin;
21   ierr = PetscOptionsGetViewer(((PetscObject)dm)->comm,((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
22   if (flg) {
23     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
24     ierr = DMView(dm,viewer);CHKERRQ(ierr);
25     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
26     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMCreate"
34 /*@
35   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
36 
37    If you never  call DMSetType()  it will generate an
38    error when you try to use the vector.
39 
40   Collective on MPI_Comm
41 
42   Input Parameter:
43 . comm - The communicator for the DM object
44 
45   Output Parameter:
46 . dm - The DM object
47 
48   Level: beginner
49 
50 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
51 @*/
52 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
53 {
54   DM             v;
55   PetscErrorCode ierr;
56 
57   PetscFunctionBegin;
58   PetscValidPointer(dm,2);
59   *dm = PETSC_NULL;
60 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
61   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
62   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
63   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
64 #endif
65 
66   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
67   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
68 
69 
70   v->ltogmap      = PETSC_NULL;
71   v->ltogmapb     = PETSC_NULL;
72   v->bs           = 1;
73   v->coloringtype = IS_COLORING_GLOBAL;
74   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
75   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
76   v->defaultSection       = PETSC_NULL;
77   v->defaultGlobalSection = PETSC_NULL;
78   {
79     PetscInt i;
80     for (i = 0; i < 10; ++i) {
81       v->nullspaceConstructors[i] = PETSC_NULL;
82     }
83   }
84   v->numFields = 0;
85   v->fields    = PETSC_NULL;
86 
87   *dm = v;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMSetVecType"
93 /*@C
94        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
95 
96    Logically Collective on DMDA
97 
98    Input Parameter:
99 +  da - initial distributed array
100 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
101 
102    Options Database:
103 .   -dm_vec_type ctype
104 
105    Level: intermediate
106 
107 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
108 @*/
109 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   PetscValidHeaderSpecific(da,DM_CLASSID,1);
115   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
116   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "VecGetDM"
122 /*@
123   VecGetDM - Gets the DM defining the data layout of the vector
124 
125   Not collective
126 
127   Input Parameter:
128 . v - The Vec
129 
130   Output Parameter:
131 . dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecGetDM(Vec v, DM *dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidPointer(dm,2);
144   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "VecSetDM"
150 /*@
151   VecSetDM - Sets the DM defining the data layout of the vector
152 
153   Not collective
154 
155   Input Parameters:
156 + v - The Vec
157 - dm - The DM
158 
159   Level: intermediate
160 
161 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
162 @*/
163 PetscErrorCode VecSetDM(Vec v, DM dm)
164 {
165   PetscErrorCode ierr;
166 
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
169   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
170   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "DMSetMatType"
176 /*@C
177        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
178 
179    Logically Collective on DM
180 
181    Input Parameter:
182 +  dm - the DM context
183 .  ctype - the matrix type
184 
185    Options Database:
186 .   -dm_mat_type ctype
187 
188    Level: intermediate
189 
190 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
191 @*/
192 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
193 {
194   PetscErrorCode ierr;
195 
196   PetscFunctionBegin;
197   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
198   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
199   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "MatGetDM"
205 /*@
206   MatGetDM - Gets the DM defining the data layout of the matrix
207 
208   Not collective
209 
210   Input Parameter:
211 . A - The Mat
212 
213   Output Parameter:
214 . dm - The DM
215 
216   Level: intermediate
217 
218 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
219 @*/
220 PetscErrorCode MatGetDM(Mat A, DM *dm)
221 {
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
226   PetscValidPointer(dm,2);
227   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatSetDM"
233 /*@
234   MatSetDM - Sets the DM defining the data layout of the matrix
235 
236   Not collective
237 
238   Input Parameters:
239 + A - The Mat
240 - dm - The DM
241 
242   Level: intermediate
243 
244 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
245 @*/
246 PetscErrorCode MatSetDM(Mat A, DM dm)
247 {
248   PetscErrorCode ierr;
249 
250   PetscFunctionBegin;
251   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
252   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
253   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMSetOptionsPrefix"
259 /*@C
260    DMSetOptionsPrefix - Sets the prefix used for searching for all
261    DMDA options in the database.
262 
263    Logically Collective on DMDA
264 
265    Input Parameter:
266 +  da - the DMDA context
267 -  prefix - the prefix to prepend to all option names
268 
269    Notes:
270    A hyphen (-) must NOT be given at the beginning of the prefix name.
271    The first character of all runtime options is AUTOMATICALLY the hyphen.
272 
273    Level: advanced
274 
275 .keywords: DMDA, set, options, prefix, database
276 
277 .seealso: DMSetFromOptions()
278 @*/
279 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
280 {
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
285   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "DMDestroy"
291 /*@
292     DMDestroy - Destroys a vector packer or DMDA.
293 
294     Collective on DM
295 
296     Input Parameter:
297 .   dm - the DM object to destroy
298 
299     Level: developer
300 
301 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
302 
303 @*/
304 PetscErrorCode  DMDestroy(DM *dm)
305 {
306   PetscInt       i, cnt = 0, f;
307   DMNamedVecLink nlink,nnext;
308   PetscErrorCode ierr;
309 
310   PetscFunctionBegin;
311   if (!*dm) PetscFunctionReturn(0);
312   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
313 
314   /* count all the circular references of DM and its contained Vecs */
315   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
316     if ((*dm)->localin[i])  {cnt++;}
317     if ((*dm)->globalin[i]) {cnt++;}
318   }
319   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
320   if ((*dm)->x) {
321     DM obj;
322     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
323     if (obj == *dm) cnt++;
324   }
325 
326   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
327   /*
328      Need this test because the dm references the vectors that
329      reference the dm, so destroying the dm calls destroy on the
330      vectors that cause another destroy on the dm
331   */
332   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
333   ((PetscObject) (*dm))->refct = 0;
334   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
335     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
336     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
337   }
338   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
339     nnext = nlink->next;
340     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
341     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
342     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
343     ierr = PetscFree(nlink);CHKERRQ(ierr);
344   }
345   (*dm)->namedglobal = PETSC_NULL;
346 
347   /* Destroy the list of hooks */
348   {
349     DMCoarsenHookLink link,next;
350     for (link=(*dm)->coarsenhook; link; link=next) {
351       next = link->next;
352       ierr = PetscFree(link);CHKERRQ(ierr);
353     }
354     (*dm)->coarsenhook = PETSC_NULL;
355   }
356   {
357     DMRefineHookLink link,next;
358     for (link=(*dm)->refinehook; link; link=next) {
359       next = link->next;
360       ierr = PetscFree(link);CHKERRQ(ierr);
361     }
362     (*dm)->refinehook = PETSC_NULL;
363   }
364   {
365     DMSubDomainHookLink link,next;
366     for (link=(*dm)->subdomainhook; link; link=next) {
367       next = link->next;
368       ierr = PetscFree(link);CHKERRQ(ierr);
369     }
370     (*dm)->subdomainhook = PETSC_NULL;
371   }
372   {
373     DMGlobalToLocalHookLink link,next;
374     for (link=(*dm)->gtolhook; link; link=next) {
375       next = link->next;
376       ierr = PetscFree(link);CHKERRQ(ierr);
377     }
378     (*dm)->gtolhook = PETSC_NULL;
379   }
380   /* Destroy the work arrays */
381   {
382     DMWorkLink link,next;
383     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
384     for (link=(*dm)->workin; link; link=next) {
385       next = link->next;
386       ierr = PetscFree(link->mem);CHKERRQ(ierr);
387       ierr = PetscFree(link);CHKERRQ(ierr);
388     }
389     (*dm)->workin = PETSC_NULL;
390   }
391 
392   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
393   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
394   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
395 
396   if ((*dm)->ctx && (*dm)->ctxdestroy) {
397     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
398   }
399   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
400   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
401   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
402   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
403   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
404   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
405   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
406 
407   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
408   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
409   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
410   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
411   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
412 
413   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
414   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
415   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
416 
417   for (f = 0; f < (*dm)->numFields; ++f) {
418     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
419   }
420   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
421   /* if memory was published with AMS then destroy it */
422   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
423 
424   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
425   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
426   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
427   PetscFunctionReturn(0);
428 }
429 
430 #undef __FUNCT__
431 #define __FUNCT__ "DMSetUp"
432 /*@
433     DMSetUp - sets up the data structures inside a DM object
434 
435     Collective on DM
436 
437     Input Parameter:
438 .   dm - the DM object to setup
439 
440     Level: developer
441 
442 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
443 
444 @*/
445 PetscErrorCode  DMSetUp(DM dm)
446 {
447   PetscErrorCode ierr;
448 
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
451   if (dm->setupcalled) PetscFunctionReturn(0);
452   if (dm->ops->setup) {
453     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
454   }
455   dm->setupcalled = PETSC_TRUE;
456   PetscFunctionReturn(0);
457 }
458 
459 #undef __FUNCT__
460 #define __FUNCT__ "DMSetFromOptions"
461 /*@
462     DMSetFromOptions - sets parameters in a DM from the options database
463 
464     Collective on DM
465 
466     Input Parameter:
467 .   dm - the DM object to set options for
468 
469     Options Database:
470 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
471 .   -dm_vec_type <type>  type of vector to create inside DM
472 .   -dm_mat_type <type>  type of matrix to create inside DM
473 -   -dm_coloring_type <global or ghosted>
474 
475     Level: developer
476 
477 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
478 
479 @*/
480 PetscErrorCode  DMSetFromOptions(DM dm)
481 {
482   char           typeName[256] = MATAIJ;
483   PetscBool      flg;
484   PetscErrorCode ierr;
485 
486   PetscFunctionBegin;
487   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
488   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
489     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
490     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
491     if (flg) {
492       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
493     }
494     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
495     if (flg) {
496       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
497     }
498     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
499     if (dm->ops->setfromoptions) {
500       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
501     }
502     /* process any options handlers added with PetscObjectAddOptionsHandler() */
503     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
504   ierr = PetscOptionsEnd();CHKERRQ(ierr);
505   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
506   PetscFunctionReturn(0);
507 }
508 
509 #undef __FUNCT__
510 #define __FUNCT__ "DMView"
511 /*@C
512     DMView - Views a vector packer or DMDA.
513 
514     Collective on DM
515 
516     Input Parameter:
517 +   dm - the DM object to view
518 -   v - the viewer
519 
520     Level: developer
521 
522 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
523 
524 @*/
525 PetscErrorCode  DMView(DM dm,PetscViewer v)
526 {
527   PetscErrorCode ierr;
528   PetscBool      isbinary;
529 
530   PetscFunctionBegin;
531   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
532   if (!v) {
533     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
534   }
535   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
536   if (isbinary) {
537     PetscInt         classid = DM_FILE_CLASSID;
538     char             type[256];
539 
540     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
541     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
542     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
543   }
544   if (dm->ops->view) {
545     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
546   }
547   PetscFunctionReturn(0);
548 }
549 
550 #undef __FUNCT__
551 #define __FUNCT__ "DMCreateGlobalVector"
552 /*@
553     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
554 
555     Collective on DM
556 
557     Input Parameter:
558 .   dm - the DM object
559 
560     Output Parameter:
561 .   vec - the global vector
562 
563     Level: beginner
564 
565 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
566 
567 @*/
568 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
569 {
570   PetscErrorCode ierr;
571 
572   PetscFunctionBegin;
573   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
574   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
575   PetscFunctionReturn(0);
576 }
577 
578 #undef __FUNCT__
579 #define __FUNCT__ "DMCreateLocalVector"
580 /*@
581     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
582 
583     Not Collective
584 
585     Input Parameter:
586 .   dm - the DM object
587 
588     Output Parameter:
589 .   vec - the local vector
590 
591     Level: beginner
592 
593 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
594 
595 @*/
596 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
597 {
598   PetscErrorCode ierr;
599 
600   PetscFunctionBegin;
601   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
602   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
603   PetscFunctionReturn(0);
604 }
605 
606 #undef __FUNCT__
607 #define __FUNCT__ "DMGetLocalToGlobalMapping"
608 /*@
609    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
610 
611    Collective on DM
612 
613    Input Parameter:
614 .  dm - the DM that provides the mapping
615 
616    Output Parameter:
617 .  ltog - the mapping
618 
619    Level: intermediate
620 
621    Notes:
622    This mapping can then be used by VecSetLocalToGlobalMapping() or
623    MatSetLocalToGlobalMapping().
624 
625 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
626 @*/
627 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
628 {
629   PetscErrorCode ierr;
630 
631   PetscFunctionBegin;
632   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
633   PetscValidPointer(ltog,2);
634   if (!dm->ltogmap) {
635     PetscSection section, sectionGlobal;
636 
637     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
638     if (section) {
639       PetscInt      *ltog;
640       PetscInt       pStart, pEnd, size, p, l;
641 
642       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
643       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
644       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
645       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
646       for (p = pStart, l = 0; p < pEnd; ++p) {
647         PetscInt dof, off, c;
648 
649         /* Should probably use constrained dofs */
650         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
651         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
652         for (c = 0; c < dof; ++c, ++l) {
653           ltog[l] = off+c;
654         }
655       }
656       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
657       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
658     } else {
659       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
660       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
661     }
662   }
663   *ltog = dm->ltogmap;
664   PetscFunctionReturn(0);
665 }
666 
667 #undef __FUNCT__
668 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
669 /*@
670    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
671 
672    Collective on DM
673 
674    Input Parameter:
675 .  da - the distributed array that provides the mapping
676 
677    Output Parameter:
678 .  ltog - the block mapping
679 
680    Level: intermediate
681 
682    Notes:
683    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
684    MatSetLocalToGlobalMappingBlock().
685 
686 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
687 @*/
688 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
689 {
690   PetscErrorCode ierr;
691 
692   PetscFunctionBegin;
693   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
694   PetscValidPointer(ltog,2);
695   if (!dm->ltogmapb) {
696     PetscInt bs;
697     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
698     if (bs > 1) {
699       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
700       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
701     } else {
702       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
703       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
704     }
705   }
706   *ltog = dm->ltogmapb;
707   PetscFunctionReturn(0);
708 }
709 
710 #undef __FUNCT__
711 #define __FUNCT__ "DMGetBlockSize"
712 /*@
713    DMGetBlockSize - Gets the inherent block size associated with a DM
714 
715    Not Collective
716 
717    Input Parameter:
718 .  dm - the DM with block structure
719 
720    Output Parameter:
721 .  bs - the block size, 1 implies no exploitable block structure
722 
723    Level: intermediate
724 
725 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
726 @*/
727 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
728 {
729   PetscFunctionBegin;
730   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
731   PetscValidPointer(bs,2);
732   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
733   *bs = dm->bs;
734   PetscFunctionReturn(0);
735 }
736 
737 #undef __FUNCT__
738 #define __FUNCT__ "DMCreateInterpolation"
739 /*@
740     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
741 
742     Collective on DM
743 
744     Input Parameter:
745 +   dm1 - the DM object
746 -   dm2 - the second, finer DM object
747 
748     Output Parameter:
749 +  mat - the interpolation
750 -  vec - the scaling (optional)
751 
752     Level: developer
753 
754     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
755         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
756 
757         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
758         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
759 
760 
761 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
762 
763 @*/
764 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
765 {
766   PetscErrorCode ierr;
767 
768   PetscFunctionBegin;
769   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
770   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
771   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
772   PetscFunctionReturn(0);
773 }
774 
775 #undef __FUNCT__
776 #define __FUNCT__ "DMCreateInjection"
777 /*@
778     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
779 
780     Collective on DM
781 
782     Input Parameter:
783 +   dm1 - the DM object
784 -   dm2 - the second, finer DM object
785 
786     Output Parameter:
787 .   ctx - the injection
788 
789     Level: developer
790 
791    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
792         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
793 
794 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
795 
796 @*/
797 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
798 {
799   PetscErrorCode ierr;
800 
801   PetscFunctionBegin;
802   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
803   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
804   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
805   PetscFunctionReturn(0);
806 }
807 
808 #undef __FUNCT__
809 #define __FUNCT__ "DMCreateColoring"
810 /*@C
811     DMCreateColoring - Gets coloring for a DMDA or DMComposite
812 
813     Collective on DM
814 
815     Input Parameter:
816 +   dm - the DM object
817 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
818 -   matype - either MATAIJ or MATBAIJ
819 
820     Output Parameter:
821 .   coloring - the coloring
822 
823     Level: developer
824 
825 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
826 
827 @*/
828 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
829 {
830   PetscErrorCode ierr;
831 
832   PetscFunctionBegin;
833   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
834   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
835   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
836   PetscFunctionReturn(0);
837 }
838 
839 #undef __FUNCT__
840 #define __FUNCT__ "DMCreateMatrix"
841 /*@C
842     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
843 
844     Collective on DM
845 
846     Input Parameter:
847 +   dm - the DM object
848 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
849             any type which inherits from one of these (such as MATAIJ)
850 
851     Output Parameter:
852 .   mat - the empty Jacobian
853 
854     Level: beginner
855 
856     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
857        do not need to do it yourself.
858 
859        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
860        the nonzero pattern call DMDASetMatPreallocateOnly()
861 
862        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
863        internally by PETSc.
864 
865        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
866        the indices for the global numbering for DMDAs which is complicated.
867 
868 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
869 
870 @*/
871 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
872 {
873   PetscErrorCode ierr;
874 
875   PetscFunctionBegin;
876   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
877 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
878   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
879 #endif
880   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
881   PetscValidPointer(mat,3);
882   if (dm->mattype) {
883     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
884   } else {
885     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
886   }
887   PetscFunctionReturn(0);
888 }
889 
890 #undef __FUNCT__
891 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
892 /*@
893   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
894     preallocated but the nonzero structure and zero values will not be set.
895 
896   Logically Collective on DMDA
897 
898   Input Parameter:
899 + dm - the DM
900 - only - PETSC_TRUE if only want preallocation
901 
902   Level: developer
903 .seealso DMCreateMatrix()
904 @*/
905 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
906 {
907   PetscFunctionBegin;
908   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
909   dm->prealloc_only = only;
910   PetscFunctionReturn(0);
911 }
912 
913 #undef __FUNCT__
914 #define __FUNCT__ "DMGetWorkArray"
915 /*@C
916   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
917 
918   Not Collective
919 
920   Input Parameters:
921 + dm - the DM object
922 . count - The minium size
923 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
924 
925   Output Parameter:
926 . array - the work array
927 
928   Level: developer
929 
930 .seealso DMDestroy(), DMCreate()
931 @*/
932 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
933 {
934   PetscErrorCode ierr;
935   DMWorkLink link;
936   size_t size;
937 
938   PetscFunctionBegin;
939   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
940   PetscValidPointer(mem,4);
941   if (dm->workin) {
942     link = dm->workin;
943     dm->workin = dm->workin->next;
944   } else {
945     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
946   }
947   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
948   if (size*count > link->bytes) {
949     ierr = PetscFree(link->mem);CHKERRQ(ierr);
950     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
951     link->bytes = size*count;
952   }
953   link->next = dm->workout;
954   dm->workout = link;
955   *(void**)mem = link->mem;
956   PetscFunctionReturn(0);
957 }
958 
959 #undef __FUNCT__
960 #define __FUNCT__ "DMRestoreWorkArray"
961 /*@C
962   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
963 
964   Not Collective
965 
966   Input Parameters:
967 + dm - the DM object
968 . count - The minium size
969 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
970 
971   Output Parameter:
972 . array - the work array
973 
974   Level: developer
975 
976 .seealso DMDestroy(), DMCreate()
977 @*/
978 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
979 {
980   DMWorkLink *p,link;
981 
982   PetscFunctionBegin;
983   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
984   PetscValidPointer(mem,4);
985   for (p=&dm->workout; (link=*p); p=&link->next) {
986     if (link->mem == *(void**)mem) {
987       *p = link->next;
988       link->next = dm->workin;
989       dm->workin = link;
990       *(void**)mem = PETSC_NULL;
991       PetscFunctionReturn(0);
992     }
993   }
994   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
995   PetscFunctionReturn(0);
996 }
997 
998 #undef __FUNCT__
999 #define __FUNCT__ "DMSetNullSpaceConstructor"
1000 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1001 {
1002   PetscFunctionBegin;
1003   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1004   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1005   dm->nullspaceConstructors[field] = nullsp;
1006   PetscFunctionReturn(0);
1007 }
1008 
1009 #undef __FUNCT__
1010 #define __FUNCT__ "DMCreateFieldIS"
1011 /*@C
1012   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1013 
1014   Not collective
1015 
1016   Input Parameter:
1017 . dm - the DM object
1018 
1019   Output Parameters:
1020 + numFields  - The number of fields (or PETSC_NULL if not requested)
1021 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1022 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1023 
1024   Level: intermediate
1025 
1026   Notes:
1027   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1028   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1029   PetscFree().
1030 
1031 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1032 @*/
1033 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1034 {
1035   PetscSection   section, sectionGlobal;
1036   PetscErrorCode ierr;
1037 
1038   PetscFunctionBegin;
1039   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1040   if (numFields) {
1041     PetscValidPointer(numFields,2);
1042     *numFields = 0;
1043   }
1044   if (fieldNames) {
1045     PetscValidPointer(fieldNames,3);
1046     *fieldNames = PETSC_NULL;
1047   }
1048   if (fields) {
1049     PetscValidPointer(fields,4);
1050     *fields = PETSC_NULL;
1051   }
1052   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1053   if (section) {
1054     PetscInt *fieldSizes, **fieldIndices;
1055     PetscInt  nF, f, pStart, pEnd, p;
1056 
1057     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1058     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1059     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1060     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1061     for (f = 0; f < nF; ++f) {
1062       fieldSizes[f] = 0;
1063     }
1064     for (p = pStart; p < pEnd; ++p) {
1065       PetscInt gdof;
1066 
1067       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1068       if (gdof > 0) {
1069         for (f = 0; f < nF; ++f) {
1070           PetscInt fdof, fcdof;
1071 
1072           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1073           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1074           fieldSizes[f] += fdof-fcdof;
1075         }
1076       }
1077     }
1078     for (f = 0; f < nF; ++f) {
1079       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1080       fieldSizes[f] = 0;
1081     }
1082     for (p = pStart; p < pEnd; ++p) {
1083       PetscInt gdof, goff;
1084 
1085       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1086       if (gdof > 0) {
1087         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1088         for (f = 0; f < nF; ++f) {
1089           PetscInt fdof, fcdof, fc;
1090 
1091           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1092           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1093           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1094             fieldIndices[f][fieldSizes[f]] = goff++;
1095           }
1096         }
1097       }
1098     }
1099     if (numFields) {*numFields = nF;}
1100     if (fieldNames) {
1101       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1102       for (f = 0; f < nF; ++f) {
1103         const char *fieldName;
1104 
1105         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1106         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1107       }
1108     }
1109     if (fields) {
1110       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1111       for (f = 0; f < nF; ++f) {
1112         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1113       }
1114     }
1115     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1116   } else {
1117     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1118   }
1119   PetscFunctionReturn(0);
1120 }
1121 
1122 #undef __FUNCT__
1123 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1124 /*@C
1125   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1126 
1127   Not Collective
1128 
1129   Input Parameters:
1130 + dm   - the DM object
1131 - name - the name of the field decomposition
1132 
1133   Output Parameter:
1134 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1135 
1136   Level: advanced
1137 
1138 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1139 @*/
1140 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1141 {
1142   PetscErrorCode ierr;
1143 
1144   PetscFunctionBegin;
1145   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1146   PetscValidCharPointer(name,2);
1147   PetscValidPointer(ddm,3);
1148   *ddm = PETSC_NULL;
1149   if (dm->ops->createfielddecompositiondm) {
1150     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1151   }
1152   PetscFunctionReturn(0);
1153 }
1154 
1155 #undef __FUNCT__
1156 #define __FUNCT__ "DMCreateFieldDecomposition"
1157 /*@C
1158   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1159                           corresponding to different fields: each IS contains the global indices of the dofs of the
1160                           corresponding field. The optional list of DMs define the DM for each subproblem.
1161                           Generalizes DMCreateFieldIS().
1162 
1163   Not collective
1164 
1165   Input Parameter:
1166 . dm - the DM object
1167 
1168   Output Parameters:
1169 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1170 . namelist  - The name for each field (or PETSC_NULL if not requested)
1171 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1172 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1173 
1174   Level: intermediate
1175 
1176   Notes:
1177   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1178   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1179   and all of the arrays should be freed with PetscFree().
1180 
1181 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1182 @*/
1183 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1184 {
1185   PetscErrorCode ierr;
1186 
1187   PetscFunctionBegin;
1188   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1189   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1190   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1191   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1192   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1193   if (!dm->ops->createfielddecomposition) {
1194     PetscSection section;
1195     PetscInt     numFields, f;
1196 
1197     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1198     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1199     if (section && numFields && dm->ops->createsubdm) {
1200       *len = numFields;
1201       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1202       for (f = 0; f < numFields; ++f) {
1203         const char *fieldName;
1204 
1205         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1206         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1207         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1208       }
1209     } else {
1210       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1211       /* By default there are no DMs associated with subproblems. */
1212       if (dmlist) *dmlist = PETSC_NULL;
1213     }
1214   }
1215   else {
1216     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1217   }
1218   PetscFunctionReturn(0);
1219 }
1220 
1221 #undef __FUNCT__
1222 #define __FUNCT__ "DMCreateSubDM"
1223 /*@C
1224   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1225                   The fields are defined by DMCreateFieldIS().
1226 
1227   Not collective
1228 
1229   Input Parameters:
1230 + dm - the DM object
1231 . numFields - number of fields in this subproblem
1232 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1233 
1234   Output Parameters:
1235 . is - The global indices for the subproblem
1236 - dm - The DM for the subproblem
1237 
1238   Level: intermediate
1239 
1240 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1241 @*/
1242 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1243 {
1244   PetscErrorCode ierr;
1245 
1246   PetscFunctionBegin;
1247   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1248   PetscValidPointer(fields,3);
1249   if (is) {PetscValidPointer(is,4);}
1250   if (subdm) {PetscValidPointer(subdm,5);}
1251   if (dm->ops->createsubdm) {
1252     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1253   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 #undef __FUNCT__
1258 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1259 /*@C
1260   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1261 
1262   Not Collective
1263 
1264   Input Parameters:
1265 + dm   - the DM object
1266 - name - the name of the subdomain decomposition
1267 
1268   Output Parameter:
1269 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1270 
1271   Level: advanced
1272 
1273 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1274 @*/
1275 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1276 {
1277   PetscErrorCode ierr;
1278 
1279   PetscFunctionBegin;
1280   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1281   PetscValidCharPointer(name,2);
1282   PetscValidPointer(ddm,3);
1283   *ddm = PETSC_NULL;
1284   if (dm->ops->createdomaindecompositiondm) {
1285     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1286   }
1287   PetscFunctionReturn(0);
1288 }
1289 
1290 #undef __FUNCT__
1291 #define __FUNCT__ "DMCreateDomainDecomposition"
1292 /*@C
1293   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1294                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1295                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1296                           define a nonoverlapping covering, while outer subdomains can overlap.
1297                           The optional list of DMs define the DM for each subproblem.
1298 
1299   Not collective
1300 
1301   Input Parameter:
1302 . dm - the DM object
1303 
1304   Output Parameters:
1305 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1306 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1307 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1308 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1309 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1310 
1311   Level: intermediate
1312 
1313   Notes:
1314   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1315   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1316   and all of the arrays should be freed with PetscFree().
1317 
1318 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1319 @*/
1320 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1321 {
1322   PetscErrorCode ierr;
1323   DMSubDomainHookLink link;
1324   PetscInt            i,l;
1325 
1326   PetscFunctionBegin;
1327   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1328   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1329   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1330   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1331   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1332   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1333   if (dm->ops->createdomaindecomposition) {
1334     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1335     /* copy subdomain hooks and context over to the subdomain DMs */
1336     if (dmlist) {
1337       for (i = 0; i < l; i++) {
1338         for (link=dm->subdomainhook; link; link=link->next) {
1339           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1340         }
1341         (*dmlist)[i]->ctx = dm->ctx;
1342       }
1343     }
1344     if (len) *len = l;
1345   }
1346   PetscFunctionReturn(0);
1347 }
1348 
1349 
1350 #undef __FUNCT__
1351 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1352 /*@C
1353   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1354 
1355   Not collective
1356 
1357   Input Parameters:
1358 + dm - the DM object
1359 . n  - the number of subdomain scatters
1360 - subdms - the local subdomains
1361 
1362   Output Parameters:
1363 + n     - the number of scatters returned
1364 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1365 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1366 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1367 
1368   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1369   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1370   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1371   solution and residual data.
1372 
1373   Level: developer
1374 
1375 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1376 @*/
1377 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1378 {
1379   PetscErrorCode ierr;
1380 
1381   PetscFunctionBegin;
1382   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1383   PetscValidPointer(subdms,3);
1384   PetscValidPointer(iscat,4);
1385   PetscValidPointer(oscat,5);
1386   PetscValidPointer(gscat,6);
1387   if (dm->ops->createddscatters) {
1388     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1389   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1390   PetscFunctionReturn(0);
1391 }
1392 
1393 #undef __FUNCT__
1394 #define __FUNCT__ "DMRefine"
1395 /*@
1396   DMRefine - Refines a DM object
1397 
1398   Collective on DM
1399 
1400   Input Parameter:
1401 + dm   - the DM object
1402 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1403 
1404   Output Parameter:
1405 . dmf - the refined DM, or PETSC_NULL
1406 
1407   Note: If no refinement was done, the return value is PETSC_NULL
1408 
1409   Level: developer
1410 
1411 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1412 @*/
1413 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1414 {
1415   PetscErrorCode ierr;
1416   DMRefineHookLink link;
1417 
1418   PetscFunctionBegin;
1419   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1420   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1421   if (*dmf) {
1422     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1423     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1424     (*dmf)->ctx       = dm->ctx;
1425     (*dmf)->leveldown = dm->leveldown;
1426     (*dmf)->levelup   = dm->levelup + 1;
1427     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1428     for (link=dm->refinehook; link; link=link->next) {
1429       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1430     }
1431   }
1432   PetscFunctionReturn(0);
1433 }
1434 
1435 #undef __FUNCT__
1436 #define __FUNCT__ "DMRefineHookAdd"
1437 /*@
1438    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1439 
1440    Logically Collective
1441 
1442    Input Arguments:
1443 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1444 .  refinehook - function to run when setting up a coarser level
1445 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1446 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1447 
1448    Calling sequence of refinehook:
1449 $    refinehook(DM coarse,DM fine,void *ctx);
1450 
1451 +  coarse - coarse level DM
1452 .  fine - fine level DM to interpolate problem to
1453 -  ctx - optional user-defined function context
1454 
1455    Calling sequence for interphook:
1456 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1457 
1458 +  coarse - coarse level DM
1459 .  interp - matrix interpolating a coarse-level solution to the finer grid
1460 .  fine - fine level DM to update
1461 -  ctx - optional user-defined function context
1462 
1463    Level: advanced
1464 
1465    Notes:
1466    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1467 
1468    If this function is called multiple times, the hooks will be run in the order they are added.
1469 
1470 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1471 @*/
1472 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1473 {
1474   PetscErrorCode ierr;
1475   DMRefineHookLink link,*p;
1476 
1477   PetscFunctionBegin;
1478   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1479   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1480   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1481   link->refinehook = refinehook;
1482   link->interphook = interphook;
1483   link->ctx = ctx;
1484   link->next = PETSC_NULL;
1485   *p = link;
1486   PetscFunctionReturn(0);
1487 }
1488 
1489 #undef __FUNCT__
1490 #define __FUNCT__ "DMInterpolate"
1491 /*@
1492    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1493 
1494    Collective if any hooks are
1495 
1496    Input Arguments:
1497 +  coarse - coarser DM to use as a base
1498 .  restrct - interpolation matrix, apply using MatInterpolate()
1499 -  fine - finer DM to update
1500 
1501    Level: developer
1502 
1503 .seealso: DMRefineHookAdd(), MatInterpolate()
1504 @*/
1505 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1506 {
1507   PetscErrorCode ierr;
1508   DMRefineHookLink link;
1509 
1510   PetscFunctionBegin;
1511   for (link=fine->refinehook; link; link=link->next) {
1512     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1513   }
1514   PetscFunctionReturn(0);
1515 }
1516 
1517 #undef __FUNCT__
1518 #define __FUNCT__ "DMGetRefineLevel"
1519 /*@
1520     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1521 
1522     Not Collective
1523 
1524     Input Parameter:
1525 .   dm - the DM object
1526 
1527     Output Parameter:
1528 .   level - number of refinements
1529 
1530     Level: developer
1531 
1532 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1533 
1534 @*/
1535 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1536 {
1537   PetscFunctionBegin;
1538   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1539   *level = dm->levelup;
1540   PetscFunctionReturn(0);
1541 }
1542 
1543 #undef __FUNCT__
1544 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1545 /*@
1546    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1547 
1548    Logically Collective
1549 
1550    Input Arguments:
1551 +  dm - the DM
1552 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1553 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1554 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1555 
1556    Calling sequence for beginhook:
1557 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1558 
1559 +  dm - global DM
1560 .  g - global vector
1561 .  mode - mode
1562 .  l - local vector
1563 -  ctx - optional user-defined function context
1564 
1565 
1566    Calling sequence for endhook:
1567 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1568 
1569 +  global - global DM
1570 -  ctx - optional user-defined function context
1571 
1572    Level: advanced
1573 
1574    Notes:
1575    This function is only needed if auxiliary data needs to be set up on coarse grids.
1576 
1577    If this function is called multiple times, the hooks will be run in the order they are added.
1578 
1579    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1580    extract the finest level information from its context (instead of from the SNES).
1581 
1582 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1583 @*/
1584 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1585 {
1586   PetscErrorCode ierr;
1587   DMGlobalToLocalHookLink link,*p;
1588 
1589   PetscFunctionBegin;
1590   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1591   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1592   ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1593   link->beginhook = beginhook;
1594   link->endhook = endhook;
1595   link->ctx = ctx;
1596   link->next = PETSC_NULL;
1597   *p = link;
1598   PetscFunctionReturn(0);
1599 }
1600 
1601 #undef __FUNCT__
1602 #define __FUNCT__ "DMGlobalToLocalBegin"
1603 /*@
1604     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1605 
1606     Neighbor-wise Collective on DM
1607 
1608     Input Parameters:
1609 +   dm - the DM object
1610 .   g - the global vector
1611 .   mode - INSERT_VALUES or ADD_VALUES
1612 -   l - the local vector
1613 
1614 
1615     Level: beginner
1616 
1617 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1618 
1619 @*/
1620 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1621 {
1622   PetscSF        sf;
1623   PetscErrorCode ierr;
1624   DMGlobalToLocalHookLink link;
1625 
1626   PetscFunctionBegin;
1627   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1628   for (link=dm->gtolhook; link; link=link->next) {
1629     if (link->beginhook) {ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1630   }
1631   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1632   if (sf) {
1633     PetscScalar *lArray, *gArray;
1634 
1635     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1636     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1637     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1638     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1639     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1640     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1641   } else {
1642     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1643   }
1644   PetscFunctionReturn(0);
1645 }
1646 
1647 #undef __FUNCT__
1648 #define __FUNCT__ "DMGlobalToLocalEnd"
1649 /*@
1650     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1651 
1652     Neighbor-wise Collective on DM
1653 
1654     Input Parameters:
1655 +   dm - the DM object
1656 .   g - the global vector
1657 .   mode - INSERT_VALUES or ADD_VALUES
1658 -   l - the local vector
1659 
1660 
1661     Level: beginner
1662 
1663 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1664 
1665 @*/
1666 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1667 {
1668   PetscSF        sf;
1669   PetscErrorCode ierr;
1670   PetscScalar    *lArray, *gArray;
1671   DMGlobalToLocalHookLink link;
1672 
1673   PetscFunctionBegin;
1674   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1675   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1676   if (sf) {
1677   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1678 
1679     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1680     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1681     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1682     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1683     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1684   } else {
1685     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1686   }
1687   for (link=dm->gtolhook; link; link=link->next) {
1688     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1689   }
1690   PetscFunctionReturn(0);
1691 }
1692 
1693 #undef __FUNCT__
1694 #define __FUNCT__ "DMLocalToGlobalBegin"
1695 /*@
1696     DMLocalToGlobalBegin - updates global vectors from local vectors
1697 
1698     Neighbor-wise Collective on DM
1699 
1700     Input Parameters:
1701 +   dm - the DM object
1702 .   l - the local vector
1703 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1704            base point.
1705 - - the global vector
1706 
1707     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1708            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1709            global array to the final global array with VecAXPY().
1710 
1711     Level: beginner
1712 
1713 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1714 
1715 @*/
1716 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1717 {
1718   PetscSF        sf;
1719   PetscErrorCode ierr;
1720 
1721   PetscFunctionBegin;
1722   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1723   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1724   if (sf) {
1725     MPI_Op       op;
1726     PetscScalar *lArray, *gArray;
1727 
1728     switch(mode) {
1729     case INSERT_VALUES:
1730     case INSERT_ALL_VALUES:
1731 #if defined(PETSC_HAVE_MPI_REPLACE)
1732       op = MPI_REPLACE; break;
1733 #else
1734       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1735 #endif
1736     case ADD_VALUES:
1737     case ADD_ALL_VALUES:
1738       op = MPI_SUM; break;
1739   default:
1740     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1741     }
1742     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1743     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1744     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1745     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1746     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1747   } else {
1748     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1749   }
1750   PetscFunctionReturn(0);
1751 }
1752 
1753 #undef __FUNCT__
1754 #define __FUNCT__ "DMLocalToGlobalEnd"
1755 /*@
1756     DMLocalToGlobalEnd - updates global vectors from local vectors
1757 
1758     Neighbor-wise Collective on DM
1759 
1760     Input Parameters:
1761 +   dm - the DM object
1762 .   l - the local vector
1763 .   mode - INSERT_VALUES or ADD_VALUES
1764 -   g - the global vector
1765 
1766 
1767     Level: beginner
1768 
1769 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1770 
1771 @*/
1772 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1773 {
1774   PetscSF        sf;
1775   PetscErrorCode ierr;
1776 
1777   PetscFunctionBegin;
1778   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1779   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1780   if (sf) {
1781     MPI_Op       op;
1782     PetscScalar *lArray, *gArray;
1783 
1784     switch(mode) {
1785     case INSERT_VALUES:
1786     case INSERT_ALL_VALUES:
1787 #if defined(PETSC_HAVE_MPI_REPLACE)
1788       op = MPI_REPLACE; break;
1789 #else
1790       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1791 #endif
1792     case ADD_VALUES:
1793     case ADD_ALL_VALUES:
1794       op = MPI_SUM; break;
1795     default:
1796       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1797     }
1798     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1799     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1800     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1801     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1802     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1803   } else {
1804     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1805   }
1806   PetscFunctionReturn(0);
1807 }
1808 
1809 #undef __FUNCT__
1810 #define __FUNCT__ "DMCoarsen"
1811 /*@
1812     DMCoarsen - Coarsens a DM object
1813 
1814     Collective on DM
1815 
1816     Input Parameter:
1817 +   dm - the DM object
1818 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1819 
1820     Output Parameter:
1821 .   dmc - the coarsened DM
1822 
1823     Level: developer
1824 
1825 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1826 
1827 @*/
1828 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1829 {
1830   PetscErrorCode ierr;
1831   DMCoarsenHookLink link;
1832 
1833   PetscFunctionBegin;
1834   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1835   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1836   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1837   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1838   (*dmc)->ctx       = dm->ctx;
1839   (*dmc)->levelup   = dm->levelup;
1840   (*dmc)->leveldown = dm->leveldown + 1;
1841   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1842   for (link=dm->coarsenhook; link; link=link->next) {
1843     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1844   }
1845   PetscFunctionReturn(0);
1846 }
1847 
1848 #undef __FUNCT__
1849 #define __FUNCT__ "DMCoarsenHookAdd"
1850 /*@
1851    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1852 
1853    Logically Collective
1854 
1855    Input Arguments:
1856 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1857 .  coarsenhook - function to run when setting up a coarser level
1858 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1859 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1860 
1861    Calling sequence of coarsenhook:
1862 $    coarsenhook(DM fine,DM coarse,void *ctx);
1863 
1864 +  fine - fine level DM
1865 .  coarse - coarse level DM to restrict problem to
1866 -  ctx - optional user-defined function context
1867 
1868    Calling sequence for restricthook:
1869 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1870 
1871 +  fine - fine level DM
1872 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1873 .  rscale - scaling vector for restriction
1874 .  inject - matrix restricting by injection
1875 .  coarse - coarse level DM to update
1876 -  ctx - optional user-defined function context
1877 
1878    Level: advanced
1879 
1880    Notes:
1881    This function is only needed if auxiliary data needs to be set up on coarse grids.
1882 
1883    If this function is called multiple times, the hooks will be run in the order they are added.
1884 
1885    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1886    extract the finest level information from its context (instead of from the SNES).
1887 
1888 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1889 @*/
1890 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1891 {
1892   PetscErrorCode ierr;
1893   DMCoarsenHookLink link,*p;
1894 
1895   PetscFunctionBegin;
1896   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1897   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1898   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1899   link->coarsenhook = coarsenhook;
1900   link->restricthook = restricthook;
1901   link->ctx = ctx;
1902   link->next = PETSC_NULL;
1903   *p = link;
1904   PetscFunctionReturn(0);
1905 }
1906 
1907 #undef __FUNCT__
1908 #define __FUNCT__ "DMRestrict"
1909 /*@
1910    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1911 
1912    Collective if any hooks are
1913 
1914    Input Arguments:
1915 +  fine - finer DM to use as a base
1916 .  restrct - restriction matrix, apply using MatRestrict()
1917 .  inject - injection matrix, also use MatRestrict()
1918 -  coarse - coarer DM to update
1919 
1920    Level: developer
1921 
1922 .seealso: DMCoarsenHookAdd(), MatRestrict()
1923 @*/
1924 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1925 {
1926   PetscErrorCode ierr;
1927   DMCoarsenHookLink link;
1928 
1929   PetscFunctionBegin;
1930   for (link=fine->coarsenhook; link; link=link->next) {
1931     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1932   }
1933   PetscFunctionReturn(0);
1934 }
1935 
1936 #undef __FUNCT__
1937 #define __FUNCT__ "DMSubDomainHookAdd"
1938 /*@
1939    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1940 
1941    Logically Collective
1942 
1943    Input Arguments:
1944 +  global - global DM
1945 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1946 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1947 
1948    Calling sequence for restricthook:
1949 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1950 
1951 +  global - global DM
1952 .  out    - scatter to the outer (with ghost and overlap points) block vector
1953 .  in     - scatter to block vector values only owned locally
1954 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1955 -  ctx - optional user-defined function context
1956 
1957    Level: advanced
1958 
1959    Notes:
1960    This function is only needed if auxiliary data needs to be set up on coarse grids.
1961 
1962    If this function is called multiple times, the hooks will be run in the order they are added.
1963 
1964    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1965    extract the finest level information from its context (instead of from the SNES).
1966 
1967 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1968 @*/
1969 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1970 {
1971   PetscErrorCode      ierr;
1972   DMSubDomainHookLink link,*p;
1973 
1974   PetscFunctionBegin;
1975   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1976   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1977   ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
1978   link->restricthook = restricthook;
1979   link->ddhook = ddhook;
1980   link->ctx = ctx;
1981   link->next = PETSC_NULL;
1982   *p = link;
1983   PetscFunctionReturn(0);
1984 }
1985 
1986 #undef __FUNCT__
1987 #define __FUNCT__ "DMSubDomainRestrict"
1988 /*@
1989    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
1990 
1991    Collective if any hooks are
1992 
1993    Input Arguments:
1994 +  fine - finer DM to use as a base
1995 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
1996 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
1997 -  coarse - coarer DM to update
1998 
1999    Level: developer
2000 
2001 .seealso: DMCoarsenHookAdd(), MatRestrict()
2002 @*/
2003 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2004 {
2005   PetscErrorCode ierr;
2006   DMSubDomainHookLink link;
2007 
2008   PetscFunctionBegin;
2009   for (link=global->subdomainhook; link; link=link->next) {
2010     if (link->restricthook) {ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);}
2011   }
2012   PetscFunctionReturn(0);
2013 }
2014 
2015 #undef __FUNCT__
2016 #define __FUNCT__ "DMGetCoarsenLevel"
2017 /*@
2018     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2019 
2020     Not Collective
2021 
2022     Input Parameter:
2023 .   dm - the DM object
2024 
2025     Output Parameter:
2026 .   level - number of coarsenings
2027 
2028     Level: developer
2029 
2030 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2031 
2032 @*/
2033 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2034 {
2035   PetscFunctionBegin;
2036   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2037   *level = dm->leveldown;
2038   PetscFunctionReturn(0);
2039 }
2040 
2041 
2042 
2043 #undef __FUNCT__
2044 #define __FUNCT__ "DMRefineHierarchy"
2045 /*@C
2046     DMRefineHierarchy - Refines a DM object, all levels at once
2047 
2048     Collective on DM
2049 
2050     Input Parameter:
2051 +   dm - the DM object
2052 -   nlevels - the number of levels of refinement
2053 
2054     Output Parameter:
2055 .   dmf - the refined DM hierarchy
2056 
2057     Level: developer
2058 
2059 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2060 
2061 @*/
2062 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2063 {
2064   PetscErrorCode ierr;
2065 
2066   PetscFunctionBegin;
2067   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2068   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2069   if (nlevels == 0) PetscFunctionReturn(0);
2070   if (dm->ops->refinehierarchy) {
2071     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2072   } else if (dm->ops->refine) {
2073     PetscInt i;
2074 
2075     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2076     for (i=1; i<nlevels; i++) {
2077       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2078     }
2079   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2080   PetscFunctionReturn(0);
2081 }
2082 
2083 #undef __FUNCT__
2084 #define __FUNCT__ "DMCoarsenHierarchy"
2085 /*@C
2086     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2087 
2088     Collective on DM
2089 
2090     Input Parameter:
2091 +   dm - the DM object
2092 -   nlevels - the number of levels of coarsening
2093 
2094     Output Parameter:
2095 .   dmc - the coarsened DM hierarchy
2096 
2097     Level: developer
2098 
2099 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2100 
2101 @*/
2102 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2103 {
2104   PetscErrorCode ierr;
2105 
2106   PetscFunctionBegin;
2107   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2108   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2109   if (nlevels == 0) PetscFunctionReturn(0);
2110   PetscValidPointer(dmc,3);
2111   if (dm->ops->coarsenhierarchy) {
2112     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2113   } else if (dm->ops->coarsen) {
2114     PetscInt i;
2115 
2116     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2117     for (i=1; i<nlevels; i++) {
2118       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2119     }
2120   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2121   PetscFunctionReturn(0);
2122 }
2123 
2124 #undef __FUNCT__
2125 #define __FUNCT__ "DMCreateAggregates"
2126 /*@
2127    DMCreateAggregates - Gets the aggregates that map between
2128    grids associated with two DMs.
2129 
2130    Collective on DM
2131 
2132    Input Parameters:
2133 +  dmc - the coarse grid DM
2134 -  dmf - the fine grid DM
2135 
2136    Output Parameters:
2137 .  rest - the restriction matrix (transpose of the projection matrix)
2138 
2139    Level: intermediate
2140 
2141 .keywords: interpolation, restriction, multigrid
2142 
2143 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2144 @*/
2145 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2146 {
2147   PetscErrorCode ierr;
2148 
2149   PetscFunctionBegin;
2150   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2151   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2152   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2153   PetscFunctionReturn(0);
2154 }
2155 
2156 #undef __FUNCT__
2157 #define __FUNCT__ "DMSetApplicationContextDestroy"
2158 /*@C
2159     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2160 
2161     Not Collective
2162 
2163     Input Parameters:
2164 +   dm - the DM object
2165 -   destroy - the destroy function
2166 
2167     Level: intermediate
2168 
2169 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2170 
2171 @*/
2172 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2173 {
2174   PetscFunctionBegin;
2175   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2176   dm->ctxdestroy = destroy;
2177   PetscFunctionReturn(0);
2178 }
2179 
2180 #undef __FUNCT__
2181 #define __FUNCT__ "DMSetApplicationContext"
2182 /*@
2183     DMSetApplicationContext - Set a user context into a DM object
2184 
2185     Not Collective
2186 
2187     Input Parameters:
2188 +   dm - the DM object
2189 -   ctx - the user context
2190 
2191     Level: intermediate
2192 
2193 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2194 
2195 @*/
2196 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2197 {
2198   PetscFunctionBegin;
2199   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2200   dm->ctx = ctx;
2201   PetscFunctionReturn(0);
2202 }
2203 
2204 #undef __FUNCT__
2205 #define __FUNCT__ "DMGetApplicationContext"
2206 /*@
2207     DMGetApplicationContext - Gets a user context from a DM object
2208 
2209     Not Collective
2210 
2211     Input Parameter:
2212 .   dm - the DM object
2213 
2214     Output Parameter:
2215 .   ctx - the user context
2216 
2217     Level: intermediate
2218 
2219 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2220 
2221 @*/
2222 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2223 {
2224   PetscFunctionBegin;
2225   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2226   *(void**)ctx = dm->ctx;
2227   PetscFunctionReturn(0);
2228 }
2229 
2230 #undef __FUNCT__
2231 #define __FUNCT__ "DMSetVariableBounds"
2232 /*@C
2233     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2234 
2235     Logically Collective on DM
2236 
2237     Input Parameter:
2238 +   dm - the DM object
2239 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2240 
2241     Level: intermediate
2242 
2243 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2244          DMSetJacobian()
2245 
2246 @*/
2247 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2248 {
2249   PetscFunctionBegin;
2250   dm->ops->computevariablebounds = f;
2251   PetscFunctionReturn(0);
2252 }
2253 
2254 #undef __FUNCT__
2255 #define __FUNCT__ "DMHasVariableBounds"
2256 /*@
2257     DMHasVariableBounds - does the DM object have a variable bounds function?
2258 
2259     Not Collective
2260 
2261     Input Parameter:
2262 .   dm - the DM object to destroy
2263 
2264     Output Parameter:
2265 .   flg - PETSC_TRUE if the variable bounds function exists
2266 
2267     Level: developer
2268 
2269 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2270 
2271 @*/
2272 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2273 {
2274   PetscFunctionBegin;
2275   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2276   PetscFunctionReturn(0);
2277 }
2278 
2279 #undef __FUNCT__
2280 #define __FUNCT__ "DMComputeVariableBounds"
2281 /*@C
2282     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2283 
2284     Logically Collective on DM
2285 
2286     Input Parameters:
2287 +   dm - the DM object to destroy
2288 -   x  - current solution at which the bounds are computed
2289 
2290     Output parameters:
2291 +   xl - lower bound
2292 -   xu - upper bound
2293 
2294     Level: intermediate
2295 
2296 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2297 
2298 @*/
2299 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2300 {
2301   PetscErrorCode ierr;
2302   PetscFunctionBegin;
2303   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2304   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2305   if (dm->ops->computevariablebounds) {
2306     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2307   }
2308   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2309   PetscFunctionReturn(0);
2310 }
2311 
2312 #undef __FUNCT__
2313 #define __FUNCT__ "DMHasColoring"
2314 /*@
2315     DMHasColoring - does the DM object have a method of providing a coloring?
2316 
2317     Not Collective
2318 
2319     Input Parameter:
2320 .   dm - the DM object
2321 
2322     Output Parameter:
2323 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2324 
2325     Level: developer
2326 
2327 .seealso DMHasFunction(), DMCreateColoring()
2328 
2329 @*/
2330 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2331 {
2332   PetscFunctionBegin;
2333   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2334   PetscFunctionReturn(0);
2335 }
2336 
2337 #undef  __FUNCT__
2338 #define __FUNCT__ "DMSetVec"
2339 /*@C
2340     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2341 
2342     Collective on DM
2343 
2344     Input Parameter:
2345 +   dm - the DM object
2346 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2347 
2348     Level: developer
2349 
2350 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2351 
2352 @*/
2353 PetscErrorCode  DMSetVec(DM dm,Vec x)
2354 {
2355   PetscErrorCode ierr;
2356   PetscFunctionBegin;
2357   if (x) {
2358     if (!dm->x) {
2359       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2360     }
2361     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2362   }
2363   else if (dm->x) {
2364     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2365   }
2366   PetscFunctionReturn(0);
2367 }
2368 
2369 PetscFunctionList DMList                       = PETSC_NULL;
2370 PetscBool         DMRegisterAllCalled          = PETSC_FALSE;
2371 
2372 #undef __FUNCT__
2373 #define __FUNCT__ "DMSetType"
2374 /*@C
2375   DMSetType - Builds a DM, for a particular DM implementation.
2376 
2377   Collective on DM
2378 
2379   Input Parameters:
2380 + dm     - The DM object
2381 - method - The name of the DM type
2382 
2383   Options Database Key:
2384 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2385 
2386   Notes:
2387   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2388 
2389   Level: intermediate
2390 
2391 .keywords: DM, set, type
2392 .seealso: DMGetType(), DMCreate()
2393 @*/
2394 PetscErrorCode  DMSetType(DM dm, DMType method)
2395 {
2396   PetscErrorCode (*r)(DM);
2397   PetscBool      match;
2398   PetscErrorCode ierr;
2399 
2400   PetscFunctionBegin;
2401   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2402   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2403   if (match) PetscFunctionReturn(0);
2404 
2405   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2406   ierr = PetscFunctionListFind(((PetscObject)dm)->comm, DMList, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2407   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2408 
2409   if (dm->ops->destroy) {
2410     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2411     dm->ops->destroy = PETSC_NULL;
2412   }
2413   ierr = (*r)(dm);CHKERRQ(ierr);
2414   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2415   PetscFunctionReturn(0);
2416 }
2417 
2418 #undef __FUNCT__
2419 #define __FUNCT__ "DMGetType"
2420 /*@C
2421   DMGetType - Gets the DM type name (as a string) from the DM.
2422 
2423   Not Collective
2424 
2425   Input Parameter:
2426 . dm  - The DM
2427 
2428   Output Parameter:
2429 . type - The DM type name
2430 
2431   Level: intermediate
2432 
2433 .keywords: DM, get, type, name
2434 .seealso: DMSetType(), DMCreate()
2435 @*/
2436 PetscErrorCode  DMGetType(DM dm, DMType *type)
2437 {
2438   PetscErrorCode ierr;
2439 
2440   PetscFunctionBegin;
2441   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2442   PetscValidCharPointer(type,2);
2443   if (!DMRegisterAllCalled) {
2444     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2445   }
2446   *type = ((PetscObject)dm)->type_name;
2447   PetscFunctionReturn(0);
2448 }
2449 
2450 #undef __FUNCT__
2451 #define __FUNCT__ "DMConvert"
2452 /*@C
2453   DMConvert - Converts a DM to another DM, either of the same or different type.
2454 
2455   Collective on DM
2456 
2457   Input Parameters:
2458 + dm - the DM
2459 - newtype - new DM type (use "same" for the same type)
2460 
2461   Output Parameter:
2462 . M - pointer to new DM
2463 
2464   Notes:
2465   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2466   the MPI communicator of the generated DM is always the same as the communicator
2467   of the input DM.
2468 
2469   Level: intermediate
2470 
2471 .seealso: DMCreate()
2472 @*/
2473 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2474 {
2475   DM             B;
2476   char           convname[256];
2477   PetscBool      sametype, issame;
2478   PetscErrorCode ierr;
2479 
2480   PetscFunctionBegin;
2481   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2482   PetscValidType(dm,1);
2483   PetscValidPointer(M,3);
2484   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2485   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2486   {
2487     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2488 
2489     /*
2490        Order of precedence:
2491        1) See if a specialized converter is known to the current DM.
2492        2) See if a specialized converter is known to the desired DM class.
2493        3) See if a good general converter is registered for the desired class
2494        4) See if a good general converter is known for the current matrix.
2495        5) Use a really basic converter.
2496     */
2497 
2498     /* 1) See if a specialized converter is known to the current DM and the desired class */
2499     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2500     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2501     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2502     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2503     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2504     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2505     if (conv) goto foundconv;
2506 
2507     /* 2)  See if a specialized converter is known to the desired DM class. */
2508     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2509     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2510     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2511     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2512     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2513     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2514     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2515     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2516     if (conv) {
2517       ierr = DMDestroy(&B);CHKERRQ(ierr);
2518       goto foundconv;
2519     }
2520 
2521 #if 0
2522     /* 3) See if a good general converter is registered for the desired class */
2523     conv = B->ops->convertfrom;
2524     ierr = DMDestroy(&B);CHKERRQ(ierr);
2525     if (conv) goto foundconv;
2526 
2527     /* 4) See if a good general converter is known for the current matrix */
2528     if (dm->ops->convert) {
2529       conv = dm->ops->convert;
2530     }
2531     if (conv) goto foundconv;
2532 #endif
2533 
2534     /* 5) Use a really basic converter. */
2535     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2536 
2537     foundconv:
2538     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2539     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2540     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2541   }
2542   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2543   PetscFunctionReturn(0);
2544 }
2545 
2546 /*--------------------------------------------------------------------------------------------------------------------*/
2547 
2548 #undef __FUNCT__
2549 #define __FUNCT__ "DMRegister"
2550 /*@C
2551   DMRegister - See DMRegisterDynamic()
2552 
2553   Level: advanced
2554 @*/
2555 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2556 {
2557   char fullname[PETSC_MAX_PATH_LEN];
2558   PetscErrorCode ierr;
2559 
2560   PetscFunctionBegin;
2561   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2562   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2563   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2564   ierr = PetscFunctionListAdd(PETSC_COMM_WORLD,&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2565   PetscFunctionReturn(0);
2566 }
2567 
2568 
2569 /*--------------------------------------------------------------------------------------------------------------------*/
2570 #undef __FUNCT__
2571 #define __FUNCT__ "DMRegisterDestroy"
2572 /*@C
2573    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2574 
2575    Not Collective
2576 
2577    Level: advanced
2578 
2579 .keywords: DM, register, destroy
2580 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2581 @*/
2582 PetscErrorCode  DMRegisterDestroy(void)
2583 {
2584   PetscErrorCode ierr;
2585 
2586   PetscFunctionBegin;
2587   ierr = PetscFunctionListDestroy(&DMList);CHKERRQ(ierr);
2588   DMRegisterAllCalled = PETSC_FALSE;
2589   PetscFunctionReturn(0);
2590 }
2591 
2592 #undef __FUNCT__
2593 #define __FUNCT__ "DMLoad"
2594 /*@C
2595   DMLoad - Loads a DM that has been stored in binary  with DMView().
2596 
2597   Collective on PetscViewer
2598 
2599   Input Parameters:
2600 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2601            some related function before a call to DMLoad().
2602 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2603            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2604 
2605    Level: intermediate
2606 
2607   Notes:
2608    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2609 
2610   Notes for advanced users:
2611   Most users should not need to know the details of the binary storage
2612   format, since DMLoad() and DMView() completely hide these details.
2613   But for anyone who's interested, the standard binary matrix storage
2614   format is
2615 .vb
2616      has not yet been determined
2617 .ve
2618 
2619 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2620 @*/
2621 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2622 {
2623   PetscErrorCode ierr;
2624   PetscBool      isbinary;
2625   PetscInt       classid;
2626   char           type[256];
2627 
2628   PetscFunctionBegin;
2629   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2630   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2631   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2632   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2633 
2634   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2635   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2636   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2637   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2638   if (newdm->ops->load) {
2639     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2640   }
2641   PetscFunctionReturn(0);
2642 }
2643 
2644 /******************************** FEM Support **********************************/
2645 
2646 #undef __FUNCT__
2647 #define __FUNCT__ "DMPrintCellVector"
2648 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2649 {
2650   PetscInt       f;
2651   PetscErrorCode ierr;
2652 
2653   PetscFunctionBegin;
2654   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2655   for (f = 0; f < len; ++f) {
2656     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2657   }
2658   PetscFunctionReturn(0);
2659 }
2660 
2661 #undef __FUNCT__
2662 #define __FUNCT__ "DMPrintCellMatrix"
2663 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2664 {
2665   PetscInt       f, g;
2666   PetscErrorCode ierr;
2667 
2668   PetscFunctionBegin;
2669   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2670   for (f = 0; f < rows; ++f) {
2671     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2672     for (g = 0; g < cols; ++g) {
2673       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2674     }
2675     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2676   }
2677   PetscFunctionReturn(0);
2678 }
2679 
2680 #undef __FUNCT__
2681 #define __FUNCT__ "DMGetDefaultSection"
2682 /*@
2683   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2684 
2685   Input Parameter:
2686 . dm - The DM
2687 
2688   Output Parameter:
2689 . section - The PetscSection
2690 
2691   Level: intermediate
2692 
2693   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2694 
2695 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2696 @*/
2697 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2698 {
2699   PetscFunctionBegin;
2700   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2701   PetscValidPointer(section, 2);
2702   *section = dm->defaultSection;
2703   PetscFunctionReturn(0);
2704 }
2705 
2706 #undef __FUNCT__
2707 #define __FUNCT__ "DMSetDefaultSection"
2708 /*@
2709   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2710 
2711   Input Parameters:
2712 + dm - The DM
2713 - section - The PetscSection
2714 
2715   Level: intermediate
2716 
2717   Note: Any existing Section will be destroyed
2718 
2719 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2720 @*/
2721 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2722 {
2723   PetscInt       numFields;
2724   PetscInt       f;
2725   PetscErrorCode ierr;
2726 
2727   PetscFunctionBegin;
2728   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2729   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2730   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2731   dm->defaultSection = section;
2732   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2733   if (numFields) {
2734     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2735     for (f = 0; f < numFields; ++f) {
2736       const char *name;
2737 
2738       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2739       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2740     }
2741   }
2742   PetscFunctionReturn(0);
2743 }
2744 
2745 #undef __FUNCT__
2746 #define __FUNCT__ "DMGetDefaultGlobalSection"
2747 /*@
2748   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2749 
2750   Collective on DM
2751 
2752   Input Parameter:
2753 . dm - The DM
2754 
2755   Output Parameter:
2756 . section - The PetscSection
2757 
2758   Level: intermediate
2759 
2760   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2761 
2762 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2763 @*/
2764 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2765 {
2766   PetscErrorCode ierr;
2767 
2768   PetscFunctionBegin;
2769   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2770   PetscValidPointer(section, 2);
2771   if (!dm->defaultGlobalSection) {
2772     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2773     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2774     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2775   }
2776   *section = dm->defaultGlobalSection;
2777   PetscFunctionReturn(0);
2778 }
2779 
2780 #undef __FUNCT__
2781 #define __FUNCT__ "DMSetDefaultGlobalSection"
2782 /*@
2783   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2784 
2785   Input Parameters:
2786 + dm - The DM
2787 - section - The PetscSection
2788 
2789   Level: intermediate
2790 
2791   Note: Any existing Section will be destroyed
2792 
2793 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2794 @*/
2795 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2796 {
2797   PetscErrorCode ierr;
2798 
2799   PetscFunctionBegin;
2800   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2801   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2802   dm->defaultGlobalSection = section;
2803   PetscFunctionReturn(0);
2804 }
2805 
2806 #undef __FUNCT__
2807 #define __FUNCT__ "DMGetDefaultSF"
2808 /*@
2809   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2810   it is created from the default PetscSection layouts in the DM.
2811 
2812   Input Parameter:
2813 . dm - The DM
2814 
2815   Output Parameter:
2816 . sf - The PetscSF
2817 
2818   Level: intermediate
2819 
2820   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2821 
2822 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2823 @*/
2824 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2825 {
2826   PetscInt       nroots;
2827   PetscErrorCode ierr;
2828 
2829   PetscFunctionBegin;
2830   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2831   PetscValidPointer(sf, 2);
2832   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2833   if (nroots < 0) {
2834     PetscSection section, gSection;
2835 
2836     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2837     if (section) {
2838       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2839       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2840     } else {
2841       *sf = PETSC_NULL;
2842       PetscFunctionReturn(0);
2843     }
2844   }
2845   *sf = dm->defaultSF;
2846   PetscFunctionReturn(0);
2847 }
2848 
2849 #undef __FUNCT__
2850 #define __FUNCT__ "DMSetDefaultSF"
2851 /*@
2852   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2853 
2854   Input Parameters:
2855 + dm - The DM
2856 - sf - The PetscSF
2857 
2858   Level: intermediate
2859 
2860   Note: Any previous SF is destroyed
2861 
2862 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2863 @*/
2864 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
2865 {
2866   PetscErrorCode ierr;
2867 
2868   PetscFunctionBegin;
2869   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2870   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2871   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2872   dm->defaultSF = sf;
2873   PetscFunctionReturn(0);
2874 }
2875 
2876 #undef __FUNCT__
2877 #define __FUNCT__ "DMCreateDefaultSF"
2878 /*@C
2879   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2880   describing the data layout.
2881 
2882   Input Parameters:
2883 + dm - The DM
2884 . localSection - PetscSection describing the local data layout
2885 - globalSection - PetscSection describing the global data layout
2886 
2887   Level: intermediate
2888 
2889 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2890 @*/
2891 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2892 {
2893   MPI_Comm        comm = ((PetscObject) dm)->comm;
2894   PetscLayout     layout;
2895   const PetscInt *ranges;
2896   PetscInt       *local;
2897   PetscSFNode    *remote;
2898   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2899   PetscMPIInt     size, rank;
2900   PetscErrorCode  ierr;
2901 
2902   PetscFunctionBegin;
2903   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2904   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2905   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2906   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2907   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2908   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2909   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2910   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2911   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2912   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2913   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2914     PetscInt gdof, gcdof;
2915 
2916     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2917     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2918     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2919   }
2920   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2921   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2922   for (p = pStart, l = 0; p < pEnd; ++p) {
2923     const PetscInt *cind;
2924     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2925 
2926     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2927     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2928     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2929     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2930     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2931     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2932     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2933     if (!gdof) continue; /* Censored point */
2934     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2935     if (gsize != dof-cdof) {
2936       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2937       cdof = 0; /* Ignore constraints */
2938     }
2939     for (d = 0, c = 0; d < dof; ++d) {
2940       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2941       local[l+d-c] = off+d;
2942     }
2943     if (gdof < 0) {
2944       for(d = 0; d < gsize; ++d, ++l) {
2945         PetscInt offset = -(goff+1) + d, r;
2946 
2947         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2948         if (r < 0) r = -(r+2);
2949         remote[l].rank  = r;
2950         remote[l].index = offset - ranges[r];
2951       }
2952     } else {
2953       for(d = 0; d < gsize; ++d, ++l) {
2954         remote[l].rank  = rank;
2955         remote[l].index = goff+d - ranges[rank];
2956       }
2957     }
2958   }
2959   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2960   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2961   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2962   PetscFunctionReturn(0);
2963 }
2964 
2965 #undef __FUNCT__
2966 #define __FUNCT__ "DMGetPointSF"
2967 /*@
2968   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2969 
2970   Input Parameter:
2971 . dm - The DM
2972 
2973   Output Parameter:
2974 . sf - The PetscSF
2975 
2976   Level: intermediate
2977 
2978   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2979 
2980 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2981 @*/
2982 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
2983 {
2984   PetscFunctionBegin;
2985   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2986   PetscValidPointer(sf, 2);
2987   *sf = dm->sf;
2988   PetscFunctionReturn(0);
2989 }
2990 
2991 #undef __FUNCT__
2992 #define __FUNCT__ "DMSetPointSF"
2993 /*@
2994   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
2995 
2996   Input Parameters:
2997 + dm - The DM
2998 - sf - The PetscSF
2999 
3000   Level: intermediate
3001 
3002 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3003 @*/
3004 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3005 {
3006   PetscErrorCode ierr;
3007 
3008   PetscFunctionBegin;
3009   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3010   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3011   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3012   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3013   dm->sf = sf;
3014   PetscFunctionReturn(0);
3015 }
3016 
3017 #undef __FUNCT__
3018 #define __FUNCT__ "DMGetNumFields"
3019 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3020 {
3021   PetscFunctionBegin;
3022   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3023   PetscValidPointer(numFields, 2);
3024   *numFields = dm->numFields;
3025   PetscFunctionReturn(0);
3026 }
3027 
3028 #undef __FUNCT__
3029 #define __FUNCT__ "DMSetNumFields"
3030 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3031 {
3032   PetscInt       f;
3033   PetscErrorCode ierr;
3034 
3035   PetscFunctionBegin;
3036   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3037   for (f = 0; f < dm->numFields; ++f) {
3038     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3039   }
3040   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3041   dm->numFields = numFields;
3042   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3043   for (f = 0; f < dm->numFields; ++f) {
3044     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3045   }
3046   PetscFunctionReturn(0);
3047 }
3048 
3049 #undef __FUNCT__
3050 #define __FUNCT__ "DMGetField"
3051 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3052 {
3053   PetscFunctionBegin;
3054   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3055   PetscValidPointer(field, 2);
3056   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3057   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3058   *field = dm->fields[f];
3059   PetscFunctionReturn(0);
3060 }
3061 
3062 #undef __FUNCT__
3063 #define __FUNCT__ "DMSetCoordinates"
3064 /*@
3065   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3066 
3067   Collective on DM
3068 
3069   Input Parameters:
3070 + dm - the DM
3071 - c - coordinate vector
3072 
3073   Note:
3074   The coordinates do include those for ghost points, which are in the local vector
3075 
3076   Level: intermediate
3077 
3078 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3079 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3080 @*/
3081 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3082 {
3083   PetscErrorCode ierr;
3084 
3085   PetscFunctionBegin;
3086   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3087   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3088   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3089   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3090   dm->coordinates = c;
3091   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3092   PetscFunctionReturn(0);
3093 }
3094 
3095 #undef __FUNCT__
3096 #define __FUNCT__ "DMSetCoordinatesLocal"
3097 /*@
3098   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3099 
3100   Collective on DM
3101 
3102    Input Parameters:
3103 +  dm - the DM
3104 -  c - coordinate vector
3105 
3106   Note:
3107   The coordinates of ghost points can be set using DMSetCoordinates()
3108   followed by DMGetCoordinatesLocal(). This is intended to enable the
3109   setting of ghost coordinates outside of the domain.
3110 
3111   Level: intermediate
3112 
3113 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3114 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3115 @*/
3116 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3117 {
3118   PetscErrorCode ierr;
3119 
3120   PetscFunctionBegin;
3121   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3122   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3123   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3124   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3125   dm->coordinatesLocal = c;
3126   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3127   PetscFunctionReturn(0);
3128 }
3129 
3130 #undef __FUNCT__
3131 #define __FUNCT__ "DMGetCoordinates"
3132 /*@
3133   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3134 
3135   Not Collective
3136 
3137   Input Parameter:
3138 . dm - the DM
3139 
3140   Output Parameter:
3141 . c - global coordinate vector
3142 
3143   Note:
3144   This is a borrowed reference, so the user should NOT destroy this vector
3145 
3146   Each process has only the local coordinates (does NOT have the ghost coordinates).
3147 
3148   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3149   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3150 
3151   Level: intermediate
3152 
3153 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3154 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3155 @*/
3156 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3157 {
3158   PetscErrorCode ierr;
3159 
3160   PetscFunctionBegin;
3161   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3162   PetscValidPointer(c,2);
3163   if (!dm->coordinates && dm->coordinatesLocal) {
3164     DM cdm = PETSC_NULL;
3165 
3166     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3167     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3168     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3169     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3170     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3171   }
3172   *c = dm->coordinates;
3173   PetscFunctionReturn(0);
3174 }
3175 
3176 #undef __FUNCT__
3177 #define __FUNCT__ "DMGetCoordinatesLocal"
3178 /*@
3179   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3180 
3181   Collective on DM
3182 
3183   Input Parameter:
3184 . dm - the DM
3185 
3186   Output Parameter:
3187 . c - coordinate vector
3188 
3189   Note:
3190   This is a borrowed reference, so the user should NOT destroy this vector
3191 
3192   Each process has the local and ghost coordinates
3193 
3194   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3195   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3196 
3197   Level: intermediate
3198 
3199 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3200 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3201 @*/
3202 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3203 {
3204   PetscErrorCode ierr;
3205 
3206   PetscFunctionBegin;
3207   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3208   PetscValidPointer(c,2);
3209   if (!dm->coordinatesLocal && dm->coordinates) {
3210     DM cdm = PETSC_NULL;
3211 
3212     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3213     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3214     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3215     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3216     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3217   }
3218   *c = dm->coordinatesLocal;
3219   PetscFunctionReturn(0);
3220 }
3221 
3222 #undef __FUNCT__
3223 #define __FUNCT__ "DMGetCoordinateDM"
3224 /*@
3225   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3226 
3227   Collective on DM
3228 
3229   Input Parameter:
3230 . dm - the DM
3231 
3232   Output Parameter:
3233 . cdm - coordinate DM
3234 
3235   Level: intermediate
3236 
3237 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3238 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3239 @*/
3240 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3241 {
3242   PetscErrorCode ierr;
3243 
3244   PetscFunctionBegin;
3245   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3246   PetscValidPointer(cdm,2);
3247   if (!dm->coordinateDM) {
3248     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3249     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3250   }
3251   *cdm = dm->coordinateDM;
3252   PetscFunctionReturn(0);
3253 }
3254 
3255 #undef __FUNCT__
3256 #define __FUNCT__ "DMLocatePoints"
3257 /*@
3258   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3259 
3260   Not collective
3261 
3262   Input Parameters:
3263 + dm - The DM
3264 - v - The Vec of points
3265 
3266   Output Parameter:
3267 . cells - The local cell numbers for cells which contain the points
3268 
3269   Level: developer
3270 
3271 .keywords: point location, mesh
3272 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3273 @*/
3274 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3275 {
3276   PetscErrorCode ierr;
3277 
3278   PetscFunctionBegin;
3279   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3280   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3281   PetscValidPointer(cells,3);
3282   if (dm->ops->locatepoints) {
3283     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3284   } else {
3285     SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Point location not available for this DM");
3286   }
3287   PetscFunctionReturn(0);
3288 }
3289