xref: /petsc/src/dm/interface/dm.c (revision 009bbdc485cd9ad46be9940d3549e2dde9cdc322)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMViewFromOptions"
8 /*
9   Processes command line options to determine if/how a dm
10   is to be viewed.
11 
12 */
13 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
14 {
15   PetscErrorCode    ierr;
16   PetscBool         flg;
17   PetscViewer       viewer;
18   PetscViewerFormat format;
19 
20   PetscFunctionBegin;
21   ierr = PetscOptionsGetViewer(((PetscObject)dm)->comm,((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
22   if (flg) {
23     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
24     ierr = DMView(dm,viewer);CHKERRQ(ierr);
25     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
26     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMCreate"
34 /*@
35   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
36 
37    If you never  call DMSetType()  it will generate an
38    error when you try to use the vector.
39 
40   Collective on MPI_Comm
41 
42   Input Parameter:
43 . comm - The communicator for the DM object
44 
45   Output Parameter:
46 . dm - The DM object
47 
48   Level: beginner
49 
50 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
51 @*/
52 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
53 {
54   DM             v;
55   PetscErrorCode ierr;
56 
57   PetscFunctionBegin;
58   PetscValidPointer(dm,2);
59   *dm = PETSC_NULL;
60 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
61   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
62   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
63   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
64 #endif
65 
66   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
67   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
68 
69 
70   v->ltogmap      = PETSC_NULL;
71   v->ltogmapb     = PETSC_NULL;
72   v->bs           = 1;
73   v->coloringtype = IS_COLORING_GLOBAL;
74   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
75   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
76   v->defaultSection       = PETSC_NULL;
77   v->defaultGlobalSection = PETSC_NULL;
78   {
79     PetscInt i;
80     for (i = 0; i < 10; ++i) {
81       v->nullspaceConstructors[i] = PETSC_NULL;
82     }
83   }
84   v->numFields = 0;
85   v->fields    = PETSC_NULL;
86 
87   *dm = v;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMSetVecType"
93 /*@C
94        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
95 
96    Logically Collective on DMDA
97 
98    Input Parameter:
99 +  da - initial distributed array
100 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
101 
102    Options Database:
103 .   -dm_vec_type ctype
104 
105    Level: intermediate
106 
107 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
108 @*/
109 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   PetscValidHeaderSpecific(da,DM_CLASSID,1);
115   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
116   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "VecGetDM"
122 /*@
123   VecGetDM - Gets the DM defining the data layout of the vector
124 
125   Not collective
126 
127   Input Parameter:
128 . v - The Vec
129 
130   Output Parameter:
131 . dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecGetDM(Vec v, DM *dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidPointer(dm,2);
144   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "VecSetDM"
150 /*@
151   VecSetDM - Sets the DM defining the data layout of the vector
152 
153   Not collective
154 
155   Input Parameters:
156 + v - The Vec
157 - dm - The DM
158 
159   Level: intermediate
160 
161 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
162 @*/
163 PetscErrorCode VecSetDM(Vec v, DM dm)
164 {
165   PetscErrorCode ierr;
166 
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
169   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
170   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "DMSetMatType"
176 /*@C
177        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
178 
179    Logically Collective on DM
180 
181    Input Parameter:
182 +  dm - the DM context
183 .  ctype - the matrix type
184 
185    Options Database:
186 .   -dm_mat_type ctype
187 
188    Level: intermediate
189 
190 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
191 @*/
192 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
193 {
194   PetscErrorCode ierr;
195 
196   PetscFunctionBegin;
197   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
198   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
199   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "MatGetDM"
205 /*@
206   MatGetDM - Gets the DM defining the data layout of the matrix
207 
208   Not collective
209 
210   Input Parameter:
211 . A - The Mat
212 
213   Output Parameter:
214 . dm - The DM
215 
216   Level: intermediate
217 
218 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
219 @*/
220 PetscErrorCode MatGetDM(Mat A, DM *dm)
221 {
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
226   PetscValidPointer(dm,2);
227   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatSetDM"
233 /*@
234   MatSetDM - Sets the DM defining the data layout of the matrix
235 
236   Not collective
237 
238   Input Parameters:
239 + A - The Mat
240 - dm - The DM
241 
242   Level: intermediate
243 
244 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
245 @*/
246 PetscErrorCode MatSetDM(Mat A, DM dm)
247 {
248   PetscErrorCode ierr;
249 
250   PetscFunctionBegin;
251   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
252   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
253   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMSetOptionsPrefix"
259 /*@C
260    DMSetOptionsPrefix - Sets the prefix used for searching for all
261    DMDA options in the database.
262 
263    Logically Collective on DMDA
264 
265    Input Parameter:
266 +  da - the DMDA context
267 -  prefix - the prefix to prepend to all option names
268 
269    Notes:
270    A hyphen (-) must NOT be given at the beginning of the prefix name.
271    The first character of all runtime options is AUTOMATICALLY the hyphen.
272 
273    Level: advanced
274 
275 .keywords: DMDA, set, options, prefix, database
276 
277 .seealso: DMSetFromOptions()
278 @*/
279 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
280 {
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
285   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "DMDestroy"
291 /*@
292     DMDestroy - Destroys a vector packer or DMDA.
293 
294     Collective on DM
295 
296     Input Parameter:
297 .   dm - the DM object to destroy
298 
299     Level: developer
300 
301 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
302 
303 @*/
304 PetscErrorCode  DMDestroy(DM *dm)
305 {
306   PetscInt       i, cnt = 0, f;
307   DMNamedVecLink nlink,nnext;
308   PetscErrorCode ierr;
309 
310   PetscFunctionBegin;
311   if (!*dm) PetscFunctionReturn(0);
312   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
313 
314   /* count all the circular references of DM and its contained Vecs */
315   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
316     if ((*dm)->localin[i])  {cnt++;}
317     if ((*dm)->globalin[i]) {cnt++;}
318   }
319   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
320   if ((*dm)->x) {
321     DM obj;
322     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
323     if (obj == *dm) cnt++;
324   }
325   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
326   if ((*dm)->x) {
327     DM obj;
328     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
329     if (obj == *dm) cnt++;
330   }
331 
332   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
333   /*
334      Need this test because the dm references the vectors that
335      reference the dm, so destroying the dm calls destroy on the
336      vectors that cause another destroy on the dm
337   */
338   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
339   ((PetscObject) (*dm))->refct = 0;
340   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
341     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
342     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
343   }
344   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
345     nnext = nlink->next;
346     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
347     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
348     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
349     ierr = PetscFree(nlink);CHKERRQ(ierr);
350   }
351   (*dm)->namedglobal = PETSC_NULL;
352 
353   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
354     nnext = nlink->next;
355     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
356     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
357     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
358     ierr = PetscFree(nlink);CHKERRQ(ierr);
359   }
360   (*dm)->namedlocal = PETSC_NULL;
361 
362   /* Destroy the list of hooks */
363   {
364     DMCoarsenHookLink link,next;
365     for (link=(*dm)->coarsenhook; link; link=next) {
366       next = link->next;
367       ierr = PetscFree(link);CHKERRQ(ierr);
368     }
369     (*dm)->coarsenhook = PETSC_NULL;
370   }
371   {
372     DMRefineHookLink link,next;
373     for (link=(*dm)->refinehook; link; link=next) {
374       next = link->next;
375       ierr = PetscFree(link);CHKERRQ(ierr);
376     }
377     (*dm)->refinehook = PETSC_NULL;
378   }
379   {
380     DMSubDomainHookLink link,next;
381     for (link=(*dm)->subdomainhook; link; link=next) {
382       next = link->next;
383       ierr = PetscFree(link);CHKERRQ(ierr);
384     }
385     (*dm)->subdomainhook = PETSC_NULL;
386   }
387   {
388     DMGlobalToLocalHookLink link,next;
389     for (link=(*dm)->gtolhook; link; link=next) {
390       next = link->next;
391       ierr = PetscFree(link);CHKERRQ(ierr);
392     }
393     (*dm)->gtolhook = PETSC_NULL;
394   }
395   /* Destroy the work arrays */
396   {
397     DMWorkLink link,next;
398     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
399     for (link=(*dm)->workin; link; link=next) {
400       next = link->next;
401       ierr = PetscFree(link->mem);CHKERRQ(ierr);
402       ierr = PetscFree(link);CHKERRQ(ierr);
403     }
404     (*dm)->workin = PETSC_NULL;
405   }
406 
407   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
408   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
409   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
410 
411   if ((*dm)->ctx && (*dm)->ctxdestroy) {
412     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
413   }
414   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
415   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
416   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
417   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
418   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
419   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
420   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
421 
422   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
423   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
424   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
425   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
426   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
427 
428   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
429   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
430   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
431 
432   for (f = 0; f < (*dm)->numFields; ++f) {
433     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
434   }
435   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
436   /* if memory was published with AMS then destroy it */
437   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
438 
439   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
440   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
441   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
442   PetscFunctionReturn(0);
443 }
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "DMSetUp"
447 /*@
448     DMSetUp - sets up the data structures inside a DM object
449 
450     Collective on DM
451 
452     Input Parameter:
453 .   dm - the DM object to setup
454 
455     Level: developer
456 
457 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
458 
459 @*/
460 PetscErrorCode  DMSetUp(DM dm)
461 {
462   PetscErrorCode ierr;
463 
464   PetscFunctionBegin;
465   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
466   if (dm->setupcalled) PetscFunctionReturn(0);
467   if (dm->ops->setup) {
468     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
469   }
470   dm->setupcalled = PETSC_TRUE;
471   PetscFunctionReturn(0);
472 }
473 
474 #undef __FUNCT__
475 #define __FUNCT__ "DMSetFromOptions"
476 /*@
477     DMSetFromOptions - sets parameters in a DM from the options database
478 
479     Collective on DM
480 
481     Input Parameter:
482 .   dm - the DM object to set options for
483 
484     Options Database:
485 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
486 .   -dm_vec_type <type>  type of vector to create inside DM
487 .   -dm_mat_type <type>  type of matrix to create inside DM
488 -   -dm_coloring_type <global or ghosted>
489 
490     Level: developer
491 
492 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
493 
494 @*/
495 PetscErrorCode  DMSetFromOptions(DM dm)
496 {
497   char           typeName[256] = MATAIJ;
498   PetscBool      flg;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
503   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
504     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
505     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
506     if (flg) {
507       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
508     }
509     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
510     if (flg) {
511       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
512     }
513     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
514     if (dm->ops->setfromoptions) {
515       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
516     }
517     /* process any options handlers added with PetscObjectAddOptionsHandler() */
518     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
519   ierr = PetscOptionsEnd();CHKERRQ(ierr);
520   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "DMView"
526 /*@C
527     DMView - Views a vector packer or DMDA.
528 
529     Collective on DM
530 
531     Input Parameter:
532 +   dm - the DM object to view
533 -   v - the viewer
534 
535     Level: developer
536 
537 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
538 
539 @*/
540 PetscErrorCode  DMView(DM dm,PetscViewer v)
541 {
542   PetscErrorCode ierr;
543   PetscBool      isbinary;
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
547   if (!v) {
548     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
549   }
550   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
551   if (isbinary) {
552     PetscInt         classid = DM_FILE_CLASSID;
553     char             type[256];
554 
555     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
556     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
557     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
558   }
559   if (dm->ops->view) {
560     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
561   }
562   PetscFunctionReturn(0);
563 }
564 
565 #undef __FUNCT__
566 #define __FUNCT__ "DMCreateGlobalVector"
567 /*@
568     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
569 
570     Collective on DM
571 
572     Input Parameter:
573 .   dm - the DM object
574 
575     Output Parameter:
576 .   vec - the global vector
577 
578     Level: beginner
579 
580 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
581 
582 @*/
583 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
584 {
585   PetscErrorCode ierr;
586 
587   PetscFunctionBegin;
588   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
589   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
590   PetscFunctionReturn(0);
591 }
592 
593 #undef __FUNCT__
594 #define __FUNCT__ "DMCreateLocalVector"
595 /*@
596     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
597 
598     Not Collective
599 
600     Input Parameter:
601 .   dm - the DM object
602 
603     Output Parameter:
604 .   vec - the local vector
605 
606     Level: beginner
607 
608 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
609 
610 @*/
611 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
612 {
613   PetscErrorCode ierr;
614 
615   PetscFunctionBegin;
616   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
617   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
618   PetscFunctionReturn(0);
619 }
620 
621 #undef __FUNCT__
622 #define __FUNCT__ "DMGetLocalToGlobalMapping"
623 /*@
624    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
625 
626    Collective on DM
627 
628    Input Parameter:
629 .  dm - the DM that provides the mapping
630 
631    Output Parameter:
632 .  ltog - the mapping
633 
634    Level: intermediate
635 
636    Notes:
637    This mapping can then be used by VecSetLocalToGlobalMapping() or
638    MatSetLocalToGlobalMapping().
639 
640 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
641 @*/
642 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
643 {
644   PetscErrorCode ierr;
645 
646   PetscFunctionBegin;
647   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
648   PetscValidPointer(ltog,2);
649   if (!dm->ltogmap) {
650     PetscSection section, sectionGlobal;
651 
652     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
653     if (section) {
654       PetscInt      *ltog;
655       PetscInt       pStart, pEnd, size, p, l;
656 
657       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
658       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
659       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
660       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
661       for (p = pStart, l = 0; p < pEnd; ++p) {
662         PetscInt dof, off, c;
663 
664         /* Should probably use constrained dofs */
665         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
666         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
667         for (c = 0; c < dof; ++c, ++l) {
668           ltog[l] = off+c;
669         }
670       }
671       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
672       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
673     } else {
674       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
675       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
676     }
677   }
678   *ltog = dm->ltogmap;
679   PetscFunctionReturn(0);
680 }
681 
682 #undef __FUNCT__
683 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
684 /*@
685    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
686 
687    Collective on DM
688 
689    Input Parameter:
690 .  da - the distributed array that provides the mapping
691 
692    Output Parameter:
693 .  ltog - the block mapping
694 
695    Level: intermediate
696 
697    Notes:
698    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
699    MatSetLocalToGlobalMappingBlock().
700 
701 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
702 @*/
703 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
704 {
705   PetscErrorCode ierr;
706 
707   PetscFunctionBegin;
708   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
709   PetscValidPointer(ltog,2);
710   if (!dm->ltogmapb) {
711     PetscInt bs;
712     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
713     if (bs > 1) {
714       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
715       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
716     } else {
717       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
718       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
719     }
720   }
721   *ltog = dm->ltogmapb;
722   PetscFunctionReturn(0);
723 }
724 
725 #undef __FUNCT__
726 #define __FUNCT__ "DMGetBlockSize"
727 /*@
728    DMGetBlockSize - Gets the inherent block size associated with a DM
729 
730    Not Collective
731 
732    Input Parameter:
733 .  dm - the DM with block structure
734 
735    Output Parameter:
736 .  bs - the block size, 1 implies no exploitable block structure
737 
738    Level: intermediate
739 
740 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
741 @*/
742 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
743 {
744   PetscFunctionBegin;
745   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
746   PetscValidPointer(bs,2);
747   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
748   *bs = dm->bs;
749   PetscFunctionReturn(0);
750 }
751 
752 #undef __FUNCT__
753 #define __FUNCT__ "DMCreateInterpolation"
754 /*@
755     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
756 
757     Collective on DM
758 
759     Input Parameter:
760 +   dm1 - the DM object
761 -   dm2 - the second, finer DM object
762 
763     Output Parameter:
764 +  mat - the interpolation
765 -  vec - the scaling (optional)
766 
767     Level: developer
768 
769     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
770         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
771 
772         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
773         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
774 
775 
776 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
777 
778 @*/
779 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
780 {
781   PetscErrorCode ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
785   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
786   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
787   PetscFunctionReturn(0);
788 }
789 
790 #undef __FUNCT__
791 #define __FUNCT__ "DMCreateInjection"
792 /*@
793     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
794 
795     Collective on DM
796 
797     Input Parameter:
798 +   dm1 - the DM object
799 -   dm2 - the second, finer DM object
800 
801     Output Parameter:
802 .   ctx - the injection
803 
804     Level: developer
805 
806    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
807         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
808 
809 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
810 
811 @*/
812 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
813 {
814   PetscErrorCode ierr;
815 
816   PetscFunctionBegin;
817   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
818   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
819   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
820   PetscFunctionReturn(0);
821 }
822 
823 #undef __FUNCT__
824 #define __FUNCT__ "DMCreateColoring"
825 /*@C
826     DMCreateColoring - Gets coloring for a DMDA or DMComposite
827 
828     Collective on DM
829 
830     Input Parameter:
831 +   dm - the DM object
832 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
833 -   matype - either MATAIJ or MATBAIJ
834 
835     Output Parameter:
836 .   coloring - the coloring
837 
838     Level: developer
839 
840 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
841 
842 @*/
843 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
844 {
845   PetscErrorCode ierr;
846 
847   PetscFunctionBegin;
848   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
849   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
850   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
851   PetscFunctionReturn(0);
852 }
853 
854 #undef __FUNCT__
855 #define __FUNCT__ "DMCreateMatrix"
856 /*@C
857     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
858 
859     Collective on DM
860 
861     Input Parameter:
862 +   dm - the DM object
863 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
864             any type which inherits from one of these (such as MATAIJ)
865 
866     Output Parameter:
867 .   mat - the empty Jacobian
868 
869     Level: beginner
870 
871     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
872        do not need to do it yourself.
873 
874        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
875        the nonzero pattern call DMDASetMatPreallocateOnly()
876 
877        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
878        internally by PETSc.
879 
880        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
881        the indices for the global numbering for DMDAs which is complicated.
882 
883 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
884 
885 @*/
886 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
887 {
888   PetscErrorCode ierr;
889 
890   PetscFunctionBegin;
891   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
892 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
893   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
894 #endif
895   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
896   PetscValidPointer(mat,3);
897   if (dm->mattype) {
898     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
899   } else {
900     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
901   }
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
907 /*@
908   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
909     preallocated but the nonzero structure and zero values will not be set.
910 
911   Logically Collective on DMDA
912 
913   Input Parameter:
914 + dm - the DM
915 - only - PETSC_TRUE if only want preallocation
916 
917   Level: developer
918 .seealso DMCreateMatrix()
919 @*/
920 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
921 {
922   PetscFunctionBegin;
923   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
924   dm->prealloc_only = only;
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "DMGetWorkArray"
930 /*@C
931   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
932 
933   Not Collective
934 
935   Input Parameters:
936 + dm - the DM object
937 . count - The minium size
938 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
939 
940   Output Parameter:
941 . array - the work array
942 
943   Level: developer
944 
945 .seealso DMDestroy(), DMCreate()
946 @*/
947 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
948 {
949   PetscErrorCode ierr;
950   DMWorkLink link;
951   size_t size;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
955   PetscValidPointer(mem,4);
956   if (dm->workin) {
957     link = dm->workin;
958     dm->workin = dm->workin->next;
959   } else {
960     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
961   }
962   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
963   if (size*count > link->bytes) {
964     ierr = PetscFree(link->mem);CHKERRQ(ierr);
965     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
966     link->bytes = size*count;
967   }
968   link->next = dm->workout;
969   dm->workout = link;
970   *(void**)mem = link->mem;
971   PetscFunctionReturn(0);
972 }
973 
974 #undef __FUNCT__
975 #define __FUNCT__ "DMRestoreWorkArray"
976 /*@C
977   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
978 
979   Not Collective
980 
981   Input Parameters:
982 + dm - the DM object
983 . count - The minium size
984 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
985 
986   Output Parameter:
987 . array - the work array
988 
989   Level: developer
990 
991 .seealso DMDestroy(), DMCreate()
992 @*/
993 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
994 {
995   DMWorkLink *p,link;
996 
997   PetscFunctionBegin;
998   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
999   PetscValidPointer(mem,4);
1000   for (p=&dm->workout; (link=*p); p=&link->next) {
1001     if (link->mem == *(void**)mem) {
1002       *p = link->next;
1003       link->next = dm->workin;
1004       dm->workin = link;
1005       *(void**)mem = PETSC_NULL;
1006       PetscFunctionReturn(0);
1007     }
1008   }
1009   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1010   PetscFunctionReturn(0);
1011 }
1012 
1013 #undef __FUNCT__
1014 #define __FUNCT__ "DMSetNullSpaceConstructor"
1015 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1016 {
1017   PetscFunctionBegin;
1018   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1019   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1020   dm->nullspaceConstructors[field] = nullsp;
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 #undef __FUNCT__
1025 #define __FUNCT__ "DMCreateFieldIS"
1026 /*@C
1027   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1028 
1029   Not collective
1030 
1031   Input Parameter:
1032 . dm - the DM object
1033 
1034   Output Parameters:
1035 + numFields  - The number of fields (or PETSC_NULL if not requested)
1036 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1037 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1038 
1039   Level: intermediate
1040 
1041   Notes:
1042   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1043   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1044   PetscFree().
1045 
1046 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1047 @*/
1048 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1049 {
1050   PetscSection   section, sectionGlobal;
1051   PetscErrorCode ierr;
1052 
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1055   if (numFields) {
1056     PetscValidPointer(numFields,2);
1057     *numFields = 0;
1058   }
1059   if (fieldNames) {
1060     PetscValidPointer(fieldNames,3);
1061     *fieldNames = PETSC_NULL;
1062   }
1063   if (fields) {
1064     PetscValidPointer(fields,4);
1065     *fields = PETSC_NULL;
1066   }
1067   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1068   if (section) {
1069     PetscInt *fieldSizes, **fieldIndices;
1070     PetscInt  nF, f, pStart, pEnd, p;
1071 
1072     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1073     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1074     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1075     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1076     for (f = 0; f < nF; ++f) {
1077       fieldSizes[f] = 0;
1078     }
1079     for (p = pStart; p < pEnd; ++p) {
1080       PetscInt gdof;
1081 
1082       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1083       if (gdof > 0) {
1084         for (f = 0; f < nF; ++f) {
1085           PetscInt fdof, fcdof;
1086 
1087           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1088           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1089           fieldSizes[f] += fdof-fcdof;
1090         }
1091       }
1092     }
1093     for (f = 0; f < nF; ++f) {
1094       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1095       fieldSizes[f] = 0;
1096     }
1097     for (p = pStart; p < pEnd; ++p) {
1098       PetscInt gdof, goff;
1099 
1100       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1101       if (gdof > 0) {
1102         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1103         for (f = 0; f < nF; ++f) {
1104           PetscInt fdof, fcdof, fc;
1105 
1106           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1107           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1108           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1109             fieldIndices[f][fieldSizes[f]] = goff++;
1110           }
1111         }
1112       }
1113     }
1114     if (numFields) {*numFields = nF;}
1115     if (fieldNames) {
1116       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1117       for (f = 0; f < nF; ++f) {
1118         const char *fieldName;
1119 
1120         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1121         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1122       }
1123     }
1124     if (fields) {
1125       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1126       for (f = 0; f < nF; ++f) {
1127         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1128       }
1129     }
1130     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1131   } else {
1132     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1133   }
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 #undef __FUNCT__
1138 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1139 /*@C
1140   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1141 
1142   Not Collective
1143 
1144   Input Parameters:
1145 + dm   - the DM object
1146 - name - the name of the field decomposition
1147 
1148   Output Parameter:
1149 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1150 
1151   Level: advanced
1152 
1153 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1154 @*/
1155 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1156 {
1157   PetscErrorCode ierr;
1158 
1159   PetscFunctionBegin;
1160   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1161   PetscValidCharPointer(name,2);
1162   PetscValidPointer(ddm,3);
1163   *ddm = PETSC_NULL;
1164   if (dm->ops->createfielddecompositiondm) {
1165     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1166   }
1167   PetscFunctionReturn(0);
1168 }
1169 
1170 #undef __FUNCT__
1171 #define __FUNCT__ "DMCreateFieldDecomposition"
1172 /*@C
1173   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1174                           corresponding to different fields: each IS contains the global indices of the dofs of the
1175                           corresponding field. The optional list of DMs define the DM for each subproblem.
1176                           Generalizes DMCreateFieldIS().
1177 
1178   Not collective
1179 
1180   Input Parameter:
1181 . dm - the DM object
1182 
1183   Output Parameters:
1184 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1185 . namelist  - The name for each field (or PETSC_NULL if not requested)
1186 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1187 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1188 
1189   Level: intermediate
1190 
1191   Notes:
1192   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1193   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1194   and all of the arrays should be freed with PetscFree().
1195 
1196 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1197 @*/
1198 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1199 {
1200   PetscErrorCode ierr;
1201 
1202   PetscFunctionBegin;
1203   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1204   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1205   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1206   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1207   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1208   if (!dm->ops->createfielddecomposition) {
1209     PetscSection section;
1210     PetscInt     numFields, f;
1211 
1212     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1213     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1214     if (section && numFields && dm->ops->createsubdm) {
1215       *len = numFields;
1216       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1217       for (f = 0; f < numFields; ++f) {
1218         const char *fieldName;
1219 
1220         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1221         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1222         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1223       }
1224     } else {
1225       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1226       /* By default there are no DMs associated with subproblems. */
1227       if (dmlist) *dmlist = PETSC_NULL;
1228     }
1229   }
1230   else {
1231     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1232   }
1233   PetscFunctionReturn(0);
1234 }
1235 
1236 #undef __FUNCT__
1237 #define __FUNCT__ "DMCreateSubDM"
1238 /*@C
1239   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1240                   The fields are defined by DMCreateFieldIS().
1241 
1242   Not collective
1243 
1244   Input Parameters:
1245 + dm - the DM object
1246 . numFields - number of fields in this subproblem
1247 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1248 
1249   Output Parameters:
1250 . is - The global indices for the subproblem
1251 - dm - The DM for the subproblem
1252 
1253   Level: intermediate
1254 
1255 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1256 @*/
1257 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1258 {
1259   PetscErrorCode ierr;
1260 
1261   PetscFunctionBegin;
1262   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1263   PetscValidPointer(fields,3);
1264   if (is) {PetscValidPointer(is,4);}
1265   if (subdm) {PetscValidPointer(subdm,5);}
1266   if (dm->ops->createsubdm) {
1267     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1268   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1269   PetscFunctionReturn(0);
1270 }
1271 
1272 #undef __FUNCT__
1273 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1274 /*@C
1275   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1276 
1277   Not Collective
1278 
1279   Input Parameters:
1280 + dm   - the DM object
1281 - name - the name of the subdomain decomposition
1282 
1283   Output Parameter:
1284 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1285 
1286   Level: advanced
1287 
1288 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1289 @*/
1290 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1291 {
1292   PetscErrorCode ierr;
1293 
1294   PetscFunctionBegin;
1295   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1296   PetscValidCharPointer(name,2);
1297   PetscValidPointer(ddm,3);
1298   *ddm = PETSC_NULL;
1299   if (dm->ops->createdomaindecompositiondm) {
1300     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1301   }
1302   PetscFunctionReturn(0);
1303 }
1304 
1305 #undef __FUNCT__
1306 #define __FUNCT__ "DMCreateDomainDecomposition"
1307 /*@C
1308   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1309                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1310                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1311                           define a nonoverlapping covering, while outer subdomains can overlap.
1312                           The optional list of DMs define the DM for each subproblem.
1313 
1314   Not collective
1315 
1316   Input Parameter:
1317 . dm - the DM object
1318 
1319   Output Parameters:
1320 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1321 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1322 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1323 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1324 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1325 
1326   Level: intermediate
1327 
1328   Notes:
1329   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1330   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1331   and all of the arrays should be freed with PetscFree().
1332 
1333 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1334 @*/
1335 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1336 {
1337   PetscErrorCode ierr;
1338   DMSubDomainHookLink link;
1339   PetscInt            i,l;
1340 
1341   PetscFunctionBegin;
1342   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1343   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1344   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1345   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1346   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1347   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1348   if (dm->ops->createdomaindecomposition) {
1349     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1350     /* copy subdomain hooks and context over to the subdomain DMs */
1351     if (dmlist) {
1352       for (i = 0; i < l; i++) {
1353         for (link=dm->subdomainhook; link; link=link->next) {
1354           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1355         }
1356         (*dmlist)[i]->ctx = dm->ctx;
1357       }
1358     }
1359     if (len) *len = l;
1360   }
1361   PetscFunctionReturn(0);
1362 }
1363 
1364 
1365 #undef __FUNCT__
1366 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1367 /*@C
1368   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1369 
1370   Not collective
1371 
1372   Input Parameters:
1373 + dm - the DM object
1374 . n  - the number of subdomain scatters
1375 - subdms - the local subdomains
1376 
1377   Output Parameters:
1378 + n     - the number of scatters returned
1379 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1380 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1381 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1382 
1383   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1384   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1385   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1386   solution and residual data.
1387 
1388   Level: developer
1389 
1390 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1391 @*/
1392 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1393 {
1394   PetscErrorCode ierr;
1395 
1396   PetscFunctionBegin;
1397   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1398   PetscValidPointer(subdms,3);
1399   PetscValidPointer(iscat,4);
1400   PetscValidPointer(oscat,5);
1401   PetscValidPointer(gscat,6);
1402   if (dm->ops->createddscatters) {
1403     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1404   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1405   PetscFunctionReturn(0);
1406 }
1407 
1408 #undef __FUNCT__
1409 #define __FUNCT__ "DMRefine"
1410 /*@
1411   DMRefine - Refines a DM object
1412 
1413   Collective on DM
1414 
1415   Input Parameter:
1416 + dm   - the DM object
1417 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1418 
1419   Output Parameter:
1420 . dmf - the refined DM, or PETSC_NULL
1421 
1422   Note: If no refinement was done, the return value is PETSC_NULL
1423 
1424   Level: developer
1425 
1426 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1427 @*/
1428 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1429 {
1430   PetscErrorCode ierr;
1431   DMRefineHookLink link;
1432 
1433   PetscFunctionBegin;
1434   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1435   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1436   if (*dmf) {
1437     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1438     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1439     (*dmf)->ctx       = dm->ctx;
1440     (*dmf)->leveldown = dm->leveldown;
1441     (*dmf)->levelup   = dm->levelup + 1;
1442     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1443     for (link=dm->refinehook; link; link=link->next) {
1444       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1445     }
1446   }
1447   PetscFunctionReturn(0);
1448 }
1449 
1450 #undef __FUNCT__
1451 #define __FUNCT__ "DMRefineHookAdd"
1452 /*@
1453    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1454 
1455    Logically Collective
1456 
1457    Input Arguments:
1458 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1459 .  refinehook - function to run when setting up a coarser level
1460 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1461 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1462 
1463    Calling sequence of refinehook:
1464 $    refinehook(DM coarse,DM fine,void *ctx);
1465 
1466 +  coarse - coarse level DM
1467 .  fine - fine level DM to interpolate problem to
1468 -  ctx - optional user-defined function context
1469 
1470    Calling sequence for interphook:
1471 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1472 
1473 +  coarse - coarse level DM
1474 .  interp - matrix interpolating a coarse-level solution to the finer grid
1475 .  fine - fine level DM to update
1476 -  ctx - optional user-defined function context
1477 
1478    Level: advanced
1479 
1480    Notes:
1481    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1482 
1483    If this function is called multiple times, the hooks will be run in the order they are added.
1484 
1485 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1486 @*/
1487 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1488 {
1489   PetscErrorCode ierr;
1490   DMRefineHookLink link,*p;
1491 
1492   PetscFunctionBegin;
1493   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1494   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1495   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1496   link->refinehook = refinehook;
1497   link->interphook = interphook;
1498   link->ctx = ctx;
1499   link->next = PETSC_NULL;
1500   *p = link;
1501   PetscFunctionReturn(0);
1502 }
1503 
1504 #undef __FUNCT__
1505 #define __FUNCT__ "DMInterpolate"
1506 /*@
1507    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1508 
1509    Collective if any hooks are
1510 
1511    Input Arguments:
1512 +  coarse - coarser DM to use as a base
1513 .  restrct - interpolation matrix, apply using MatInterpolate()
1514 -  fine - finer DM to update
1515 
1516    Level: developer
1517 
1518 .seealso: DMRefineHookAdd(), MatInterpolate()
1519 @*/
1520 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1521 {
1522   PetscErrorCode ierr;
1523   DMRefineHookLink link;
1524 
1525   PetscFunctionBegin;
1526   for (link=fine->refinehook; link; link=link->next) {
1527     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1528   }
1529   PetscFunctionReturn(0);
1530 }
1531 
1532 #undef __FUNCT__
1533 #define __FUNCT__ "DMGetRefineLevel"
1534 /*@
1535     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1536 
1537     Not Collective
1538 
1539     Input Parameter:
1540 .   dm - the DM object
1541 
1542     Output Parameter:
1543 .   level - number of refinements
1544 
1545     Level: developer
1546 
1547 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1548 
1549 @*/
1550 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1551 {
1552   PetscFunctionBegin;
1553   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1554   *level = dm->levelup;
1555   PetscFunctionReturn(0);
1556 }
1557 
1558 #undef __FUNCT__
1559 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1560 /*@
1561    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1562 
1563    Logically Collective
1564 
1565    Input Arguments:
1566 +  dm - the DM
1567 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1568 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1569 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1570 
1571    Calling sequence for beginhook:
1572 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1573 
1574 +  dm - global DM
1575 .  g - global vector
1576 .  mode - mode
1577 .  l - local vector
1578 -  ctx - optional user-defined function context
1579 
1580 
1581    Calling sequence for endhook:
1582 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1583 
1584 +  global - global DM
1585 -  ctx - optional user-defined function context
1586 
1587    Level: advanced
1588 
1589    Notes:
1590    This function is only needed if auxiliary data needs to be set up on coarse grids.
1591 
1592    If this function is called multiple times, the hooks will be run in the order they are added.
1593 
1594    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1595    extract the finest level information from its context (instead of from the SNES).
1596 
1597 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1598 @*/
1599 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1600 {
1601   PetscErrorCode ierr;
1602   DMGlobalToLocalHookLink link,*p;
1603 
1604   PetscFunctionBegin;
1605   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1606   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1607   ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1608   link->beginhook = beginhook;
1609   link->endhook = endhook;
1610   link->ctx = ctx;
1611   link->next = PETSC_NULL;
1612   *p = link;
1613   PetscFunctionReturn(0);
1614 }
1615 
1616 #undef __FUNCT__
1617 #define __FUNCT__ "DMGlobalToLocalBegin"
1618 /*@
1619     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1620 
1621     Neighbor-wise Collective on DM
1622 
1623     Input Parameters:
1624 +   dm - the DM object
1625 .   g - the global vector
1626 .   mode - INSERT_VALUES or ADD_VALUES
1627 -   l - the local vector
1628 
1629 
1630     Level: beginner
1631 
1632 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1633 
1634 @*/
1635 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1636 {
1637   PetscSF        sf;
1638   PetscErrorCode ierr;
1639   DMGlobalToLocalHookLink link;
1640 
1641   PetscFunctionBegin;
1642   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1643   for (link=dm->gtolhook; link; link=link->next) {
1644     if (link->beginhook) {ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1645   }
1646   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1647   if (sf) {
1648     PetscScalar *lArray, *gArray;
1649 
1650     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1651     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1652     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1653     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1654     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1655     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1656   } else {
1657     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1658   }
1659   PetscFunctionReturn(0);
1660 }
1661 
1662 #undef __FUNCT__
1663 #define __FUNCT__ "DMGlobalToLocalEnd"
1664 /*@
1665     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1666 
1667     Neighbor-wise Collective on DM
1668 
1669     Input Parameters:
1670 +   dm - the DM object
1671 .   g - the global vector
1672 .   mode - INSERT_VALUES or ADD_VALUES
1673 -   l - the local vector
1674 
1675 
1676     Level: beginner
1677 
1678 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1679 
1680 @*/
1681 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1682 {
1683   PetscSF        sf;
1684   PetscErrorCode ierr;
1685   PetscScalar    *lArray, *gArray;
1686   DMGlobalToLocalHookLink link;
1687 
1688   PetscFunctionBegin;
1689   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1690   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1691   if (sf) {
1692   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1693 
1694     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1695     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1696     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1697     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1698     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1699   } else {
1700     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1701   }
1702   for (link=dm->gtolhook; link; link=link->next) {
1703     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1704   }
1705   PetscFunctionReturn(0);
1706 }
1707 
1708 #undef __FUNCT__
1709 #define __FUNCT__ "DMLocalToGlobalBegin"
1710 /*@
1711     DMLocalToGlobalBegin - updates global vectors from local vectors
1712 
1713     Neighbor-wise Collective on DM
1714 
1715     Input Parameters:
1716 +   dm - the DM object
1717 .   l - the local vector
1718 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1719            base point.
1720 - - the global vector
1721 
1722     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1723            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1724            global array to the final global array with VecAXPY().
1725 
1726     Level: beginner
1727 
1728 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1729 
1730 @*/
1731 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1732 {
1733   PetscSF        sf;
1734   PetscErrorCode ierr;
1735 
1736   PetscFunctionBegin;
1737   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1738   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1739   if (sf) {
1740     MPI_Op       op;
1741     PetscScalar *lArray, *gArray;
1742 
1743     switch (mode) {
1744     case INSERT_VALUES:
1745     case INSERT_ALL_VALUES:
1746 #if defined(PETSC_HAVE_MPI_REPLACE)
1747       op = MPI_REPLACE; break;
1748 #else
1749       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1750 #endif
1751     case ADD_VALUES:
1752     case ADD_ALL_VALUES:
1753       op = MPI_SUM; break;
1754     default:
1755       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1756     }
1757     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1758     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1759     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1760     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1761     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1762   } else {
1763     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1764   }
1765   PetscFunctionReturn(0);
1766 }
1767 
1768 #undef __FUNCT__
1769 #define __FUNCT__ "DMLocalToGlobalEnd"
1770 /*@
1771     DMLocalToGlobalEnd - updates global vectors from local vectors
1772 
1773     Neighbor-wise Collective on DM
1774 
1775     Input Parameters:
1776 +   dm - the DM object
1777 .   l - the local vector
1778 .   mode - INSERT_VALUES or ADD_VALUES
1779 -   g - the global vector
1780 
1781 
1782     Level: beginner
1783 
1784 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1785 
1786 @*/
1787 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1788 {
1789   PetscSF        sf;
1790   PetscErrorCode ierr;
1791 
1792   PetscFunctionBegin;
1793   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1794   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1795   if (sf) {
1796     MPI_Op       op;
1797     PetscScalar *lArray, *gArray;
1798 
1799     switch (mode) {
1800     case INSERT_VALUES:
1801     case INSERT_ALL_VALUES:
1802 #if defined(PETSC_HAVE_MPI_REPLACE)
1803       op = MPI_REPLACE; break;
1804 #else
1805       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1806 #endif
1807     case ADD_VALUES:
1808     case ADD_ALL_VALUES:
1809       op = MPI_SUM; break;
1810     default:
1811       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1812     }
1813     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1814     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1815     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1816     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1817     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1818   } else {
1819     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1820   }
1821   PetscFunctionReturn(0);
1822 }
1823 
1824 #undef __FUNCT__
1825 #define __FUNCT__ "DMCoarsen"
1826 /*@
1827     DMCoarsen - Coarsens a DM object
1828 
1829     Collective on DM
1830 
1831     Input Parameter:
1832 +   dm - the DM object
1833 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1834 
1835     Output Parameter:
1836 .   dmc - the coarsened DM
1837 
1838     Level: developer
1839 
1840 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1841 
1842 @*/
1843 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1844 {
1845   PetscErrorCode ierr;
1846   DMCoarsenHookLink link;
1847 
1848   PetscFunctionBegin;
1849   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1850   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1851   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1852   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1853   (*dmc)->ctx       = dm->ctx;
1854   (*dmc)->levelup   = dm->levelup;
1855   (*dmc)->leveldown = dm->leveldown + 1;
1856   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1857   for (link=dm->coarsenhook; link; link=link->next) {
1858     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1859   }
1860   PetscFunctionReturn(0);
1861 }
1862 
1863 #undef __FUNCT__
1864 #define __FUNCT__ "DMCoarsenHookAdd"
1865 /*@
1866    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1867 
1868    Logically Collective
1869 
1870    Input Arguments:
1871 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1872 .  coarsenhook - function to run when setting up a coarser level
1873 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1874 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1875 
1876    Calling sequence of coarsenhook:
1877 $    coarsenhook(DM fine,DM coarse,void *ctx);
1878 
1879 +  fine - fine level DM
1880 .  coarse - coarse level DM to restrict problem to
1881 -  ctx - optional user-defined function context
1882 
1883    Calling sequence for restricthook:
1884 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1885 
1886 +  fine - fine level DM
1887 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1888 .  rscale - scaling vector for restriction
1889 .  inject - matrix restricting by injection
1890 .  coarse - coarse level DM to update
1891 -  ctx - optional user-defined function context
1892 
1893    Level: advanced
1894 
1895    Notes:
1896    This function is only needed if auxiliary data needs to be set up on coarse grids.
1897 
1898    If this function is called multiple times, the hooks will be run in the order they are added.
1899 
1900    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1901    extract the finest level information from its context (instead of from the SNES).
1902 
1903 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1904 @*/
1905 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1906 {
1907   PetscErrorCode ierr;
1908   DMCoarsenHookLink link,*p;
1909 
1910   PetscFunctionBegin;
1911   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1912   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1913   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1914   link->coarsenhook = coarsenhook;
1915   link->restricthook = restricthook;
1916   link->ctx = ctx;
1917   link->next = PETSC_NULL;
1918   *p = link;
1919   PetscFunctionReturn(0);
1920 }
1921 
1922 #undef __FUNCT__
1923 #define __FUNCT__ "DMRestrict"
1924 /*@
1925    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1926 
1927    Collective if any hooks are
1928 
1929    Input Arguments:
1930 +  fine - finer DM to use as a base
1931 .  restrct - restriction matrix, apply using MatRestrict()
1932 .  inject - injection matrix, also use MatRestrict()
1933 -  coarse - coarer DM to update
1934 
1935    Level: developer
1936 
1937 .seealso: DMCoarsenHookAdd(), MatRestrict()
1938 @*/
1939 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1940 {
1941   PetscErrorCode ierr;
1942   DMCoarsenHookLink link;
1943 
1944   PetscFunctionBegin;
1945   for (link=fine->coarsenhook; link; link=link->next) {
1946     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1947   }
1948   PetscFunctionReturn(0);
1949 }
1950 
1951 #undef __FUNCT__
1952 #define __FUNCT__ "DMSubDomainHookAdd"
1953 /*@
1954    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1955 
1956    Logically Collective
1957 
1958    Input Arguments:
1959 +  global - global DM
1960 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1961 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1962 
1963    Calling sequence for restricthook:
1964 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1965 
1966 +  global - global DM
1967 .  out    - scatter to the outer (with ghost and overlap points) block vector
1968 .  in     - scatter to block vector values only owned locally
1969 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1970 -  ctx - optional user-defined function context
1971 
1972    Level: advanced
1973 
1974    Notes:
1975    This function is only needed if auxiliary data needs to be set up on coarse grids.
1976 
1977    If this function is called multiple times, the hooks will be run in the order they are added.
1978 
1979    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1980    extract the finest level information from its context (instead of from the SNES).
1981 
1982 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1983 @*/
1984 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1985 {
1986   PetscErrorCode      ierr;
1987   DMSubDomainHookLink link,*p;
1988 
1989   PetscFunctionBegin;
1990   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1991   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1992   ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
1993   link->restricthook = restricthook;
1994   link->ddhook = ddhook;
1995   link->ctx = ctx;
1996   link->next = PETSC_NULL;
1997   *p = link;
1998   PetscFunctionReturn(0);
1999 }
2000 
2001 #undef __FUNCT__
2002 #define __FUNCT__ "DMSubDomainRestrict"
2003 /*@
2004    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2005 
2006    Collective if any hooks are
2007 
2008    Input Arguments:
2009 +  fine - finer DM to use as a base
2010 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2011 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2012 -  coarse - coarer DM to update
2013 
2014    Level: developer
2015 
2016 .seealso: DMCoarsenHookAdd(), MatRestrict()
2017 @*/
2018 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2019 {
2020   PetscErrorCode ierr;
2021   DMSubDomainHookLink link;
2022 
2023   PetscFunctionBegin;
2024   for (link=global->subdomainhook; link; link=link->next) {
2025     if (link->restricthook) {ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);}
2026   }
2027   PetscFunctionReturn(0);
2028 }
2029 
2030 #undef __FUNCT__
2031 #define __FUNCT__ "DMGetCoarsenLevel"
2032 /*@
2033     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2034 
2035     Not Collective
2036 
2037     Input Parameter:
2038 .   dm - the DM object
2039 
2040     Output Parameter:
2041 .   level - number of coarsenings
2042 
2043     Level: developer
2044 
2045 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2046 
2047 @*/
2048 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2049 {
2050   PetscFunctionBegin;
2051   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2052   *level = dm->leveldown;
2053   PetscFunctionReturn(0);
2054 }
2055 
2056 
2057 
2058 #undef __FUNCT__
2059 #define __FUNCT__ "DMRefineHierarchy"
2060 /*@C
2061     DMRefineHierarchy - Refines a DM object, all levels at once
2062 
2063     Collective on DM
2064 
2065     Input Parameter:
2066 +   dm - the DM object
2067 -   nlevels - the number of levels of refinement
2068 
2069     Output Parameter:
2070 .   dmf - the refined DM hierarchy
2071 
2072     Level: developer
2073 
2074 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2075 
2076 @*/
2077 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2078 {
2079   PetscErrorCode ierr;
2080 
2081   PetscFunctionBegin;
2082   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2083   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2084   if (nlevels == 0) PetscFunctionReturn(0);
2085   if (dm->ops->refinehierarchy) {
2086     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2087   } else if (dm->ops->refine) {
2088     PetscInt i;
2089 
2090     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2091     for (i=1; i<nlevels; i++) {
2092       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2093     }
2094   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2095   PetscFunctionReturn(0);
2096 }
2097 
2098 #undef __FUNCT__
2099 #define __FUNCT__ "DMCoarsenHierarchy"
2100 /*@C
2101     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2102 
2103     Collective on DM
2104 
2105     Input Parameter:
2106 +   dm - the DM object
2107 -   nlevels - the number of levels of coarsening
2108 
2109     Output Parameter:
2110 .   dmc - the coarsened DM hierarchy
2111 
2112     Level: developer
2113 
2114 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2115 
2116 @*/
2117 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2118 {
2119   PetscErrorCode ierr;
2120 
2121   PetscFunctionBegin;
2122   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2123   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2124   if (nlevels == 0) PetscFunctionReturn(0);
2125   PetscValidPointer(dmc,3);
2126   if (dm->ops->coarsenhierarchy) {
2127     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2128   } else if (dm->ops->coarsen) {
2129     PetscInt i;
2130 
2131     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2132     for (i=1; i<nlevels; i++) {
2133       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2134     }
2135   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2136   PetscFunctionReturn(0);
2137 }
2138 
2139 #undef __FUNCT__
2140 #define __FUNCT__ "DMCreateAggregates"
2141 /*@
2142    DMCreateAggregates - Gets the aggregates that map between
2143    grids associated with two DMs.
2144 
2145    Collective on DM
2146 
2147    Input Parameters:
2148 +  dmc - the coarse grid DM
2149 -  dmf - the fine grid DM
2150 
2151    Output Parameters:
2152 .  rest - the restriction matrix (transpose of the projection matrix)
2153 
2154    Level: intermediate
2155 
2156 .keywords: interpolation, restriction, multigrid
2157 
2158 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2159 @*/
2160 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2161 {
2162   PetscErrorCode ierr;
2163 
2164   PetscFunctionBegin;
2165   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2166   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2167   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2168   PetscFunctionReturn(0);
2169 }
2170 
2171 #undef __FUNCT__
2172 #define __FUNCT__ "DMSetApplicationContextDestroy"
2173 /*@C
2174     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2175 
2176     Not Collective
2177 
2178     Input Parameters:
2179 +   dm - the DM object
2180 -   destroy - the destroy function
2181 
2182     Level: intermediate
2183 
2184 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2185 
2186 @*/
2187 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2188 {
2189   PetscFunctionBegin;
2190   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2191   dm->ctxdestroy = destroy;
2192   PetscFunctionReturn(0);
2193 }
2194 
2195 #undef __FUNCT__
2196 #define __FUNCT__ "DMSetApplicationContext"
2197 /*@
2198     DMSetApplicationContext - Set a user context into a DM object
2199 
2200     Not Collective
2201 
2202     Input Parameters:
2203 +   dm - the DM object
2204 -   ctx - the user context
2205 
2206     Level: intermediate
2207 
2208 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2209 
2210 @*/
2211 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2212 {
2213   PetscFunctionBegin;
2214   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2215   dm->ctx = ctx;
2216   PetscFunctionReturn(0);
2217 }
2218 
2219 #undef __FUNCT__
2220 #define __FUNCT__ "DMGetApplicationContext"
2221 /*@
2222     DMGetApplicationContext - Gets a user context from a DM object
2223 
2224     Not Collective
2225 
2226     Input Parameter:
2227 .   dm - the DM object
2228 
2229     Output Parameter:
2230 .   ctx - the user context
2231 
2232     Level: intermediate
2233 
2234 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2235 
2236 @*/
2237 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2238 {
2239   PetscFunctionBegin;
2240   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2241   *(void**)ctx = dm->ctx;
2242   PetscFunctionReturn(0);
2243 }
2244 
2245 #undef __FUNCT__
2246 #define __FUNCT__ "DMSetVariableBounds"
2247 /*@C
2248     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2249 
2250     Logically Collective on DM
2251 
2252     Input Parameter:
2253 +   dm - the DM object
2254 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2255 
2256     Level: intermediate
2257 
2258 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2259          DMSetJacobian()
2260 
2261 @*/
2262 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2263 {
2264   PetscFunctionBegin;
2265   dm->ops->computevariablebounds = f;
2266   PetscFunctionReturn(0);
2267 }
2268 
2269 #undef __FUNCT__
2270 #define __FUNCT__ "DMHasVariableBounds"
2271 /*@
2272     DMHasVariableBounds - does the DM object have a variable bounds function?
2273 
2274     Not Collective
2275 
2276     Input Parameter:
2277 .   dm - the DM object to destroy
2278 
2279     Output Parameter:
2280 .   flg - PETSC_TRUE if the variable bounds function exists
2281 
2282     Level: developer
2283 
2284 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2285 
2286 @*/
2287 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2288 {
2289   PetscFunctionBegin;
2290   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2291   PetscFunctionReturn(0);
2292 }
2293 
2294 #undef __FUNCT__
2295 #define __FUNCT__ "DMComputeVariableBounds"
2296 /*@C
2297     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2298 
2299     Logically Collective on DM
2300 
2301     Input Parameters:
2302 +   dm - the DM object to destroy
2303 -   x  - current solution at which the bounds are computed
2304 
2305     Output parameters:
2306 +   xl - lower bound
2307 -   xu - upper bound
2308 
2309     Level: intermediate
2310 
2311 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2312 
2313 @*/
2314 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2315 {
2316   PetscErrorCode ierr;
2317 
2318   PetscFunctionBegin;
2319   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2320   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2321   if (dm->ops->computevariablebounds) {
2322     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2323   }
2324   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2325   PetscFunctionReturn(0);
2326 }
2327 
2328 #undef __FUNCT__
2329 #define __FUNCT__ "DMHasColoring"
2330 /*@
2331     DMHasColoring - does the DM object have a method of providing a coloring?
2332 
2333     Not Collective
2334 
2335     Input Parameter:
2336 .   dm - the DM object
2337 
2338     Output Parameter:
2339 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2340 
2341     Level: developer
2342 
2343 .seealso DMHasFunction(), DMCreateColoring()
2344 
2345 @*/
2346 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2347 {
2348   PetscFunctionBegin;
2349   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2350   PetscFunctionReturn(0);
2351 }
2352 
2353 #undef  __FUNCT__
2354 #define __FUNCT__ "DMSetVec"
2355 /*@C
2356     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2357 
2358     Collective on DM
2359 
2360     Input Parameter:
2361 +   dm - the DM object
2362 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2363 
2364     Level: developer
2365 
2366 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2367 
2368 @*/
2369 PetscErrorCode  DMSetVec(DM dm,Vec x)
2370 {
2371   PetscErrorCode ierr;
2372 
2373   PetscFunctionBegin;
2374   if (x) {
2375     if (!dm->x) {
2376       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2377     }
2378     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2379   }
2380   else if (dm->x) {
2381     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2382   }
2383   PetscFunctionReturn(0);
2384 }
2385 
2386 PetscFunctionList DMList                       = PETSC_NULL;
2387 PetscBool         DMRegisterAllCalled          = PETSC_FALSE;
2388 
2389 #undef __FUNCT__
2390 #define __FUNCT__ "DMSetType"
2391 /*@C
2392   DMSetType - Builds a DM, for a particular DM implementation.
2393 
2394   Collective on DM
2395 
2396   Input Parameters:
2397 + dm     - The DM object
2398 - method - The name of the DM type
2399 
2400   Options Database Key:
2401 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2402 
2403   Notes:
2404   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2405 
2406   Level: intermediate
2407 
2408 .keywords: DM, set, type
2409 .seealso: DMGetType(), DMCreate()
2410 @*/
2411 PetscErrorCode  DMSetType(DM dm, DMType method)
2412 {
2413   PetscErrorCode (*r)(DM);
2414   PetscBool      match;
2415   PetscErrorCode ierr;
2416 
2417   PetscFunctionBegin;
2418   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2419   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2420   if (match) PetscFunctionReturn(0);
2421 
2422   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2423   ierr = PetscFunctionListFind(((PetscObject)dm)->comm, DMList, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2424   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2425 
2426   if (dm->ops->destroy) {
2427     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2428     dm->ops->destroy = PETSC_NULL;
2429   }
2430   ierr = (*r)(dm);CHKERRQ(ierr);
2431   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2432   PetscFunctionReturn(0);
2433 }
2434 
2435 #undef __FUNCT__
2436 #define __FUNCT__ "DMGetType"
2437 /*@C
2438   DMGetType - Gets the DM type name (as a string) from the DM.
2439 
2440   Not Collective
2441 
2442   Input Parameter:
2443 . dm  - The DM
2444 
2445   Output Parameter:
2446 . type - The DM type name
2447 
2448   Level: intermediate
2449 
2450 .keywords: DM, get, type, name
2451 .seealso: DMSetType(), DMCreate()
2452 @*/
2453 PetscErrorCode  DMGetType(DM dm, DMType *type)
2454 {
2455   PetscErrorCode ierr;
2456 
2457   PetscFunctionBegin;
2458   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2459   PetscValidCharPointer(type,2);
2460   if (!DMRegisterAllCalled) {
2461     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2462   }
2463   *type = ((PetscObject)dm)->type_name;
2464   PetscFunctionReturn(0);
2465 }
2466 
2467 #undef __FUNCT__
2468 #define __FUNCT__ "DMConvert"
2469 /*@C
2470   DMConvert - Converts a DM to another DM, either of the same or different type.
2471 
2472   Collective on DM
2473 
2474   Input Parameters:
2475 + dm - the DM
2476 - newtype - new DM type (use "same" for the same type)
2477 
2478   Output Parameter:
2479 . M - pointer to new DM
2480 
2481   Notes:
2482   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2483   the MPI communicator of the generated DM is always the same as the communicator
2484   of the input DM.
2485 
2486   Level: intermediate
2487 
2488 .seealso: DMCreate()
2489 @*/
2490 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2491 {
2492   DM             B;
2493   char           convname[256];
2494   PetscBool      sametype, issame;
2495   PetscErrorCode ierr;
2496 
2497   PetscFunctionBegin;
2498   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2499   PetscValidType(dm,1);
2500   PetscValidPointer(M,3);
2501   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2502   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2503   {
2504     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2505 
2506     /*
2507        Order of precedence:
2508        1) See if a specialized converter is known to the current DM.
2509        2) See if a specialized converter is known to the desired DM class.
2510        3) See if a good general converter is registered for the desired class
2511        4) See if a good general converter is known for the current matrix.
2512        5) Use a really basic converter.
2513     */
2514 
2515     /* 1) See if a specialized converter is known to the current DM and the desired class */
2516     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2517     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2518     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2519     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2520     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2521     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2522     if (conv) goto foundconv;
2523 
2524     /* 2)  See if a specialized converter is known to the desired DM class. */
2525     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2526     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2527     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2528     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2529     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2530     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2531     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2532     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2533     if (conv) {
2534       ierr = DMDestroy(&B);CHKERRQ(ierr);
2535       goto foundconv;
2536     }
2537 
2538 #if 0
2539     /* 3) See if a good general converter is registered for the desired class */
2540     conv = B->ops->convertfrom;
2541     ierr = DMDestroy(&B);CHKERRQ(ierr);
2542     if (conv) goto foundconv;
2543 
2544     /* 4) See if a good general converter is known for the current matrix */
2545     if (dm->ops->convert) {
2546       conv = dm->ops->convert;
2547     }
2548     if (conv) goto foundconv;
2549 #endif
2550 
2551     /* 5) Use a really basic converter. */
2552     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2553 
2554     foundconv:
2555     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2556     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2557     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2558   }
2559   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2560   PetscFunctionReturn(0);
2561 }
2562 
2563 /*--------------------------------------------------------------------------------------------------------------------*/
2564 
2565 #undef __FUNCT__
2566 #define __FUNCT__ "DMRegister"
2567 /*@C
2568   DMRegister - See DMRegisterDynamic()
2569 
2570   Level: advanced
2571 @*/
2572 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2573 {
2574   char fullname[PETSC_MAX_PATH_LEN];
2575   PetscErrorCode ierr;
2576 
2577   PetscFunctionBegin;
2578   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2579   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2580   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2581   ierr = PetscFunctionListAdd(PETSC_COMM_WORLD,&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2582   PetscFunctionReturn(0);
2583 }
2584 
2585 
2586 /*--------------------------------------------------------------------------------------------------------------------*/
2587 #undef __FUNCT__
2588 #define __FUNCT__ "DMRegisterDestroy"
2589 /*@C
2590    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2591 
2592    Not Collective
2593 
2594    Level: advanced
2595 
2596 .keywords: DM, register, destroy
2597 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2598 @*/
2599 PetscErrorCode  DMRegisterDestroy(void)
2600 {
2601   PetscErrorCode ierr;
2602 
2603   PetscFunctionBegin;
2604   ierr = PetscFunctionListDestroy(&DMList);CHKERRQ(ierr);
2605   DMRegisterAllCalled = PETSC_FALSE;
2606   PetscFunctionReturn(0);
2607 }
2608 
2609 #undef __FUNCT__
2610 #define __FUNCT__ "DMLoad"
2611 /*@C
2612   DMLoad - Loads a DM that has been stored in binary  with DMView().
2613 
2614   Collective on PetscViewer
2615 
2616   Input Parameters:
2617 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2618            some related function before a call to DMLoad().
2619 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2620            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2621 
2622    Level: intermediate
2623 
2624   Notes:
2625    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2626 
2627   Notes for advanced users:
2628   Most users should not need to know the details of the binary storage
2629   format, since DMLoad() and DMView() completely hide these details.
2630   But for anyone who's interested, the standard binary matrix storage
2631   format is
2632 .vb
2633      has not yet been determined
2634 .ve
2635 
2636 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2637 @*/
2638 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2639 {
2640   PetscErrorCode ierr;
2641   PetscBool      isbinary;
2642   PetscInt       classid;
2643   char           type[256];
2644 
2645   PetscFunctionBegin;
2646   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2647   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2648   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2649   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2650 
2651   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2652   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2653   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2654   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2655   if (newdm->ops->load) {
2656     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2657   }
2658   PetscFunctionReturn(0);
2659 }
2660 
2661 /******************************** FEM Support **********************************/
2662 
2663 #undef __FUNCT__
2664 #define __FUNCT__ "DMPrintCellVector"
2665 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2666 {
2667   PetscInt       f;
2668   PetscErrorCode ierr;
2669 
2670   PetscFunctionBegin;
2671   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2672   for (f = 0; f < len; ++f) {
2673     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2674   }
2675   PetscFunctionReturn(0);
2676 }
2677 
2678 #undef __FUNCT__
2679 #define __FUNCT__ "DMPrintCellMatrix"
2680 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2681 {
2682   PetscInt       f, g;
2683   PetscErrorCode ierr;
2684 
2685   PetscFunctionBegin;
2686   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2687   for (f = 0; f < rows; ++f) {
2688     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2689     for (g = 0; g < cols; ++g) {
2690       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2691     }
2692     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2693   }
2694   PetscFunctionReturn(0);
2695 }
2696 
2697 #undef __FUNCT__
2698 #define __FUNCT__ "DMGetDefaultSection"
2699 /*@
2700   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2701 
2702   Input Parameter:
2703 . dm - The DM
2704 
2705   Output Parameter:
2706 . section - The PetscSection
2707 
2708   Level: intermediate
2709 
2710   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2711 
2712 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2713 @*/
2714 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2715 {
2716   PetscFunctionBegin;
2717   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2718   PetscValidPointer(section, 2);
2719   *section = dm->defaultSection;
2720   PetscFunctionReturn(0);
2721 }
2722 
2723 #undef __FUNCT__
2724 #define __FUNCT__ "DMSetDefaultSection"
2725 /*@
2726   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2727 
2728   Input Parameters:
2729 + dm - The DM
2730 - section - The PetscSection
2731 
2732   Level: intermediate
2733 
2734   Note: Any existing Section will be destroyed
2735 
2736 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2737 @*/
2738 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2739 {
2740   PetscInt       numFields;
2741   PetscInt       f;
2742   PetscErrorCode ierr;
2743 
2744   PetscFunctionBegin;
2745   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2746   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2747   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2748   dm->defaultSection = section;
2749   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2750   if (numFields) {
2751     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2752     for (f = 0; f < numFields; ++f) {
2753       const char *name;
2754 
2755       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2756       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2757     }
2758   }
2759   PetscFunctionReturn(0);
2760 }
2761 
2762 #undef __FUNCT__
2763 #define __FUNCT__ "DMGetDefaultGlobalSection"
2764 /*@
2765   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2766 
2767   Collective on DM
2768 
2769   Input Parameter:
2770 . dm - The DM
2771 
2772   Output Parameter:
2773 . section - The PetscSection
2774 
2775   Level: intermediate
2776 
2777   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2778 
2779 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2780 @*/
2781 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2782 {
2783   PetscErrorCode ierr;
2784 
2785   PetscFunctionBegin;
2786   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2787   PetscValidPointer(section, 2);
2788   if (!dm->defaultGlobalSection) {
2789     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2790     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2791     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2792   }
2793   *section = dm->defaultGlobalSection;
2794   PetscFunctionReturn(0);
2795 }
2796 
2797 #undef __FUNCT__
2798 #define __FUNCT__ "DMSetDefaultGlobalSection"
2799 /*@
2800   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2801 
2802   Input Parameters:
2803 + dm - The DM
2804 - section - The PetscSection
2805 
2806   Level: intermediate
2807 
2808   Note: Any existing Section will be destroyed
2809 
2810 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2811 @*/
2812 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2813 {
2814   PetscErrorCode ierr;
2815 
2816   PetscFunctionBegin;
2817   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2818   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2819   dm->defaultGlobalSection = section;
2820   PetscFunctionReturn(0);
2821 }
2822 
2823 #undef __FUNCT__
2824 #define __FUNCT__ "DMGetDefaultSF"
2825 /*@
2826   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2827   it is created from the default PetscSection layouts in the DM.
2828 
2829   Input Parameter:
2830 . dm - The DM
2831 
2832   Output Parameter:
2833 . sf - The PetscSF
2834 
2835   Level: intermediate
2836 
2837   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2838 
2839 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2840 @*/
2841 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2842 {
2843   PetscInt       nroots;
2844   PetscErrorCode ierr;
2845 
2846   PetscFunctionBegin;
2847   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2848   PetscValidPointer(sf, 2);
2849   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2850   if (nroots < 0) {
2851     PetscSection section, gSection;
2852 
2853     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2854     if (section) {
2855       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2856       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2857     } else {
2858       *sf = PETSC_NULL;
2859       PetscFunctionReturn(0);
2860     }
2861   }
2862   *sf = dm->defaultSF;
2863   PetscFunctionReturn(0);
2864 }
2865 
2866 #undef __FUNCT__
2867 #define __FUNCT__ "DMSetDefaultSF"
2868 /*@
2869   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2870 
2871   Input Parameters:
2872 + dm - The DM
2873 - sf - The PetscSF
2874 
2875   Level: intermediate
2876 
2877   Note: Any previous SF is destroyed
2878 
2879 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2880 @*/
2881 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
2882 {
2883   PetscErrorCode ierr;
2884 
2885   PetscFunctionBegin;
2886   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2887   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2888   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2889   dm->defaultSF = sf;
2890   PetscFunctionReturn(0);
2891 }
2892 
2893 #undef __FUNCT__
2894 #define __FUNCT__ "DMCreateDefaultSF"
2895 /*@C
2896   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2897   describing the data layout.
2898 
2899   Input Parameters:
2900 + dm - The DM
2901 . localSection - PetscSection describing the local data layout
2902 - globalSection - PetscSection describing the global data layout
2903 
2904   Level: intermediate
2905 
2906 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2907 @*/
2908 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2909 {
2910   MPI_Comm        comm = ((PetscObject) dm)->comm;
2911   PetscLayout     layout;
2912   const PetscInt *ranges;
2913   PetscInt       *local;
2914   PetscSFNode    *remote;
2915   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2916   PetscMPIInt     size, rank;
2917   PetscErrorCode  ierr;
2918 
2919   PetscFunctionBegin;
2920   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2921   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2922   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2923   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2924   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2925   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2926   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2927   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2928   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2929   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2930   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2931     PetscInt gdof, gcdof;
2932 
2933     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2934     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2935     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2936   }
2937   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2938   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2939   for (p = pStart, l = 0; p < pEnd; ++p) {
2940     const PetscInt *cind;
2941     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2942 
2943     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2944     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2945     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2946     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2947     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2948     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2949     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2950     if (!gdof) continue; /* Censored point */
2951     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2952     if (gsize != dof-cdof) {
2953       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2954       cdof = 0; /* Ignore constraints */
2955     }
2956     for (d = 0, c = 0; d < dof; ++d) {
2957       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2958       local[l+d-c] = off+d;
2959     }
2960     if (gdof < 0) {
2961       for (d = 0; d < gsize; ++d, ++l) {
2962         PetscInt offset = -(goff+1) + d, r;
2963 
2964         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2965         if (r < 0) r = -(r+2);
2966         remote[l].rank  = r;
2967         remote[l].index = offset - ranges[r];
2968       }
2969     } else {
2970       for (d = 0; d < gsize; ++d, ++l) {
2971         remote[l].rank  = rank;
2972         remote[l].index = goff+d - ranges[rank];
2973       }
2974     }
2975   }
2976   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2977   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2978   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2979   PetscFunctionReturn(0);
2980 }
2981 
2982 #undef __FUNCT__
2983 #define __FUNCT__ "DMGetPointSF"
2984 /*@
2985   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2986 
2987   Input Parameter:
2988 . dm - The DM
2989 
2990   Output Parameter:
2991 . sf - The PetscSF
2992 
2993   Level: intermediate
2994 
2995   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2996 
2997 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2998 @*/
2999 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3000 {
3001   PetscFunctionBegin;
3002   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3003   PetscValidPointer(sf, 2);
3004   *sf = dm->sf;
3005   PetscFunctionReturn(0);
3006 }
3007 
3008 #undef __FUNCT__
3009 #define __FUNCT__ "DMSetPointSF"
3010 /*@
3011   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3012 
3013   Input Parameters:
3014 + dm - The DM
3015 - sf - The PetscSF
3016 
3017   Level: intermediate
3018 
3019 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3020 @*/
3021 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3022 {
3023   PetscErrorCode ierr;
3024 
3025   PetscFunctionBegin;
3026   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3027   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3028   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3029   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3030   dm->sf = sf;
3031   PetscFunctionReturn(0);
3032 }
3033 
3034 #undef __FUNCT__
3035 #define __FUNCT__ "DMGetNumFields"
3036 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3037 {
3038   PetscFunctionBegin;
3039   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3040   PetscValidPointer(numFields, 2);
3041   *numFields = dm->numFields;
3042   PetscFunctionReturn(0);
3043 }
3044 
3045 #undef __FUNCT__
3046 #define __FUNCT__ "DMSetNumFields"
3047 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3048 {
3049   PetscInt       f;
3050   PetscErrorCode ierr;
3051 
3052   PetscFunctionBegin;
3053   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3054   for (f = 0; f < dm->numFields; ++f) {
3055     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3056   }
3057   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3058   dm->numFields = numFields;
3059   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3060   for (f = 0; f < dm->numFields; ++f) {
3061     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3062   }
3063   PetscFunctionReturn(0);
3064 }
3065 
3066 #undef __FUNCT__
3067 #define __FUNCT__ "DMGetField"
3068 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3069 {
3070   PetscFunctionBegin;
3071   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3072   PetscValidPointer(field, 2);
3073   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3074   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3075   *field = dm->fields[f];
3076   PetscFunctionReturn(0);
3077 }
3078 
3079 #undef __FUNCT__
3080 #define __FUNCT__ "DMSetCoordinates"
3081 /*@
3082   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3083 
3084   Collective on DM
3085 
3086   Input Parameters:
3087 + dm - the DM
3088 - c - coordinate vector
3089 
3090   Note:
3091   The coordinates do include those for ghost points, which are in the local vector
3092 
3093   Level: intermediate
3094 
3095 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3096 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3097 @*/
3098 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3099 {
3100   PetscErrorCode ierr;
3101 
3102   PetscFunctionBegin;
3103   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3104   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3105   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3106   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3107   dm->coordinates = c;
3108   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3109   PetscFunctionReturn(0);
3110 }
3111 
3112 #undef __FUNCT__
3113 #define __FUNCT__ "DMSetCoordinatesLocal"
3114 /*@
3115   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3116 
3117   Collective on DM
3118 
3119    Input Parameters:
3120 +  dm - the DM
3121 -  c - coordinate vector
3122 
3123   Note:
3124   The coordinates of ghost points can be set using DMSetCoordinates()
3125   followed by DMGetCoordinatesLocal(). This is intended to enable the
3126   setting of ghost coordinates outside of the domain.
3127 
3128   Level: intermediate
3129 
3130 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3131 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3132 @*/
3133 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3134 {
3135   PetscErrorCode ierr;
3136 
3137   PetscFunctionBegin;
3138   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3139   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3140   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3141   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3142   dm->coordinatesLocal = c;
3143   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3144   PetscFunctionReturn(0);
3145 }
3146 
3147 #undef __FUNCT__
3148 #define __FUNCT__ "DMGetCoordinates"
3149 /*@
3150   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3151 
3152   Not Collective
3153 
3154   Input Parameter:
3155 . dm - the DM
3156 
3157   Output Parameter:
3158 . c - global coordinate vector
3159 
3160   Note:
3161   This is a borrowed reference, so the user should NOT destroy this vector
3162 
3163   Each process has only the local coordinates (does NOT have the ghost coordinates).
3164 
3165   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3166   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3167 
3168   Level: intermediate
3169 
3170 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3171 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3172 @*/
3173 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3174 {
3175   PetscErrorCode ierr;
3176 
3177   PetscFunctionBegin;
3178   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3179   PetscValidPointer(c,2);
3180   if (!dm->coordinates && dm->coordinatesLocal) {
3181     DM cdm = PETSC_NULL;
3182 
3183     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3184     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3185     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3186     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3187     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3188   }
3189   *c = dm->coordinates;
3190   PetscFunctionReturn(0);
3191 }
3192 
3193 #undef __FUNCT__
3194 #define __FUNCT__ "DMGetCoordinatesLocal"
3195 /*@
3196   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3197 
3198   Collective on DM
3199 
3200   Input Parameter:
3201 . dm - the DM
3202 
3203   Output Parameter:
3204 . c - coordinate vector
3205 
3206   Note:
3207   This is a borrowed reference, so the user should NOT destroy this vector
3208 
3209   Each process has the local and ghost coordinates
3210 
3211   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3212   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3213 
3214   Level: intermediate
3215 
3216 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3217 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3218 @*/
3219 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3220 {
3221   PetscErrorCode ierr;
3222 
3223   PetscFunctionBegin;
3224   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3225   PetscValidPointer(c,2);
3226   if (!dm->coordinatesLocal && dm->coordinates) {
3227     DM cdm = PETSC_NULL;
3228 
3229     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3230     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3231     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3232     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3233     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3234   }
3235   *c = dm->coordinatesLocal;
3236   PetscFunctionReturn(0);
3237 }
3238 
3239 #undef __FUNCT__
3240 #define __FUNCT__ "DMGetCoordinateDM"
3241 /*@
3242   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3243 
3244   Collective on DM
3245 
3246   Input Parameter:
3247 . dm - the DM
3248 
3249   Output Parameter:
3250 . cdm - coordinate DM
3251 
3252   Level: intermediate
3253 
3254 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3255 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3256 @*/
3257 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3258 {
3259   PetscErrorCode ierr;
3260 
3261   PetscFunctionBegin;
3262   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3263   PetscValidPointer(cdm,2);
3264   if (!dm->coordinateDM) {
3265     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3266     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3267   }
3268   *cdm = dm->coordinateDM;
3269   PetscFunctionReturn(0);
3270 }
3271 
3272 #undef __FUNCT__
3273 #define __FUNCT__ "DMLocatePoints"
3274 /*@
3275   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3276 
3277   Not collective
3278 
3279   Input Parameters:
3280 + dm - The DM
3281 - v - The Vec of points
3282 
3283   Output Parameter:
3284 . cells - The local cell numbers for cells which contain the points
3285 
3286   Level: developer
3287 
3288 .keywords: point location, mesh
3289 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3290 @*/
3291 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3292 {
3293   PetscErrorCode ierr;
3294 
3295   PetscFunctionBegin;
3296   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3297   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3298   PetscValidPointer(cells,3);
3299   if (dm->ops->locatepoints) {
3300     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3301   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Point location not available for this DM");
3302   PetscFunctionReturn(0);
3303 }
3304