xref: /petsc/src/dm/interface/dm.c (revision 5f0e2cadda4c4faebce9006066b03227c57d68ea)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMViewFromOptions"
8 /*
9   Processes command line options to determine if/how a dm
10   is to be viewed.
11 
12 */
13 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
14 {
15   PetscErrorCode    ierr;
16   PetscBool         flg;
17   PetscViewer       viewer;
18   PetscViewerFormat format;
19 
20   PetscFunctionBegin;
21   ierr = PetscOptionsGetViewer(((PetscObject)dm)->comm,((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
22   if (flg) {
23     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
24     ierr = DMView(dm,viewer);CHKERRQ(ierr);
25     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
26     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMCreate"
34 /*@
35   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
36 
37    If you never  call DMSetType()  it will generate an
38    error when you try to use the vector.
39 
40   Collective on MPI_Comm
41 
42   Input Parameter:
43 . comm - The communicator for the DM object
44 
45   Output Parameter:
46 . dm - The DM object
47 
48   Level: beginner
49 
50 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
51 @*/
52 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
53 {
54   DM             v;
55   PetscErrorCode ierr;
56 
57   PetscFunctionBegin;
58   PetscValidPointer(dm,2);
59   *dm = PETSC_NULL;
60 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
61   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
62   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
63   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
64 #endif
65 
66   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
67   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
68 
69 
70   v->ltogmap      = PETSC_NULL;
71   v->ltogmapb     = PETSC_NULL;
72   v->bs           = 1;
73   v->coloringtype = IS_COLORING_GLOBAL;
74   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
75   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
76   v->defaultSection       = PETSC_NULL;
77   v->defaultGlobalSection = PETSC_NULL;
78   {
79     PetscInt i;
80     for (i = 0; i < 10; ++i) {
81       v->nullspaceConstructors[i] = PETSC_NULL;
82     }
83   }
84   v->numFields = 0;
85   v->fields    = PETSC_NULL;
86 
87   *dm = v;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMSetVecType"
93 /*@C
94        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
95 
96    Logically Collective on DMDA
97 
98    Input Parameter:
99 +  da - initial distributed array
100 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
101 
102    Options Database:
103 .   -dm_vec_type ctype
104 
105    Level: intermediate
106 
107 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
108 @*/
109 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   PetscValidHeaderSpecific(da,DM_CLASSID,1);
115   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
116   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "VecGetDM"
122 /*@
123   VecGetDM - Gets the DM defining the data layout of the vector
124 
125   Not collective
126 
127   Input Parameter:
128 . v - The Vec
129 
130   Output Parameter:
131 . dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecGetDM(Vec v, DM *dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidPointer(dm,2);
144   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "VecSetDM"
150 /*@
151   VecSetDM - Sets the DM defining the data layout of the vector
152 
153   Not collective
154 
155   Input Parameters:
156 + v - The Vec
157 - dm - The DM
158 
159   Level: intermediate
160 
161 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
162 @*/
163 PetscErrorCode VecSetDM(Vec v, DM dm)
164 {
165   PetscErrorCode ierr;
166 
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
169   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
170   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "DMSetMatType"
176 /*@C
177        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
178 
179    Logically Collective on DM
180 
181    Input Parameter:
182 +  dm - the DM context
183 .  ctype - the matrix type
184 
185    Options Database:
186 .   -dm_mat_type ctype
187 
188    Level: intermediate
189 
190 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
191 @*/
192 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
193 {
194   PetscErrorCode ierr;
195 
196   PetscFunctionBegin;
197   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
198   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
199   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "MatGetDM"
205 /*@
206   MatGetDM - Gets the DM defining the data layout of the matrix
207 
208   Not collective
209 
210   Input Parameter:
211 . A - The Mat
212 
213   Output Parameter:
214 . dm - The DM
215 
216   Level: intermediate
217 
218 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
219 @*/
220 PetscErrorCode MatGetDM(Mat A, DM *dm)
221 {
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
226   PetscValidPointer(dm,2);
227   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatSetDM"
233 /*@
234   MatSetDM - Sets the DM defining the data layout of the matrix
235 
236   Not collective
237 
238   Input Parameters:
239 + A - The Mat
240 - dm - The DM
241 
242   Level: intermediate
243 
244 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
245 @*/
246 PetscErrorCode MatSetDM(Mat A, DM dm)
247 {
248   PetscErrorCode ierr;
249 
250   PetscFunctionBegin;
251   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
252   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
253   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMSetOptionsPrefix"
259 /*@C
260    DMSetOptionsPrefix - Sets the prefix used for searching for all
261    DMDA options in the database.
262 
263    Logically Collective on DMDA
264 
265    Input Parameter:
266 +  da - the DMDA context
267 -  prefix - the prefix to prepend to all option names
268 
269    Notes:
270    A hyphen (-) must NOT be given at the beginning of the prefix name.
271    The first character of all runtime options is AUTOMATICALLY the hyphen.
272 
273    Level: advanced
274 
275 .keywords: DMDA, set, options, prefix, database
276 
277 .seealso: DMSetFromOptions()
278 @*/
279 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
280 {
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
285   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "DMDestroy"
291 /*@
292     DMDestroy - Destroys a vector packer or DMDA.
293 
294     Collective on DM
295 
296     Input Parameter:
297 .   dm - the DM object to destroy
298 
299     Level: developer
300 
301 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
302 
303 @*/
304 PetscErrorCode  DMDestroy(DM *dm)
305 {
306   PetscInt       i, cnt = 0, f;
307   DMNamedVecLink nlink,nnext;
308   PetscErrorCode ierr;
309 
310   PetscFunctionBegin;
311   if (!*dm) PetscFunctionReturn(0);
312   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
313 
314   /* count all the circular references of DM and its contained Vecs */
315   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
316     if ((*dm)->localin[i])  {cnt++;}
317     if ((*dm)->globalin[i]) {cnt++;}
318   }
319   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
320   if ((*dm)->x) {
321     DM obj;
322     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
323     if (obj == *dm) cnt++;
324   }
325   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
326   if ((*dm)->x) {
327     DM obj;
328     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
329     if (obj == *dm) cnt++;
330   }
331 
332   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
333   /*
334      Need this test because the dm references the vectors that
335      reference the dm, so destroying the dm calls destroy on the
336      vectors that cause another destroy on the dm
337   */
338   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
339   ((PetscObject) (*dm))->refct = 0;
340   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
341     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
342     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
343   }
344   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
345     nnext = nlink->next;
346     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
347     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
348     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
349     ierr = PetscFree(nlink);CHKERRQ(ierr);
350   }
351   (*dm)->namedglobal = PETSC_NULL;
352 
353   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
354     nnext = nlink->next;
355     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
356     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
357     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
358     ierr = PetscFree(nlink);CHKERRQ(ierr);
359   }
360   (*dm)->namedlocal = PETSC_NULL;
361 
362   /* Destroy the list of hooks */
363   {
364     DMCoarsenHookLink link,next;
365     for (link=(*dm)->coarsenhook; link; link=next) {
366       next = link->next;
367       ierr = PetscFree(link);CHKERRQ(ierr);
368     }
369     (*dm)->coarsenhook = PETSC_NULL;
370   }
371   {
372     DMRefineHookLink link,next;
373     for (link=(*dm)->refinehook; link; link=next) {
374       next = link->next;
375       ierr = PetscFree(link);CHKERRQ(ierr);
376     }
377     (*dm)->refinehook = PETSC_NULL;
378   }
379   {
380     DMSubDomainHookLink link,next;
381     for (link=(*dm)->subdomainhook; link; link=next) {
382       next = link->next;
383       ierr = PetscFree(link);CHKERRQ(ierr);
384     }
385     (*dm)->subdomainhook = PETSC_NULL;
386   }
387   {
388     DMGlobalToLocalHookLink link,next;
389     for (link=(*dm)->gtolhook; link; link=next) {
390       next = link->next;
391       ierr = PetscFree(link);CHKERRQ(ierr);
392     }
393     (*dm)->gtolhook = PETSC_NULL;
394   }
395   /* Destroy the work arrays */
396   {
397     DMWorkLink link,next;
398     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
399     for (link=(*dm)->workin; link; link=next) {
400       next = link->next;
401       ierr = PetscFree(link->mem);CHKERRQ(ierr);
402       ierr = PetscFree(link);CHKERRQ(ierr);
403     }
404     (*dm)->workin = PETSC_NULL;
405   }
406 
407   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
408   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
409   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
410 
411   if ((*dm)->ctx && (*dm)->ctxdestroy) {
412     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
413   }
414   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
415   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
416   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
417   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
418   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
419   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
420   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
421 
422   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
423   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
424   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
425   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
426   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
427 
428   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
429   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
430   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
431 
432   for (f = 0; f < (*dm)->numFields; ++f) {
433     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
434   }
435   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
436   /* if memory was published with AMS then destroy it */
437   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
438 
439   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
440   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
441   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
442   PetscFunctionReturn(0);
443 }
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "DMSetUp"
447 /*@
448     DMSetUp - sets up the data structures inside a DM object
449 
450     Collective on DM
451 
452     Input Parameter:
453 .   dm - the DM object to setup
454 
455     Level: developer
456 
457 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
458 
459 @*/
460 PetscErrorCode  DMSetUp(DM dm)
461 {
462   PetscErrorCode ierr;
463 
464   PetscFunctionBegin;
465   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
466   if (dm->setupcalled) PetscFunctionReturn(0);
467   if (dm->ops->setup) {
468     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
469   }
470   dm->setupcalled = PETSC_TRUE;
471   PetscFunctionReturn(0);
472 }
473 
474 #undef __FUNCT__
475 #define __FUNCT__ "DMSetFromOptions"
476 /*@
477     DMSetFromOptions - sets parameters in a DM from the options database
478 
479     Collective on DM
480 
481     Input Parameter:
482 .   dm - the DM object to set options for
483 
484     Options Database:
485 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
486 .   -dm_vec_type <type>  type of vector to create inside DM
487 .   -dm_mat_type <type>  type of matrix to create inside DM
488 -   -dm_coloring_type <global or ghosted>
489 
490     Level: developer
491 
492 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
493 
494 @*/
495 PetscErrorCode  DMSetFromOptions(DM dm)
496 {
497   char           typeName[256] = MATAIJ;
498   PetscBool      flg;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
503   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
504     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
505     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
506     if (flg) {
507       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
508     }
509     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
510     if (flg) {
511       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
512     }
513     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
514     if (dm->ops->setfromoptions) {
515       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
516     }
517     /* process any options handlers added with PetscObjectAddOptionsHandler() */
518     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
519   ierr = PetscOptionsEnd();CHKERRQ(ierr);
520   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "DMView"
526 /*@C
527     DMView - Views a vector packer or DMDA.
528 
529     Collective on DM
530 
531     Input Parameter:
532 +   dm - the DM object to view
533 -   v - the viewer
534 
535     Level: developer
536 
537 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
538 
539 @*/
540 PetscErrorCode  DMView(DM dm,PetscViewer v)
541 {
542   PetscErrorCode ierr;
543   PetscBool      isbinary;
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
547   if (!v) {
548     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
549   }
550   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
551   if (isbinary) {
552     PetscInt         classid = DM_FILE_CLASSID;
553     char             type[256];
554 
555     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
556     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
557     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
558   }
559   if (dm->ops->view) {
560     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
561   }
562   PetscFunctionReturn(0);
563 }
564 
565 #undef __FUNCT__
566 #define __FUNCT__ "DMCreateGlobalVector"
567 /*@
568     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
569 
570     Collective on DM
571 
572     Input Parameter:
573 .   dm - the DM object
574 
575     Output Parameter:
576 .   vec - the global vector
577 
578     Level: beginner
579 
580 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
581 
582 @*/
583 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
584 {
585   PetscErrorCode ierr;
586 
587   PetscFunctionBegin;
588   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
589   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
590   PetscFunctionReturn(0);
591 }
592 
593 #undef __FUNCT__
594 #define __FUNCT__ "DMCreateLocalVector"
595 /*@
596     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
597 
598     Not Collective
599 
600     Input Parameter:
601 .   dm - the DM object
602 
603     Output Parameter:
604 .   vec - the local vector
605 
606     Level: beginner
607 
608 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
609 
610 @*/
611 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
612 {
613   PetscErrorCode ierr;
614 
615   PetscFunctionBegin;
616   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
617   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
618   PetscFunctionReturn(0);
619 }
620 
621 #undef __FUNCT__
622 #define __FUNCT__ "DMGetLocalToGlobalMapping"
623 /*@
624    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
625 
626    Collective on DM
627 
628    Input Parameter:
629 .  dm - the DM that provides the mapping
630 
631    Output Parameter:
632 .  ltog - the mapping
633 
634    Level: intermediate
635 
636    Notes:
637    This mapping can then be used by VecSetLocalToGlobalMapping() or
638    MatSetLocalToGlobalMapping().
639 
640 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
641 @*/
642 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
643 {
644   PetscErrorCode ierr;
645 
646   PetscFunctionBegin;
647   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
648   PetscValidPointer(ltog,2);
649   if (!dm->ltogmap) {
650     PetscSection section, sectionGlobal;
651 
652     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
653     if (section) {
654       PetscInt      *ltog;
655       PetscInt       pStart, pEnd, size, p, l;
656 
657       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
658       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
659       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
660       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
661       for (p = pStart, l = 0; p < pEnd; ++p) {
662         PetscInt dof, off, c;
663 
664         /* Should probably use constrained dofs */
665         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
666         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
667         for (c = 0; c < dof; ++c, ++l) {
668           ltog[l] = off+c;
669         }
670       }
671       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
672       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
673     } else {
674       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
675       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
676     }
677   }
678   *ltog = dm->ltogmap;
679   PetscFunctionReturn(0);
680 }
681 
682 #undef __FUNCT__
683 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
684 /*@
685    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
686 
687    Collective on DM
688 
689    Input Parameter:
690 .  da - the distributed array that provides the mapping
691 
692    Output Parameter:
693 .  ltog - the block mapping
694 
695    Level: intermediate
696 
697    Notes:
698    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
699    MatSetLocalToGlobalMappingBlock().
700 
701 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
702 @*/
703 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
704 {
705   PetscErrorCode ierr;
706 
707   PetscFunctionBegin;
708   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
709   PetscValidPointer(ltog,2);
710   if (!dm->ltogmapb) {
711     PetscInt bs;
712     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
713     if (bs > 1) {
714       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
715       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
716     } else {
717       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
718       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
719     }
720   }
721   *ltog = dm->ltogmapb;
722   PetscFunctionReturn(0);
723 }
724 
725 #undef __FUNCT__
726 #define __FUNCT__ "DMGetBlockSize"
727 /*@
728    DMGetBlockSize - Gets the inherent block size associated with a DM
729 
730    Not Collective
731 
732    Input Parameter:
733 .  dm - the DM with block structure
734 
735    Output Parameter:
736 .  bs - the block size, 1 implies no exploitable block structure
737 
738    Level: intermediate
739 
740 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
741 @*/
742 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
743 {
744   PetscFunctionBegin;
745   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
746   PetscValidPointer(bs,2);
747   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
748   *bs = dm->bs;
749   PetscFunctionReturn(0);
750 }
751 
752 #undef __FUNCT__
753 #define __FUNCT__ "DMCreateInterpolation"
754 /*@
755     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
756 
757     Collective on DM
758 
759     Input Parameter:
760 +   dm1 - the DM object
761 -   dm2 - the second, finer DM object
762 
763     Output Parameter:
764 +  mat - the interpolation
765 -  vec - the scaling (optional)
766 
767     Level: developer
768 
769     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
770         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
771 
772         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
773         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
774 
775 
776 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
777 
778 @*/
779 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
780 {
781   PetscErrorCode ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
785   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
786   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
787   PetscFunctionReturn(0);
788 }
789 
790 #undef __FUNCT__
791 #define __FUNCT__ "DMCreateInjection"
792 /*@
793     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
794 
795     Collective on DM
796 
797     Input Parameter:
798 +   dm1 - the DM object
799 -   dm2 - the second, finer DM object
800 
801     Output Parameter:
802 .   ctx - the injection
803 
804     Level: developer
805 
806    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
807         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
808 
809 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
810 
811 @*/
812 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
813 {
814   PetscErrorCode ierr;
815 
816   PetscFunctionBegin;
817   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
818   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
819   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
820   PetscFunctionReturn(0);
821 }
822 
823 #undef __FUNCT__
824 #define __FUNCT__ "DMCreateColoring"
825 /*@C
826     DMCreateColoring - Gets coloring for a DMDA or DMComposite
827 
828     Collective on DM
829 
830     Input Parameter:
831 +   dm - the DM object
832 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
833 -   matype - either MATAIJ or MATBAIJ
834 
835     Output Parameter:
836 .   coloring - the coloring
837 
838     Level: developer
839 
840 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
841 
842 @*/
843 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
844 {
845   PetscErrorCode ierr;
846 
847   PetscFunctionBegin;
848   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
849   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
850   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
851   PetscFunctionReturn(0);
852 }
853 
854 #undef __FUNCT__
855 #define __FUNCT__ "DMCreateMatrix"
856 /*@C
857     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
858 
859     Collective on DM
860 
861     Input Parameter:
862 +   dm - the DM object
863 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
864             any type which inherits from one of these (such as MATAIJ)
865 
866     Output Parameter:
867 .   mat - the empty Jacobian
868 
869     Level: beginner
870 
871     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
872        do not need to do it yourself.
873 
874        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
875        the nonzero pattern call DMDASetMatPreallocateOnly()
876 
877        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
878        internally by PETSc.
879 
880        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
881        the indices for the global numbering for DMDAs which is complicated.
882 
883 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
884 
885 @*/
886 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
887 {
888   PetscErrorCode ierr;
889 
890   PetscFunctionBegin;
891   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
892 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
893   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
894 #endif
895   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
896   PetscValidPointer(mat,3);
897   if (dm->mattype) {
898     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
899   } else {
900     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
901   }
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
907 /*@
908   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
909     preallocated but the nonzero structure and zero values will not be set.
910 
911   Logically Collective on DMDA
912 
913   Input Parameter:
914 + dm - the DM
915 - only - PETSC_TRUE if only want preallocation
916 
917   Level: developer
918 .seealso DMCreateMatrix()
919 @*/
920 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
921 {
922   PetscFunctionBegin;
923   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
924   dm->prealloc_only = only;
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "DMGetWorkArray"
930 /*@C
931   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
932 
933   Not Collective
934 
935   Input Parameters:
936 + dm - the DM object
937 . count - The minium size
938 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
939 
940   Output Parameter:
941 . array - the work array
942 
943   Level: developer
944 
945 .seealso DMDestroy(), DMCreate()
946 @*/
947 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
948 {
949   PetscErrorCode ierr;
950   DMWorkLink link;
951   size_t size;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
955   PetscValidPointer(mem,4);
956   if (dm->workin) {
957     link = dm->workin;
958     dm->workin = dm->workin->next;
959   } else {
960     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
961   }
962   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
963   if (size*count > link->bytes) {
964     ierr = PetscFree(link->mem);CHKERRQ(ierr);
965     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
966     link->bytes = size*count;
967   }
968   link->next = dm->workout;
969   dm->workout = link;
970   *(void**)mem = link->mem;
971   PetscFunctionReturn(0);
972 }
973 
974 #undef __FUNCT__
975 #define __FUNCT__ "DMRestoreWorkArray"
976 /*@C
977   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
978 
979   Not Collective
980 
981   Input Parameters:
982 + dm - the DM object
983 . count - The minium size
984 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
985 
986   Output Parameter:
987 . array - the work array
988 
989   Level: developer
990 
991 .seealso DMDestroy(), DMCreate()
992 @*/
993 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
994 {
995   DMWorkLink *p,link;
996 
997   PetscFunctionBegin;
998   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
999   PetscValidPointer(mem,4);
1000   for (p=&dm->workout; (link=*p); p=&link->next) {
1001     if (link->mem == *(void**)mem) {
1002       *p = link->next;
1003       link->next = dm->workin;
1004       dm->workin = link;
1005       *(void**)mem = PETSC_NULL;
1006       PetscFunctionReturn(0);
1007     }
1008   }
1009   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1010   PetscFunctionReturn(0);
1011 }
1012 
1013 #undef __FUNCT__
1014 #define __FUNCT__ "DMSetNullSpaceConstructor"
1015 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1016 {
1017   PetscFunctionBegin;
1018   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1019   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1020   dm->nullspaceConstructors[field] = nullsp;
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 #undef __FUNCT__
1025 #define __FUNCT__ "DMCreateFieldIS"
1026 /*@C
1027   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1028 
1029   Not collective
1030 
1031   Input Parameter:
1032 . dm - the DM object
1033 
1034   Output Parameters:
1035 + numFields  - The number of fields (or PETSC_NULL if not requested)
1036 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1037 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1038 
1039   Level: intermediate
1040 
1041   Notes:
1042   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1043   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1044   PetscFree().
1045 
1046 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1047 @*/
1048 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1049 {
1050   PetscSection   section, sectionGlobal;
1051   PetscErrorCode ierr;
1052 
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1055   if (numFields) {
1056     PetscValidPointer(numFields,2);
1057     *numFields = 0;
1058   }
1059   if (fieldNames) {
1060     PetscValidPointer(fieldNames,3);
1061     *fieldNames = PETSC_NULL;
1062   }
1063   if (fields) {
1064     PetscValidPointer(fields,4);
1065     *fields = PETSC_NULL;
1066   }
1067   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1068   if (section) {
1069     PetscInt *fieldSizes, **fieldIndices;
1070     PetscInt  nF, f, pStart, pEnd, p;
1071 
1072     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1073     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1074     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1075     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1076     for (f = 0; f < nF; ++f) {
1077       fieldSizes[f] = 0;
1078     }
1079     for (p = pStart; p < pEnd; ++p) {
1080       PetscInt gdof;
1081 
1082       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1083       if (gdof > 0) {
1084         for (f = 0; f < nF; ++f) {
1085           PetscInt fdof, fcdof;
1086 
1087           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1088           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1089           fieldSizes[f] += fdof-fcdof;
1090         }
1091       }
1092     }
1093     for (f = 0; f < nF; ++f) {
1094       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1095       fieldSizes[f] = 0;
1096     }
1097     for (p = pStart; p < pEnd; ++p) {
1098       PetscInt gdof, goff;
1099 
1100       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1101       if (gdof > 0) {
1102         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1103         for (f = 0; f < nF; ++f) {
1104           PetscInt fdof, fcdof, fc;
1105 
1106           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1107           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1108           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1109             fieldIndices[f][fieldSizes[f]] = goff++;
1110           }
1111         }
1112       }
1113     }
1114     if (numFields) {*numFields = nF;}
1115     if (fieldNames) {
1116       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1117       for (f = 0; f < nF; ++f) {
1118         const char *fieldName;
1119 
1120         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1121         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1122       }
1123     }
1124     if (fields) {
1125       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1126       for (f = 0; f < nF; ++f) {
1127         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1128       }
1129     }
1130     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1131   } else {
1132     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1133   }
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 #undef __FUNCT__
1138 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1139 /*@C
1140   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1141 
1142   Not Collective
1143 
1144   Input Parameters:
1145 + dm   - the DM object
1146 - name - the name of the field decomposition
1147 
1148   Output Parameter:
1149 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1150 
1151   Level: advanced
1152 
1153 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1154 @*/
1155 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1156 {
1157   PetscErrorCode ierr;
1158 
1159   PetscFunctionBegin;
1160   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1161   PetscValidCharPointer(name,2);
1162   PetscValidPointer(ddm,3);
1163   *ddm = PETSC_NULL;
1164   if (dm->ops->createfielddecompositiondm) {
1165     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1166   }
1167   PetscFunctionReturn(0);
1168 }
1169 
1170 #undef __FUNCT__
1171 #define __FUNCT__ "DMCreateFieldDecomposition"
1172 /*@C
1173   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1174                           corresponding to different fields: each IS contains the global indices of the dofs of the
1175                           corresponding field. The optional list of DMs define the DM for each subproblem.
1176                           Generalizes DMCreateFieldIS().
1177 
1178   Not collective
1179 
1180   Input Parameter:
1181 . dm - the DM object
1182 
1183   Output Parameters:
1184 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1185 . namelist  - The name for each field (or PETSC_NULL if not requested)
1186 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1187 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1188 
1189   Level: intermediate
1190 
1191   Notes:
1192   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1193   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1194   and all of the arrays should be freed with PetscFree().
1195 
1196 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1197 @*/
1198 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1199 {
1200   PetscErrorCode ierr;
1201 
1202   PetscFunctionBegin;
1203   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1204   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1205   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1206   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1207   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1208   /*
1209    Is it a good idea to apply the following check across all impls?
1210    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1211    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1212    */
1213   if (!dm->setupcalled) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1214   if (!dm->ops->createfielddecomposition) {
1215     PetscSection section;
1216     PetscInt     numFields, f;
1217 
1218     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1219     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1220     if (section && numFields && dm->ops->createsubdm) {
1221       *len = numFields;
1222       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1223       for (f = 0; f < numFields; ++f) {
1224         const char *fieldName;
1225 
1226         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1227         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1228         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1229       }
1230     } else {
1231       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1232       /* By default there are no DMs associated with subproblems. */
1233       if (dmlist) *dmlist = PETSC_NULL;
1234     }
1235   }
1236   else {
1237     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1238   }
1239   PetscFunctionReturn(0);
1240 }
1241 
1242 #undef __FUNCT__
1243 #define __FUNCT__ "DMCreateSubDM"
1244 /*@C
1245   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1246                   The fields are defined by DMCreateFieldIS().
1247 
1248   Not collective
1249 
1250   Input Parameters:
1251 + dm - the DM object
1252 . numFields - number of fields in this subproblem
1253 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1254 
1255   Output Parameters:
1256 . is - The global indices for the subproblem
1257 - dm - The DM for the subproblem
1258 
1259   Level: intermediate
1260 
1261 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1262 @*/
1263 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1264 {
1265   PetscErrorCode ierr;
1266 
1267   PetscFunctionBegin;
1268   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1269   PetscValidPointer(fields,3);
1270   if (is) {PetscValidPointer(is,4);}
1271   if (subdm) {PetscValidPointer(subdm,5);}
1272   if (dm->ops->createsubdm) {
1273     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1274   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1275   PetscFunctionReturn(0);
1276 }
1277 
1278 #undef __FUNCT__
1279 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1280 /*@C
1281   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1282 
1283   Not Collective
1284 
1285   Input Parameters:
1286 + dm   - the DM object
1287 - name - the name of the subdomain decomposition
1288 
1289   Output Parameter:
1290 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1291 
1292   Level: advanced
1293 
1294 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1295 @*/
1296 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1297 {
1298   PetscErrorCode ierr;
1299 
1300   PetscFunctionBegin;
1301   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1302   PetscValidCharPointer(name,2);
1303   PetscValidPointer(ddm,3);
1304   *ddm = PETSC_NULL;
1305   if (dm->ops->createdomaindecompositiondm) {
1306     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1307   }
1308   PetscFunctionReturn(0);
1309 }
1310 
1311 #undef __FUNCT__
1312 #define __FUNCT__ "DMCreateDomainDecomposition"
1313 /*@C
1314   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1315                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1316                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1317                           define a nonoverlapping covering, while outer subdomains can overlap.
1318                           The optional list of DMs define the DM for each subproblem.
1319 
1320   Not collective
1321 
1322   Input Parameter:
1323 . dm - the DM object
1324 
1325   Output Parameters:
1326 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1327 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1328 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1329 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1330 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1331 
1332   Level: intermediate
1333 
1334   Notes:
1335   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1336   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1337   and all of the arrays should be freed with PetscFree().
1338 
1339 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1340 @*/
1341 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1342 {
1343   PetscErrorCode ierr;
1344   DMSubDomainHookLink link;
1345   PetscInt            i,l;
1346 
1347   PetscFunctionBegin;
1348   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1349   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1350   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1351   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1352   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1353   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1354   /*
1355    Is it a good idea to apply the following check across all impls?
1356    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1357    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1358    */
1359   if (!dm->setupcalled) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1360   if (dm->ops->createdomaindecomposition) {
1361     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1362     /* copy subdomain hooks and context over to the subdomain DMs */
1363     if (dmlist) {
1364       for (i = 0; i < l; i++) {
1365         for (link=dm->subdomainhook; link; link=link->next) {
1366           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1367         }
1368         (*dmlist)[i]->ctx = dm->ctx;
1369       }
1370     }
1371     if (len) *len = l;
1372   }
1373   PetscFunctionReturn(0);
1374 }
1375 
1376 
1377 #undef __FUNCT__
1378 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1379 /*@C
1380   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1381 
1382   Not collective
1383 
1384   Input Parameters:
1385 + dm - the DM object
1386 . n  - the number of subdomain scatters
1387 - subdms - the local subdomains
1388 
1389   Output Parameters:
1390 + n     - the number of scatters returned
1391 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1392 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1393 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1394 
1395   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1396   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1397   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1398   solution and residual data.
1399 
1400   Level: developer
1401 
1402 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1403 @*/
1404 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1405 {
1406   PetscErrorCode ierr;
1407 
1408   PetscFunctionBegin;
1409   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1410   PetscValidPointer(subdms,3);
1411   PetscValidPointer(iscat,4);
1412   PetscValidPointer(oscat,5);
1413   PetscValidPointer(gscat,6);
1414   if (dm->ops->createddscatters) {
1415     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1416   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1417   PetscFunctionReturn(0);
1418 }
1419 
1420 #undef __FUNCT__
1421 #define __FUNCT__ "DMRefine"
1422 /*@
1423   DMRefine - Refines a DM object
1424 
1425   Collective on DM
1426 
1427   Input Parameter:
1428 + dm   - the DM object
1429 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1430 
1431   Output Parameter:
1432 . dmf - the refined DM, or PETSC_NULL
1433 
1434   Note: If no refinement was done, the return value is PETSC_NULL
1435 
1436   Level: developer
1437 
1438 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1439 @*/
1440 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1441 {
1442   PetscErrorCode ierr;
1443   DMRefineHookLink link;
1444 
1445   PetscFunctionBegin;
1446   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1447   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1448   if (*dmf) {
1449     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1450     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1451     (*dmf)->ctx       = dm->ctx;
1452     (*dmf)->leveldown = dm->leveldown;
1453     (*dmf)->levelup   = dm->levelup + 1;
1454     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1455     for (link=dm->refinehook; link; link=link->next) {
1456       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1457     }
1458   }
1459   PetscFunctionReturn(0);
1460 }
1461 
1462 #undef __FUNCT__
1463 #define __FUNCT__ "DMRefineHookAdd"
1464 /*@
1465    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1466 
1467    Logically Collective
1468 
1469    Input Arguments:
1470 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1471 .  refinehook - function to run when setting up a coarser level
1472 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1473 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1474 
1475    Calling sequence of refinehook:
1476 $    refinehook(DM coarse,DM fine,void *ctx);
1477 
1478 +  coarse - coarse level DM
1479 .  fine - fine level DM to interpolate problem to
1480 -  ctx - optional user-defined function context
1481 
1482    Calling sequence for interphook:
1483 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1484 
1485 +  coarse - coarse level DM
1486 .  interp - matrix interpolating a coarse-level solution to the finer grid
1487 .  fine - fine level DM to update
1488 -  ctx - optional user-defined function context
1489 
1490    Level: advanced
1491 
1492    Notes:
1493    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1494 
1495    If this function is called multiple times, the hooks will be run in the order they are added.
1496 
1497 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1498 @*/
1499 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1500 {
1501   PetscErrorCode ierr;
1502   DMRefineHookLink link,*p;
1503 
1504   PetscFunctionBegin;
1505   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1506   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1507   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1508   link->refinehook = refinehook;
1509   link->interphook = interphook;
1510   link->ctx = ctx;
1511   link->next = PETSC_NULL;
1512   *p = link;
1513   PetscFunctionReturn(0);
1514 }
1515 
1516 #undef __FUNCT__
1517 #define __FUNCT__ "DMInterpolate"
1518 /*@
1519    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1520 
1521    Collective if any hooks are
1522 
1523    Input Arguments:
1524 +  coarse - coarser DM to use as a base
1525 .  restrct - interpolation matrix, apply using MatInterpolate()
1526 -  fine - finer DM to update
1527 
1528    Level: developer
1529 
1530 .seealso: DMRefineHookAdd(), MatInterpolate()
1531 @*/
1532 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1533 {
1534   PetscErrorCode ierr;
1535   DMRefineHookLink link;
1536 
1537   PetscFunctionBegin;
1538   for (link=fine->refinehook; link; link=link->next) {
1539     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1540   }
1541   PetscFunctionReturn(0);
1542 }
1543 
1544 #undef __FUNCT__
1545 #define __FUNCT__ "DMGetRefineLevel"
1546 /*@
1547     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1548 
1549     Not Collective
1550 
1551     Input Parameter:
1552 .   dm - the DM object
1553 
1554     Output Parameter:
1555 .   level - number of refinements
1556 
1557     Level: developer
1558 
1559 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1560 
1561 @*/
1562 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1563 {
1564   PetscFunctionBegin;
1565   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1566   *level = dm->levelup;
1567   PetscFunctionReturn(0);
1568 }
1569 
1570 #undef __FUNCT__
1571 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1572 /*@
1573    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1574 
1575    Logically Collective
1576 
1577    Input Arguments:
1578 +  dm - the DM
1579 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1580 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1581 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1582 
1583    Calling sequence for beginhook:
1584 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1585 
1586 +  dm - global DM
1587 .  g - global vector
1588 .  mode - mode
1589 .  l - local vector
1590 -  ctx - optional user-defined function context
1591 
1592 
1593    Calling sequence for endhook:
1594 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1595 
1596 +  global - global DM
1597 -  ctx - optional user-defined function context
1598 
1599    Level: advanced
1600 
1601    Notes:
1602    This function is only needed if auxiliary data needs to be set up on coarse grids.
1603 
1604    If this function is called multiple times, the hooks will be run in the order they are added.
1605 
1606    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1607    extract the finest level information from its context (instead of from the SNES).
1608 
1609 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1610 @*/
1611 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1612 {
1613   PetscErrorCode ierr;
1614   DMGlobalToLocalHookLink link,*p;
1615 
1616   PetscFunctionBegin;
1617   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1618   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1619   ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1620   link->beginhook = beginhook;
1621   link->endhook = endhook;
1622   link->ctx = ctx;
1623   link->next = PETSC_NULL;
1624   *p = link;
1625   PetscFunctionReturn(0);
1626 }
1627 
1628 #undef __FUNCT__
1629 #define __FUNCT__ "DMGlobalToLocalBegin"
1630 /*@
1631     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1632 
1633     Neighbor-wise Collective on DM
1634 
1635     Input Parameters:
1636 +   dm - the DM object
1637 .   g - the global vector
1638 .   mode - INSERT_VALUES or ADD_VALUES
1639 -   l - the local vector
1640 
1641 
1642     Level: beginner
1643 
1644 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1645 
1646 @*/
1647 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1648 {
1649   PetscSF        sf;
1650   PetscErrorCode ierr;
1651   DMGlobalToLocalHookLink link;
1652 
1653   PetscFunctionBegin;
1654   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1655   for (link=dm->gtolhook; link; link=link->next) {
1656     if (link->beginhook) {ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1657   }
1658   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1659   if (sf) {
1660     PetscScalar *lArray, *gArray;
1661 
1662     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1663     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1664     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1665     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1666     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1667     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1668   } else {
1669     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1670   }
1671   PetscFunctionReturn(0);
1672 }
1673 
1674 #undef __FUNCT__
1675 #define __FUNCT__ "DMGlobalToLocalEnd"
1676 /*@
1677     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1678 
1679     Neighbor-wise Collective on DM
1680 
1681     Input Parameters:
1682 +   dm - the DM object
1683 .   g - the global vector
1684 .   mode - INSERT_VALUES or ADD_VALUES
1685 -   l - the local vector
1686 
1687 
1688     Level: beginner
1689 
1690 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1691 
1692 @*/
1693 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1694 {
1695   PetscSF        sf;
1696   PetscErrorCode ierr;
1697   PetscScalar    *lArray, *gArray;
1698   DMGlobalToLocalHookLink link;
1699 
1700   PetscFunctionBegin;
1701   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1702   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1703   if (sf) {
1704   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1705 
1706     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1707     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1708     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1709     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1710     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1711   } else {
1712     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1713   }
1714   for (link=dm->gtolhook; link; link=link->next) {
1715     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1716   }
1717   PetscFunctionReturn(0);
1718 }
1719 
1720 #undef __FUNCT__
1721 #define __FUNCT__ "DMLocalToGlobalBegin"
1722 /*@
1723     DMLocalToGlobalBegin - updates global vectors from local vectors
1724 
1725     Neighbor-wise Collective on DM
1726 
1727     Input Parameters:
1728 +   dm - the DM object
1729 .   l - the local vector
1730 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1731            base point.
1732 - - the global vector
1733 
1734     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1735            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1736            global array to the final global array with VecAXPY().
1737 
1738     Level: beginner
1739 
1740 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1741 
1742 @*/
1743 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1744 {
1745   PetscSF        sf;
1746   PetscErrorCode ierr;
1747 
1748   PetscFunctionBegin;
1749   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1750   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1751   if (sf) {
1752     MPI_Op       op;
1753     PetscScalar *lArray, *gArray;
1754 
1755     switch (mode) {
1756     case INSERT_VALUES:
1757     case INSERT_ALL_VALUES:
1758 #if defined(PETSC_HAVE_MPI_REPLACE)
1759       op = MPI_REPLACE; break;
1760 #else
1761       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1762 #endif
1763     case ADD_VALUES:
1764     case ADD_ALL_VALUES:
1765       op = MPI_SUM; break;
1766     default:
1767       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1768     }
1769     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1770     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1771     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1772     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1773     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1774   } else {
1775     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1776   }
1777   PetscFunctionReturn(0);
1778 }
1779 
1780 #undef __FUNCT__
1781 #define __FUNCT__ "DMLocalToGlobalEnd"
1782 /*@
1783     DMLocalToGlobalEnd - updates global vectors from local vectors
1784 
1785     Neighbor-wise Collective on DM
1786 
1787     Input Parameters:
1788 +   dm - the DM object
1789 .   l - the local vector
1790 .   mode - INSERT_VALUES or ADD_VALUES
1791 -   g - the global vector
1792 
1793 
1794     Level: beginner
1795 
1796 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1797 
1798 @*/
1799 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1800 {
1801   PetscSF        sf;
1802   PetscErrorCode ierr;
1803 
1804   PetscFunctionBegin;
1805   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1806   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1807   if (sf) {
1808     MPI_Op       op;
1809     PetscScalar *lArray, *gArray;
1810 
1811     switch (mode) {
1812     case INSERT_VALUES:
1813     case INSERT_ALL_VALUES:
1814 #if defined(PETSC_HAVE_MPI_REPLACE)
1815       op = MPI_REPLACE; break;
1816 #else
1817       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1818 #endif
1819     case ADD_VALUES:
1820     case ADD_ALL_VALUES:
1821       op = MPI_SUM; break;
1822     default:
1823       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1824     }
1825     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1826     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1827     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1828     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1829     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1830   } else {
1831     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1832   }
1833   PetscFunctionReturn(0);
1834 }
1835 
1836 #undef __FUNCT__
1837 #define __FUNCT__ "DMCoarsen"
1838 /*@
1839     DMCoarsen - Coarsens a DM object
1840 
1841     Collective on DM
1842 
1843     Input Parameter:
1844 +   dm - the DM object
1845 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1846 
1847     Output Parameter:
1848 .   dmc - the coarsened DM
1849 
1850     Level: developer
1851 
1852 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1853 
1854 @*/
1855 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1856 {
1857   PetscErrorCode ierr;
1858   DMCoarsenHookLink link;
1859 
1860   PetscFunctionBegin;
1861   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1862   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1863   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1864   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1865   (*dmc)->ctx       = dm->ctx;
1866   (*dmc)->levelup   = dm->levelup;
1867   (*dmc)->leveldown = dm->leveldown + 1;
1868   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1869   for (link=dm->coarsenhook; link; link=link->next) {
1870     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1871   }
1872   PetscFunctionReturn(0);
1873 }
1874 
1875 #undef __FUNCT__
1876 #define __FUNCT__ "DMCoarsenHookAdd"
1877 /*@
1878    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1879 
1880    Logically Collective
1881 
1882    Input Arguments:
1883 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1884 .  coarsenhook - function to run when setting up a coarser level
1885 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1886 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1887 
1888    Calling sequence of coarsenhook:
1889 $    coarsenhook(DM fine,DM coarse,void *ctx);
1890 
1891 +  fine - fine level DM
1892 .  coarse - coarse level DM to restrict problem to
1893 -  ctx - optional user-defined function context
1894 
1895    Calling sequence for restricthook:
1896 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1897 
1898 +  fine - fine level DM
1899 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1900 .  rscale - scaling vector for restriction
1901 .  inject - matrix restricting by injection
1902 .  coarse - coarse level DM to update
1903 -  ctx - optional user-defined function context
1904 
1905    Level: advanced
1906 
1907    Notes:
1908    This function is only needed if auxiliary data needs to be set up on coarse grids.
1909 
1910    If this function is called multiple times, the hooks will be run in the order they are added.
1911 
1912    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1913    extract the finest level information from its context (instead of from the SNES).
1914 
1915 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1916 @*/
1917 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1918 {
1919   PetscErrorCode ierr;
1920   DMCoarsenHookLink link,*p;
1921 
1922   PetscFunctionBegin;
1923   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1924   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1925   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1926   link->coarsenhook = coarsenhook;
1927   link->restricthook = restricthook;
1928   link->ctx = ctx;
1929   link->next = PETSC_NULL;
1930   *p = link;
1931   PetscFunctionReturn(0);
1932 }
1933 
1934 #undef __FUNCT__
1935 #define __FUNCT__ "DMRestrict"
1936 /*@
1937    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1938 
1939    Collective if any hooks are
1940 
1941    Input Arguments:
1942 +  fine - finer DM to use as a base
1943 .  restrct - restriction matrix, apply using MatRestrict()
1944 .  inject - injection matrix, also use MatRestrict()
1945 -  coarse - coarer DM to update
1946 
1947    Level: developer
1948 
1949 .seealso: DMCoarsenHookAdd(), MatRestrict()
1950 @*/
1951 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1952 {
1953   PetscErrorCode ierr;
1954   DMCoarsenHookLink link;
1955 
1956   PetscFunctionBegin;
1957   for (link=fine->coarsenhook; link; link=link->next) {
1958     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1959   }
1960   PetscFunctionReturn(0);
1961 }
1962 
1963 #undef __FUNCT__
1964 #define __FUNCT__ "DMSubDomainHookAdd"
1965 /*@
1966    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1967 
1968    Logically Collective
1969 
1970    Input Arguments:
1971 +  global - global DM
1972 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1973 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1974 
1975    Calling sequence for restricthook:
1976 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1977 
1978 +  global - global DM
1979 .  out    - scatter to the outer (with ghost and overlap points) block vector
1980 .  in     - scatter to block vector values only owned locally
1981 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1982 -  ctx - optional user-defined function context
1983 
1984    Level: advanced
1985 
1986    Notes:
1987    This function is only needed if auxiliary data needs to be set up on coarse grids.
1988 
1989    If this function is called multiple times, the hooks will be run in the order they are added.
1990 
1991    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1992    extract the finest level information from its context (instead of from the SNES).
1993 
1994 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1995 @*/
1996 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1997 {
1998   PetscErrorCode      ierr;
1999   DMSubDomainHookLink link,*p;
2000 
2001   PetscFunctionBegin;
2002   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2003   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2004   ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2005   link->restricthook = restricthook;
2006   link->ddhook = ddhook;
2007   link->ctx = ctx;
2008   link->next = PETSC_NULL;
2009   *p = link;
2010   PetscFunctionReturn(0);
2011 }
2012 
2013 #undef __FUNCT__
2014 #define __FUNCT__ "DMSubDomainRestrict"
2015 /*@
2016    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2017 
2018    Collective if any hooks are
2019 
2020    Input Arguments:
2021 +  fine - finer DM to use as a base
2022 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2023 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2024 -  coarse - coarer DM to update
2025 
2026    Level: developer
2027 
2028 .seealso: DMCoarsenHookAdd(), MatRestrict()
2029 @*/
2030 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2031 {
2032   PetscErrorCode ierr;
2033   DMSubDomainHookLink link;
2034 
2035   PetscFunctionBegin;
2036   for (link=global->subdomainhook; link; link=link->next) {
2037     if (link->restricthook) {ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);}
2038   }
2039   PetscFunctionReturn(0);
2040 }
2041 
2042 #undef __FUNCT__
2043 #define __FUNCT__ "DMGetCoarsenLevel"
2044 /*@
2045     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2046 
2047     Not Collective
2048 
2049     Input Parameter:
2050 .   dm - the DM object
2051 
2052     Output Parameter:
2053 .   level - number of coarsenings
2054 
2055     Level: developer
2056 
2057 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2058 
2059 @*/
2060 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2061 {
2062   PetscFunctionBegin;
2063   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2064   *level = dm->leveldown;
2065   PetscFunctionReturn(0);
2066 }
2067 
2068 
2069 
2070 #undef __FUNCT__
2071 #define __FUNCT__ "DMRefineHierarchy"
2072 /*@C
2073     DMRefineHierarchy - Refines a DM object, all levels at once
2074 
2075     Collective on DM
2076 
2077     Input Parameter:
2078 +   dm - the DM object
2079 -   nlevels - the number of levels of refinement
2080 
2081     Output Parameter:
2082 .   dmf - the refined DM hierarchy
2083 
2084     Level: developer
2085 
2086 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2087 
2088 @*/
2089 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2090 {
2091   PetscErrorCode ierr;
2092 
2093   PetscFunctionBegin;
2094   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2095   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2096   if (nlevels == 0) PetscFunctionReturn(0);
2097   if (dm->ops->refinehierarchy) {
2098     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2099   } else if (dm->ops->refine) {
2100     PetscInt i;
2101 
2102     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2103     for (i=1; i<nlevels; i++) {
2104       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2105     }
2106   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2107   PetscFunctionReturn(0);
2108 }
2109 
2110 #undef __FUNCT__
2111 #define __FUNCT__ "DMCoarsenHierarchy"
2112 /*@C
2113     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2114 
2115     Collective on DM
2116 
2117     Input Parameter:
2118 +   dm - the DM object
2119 -   nlevels - the number of levels of coarsening
2120 
2121     Output Parameter:
2122 .   dmc - the coarsened DM hierarchy
2123 
2124     Level: developer
2125 
2126 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2127 
2128 @*/
2129 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2130 {
2131   PetscErrorCode ierr;
2132 
2133   PetscFunctionBegin;
2134   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2135   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2136   if (nlevels == 0) PetscFunctionReturn(0);
2137   PetscValidPointer(dmc,3);
2138   if (dm->ops->coarsenhierarchy) {
2139     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2140   } else if (dm->ops->coarsen) {
2141     PetscInt i;
2142 
2143     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2144     for (i=1; i<nlevels; i++) {
2145       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2146     }
2147   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2148   PetscFunctionReturn(0);
2149 }
2150 
2151 #undef __FUNCT__
2152 #define __FUNCT__ "DMCreateAggregates"
2153 /*@
2154    DMCreateAggregates - Gets the aggregates that map between
2155    grids associated with two DMs.
2156 
2157    Collective on DM
2158 
2159    Input Parameters:
2160 +  dmc - the coarse grid DM
2161 -  dmf - the fine grid DM
2162 
2163    Output Parameters:
2164 .  rest - the restriction matrix (transpose of the projection matrix)
2165 
2166    Level: intermediate
2167 
2168 .keywords: interpolation, restriction, multigrid
2169 
2170 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2171 @*/
2172 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2173 {
2174   PetscErrorCode ierr;
2175 
2176   PetscFunctionBegin;
2177   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2178   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2179   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2180   PetscFunctionReturn(0);
2181 }
2182 
2183 #undef __FUNCT__
2184 #define __FUNCT__ "DMSetApplicationContextDestroy"
2185 /*@C
2186     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2187 
2188     Not Collective
2189 
2190     Input Parameters:
2191 +   dm - the DM object
2192 -   destroy - the destroy function
2193 
2194     Level: intermediate
2195 
2196 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2197 
2198 @*/
2199 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2200 {
2201   PetscFunctionBegin;
2202   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2203   dm->ctxdestroy = destroy;
2204   PetscFunctionReturn(0);
2205 }
2206 
2207 #undef __FUNCT__
2208 #define __FUNCT__ "DMSetApplicationContext"
2209 /*@
2210     DMSetApplicationContext - Set a user context into a DM object
2211 
2212     Not Collective
2213 
2214     Input Parameters:
2215 +   dm - the DM object
2216 -   ctx - the user context
2217 
2218     Level: intermediate
2219 
2220 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2221 
2222 @*/
2223 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2224 {
2225   PetscFunctionBegin;
2226   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2227   dm->ctx = ctx;
2228   PetscFunctionReturn(0);
2229 }
2230 
2231 #undef __FUNCT__
2232 #define __FUNCT__ "DMGetApplicationContext"
2233 /*@
2234     DMGetApplicationContext - Gets a user context from a DM object
2235 
2236     Not Collective
2237 
2238     Input Parameter:
2239 .   dm - the DM object
2240 
2241     Output Parameter:
2242 .   ctx - the user context
2243 
2244     Level: intermediate
2245 
2246 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2247 
2248 @*/
2249 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2250 {
2251   PetscFunctionBegin;
2252   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2253   *(void**)ctx = dm->ctx;
2254   PetscFunctionReturn(0);
2255 }
2256 
2257 #undef __FUNCT__
2258 #define __FUNCT__ "DMSetVariableBounds"
2259 /*@C
2260     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2261 
2262     Logically Collective on DM
2263 
2264     Input Parameter:
2265 +   dm - the DM object
2266 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2267 
2268     Level: intermediate
2269 
2270 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2271          DMSetJacobian()
2272 
2273 @*/
2274 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2275 {
2276   PetscFunctionBegin;
2277   dm->ops->computevariablebounds = f;
2278   PetscFunctionReturn(0);
2279 }
2280 
2281 #undef __FUNCT__
2282 #define __FUNCT__ "DMHasVariableBounds"
2283 /*@
2284     DMHasVariableBounds - does the DM object have a variable bounds function?
2285 
2286     Not Collective
2287 
2288     Input Parameter:
2289 .   dm - the DM object to destroy
2290 
2291     Output Parameter:
2292 .   flg - PETSC_TRUE if the variable bounds function exists
2293 
2294     Level: developer
2295 
2296 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2297 
2298 @*/
2299 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2300 {
2301   PetscFunctionBegin;
2302   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2303   PetscFunctionReturn(0);
2304 }
2305 
2306 #undef __FUNCT__
2307 #define __FUNCT__ "DMComputeVariableBounds"
2308 /*@C
2309     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2310 
2311     Logically Collective on DM
2312 
2313     Input Parameters:
2314 +   dm - the DM object to destroy
2315 -   x  - current solution at which the bounds are computed
2316 
2317     Output parameters:
2318 +   xl - lower bound
2319 -   xu - upper bound
2320 
2321     Level: intermediate
2322 
2323 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2324 
2325 @*/
2326 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2327 {
2328   PetscErrorCode ierr;
2329 
2330   PetscFunctionBegin;
2331   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2332   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2333   if (dm->ops->computevariablebounds) {
2334     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2335   }
2336   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2337   PetscFunctionReturn(0);
2338 }
2339 
2340 #undef __FUNCT__
2341 #define __FUNCT__ "DMHasColoring"
2342 /*@
2343     DMHasColoring - does the DM object have a method of providing a coloring?
2344 
2345     Not Collective
2346 
2347     Input Parameter:
2348 .   dm - the DM object
2349 
2350     Output Parameter:
2351 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2352 
2353     Level: developer
2354 
2355 .seealso DMHasFunction(), DMCreateColoring()
2356 
2357 @*/
2358 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2359 {
2360   PetscFunctionBegin;
2361   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2362   PetscFunctionReturn(0);
2363 }
2364 
2365 #undef  __FUNCT__
2366 #define __FUNCT__ "DMSetVec"
2367 /*@C
2368     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2369 
2370     Collective on DM
2371 
2372     Input Parameter:
2373 +   dm - the DM object
2374 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2375 
2376     Level: developer
2377 
2378 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2379 
2380 @*/
2381 PetscErrorCode  DMSetVec(DM dm,Vec x)
2382 {
2383   PetscErrorCode ierr;
2384 
2385   PetscFunctionBegin;
2386   if (x) {
2387     if (!dm->x) {
2388       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2389     }
2390     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2391   }
2392   else if (dm->x) {
2393     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2394   }
2395   PetscFunctionReturn(0);
2396 }
2397 
2398 PetscFunctionList DMList                       = PETSC_NULL;
2399 PetscBool         DMRegisterAllCalled          = PETSC_FALSE;
2400 
2401 #undef __FUNCT__
2402 #define __FUNCT__ "DMSetType"
2403 /*@C
2404   DMSetType - Builds a DM, for a particular DM implementation.
2405 
2406   Collective on DM
2407 
2408   Input Parameters:
2409 + dm     - The DM object
2410 - method - The name of the DM type
2411 
2412   Options Database Key:
2413 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2414 
2415   Notes:
2416   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2417 
2418   Level: intermediate
2419 
2420 .keywords: DM, set, type
2421 .seealso: DMGetType(), DMCreate()
2422 @*/
2423 PetscErrorCode  DMSetType(DM dm, DMType method)
2424 {
2425   PetscErrorCode (*r)(DM);
2426   PetscBool      match;
2427   PetscErrorCode ierr;
2428 
2429   PetscFunctionBegin;
2430   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2431   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2432   if (match) PetscFunctionReturn(0);
2433 
2434   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2435   ierr = PetscFunctionListFind(((PetscObject)dm)->comm, DMList, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2436   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2437 
2438   if (dm->ops->destroy) {
2439     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2440     dm->ops->destroy = PETSC_NULL;
2441   }
2442   ierr = (*r)(dm);CHKERRQ(ierr);
2443   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2444   PetscFunctionReturn(0);
2445 }
2446 
2447 #undef __FUNCT__
2448 #define __FUNCT__ "DMGetType"
2449 /*@C
2450   DMGetType - Gets the DM type name (as a string) from the DM.
2451 
2452   Not Collective
2453 
2454   Input Parameter:
2455 . dm  - The DM
2456 
2457   Output Parameter:
2458 . type - The DM type name
2459 
2460   Level: intermediate
2461 
2462 .keywords: DM, get, type, name
2463 .seealso: DMSetType(), DMCreate()
2464 @*/
2465 PetscErrorCode  DMGetType(DM dm, DMType *type)
2466 {
2467   PetscErrorCode ierr;
2468 
2469   PetscFunctionBegin;
2470   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2471   PetscValidCharPointer(type,2);
2472   if (!DMRegisterAllCalled) {
2473     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2474   }
2475   *type = ((PetscObject)dm)->type_name;
2476   PetscFunctionReturn(0);
2477 }
2478 
2479 #undef __FUNCT__
2480 #define __FUNCT__ "DMConvert"
2481 /*@C
2482   DMConvert - Converts a DM to another DM, either of the same or different type.
2483 
2484   Collective on DM
2485 
2486   Input Parameters:
2487 + dm - the DM
2488 - newtype - new DM type (use "same" for the same type)
2489 
2490   Output Parameter:
2491 . M - pointer to new DM
2492 
2493   Notes:
2494   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2495   the MPI communicator of the generated DM is always the same as the communicator
2496   of the input DM.
2497 
2498   Level: intermediate
2499 
2500 .seealso: DMCreate()
2501 @*/
2502 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2503 {
2504   DM             B;
2505   char           convname[256];
2506   PetscBool      sametype, issame;
2507   PetscErrorCode ierr;
2508 
2509   PetscFunctionBegin;
2510   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2511   PetscValidType(dm,1);
2512   PetscValidPointer(M,3);
2513   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2514   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2515   {
2516     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2517 
2518     /*
2519        Order of precedence:
2520        1) See if a specialized converter is known to the current DM.
2521        2) See if a specialized converter is known to the desired DM class.
2522        3) See if a good general converter is registered for the desired class
2523        4) See if a good general converter is known for the current matrix.
2524        5) Use a really basic converter.
2525     */
2526 
2527     /* 1) See if a specialized converter is known to the current DM and the desired class */
2528     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2529     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2530     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2531     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2532     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2533     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2534     if (conv) goto foundconv;
2535 
2536     /* 2)  See if a specialized converter is known to the desired DM class. */
2537     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2538     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2539     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2540     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2541     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2542     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2543     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2544     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2545     if (conv) {
2546       ierr = DMDestroy(&B);CHKERRQ(ierr);
2547       goto foundconv;
2548     }
2549 
2550 #if 0
2551     /* 3) See if a good general converter is registered for the desired class */
2552     conv = B->ops->convertfrom;
2553     ierr = DMDestroy(&B);CHKERRQ(ierr);
2554     if (conv) goto foundconv;
2555 
2556     /* 4) See if a good general converter is known for the current matrix */
2557     if (dm->ops->convert) {
2558       conv = dm->ops->convert;
2559     }
2560     if (conv) goto foundconv;
2561 #endif
2562 
2563     /* 5) Use a really basic converter. */
2564     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2565 
2566     foundconv:
2567     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2568     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2569     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2570   }
2571   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2572   PetscFunctionReturn(0);
2573 }
2574 
2575 /*--------------------------------------------------------------------------------------------------------------------*/
2576 
2577 #undef __FUNCT__
2578 #define __FUNCT__ "DMRegister"
2579 /*@C
2580   DMRegister - See DMRegisterDynamic()
2581 
2582   Level: advanced
2583 @*/
2584 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2585 {
2586   char fullname[PETSC_MAX_PATH_LEN];
2587   PetscErrorCode ierr;
2588 
2589   PetscFunctionBegin;
2590   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2591   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2592   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2593   ierr = PetscFunctionListAdd(PETSC_COMM_WORLD,&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2594   PetscFunctionReturn(0);
2595 }
2596 
2597 
2598 /*--------------------------------------------------------------------------------------------------------------------*/
2599 #undef __FUNCT__
2600 #define __FUNCT__ "DMRegisterDestroy"
2601 /*@C
2602    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2603 
2604    Not Collective
2605 
2606    Level: advanced
2607 
2608 .keywords: DM, register, destroy
2609 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2610 @*/
2611 PetscErrorCode  DMRegisterDestroy(void)
2612 {
2613   PetscErrorCode ierr;
2614 
2615   PetscFunctionBegin;
2616   ierr = PetscFunctionListDestroy(&DMList);CHKERRQ(ierr);
2617   DMRegisterAllCalled = PETSC_FALSE;
2618   PetscFunctionReturn(0);
2619 }
2620 
2621 #undef __FUNCT__
2622 #define __FUNCT__ "DMLoad"
2623 /*@C
2624   DMLoad - Loads a DM that has been stored in binary  with DMView().
2625 
2626   Collective on PetscViewer
2627 
2628   Input Parameters:
2629 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2630            some related function before a call to DMLoad().
2631 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2632            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2633 
2634    Level: intermediate
2635 
2636   Notes:
2637    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2638 
2639   Notes for advanced users:
2640   Most users should not need to know the details of the binary storage
2641   format, since DMLoad() and DMView() completely hide these details.
2642   But for anyone who's interested, the standard binary matrix storage
2643   format is
2644 .vb
2645      has not yet been determined
2646 .ve
2647 
2648 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2649 @*/
2650 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2651 {
2652   PetscErrorCode ierr;
2653   PetscBool      isbinary;
2654   PetscInt       classid;
2655   char           type[256];
2656 
2657   PetscFunctionBegin;
2658   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2659   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2660   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2661   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2662 
2663   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2664   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2665   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2666   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2667   if (newdm->ops->load) {
2668     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2669   }
2670   PetscFunctionReturn(0);
2671 }
2672 
2673 /******************************** FEM Support **********************************/
2674 
2675 #undef __FUNCT__
2676 #define __FUNCT__ "DMPrintCellVector"
2677 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2678 {
2679   PetscInt       f;
2680   PetscErrorCode ierr;
2681 
2682   PetscFunctionBegin;
2683   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2684   for (f = 0; f < len; ++f) {
2685     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2686   }
2687   PetscFunctionReturn(0);
2688 }
2689 
2690 #undef __FUNCT__
2691 #define __FUNCT__ "DMPrintCellMatrix"
2692 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2693 {
2694   PetscInt       f, g;
2695   PetscErrorCode ierr;
2696 
2697   PetscFunctionBegin;
2698   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2699   for (f = 0; f < rows; ++f) {
2700     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2701     for (g = 0; g < cols; ++g) {
2702       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2703     }
2704     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2705   }
2706   PetscFunctionReturn(0);
2707 }
2708 
2709 #undef __FUNCT__
2710 #define __FUNCT__ "DMGetDefaultSection"
2711 /*@
2712   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2713 
2714   Input Parameter:
2715 . dm - The DM
2716 
2717   Output Parameter:
2718 . section - The PetscSection
2719 
2720   Level: intermediate
2721 
2722   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2723 
2724 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2725 @*/
2726 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2727 {
2728   PetscFunctionBegin;
2729   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2730   PetscValidPointer(section, 2);
2731   *section = dm->defaultSection;
2732   PetscFunctionReturn(0);
2733 }
2734 
2735 #undef __FUNCT__
2736 #define __FUNCT__ "DMSetDefaultSection"
2737 /*@
2738   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2739 
2740   Input Parameters:
2741 + dm - The DM
2742 - section - The PetscSection
2743 
2744   Level: intermediate
2745 
2746   Note: Any existing Section will be destroyed
2747 
2748 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2749 @*/
2750 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2751 {
2752   PetscInt       numFields;
2753   PetscInt       f;
2754   PetscErrorCode ierr;
2755 
2756   PetscFunctionBegin;
2757   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2758   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2759   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2760   dm->defaultSection = section;
2761   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2762   if (numFields) {
2763     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2764     for (f = 0; f < numFields; ++f) {
2765       const char *name;
2766 
2767       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2768       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2769     }
2770   }
2771   PetscFunctionReturn(0);
2772 }
2773 
2774 #undef __FUNCT__
2775 #define __FUNCT__ "DMGetDefaultGlobalSection"
2776 /*@
2777   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2778 
2779   Collective on DM
2780 
2781   Input Parameter:
2782 . dm - The DM
2783 
2784   Output Parameter:
2785 . section - The PetscSection
2786 
2787   Level: intermediate
2788 
2789   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2790 
2791 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2792 @*/
2793 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2794 {
2795   PetscErrorCode ierr;
2796 
2797   PetscFunctionBegin;
2798   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2799   PetscValidPointer(section, 2);
2800   if (!dm->defaultGlobalSection) {
2801     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2802     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2803     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2804   }
2805   *section = dm->defaultGlobalSection;
2806   PetscFunctionReturn(0);
2807 }
2808 
2809 #undef __FUNCT__
2810 #define __FUNCT__ "DMSetDefaultGlobalSection"
2811 /*@
2812   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2813 
2814   Input Parameters:
2815 + dm - The DM
2816 - section - The PetscSection
2817 
2818   Level: intermediate
2819 
2820   Note: Any existing Section will be destroyed
2821 
2822 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2823 @*/
2824 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2825 {
2826   PetscErrorCode ierr;
2827 
2828   PetscFunctionBegin;
2829   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2830   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2831   dm->defaultGlobalSection = section;
2832   PetscFunctionReturn(0);
2833 }
2834 
2835 #undef __FUNCT__
2836 #define __FUNCT__ "DMGetDefaultSF"
2837 /*@
2838   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2839   it is created from the default PetscSection layouts in the DM.
2840 
2841   Input Parameter:
2842 . dm - The DM
2843 
2844   Output Parameter:
2845 . sf - The PetscSF
2846 
2847   Level: intermediate
2848 
2849   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2850 
2851 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2852 @*/
2853 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2854 {
2855   PetscInt       nroots;
2856   PetscErrorCode ierr;
2857 
2858   PetscFunctionBegin;
2859   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2860   PetscValidPointer(sf, 2);
2861   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2862   if (nroots < 0) {
2863     PetscSection section, gSection;
2864 
2865     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2866     if (section) {
2867       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2868       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2869     } else {
2870       *sf = PETSC_NULL;
2871       PetscFunctionReturn(0);
2872     }
2873   }
2874   *sf = dm->defaultSF;
2875   PetscFunctionReturn(0);
2876 }
2877 
2878 #undef __FUNCT__
2879 #define __FUNCT__ "DMSetDefaultSF"
2880 /*@
2881   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2882 
2883   Input Parameters:
2884 + dm - The DM
2885 - sf - The PetscSF
2886 
2887   Level: intermediate
2888 
2889   Note: Any previous SF is destroyed
2890 
2891 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2892 @*/
2893 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
2894 {
2895   PetscErrorCode ierr;
2896 
2897   PetscFunctionBegin;
2898   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2899   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2900   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2901   dm->defaultSF = sf;
2902   PetscFunctionReturn(0);
2903 }
2904 
2905 #undef __FUNCT__
2906 #define __FUNCT__ "DMCreateDefaultSF"
2907 /*@C
2908   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2909   describing the data layout.
2910 
2911   Input Parameters:
2912 + dm - The DM
2913 . localSection - PetscSection describing the local data layout
2914 - globalSection - PetscSection describing the global data layout
2915 
2916   Level: intermediate
2917 
2918 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2919 @*/
2920 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2921 {
2922   MPI_Comm        comm = ((PetscObject) dm)->comm;
2923   PetscLayout     layout;
2924   const PetscInt *ranges;
2925   PetscInt       *local;
2926   PetscSFNode    *remote;
2927   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2928   PetscMPIInt     size, rank;
2929   PetscErrorCode  ierr;
2930 
2931   PetscFunctionBegin;
2932   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2933   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2934   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2935   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2936   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2937   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2938   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2939   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2940   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2941   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2942   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2943     PetscInt gdof, gcdof;
2944 
2945     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2946     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2947     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2948   }
2949   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2950   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2951   for (p = pStart, l = 0; p < pEnd; ++p) {
2952     const PetscInt *cind;
2953     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2954 
2955     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2956     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2957     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2958     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2959     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2960     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2961     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2962     if (!gdof) continue; /* Censored point */
2963     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2964     if (gsize != dof-cdof) {
2965       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2966       cdof = 0; /* Ignore constraints */
2967     }
2968     for (d = 0, c = 0; d < dof; ++d) {
2969       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2970       local[l+d-c] = off+d;
2971     }
2972     if (gdof < 0) {
2973       for (d = 0; d < gsize; ++d, ++l) {
2974         PetscInt offset = -(goff+1) + d, r;
2975 
2976         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2977         if (r < 0) r = -(r+2);
2978         remote[l].rank  = r;
2979         remote[l].index = offset - ranges[r];
2980       }
2981     } else {
2982       for (d = 0; d < gsize; ++d, ++l) {
2983         remote[l].rank  = rank;
2984         remote[l].index = goff+d - ranges[rank];
2985       }
2986     }
2987   }
2988   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2989   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2990   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2991   PetscFunctionReturn(0);
2992 }
2993 
2994 #undef __FUNCT__
2995 #define __FUNCT__ "DMGetPointSF"
2996 /*@
2997   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2998 
2999   Input Parameter:
3000 . dm - The DM
3001 
3002   Output Parameter:
3003 . sf - The PetscSF
3004 
3005   Level: intermediate
3006 
3007   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3008 
3009 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3010 @*/
3011 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3012 {
3013   PetscFunctionBegin;
3014   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3015   PetscValidPointer(sf, 2);
3016   *sf = dm->sf;
3017   PetscFunctionReturn(0);
3018 }
3019 
3020 #undef __FUNCT__
3021 #define __FUNCT__ "DMSetPointSF"
3022 /*@
3023   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3024 
3025   Input Parameters:
3026 + dm - The DM
3027 - sf - The PetscSF
3028 
3029   Level: intermediate
3030 
3031 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3032 @*/
3033 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3034 {
3035   PetscErrorCode ierr;
3036 
3037   PetscFunctionBegin;
3038   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3039   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3040   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3041   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3042   dm->sf = sf;
3043   PetscFunctionReturn(0);
3044 }
3045 
3046 #undef __FUNCT__
3047 #define __FUNCT__ "DMGetNumFields"
3048 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3049 {
3050   PetscFunctionBegin;
3051   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3052   PetscValidPointer(numFields, 2);
3053   *numFields = dm->numFields;
3054   PetscFunctionReturn(0);
3055 }
3056 
3057 #undef __FUNCT__
3058 #define __FUNCT__ "DMSetNumFields"
3059 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3060 {
3061   PetscInt       f;
3062   PetscErrorCode ierr;
3063 
3064   PetscFunctionBegin;
3065   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3066   for (f = 0; f < dm->numFields; ++f) {
3067     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3068   }
3069   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3070   dm->numFields = numFields;
3071   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3072   for (f = 0; f < dm->numFields; ++f) {
3073     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3074   }
3075   PetscFunctionReturn(0);
3076 }
3077 
3078 #undef __FUNCT__
3079 #define __FUNCT__ "DMGetField"
3080 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3081 {
3082   PetscFunctionBegin;
3083   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3084   PetscValidPointer(field, 2);
3085   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3086   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3087   *field = dm->fields[f];
3088   PetscFunctionReturn(0);
3089 }
3090 
3091 #undef __FUNCT__
3092 #define __FUNCT__ "DMSetCoordinates"
3093 /*@
3094   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3095 
3096   Collective on DM
3097 
3098   Input Parameters:
3099 + dm - the DM
3100 - c - coordinate vector
3101 
3102   Note:
3103   The coordinates do include those for ghost points, which are in the local vector
3104 
3105   Level: intermediate
3106 
3107 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3108 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3109 @*/
3110 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3111 {
3112   PetscErrorCode ierr;
3113 
3114   PetscFunctionBegin;
3115   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3116   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3117   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3118   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3119   dm->coordinates = c;
3120   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3121   PetscFunctionReturn(0);
3122 }
3123 
3124 #undef __FUNCT__
3125 #define __FUNCT__ "DMSetCoordinatesLocal"
3126 /*@
3127   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3128 
3129   Collective on DM
3130 
3131    Input Parameters:
3132 +  dm - the DM
3133 -  c - coordinate vector
3134 
3135   Note:
3136   The coordinates of ghost points can be set using DMSetCoordinates()
3137   followed by DMGetCoordinatesLocal(). This is intended to enable the
3138   setting of ghost coordinates outside of the domain.
3139 
3140   Level: intermediate
3141 
3142 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3143 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3144 @*/
3145 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3146 {
3147   PetscErrorCode ierr;
3148 
3149   PetscFunctionBegin;
3150   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3151   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3152   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3153   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3154   dm->coordinatesLocal = c;
3155   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3156   PetscFunctionReturn(0);
3157 }
3158 
3159 #undef __FUNCT__
3160 #define __FUNCT__ "DMGetCoordinates"
3161 /*@
3162   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3163 
3164   Not Collective
3165 
3166   Input Parameter:
3167 . dm - the DM
3168 
3169   Output Parameter:
3170 . c - global coordinate vector
3171 
3172   Note:
3173   This is a borrowed reference, so the user should NOT destroy this vector
3174 
3175   Each process has only the local coordinates (does NOT have the ghost coordinates).
3176 
3177   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3178   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3179 
3180   Level: intermediate
3181 
3182 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3183 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3184 @*/
3185 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3186 {
3187   PetscErrorCode ierr;
3188 
3189   PetscFunctionBegin;
3190   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3191   PetscValidPointer(c,2);
3192   if (!dm->coordinates && dm->coordinatesLocal) {
3193     DM cdm = PETSC_NULL;
3194 
3195     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3196     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3197     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3198     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3199     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3200   }
3201   *c = dm->coordinates;
3202   PetscFunctionReturn(0);
3203 }
3204 
3205 #undef __FUNCT__
3206 #define __FUNCT__ "DMGetCoordinatesLocal"
3207 /*@
3208   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3209 
3210   Collective on DM
3211 
3212   Input Parameter:
3213 . dm - the DM
3214 
3215   Output Parameter:
3216 . c - coordinate vector
3217 
3218   Note:
3219   This is a borrowed reference, so the user should NOT destroy this vector
3220 
3221   Each process has the local and ghost coordinates
3222 
3223   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3224   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3225 
3226   Level: intermediate
3227 
3228 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3229 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3230 @*/
3231 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3232 {
3233   PetscErrorCode ierr;
3234 
3235   PetscFunctionBegin;
3236   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3237   PetscValidPointer(c,2);
3238   if (!dm->coordinatesLocal && dm->coordinates) {
3239     DM cdm = PETSC_NULL;
3240 
3241     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3242     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3243     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3244     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3245     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3246   }
3247   *c = dm->coordinatesLocal;
3248   PetscFunctionReturn(0);
3249 }
3250 
3251 #undef __FUNCT__
3252 #define __FUNCT__ "DMGetCoordinateDM"
3253 /*@
3254   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3255 
3256   Collective on DM
3257 
3258   Input Parameter:
3259 . dm - the DM
3260 
3261   Output Parameter:
3262 . cdm - coordinate DM
3263 
3264   Level: intermediate
3265 
3266 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3267 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3268 @*/
3269 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3270 {
3271   PetscErrorCode ierr;
3272 
3273   PetscFunctionBegin;
3274   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3275   PetscValidPointer(cdm,2);
3276   if (!dm->coordinateDM) {
3277     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3278     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3279   }
3280   *cdm = dm->coordinateDM;
3281   PetscFunctionReturn(0);
3282 }
3283 
3284 #undef __FUNCT__
3285 #define __FUNCT__ "DMLocatePoints"
3286 /*@
3287   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3288 
3289   Not collective
3290 
3291   Input Parameters:
3292 + dm - The DM
3293 - v - The Vec of points
3294 
3295   Output Parameter:
3296 . cells - The local cell numbers for cells which contain the points
3297 
3298   Level: developer
3299 
3300 .keywords: point location, mesh
3301 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3302 @*/
3303 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3304 {
3305   PetscErrorCode ierr;
3306 
3307   PetscFunctionBegin;
3308   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3309   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3310   PetscValidPointer(cells,3);
3311   if (dm->ops->locatepoints) {
3312     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3313   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Point location not available for this DM");
3314   PetscFunctionReturn(0);
3315 }
3316