xref: /petsc/src/dm/interface/dm.c (revision 2205254efee3a00a594e5e2a3a70f74dcb40bc03)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMViewFromOptions"
8 /*
9   Processes command line options to determine if/how a dm
10   is to be viewed.
11 
12 */
13 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
14 {
15   PetscErrorCode    ierr;
16   PetscBool         flg;
17   PetscViewer       viewer;
18   PetscViewerFormat format;
19 
20   PetscFunctionBegin;
21   ierr = PetscOptionsGetViewer(((PetscObject)dm)->comm,((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
22   if (flg) {
23     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
24     ierr = DMView(dm,viewer);CHKERRQ(ierr);
25     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
26     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMCreate"
34 /*@
35   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
36 
37    If you never  call DMSetType()  it will generate an
38    error when you try to use the vector.
39 
40   Collective on MPI_Comm
41 
42   Input Parameter:
43 . comm - The communicator for the DM object
44 
45   Output Parameter:
46 . dm - The DM object
47 
48   Level: beginner
49 
50 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
51 @*/
52 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
53 {
54   DM             v;
55   PetscErrorCode ierr;
56 
57   PetscFunctionBegin;
58   PetscValidPointer(dm,2);
59   *dm = PETSC_NULL;
60 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
61   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
62   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
63   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
64 #endif
65 
66   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
67   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
68 
69 
70   v->ltogmap              = PETSC_NULL;
71   v->ltogmapb             = PETSC_NULL;
72   v->bs                   = 1;
73   v->coloringtype         = IS_COLORING_GLOBAL;
74   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
75   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
76   v->defaultSection       = PETSC_NULL;
77   v->defaultGlobalSection = PETSC_NULL;
78   {
79     PetscInt i;
80     for (i = 0; i < 10; ++i) {
81       v->nullspaceConstructors[i] = PETSC_NULL;
82     }
83   }
84   v->numFields = 0;
85   v->fields    = PETSC_NULL;
86 
87   *dm = v;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMSetVecType"
93 /*@C
94        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
95 
96    Logically Collective on DMDA
97 
98    Input Parameter:
99 +  da - initial distributed array
100 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
101 
102    Options Database:
103 .   -dm_vec_type ctype
104 
105    Level: intermediate
106 
107 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
108 @*/
109 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   PetscValidHeaderSpecific(da,DM_CLASSID,1);
115   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
116   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "VecGetDM"
122 /*@
123   VecGetDM - Gets the DM defining the data layout of the vector
124 
125   Not collective
126 
127   Input Parameter:
128 . v - The Vec
129 
130   Output Parameter:
131 . dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecGetDM(Vec v, DM *dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidPointer(dm,2);
144   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "VecSetDM"
150 /*@
151   VecSetDM - Sets the DM defining the data layout of the vector
152 
153   Not collective
154 
155   Input Parameters:
156 + v - The Vec
157 - dm - The DM
158 
159   Level: intermediate
160 
161 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
162 @*/
163 PetscErrorCode VecSetDM(Vec v, DM dm)
164 {
165   PetscErrorCode ierr;
166 
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
169   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
170   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "DMSetMatType"
176 /*@C
177        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
178 
179    Logically Collective on DM
180 
181    Input Parameter:
182 +  dm - the DM context
183 .  ctype - the matrix type
184 
185    Options Database:
186 .   -dm_mat_type ctype
187 
188    Level: intermediate
189 
190 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
191 @*/
192 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
193 {
194   PetscErrorCode ierr;
195 
196   PetscFunctionBegin;
197   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
198   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
199   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "MatGetDM"
205 /*@
206   MatGetDM - Gets the DM defining the data layout of the matrix
207 
208   Not collective
209 
210   Input Parameter:
211 . A - The Mat
212 
213   Output Parameter:
214 . dm - The DM
215 
216   Level: intermediate
217 
218 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
219 @*/
220 PetscErrorCode MatGetDM(Mat A, DM *dm)
221 {
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
226   PetscValidPointer(dm,2);
227   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatSetDM"
233 /*@
234   MatSetDM - Sets the DM defining the data layout of the matrix
235 
236   Not collective
237 
238   Input Parameters:
239 + A - The Mat
240 - dm - The DM
241 
242   Level: intermediate
243 
244 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
245 @*/
246 PetscErrorCode MatSetDM(Mat A, DM dm)
247 {
248   PetscErrorCode ierr;
249 
250   PetscFunctionBegin;
251   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
252   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
253   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMSetOptionsPrefix"
259 /*@C
260    DMSetOptionsPrefix - Sets the prefix used for searching for all
261    DMDA options in the database.
262 
263    Logically Collective on DMDA
264 
265    Input Parameter:
266 +  da - the DMDA context
267 -  prefix - the prefix to prepend to all option names
268 
269    Notes:
270    A hyphen (-) must NOT be given at the beginning of the prefix name.
271    The first character of all runtime options is AUTOMATICALLY the hyphen.
272 
273    Level: advanced
274 
275 .keywords: DMDA, set, options, prefix, database
276 
277 .seealso: DMSetFromOptions()
278 @*/
279 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
280 {
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
285   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "DMDestroy"
291 /*@
292     DMDestroy - Destroys a vector packer or DMDA.
293 
294     Collective on DM
295 
296     Input Parameter:
297 .   dm - the DM object to destroy
298 
299     Level: developer
300 
301 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
302 
303 @*/
304 PetscErrorCode  DMDestroy(DM *dm)
305 {
306   PetscInt       i, cnt = 0, f;
307   DMNamedVecLink nlink,nnext;
308   PetscErrorCode ierr;
309 
310   PetscFunctionBegin;
311   if (!*dm) PetscFunctionReturn(0);
312   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
313 
314   /* count all the circular references of DM and its contained Vecs */
315   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
316     if ((*dm)->localin[i])  cnt++;
317     if ((*dm)->globalin[i]) cnt++;
318   }
319   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
320   if ((*dm)->x) {
321     DM obj;
322     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
323     if (obj == *dm) cnt++;
324   }
325   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
326   if ((*dm)->x) {
327     DM obj;
328     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
329     if (obj == *dm) cnt++;
330   }
331 
332   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
333   /*
334      Need this test because the dm references the vectors that
335      reference the dm, so destroying the dm calls destroy on the
336      vectors that cause another destroy on the dm
337   */
338   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
339   ((PetscObject) (*dm))->refct = 0;
340   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
341     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
342     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
343   }
344   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
345     nnext = nlink->next;
346     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
347     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
348     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
349     ierr = PetscFree(nlink);CHKERRQ(ierr);
350   }
351   (*dm)->namedglobal = PETSC_NULL;
352 
353   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
354     nnext = nlink->next;
355     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
356     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
357     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
358     ierr = PetscFree(nlink);CHKERRQ(ierr);
359   }
360   (*dm)->namedlocal = PETSC_NULL;
361 
362   /* Destroy the list of hooks */
363   {
364     DMCoarsenHookLink link,next;
365     for (link=(*dm)->coarsenhook; link; link=next) {
366       next = link->next;
367       ierr = PetscFree(link);CHKERRQ(ierr);
368     }
369     (*dm)->coarsenhook = PETSC_NULL;
370   }
371   {
372     DMRefineHookLink link,next;
373     for (link=(*dm)->refinehook; link; link=next) {
374       next = link->next;
375       ierr = PetscFree(link);CHKERRQ(ierr);
376     }
377     (*dm)->refinehook = PETSC_NULL;
378   }
379   {
380     DMSubDomainHookLink link,next;
381     for (link=(*dm)->subdomainhook; link; link=next) {
382       next = link->next;
383       ierr = PetscFree(link);CHKERRQ(ierr);
384     }
385     (*dm)->subdomainhook = PETSC_NULL;
386   }
387   {
388     DMGlobalToLocalHookLink link,next;
389     for (link=(*dm)->gtolhook; link; link=next) {
390       next = link->next;
391       ierr = PetscFree(link);CHKERRQ(ierr);
392     }
393     (*dm)->gtolhook = PETSC_NULL;
394   }
395   /* Destroy the work arrays */
396   {
397     DMWorkLink link,next;
398     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
399     for (link=(*dm)->workin; link; link=next) {
400       next = link->next;
401       ierr = PetscFree(link->mem);CHKERRQ(ierr);
402       ierr = PetscFree(link);CHKERRQ(ierr);
403     }
404     (*dm)->workin = PETSC_NULL;
405   }
406 
407   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
408   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
409   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
410 
411   if ((*dm)->ctx && (*dm)->ctxdestroy) {
412     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
413   }
414   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
415   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
416   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
417   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
418   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
419   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
420   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
421 
422   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
423   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
424   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
425   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
426   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
427 
428   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
429   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
430   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
431 
432   for (f = 0; f < (*dm)->numFields; ++f) {
433     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
434   }
435   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
436   /* if memory was published with AMS then destroy it */
437   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
438 
439   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
440   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
441   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
442   PetscFunctionReturn(0);
443 }
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "DMSetUp"
447 /*@
448     DMSetUp - sets up the data structures inside a DM object
449 
450     Collective on DM
451 
452     Input Parameter:
453 .   dm - the DM object to setup
454 
455     Level: developer
456 
457 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
458 
459 @*/
460 PetscErrorCode  DMSetUp(DM dm)
461 {
462   PetscErrorCode ierr;
463 
464   PetscFunctionBegin;
465   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
466   if (dm->setupcalled) PetscFunctionReturn(0);
467   if (dm->ops->setup) {
468     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
469   }
470   dm->setupcalled = PETSC_TRUE;
471   PetscFunctionReturn(0);
472 }
473 
474 #undef __FUNCT__
475 #define __FUNCT__ "DMSetFromOptions"
476 /*@
477     DMSetFromOptions - sets parameters in a DM from the options database
478 
479     Collective on DM
480 
481     Input Parameter:
482 .   dm - the DM object to set options for
483 
484     Options Database:
485 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
486 .   -dm_vec_type <type>  type of vector to create inside DM
487 .   -dm_mat_type <type>  type of matrix to create inside DM
488 -   -dm_coloring_type <global or ghosted>
489 
490     Level: developer
491 
492 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
493 
494 @*/
495 PetscErrorCode  DMSetFromOptions(DM dm)
496 {
497   char           typeName[256] = MATAIJ;
498   PetscBool      flg;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
503   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
504   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
505   ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
506   if (flg) {
507     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
508   }
509   ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
510   if (flg) {
511     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
512   }
513   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
514   if (dm->ops->setfromoptions) {
515     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
516   }
517   /* process any options handlers added with PetscObjectAddOptionsHandler() */
518   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
519   ierr = PetscOptionsEnd();CHKERRQ(ierr);
520   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "DMView"
526 /*@C
527     DMView - Views a vector packer or DMDA.
528 
529     Collective on DM
530 
531     Input Parameter:
532 +   dm - the DM object to view
533 -   v - the viewer
534 
535     Level: developer
536 
537 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
538 
539 @*/
540 PetscErrorCode  DMView(DM dm,PetscViewer v)
541 {
542   PetscErrorCode ierr;
543   PetscBool      isbinary;
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
547   if (!v) {
548     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
549   }
550   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
551   if (isbinary) {
552     PetscInt classid = DM_FILE_CLASSID;
553     char     type[256];
554 
555     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
556     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
557     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
558   }
559   if (dm->ops->view) {
560     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
561   }
562   PetscFunctionReturn(0);
563 }
564 
565 #undef __FUNCT__
566 #define __FUNCT__ "DMCreateGlobalVector"
567 /*@
568     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
569 
570     Collective on DM
571 
572     Input Parameter:
573 .   dm - the DM object
574 
575     Output Parameter:
576 .   vec - the global vector
577 
578     Level: beginner
579 
580 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
581 
582 @*/
583 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
584 {
585   PetscErrorCode ierr;
586 
587   PetscFunctionBegin;
588   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
589   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
590   PetscFunctionReturn(0);
591 }
592 
593 #undef __FUNCT__
594 #define __FUNCT__ "DMCreateLocalVector"
595 /*@
596     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
597 
598     Not Collective
599 
600     Input Parameter:
601 .   dm - the DM object
602 
603     Output Parameter:
604 .   vec - the local vector
605 
606     Level: beginner
607 
608 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
609 
610 @*/
611 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
612 {
613   PetscErrorCode ierr;
614 
615   PetscFunctionBegin;
616   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
617   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
618   PetscFunctionReturn(0);
619 }
620 
621 #undef __FUNCT__
622 #define __FUNCT__ "DMGetLocalToGlobalMapping"
623 /*@
624    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
625 
626    Collective on DM
627 
628    Input Parameter:
629 .  dm - the DM that provides the mapping
630 
631    Output Parameter:
632 .  ltog - the mapping
633 
634    Level: intermediate
635 
636    Notes:
637    This mapping can then be used by VecSetLocalToGlobalMapping() or
638    MatSetLocalToGlobalMapping().
639 
640 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
641 @*/
642 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
643 {
644   PetscErrorCode ierr;
645 
646   PetscFunctionBegin;
647   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
648   PetscValidPointer(ltog,2);
649   if (!dm->ltogmap) {
650     PetscSection section, sectionGlobal;
651 
652     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
653     if (section) {
654       PetscInt *ltog;
655       PetscInt pStart, pEnd, size, p, l;
656 
657       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
658       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
659       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
660       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
661       for (p = pStart, l = 0; p < pEnd; ++p) {
662         PetscInt dof, off, c;
663 
664         /* Should probably use constrained dofs */
665         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
666         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
667         for (c = 0; c < dof; ++c, ++l) {
668           ltog[l] = off+c;
669         }
670       }
671       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
672       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
673     } else {
674       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
675       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
676     }
677   }
678   *ltog = dm->ltogmap;
679   PetscFunctionReturn(0);
680 }
681 
682 #undef __FUNCT__
683 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
684 /*@
685    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
686 
687    Collective on DM
688 
689    Input Parameter:
690 .  da - the distributed array that provides the mapping
691 
692    Output Parameter:
693 .  ltog - the block mapping
694 
695    Level: intermediate
696 
697    Notes:
698    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
699    MatSetLocalToGlobalMappingBlock().
700 
701 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
702 @*/
703 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
704 {
705   PetscErrorCode ierr;
706 
707   PetscFunctionBegin;
708   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
709   PetscValidPointer(ltog,2);
710   if (!dm->ltogmapb) {
711     PetscInt bs;
712     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
713     if (bs > 1) {
714       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
715       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
716     } else {
717       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
718       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
719     }
720   }
721   *ltog = dm->ltogmapb;
722   PetscFunctionReturn(0);
723 }
724 
725 #undef __FUNCT__
726 #define __FUNCT__ "DMGetBlockSize"
727 /*@
728    DMGetBlockSize - Gets the inherent block size associated with a DM
729 
730    Not Collective
731 
732    Input Parameter:
733 .  dm - the DM with block structure
734 
735    Output Parameter:
736 .  bs - the block size, 1 implies no exploitable block structure
737 
738    Level: intermediate
739 
740 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
741 @*/
742 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
743 {
744   PetscFunctionBegin;
745   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
746   PetscValidPointer(bs,2);
747   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
748   *bs = dm->bs;
749   PetscFunctionReturn(0);
750 }
751 
752 #undef __FUNCT__
753 #define __FUNCT__ "DMCreateInterpolation"
754 /*@
755     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
756 
757     Collective on DM
758 
759     Input Parameter:
760 +   dm1 - the DM object
761 -   dm2 - the second, finer DM object
762 
763     Output Parameter:
764 +  mat - the interpolation
765 -  vec - the scaling (optional)
766 
767     Level: developer
768 
769     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
770         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
771 
772         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
773         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
774 
775 
776 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
777 
778 @*/
779 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
780 {
781   PetscErrorCode ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
785   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
786   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
787   PetscFunctionReturn(0);
788 }
789 
790 #undef __FUNCT__
791 #define __FUNCT__ "DMCreateInjection"
792 /*@
793     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
794 
795     Collective on DM
796 
797     Input Parameter:
798 +   dm1 - the DM object
799 -   dm2 - the second, finer DM object
800 
801     Output Parameter:
802 .   ctx - the injection
803 
804     Level: developer
805 
806    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
807         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
808 
809 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
810 
811 @*/
812 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
813 {
814   PetscErrorCode ierr;
815 
816   PetscFunctionBegin;
817   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
818   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
819   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
820   PetscFunctionReturn(0);
821 }
822 
823 #undef __FUNCT__
824 #define __FUNCT__ "DMCreateColoring"
825 /*@C
826     DMCreateColoring - Gets coloring for a DMDA or DMComposite
827 
828     Collective on DM
829 
830     Input Parameter:
831 +   dm - the DM object
832 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
833 -   matype - either MATAIJ or MATBAIJ
834 
835     Output Parameter:
836 .   coloring - the coloring
837 
838     Level: developer
839 
840 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
841 
842 @*/
843 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
844 {
845   PetscErrorCode ierr;
846 
847   PetscFunctionBegin;
848   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
849   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
850   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
851   PetscFunctionReturn(0);
852 }
853 
854 #undef __FUNCT__
855 #define __FUNCT__ "DMCreateMatrix"
856 /*@C
857     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
858 
859     Collective on DM
860 
861     Input Parameter:
862 +   dm - the DM object
863 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
864             any type which inherits from one of these (such as MATAIJ)
865 
866     Output Parameter:
867 .   mat - the empty Jacobian
868 
869     Level: beginner
870 
871     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
872        do not need to do it yourself.
873 
874        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
875        the nonzero pattern call DMDASetMatPreallocateOnly()
876 
877        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
878        internally by PETSc.
879 
880        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
881        the indices for the global numbering for DMDAs which is complicated.
882 
883 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
884 
885 @*/
886 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
887 {
888   PetscErrorCode ierr;
889 
890   PetscFunctionBegin;
891   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
892 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
893   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
894 #endif
895   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
896   PetscValidPointer(mat,3);
897   if (dm->mattype) {
898     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
899   } else {
900     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
901   }
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
907 /*@
908   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
909     preallocated but the nonzero structure and zero values will not be set.
910 
911   Logically Collective on DMDA
912 
913   Input Parameter:
914 + dm - the DM
915 - only - PETSC_TRUE if only want preallocation
916 
917   Level: developer
918 .seealso DMCreateMatrix()
919 @*/
920 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
921 {
922   PetscFunctionBegin;
923   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
924   dm->prealloc_only = only;
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "DMGetWorkArray"
930 /*@C
931   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
932 
933   Not Collective
934 
935   Input Parameters:
936 + dm - the DM object
937 . count - The minium size
938 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
939 
940   Output Parameter:
941 . array - the work array
942 
943   Level: developer
944 
945 .seealso DMDestroy(), DMCreate()
946 @*/
947 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
948 {
949   PetscErrorCode ierr;
950   DMWorkLink     link;
951   size_t         size;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
955   PetscValidPointer(mem,4);
956   if (dm->workin) {
957     link       = dm->workin;
958     dm->workin = dm->workin->next;
959   } else {
960     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
961   }
962   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
963   if (size*count > link->bytes) {
964     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
965     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
966     link->bytes = size*count;
967   }
968   link->next   = dm->workout;
969   dm->workout  = link;
970   *(void**)mem = link->mem;
971   PetscFunctionReturn(0);
972 }
973 
974 #undef __FUNCT__
975 #define __FUNCT__ "DMRestoreWorkArray"
976 /*@C
977   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
978 
979   Not Collective
980 
981   Input Parameters:
982 + dm - the DM object
983 . count - The minium size
984 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
985 
986   Output Parameter:
987 . array - the work array
988 
989   Level: developer
990 
991 .seealso DMDestroy(), DMCreate()
992 @*/
993 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
994 {
995   DMWorkLink *p,link;
996 
997   PetscFunctionBegin;
998   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
999   PetscValidPointer(mem,4);
1000   for (p=&dm->workout; (link=*p); p=&link->next) {
1001     if (link->mem == *(void**)mem) {
1002       *p           = link->next;
1003       link->next   = dm->workin;
1004       dm->workin   = link;
1005       *(void**)mem = PETSC_NULL;
1006       PetscFunctionReturn(0);
1007     }
1008   }
1009   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1010   PetscFunctionReturn(0);
1011 }
1012 
1013 #undef __FUNCT__
1014 #define __FUNCT__ "DMSetNullSpaceConstructor"
1015 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1016 {
1017   PetscFunctionBegin;
1018   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1019   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1020   dm->nullspaceConstructors[field] = nullsp;
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 #undef __FUNCT__
1025 #define __FUNCT__ "DMCreateFieldIS"
1026 /*@C
1027   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1028 
1029   Not collective
1030 
1031   Input Parameter:
1032 . dm - the DM object
1033 
1034   Output Parameters:
1035 + numFields  - The number of fields (or PETSC_NULL if not requested)
1036 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1037 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1038 
1039   Level: intermediate
1040 
1041   Notes:
1042   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1043   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1044   PetscFree().
1045 
1046 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1047 @*/
1048 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1049 {
1050   PetscSection   section, sectionGlobal;
1051   PetscErrorCode ierr;
1052 
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1055   if (numFields) {
1056     PetscValidPointer(numFields,2);
1057     *numFields = 0;
1058   }
1059   if (fieldNames) {
1060     PetscValidPointer(fieldNames,3);
1061     *fieldNames = PETSC_NULL;
1062   }
1063   if (fields) {
1064     PetscValidPointer(fields,4);
1065     *fields = PETSC_NULL;
1066   }
1067   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1068   if (section) {
1069     PetscInt *fieldSizes, **fieldIndices;
1070     PetscInt nF, f, pStart, pEnd, p;
1071 
1072     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1073     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1074     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr);
1075     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1076     for (f = 0; f < nF; ++f) {
1077       fieldSizes[f] = 0;
1078     }
1079     for (p = pStart; p < pEnd; ++p) {
1080       PetscInt gdof;
1081 
1082       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1083       if (gdof > 0) {
1084         for (f = 0; f < nF; ++f) {
1085           PetscInt fdof, fcdof;
1086 
1087           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1088           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1089           fieldSizes[f] += fdof-fcdof;
1090         }
1091       }
1092     }
1093     for (f = 0; f < nF; ++f) {
1094       ierr          = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1095       fieldSizes[f] = 0;
1096     }
1097     for (p = pStart; p < pEnd; ++p) {
1098       PetscInt gdof, goff;
1099 
1100       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1101       if (gdof > 0) {
1102         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1103         for (f = 0; f < nF; ++f) {
1104           PetscInt fdof, fcdof, fc;
1105 
1106           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1107           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1108           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1109             fieldIndices[f][fieldSizes[f]] = goff++;
1110           }
1111         }
1112       }
1113     }
1114     if (numFields) *numFields = nF;
1115     if (fieldNames) {
1116       ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr);
1117       for (f = 0; f < nF; ++f) {
1118         const char *fieldName;
1119 
1120         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1121         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1122       }
1123     }
1124     if (fields) {
1125       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1126       for (f = 0; f < nF; ++f) {
1127         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1128       }
1129     }
1130     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1131   } else if (dm->ops->createfieldis) {
1132     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1133   }
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 #undef __FUNCT__
1138 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1139 /*@C
1140   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1141 
1142   Not Collective
1143 
1144   Input Parameters:
1145 + dm   - the DM object
1146 - name - the name of the field decomposition
1147 
1148   Output Parameter:
1149 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1150 
1151   Level: advanced
1152 
1153 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1154 @*/
1155 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char *name, DM *ddm)
1156 {
1157   PetscErrorCode ierr;
1158 
1159   PetscFunctionBegin;
1160   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1161   PetscValidCharPointer(name,2);
1162   PetscValidPointer(ddm,3);
1163   *ddm = PETSC_NULL;
1164   if (dm->ops->createfielddecompositiondm) {
1165     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1166   }
1167   PetscFunctionReturn(0);
1168 }
1169 
1170 #undef __FUNCT__
1171 #define __FUNCT__ "DMCreateFieldDecomposition"
1172 /*@C
1173   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1174                           corresponding to different fields: each IS contains the global indices of the dofs of the
1175                           corresponding field. The optional list of DMs define the DM for each subproblem.
1176                           Generalizes DMCreateFieldIS().
1177 
1178   Not collective
1179 
1180   Input Parameter:
1181 . dm - the DM object
1182 
1183   Output Parameters:
1184 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1185 . namelist  - The name for each field (or PETSC_NULL if not requested)
1186 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1187 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1188 
1189   Level: intermediate
1190 
1191   Notes:
1192   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1193   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1194   and all of the arrays should be freed with PetscFree().
1195 
1196 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1197 @*/
1198 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1199 {
1200   PetscErrorCode ierr;
1201 
1202   PetscFunctionBegin;
1203   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1204   if (len) {
1205     PetscValidPointer(len,2);
1206     *len = 0;
1207   }
1208   if (namelist) {
1209     PetscValidPointer(namelist,3);
1210     *namelist = 0;
1211   }
1212   if (islist) {
1213     PetscValidPointer(islist,4);
1214     *islist = 0;
1215   }
1216   if (dmlist) {
1217     PetscValidPointer(dmlist,5);
1218     *dmlist = 0;
1219   }
1220   /*
1221    Is it a good idea to apply the following check across all impls?
1222    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1223    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1224    */
1225   if (!dm->setupcalled) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1226   if (!dm->ops->createfielddecomposition) {
1227     PetscSection section;
1228     PetscInt     numFields, f;
1229 
1230     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1231     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1232     if (section && numFields && dm->ops->createsubdm) {
1233       *len = numFields;
1234       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1235       for (f = 0; f < numFields; ++f) {
1236         const char *fieldName;
1237 
1238         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1239         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1240         ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1241       }
1242     } else {
1243       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1244       /* By default there are no DMs associated with subproblems. */
1245       if (dmlist) *dmlist = PETSC_NULL;
1246     }
1247   } else {
1248     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1249   }
1250   PetscFunctionReturn(0);
1251 }
1252 
1253 #undef __FUNCT__
1254 #define __FUNCT__ "DMCreateSubDM"
1255 /*@C
1256   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1257                   The fields are defined by DMCreateFieldIS().
1258 
1259   Not collective
1260 
1261   Input Parameters:
1262 + dm - the DM object
1263 . numFields - number of fields in this subproblem
1264 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1265 
1266   Output Parameters:
1267 . is - The global indices for the subproblem
1268 - dm - The DM for the subproblem
1269 
1270   Level: intermediate
1271 
1272 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1273 @*/
1274 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1275 {
1276   PetscErrorCode ierr;
1277 
1278   PetscFunctionBegin;
1279   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1280   PetscValidPointer(fields,3);
1281   if (is) PetscValidPointer(is,4);
1282   if (subdm) PetscValidPointer(subdm,5);
1283   if (dm->ops->createsubdm) {
1284     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1285   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1286   PetscFunctionReturn(0);
1287 }
1288 
1289 #undef __FUNCT__
1290 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1291 /*@C
1292   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1293 
1294   Not Collective
1295 
1296   Input Parameters:
1297 + dm   - the DM object
1298 - name - the name of the subdomain decomposition
1299 
1300   Output Parameter:
1301 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1302 
1303   Level: advanced
1304 
1305 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1306 @*/
1307 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char *name, DM *ddm)
1308 {
1309   PetscErrorCode ierr;
1310 
1311   PetscFunctionBegin;
1312   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1313   PetscValidCharPointer(name,2);
1314   PetscValidPointer(ddm,3);
1315   *ddm = PETSC_NULL;
1316   if (dm->ops->createdomaindecompositiondm) {
1317     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1318   }
1319   PetscFunctionReturn(0);
1320 }
1321 
1322 #undef __FUNCT__
1323 #define __FUNCT__ "DMCreateDomainDecomposition"
1324 /*@C
1325   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1326                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1327                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1328                           define a nonoverlapping covering, while outer subdomains can overlap.
1329                           The optional list of DMs define the DM for each subproblem.
1330 
1331   Not collective
1332 
1333   Input Parameter:
1334 . dm - the DM object
1335 
1336   Output Parameters:
1337 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1338 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1339 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1340 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1341 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1342 
1343   Level: intermediate
1344 
1345   Notes:
1346   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1347   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1348   and all of the arrays should be freed with PetscFree().
1349 
1350 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1351 @*/
1352 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1353 {
1354   PetscErrorCode      ierr;
1355   DMSubDomainHookLink link;
1356   PetscInt            i,l;
1357 
1358   PetscFunctionBegin;
1359   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1360   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1361   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1362   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1363   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1364   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1365   /*
1366    Is it a good idea to apply the following check across all impls?
1367    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1368    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1369    */
1370   if (!dm->setupcalled) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1371   if (dm->ops->createdomaindecomposition) {
1372     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1373     /* copy subdomain hooks and context over to the subdomain DMs */
1374     if (dmlist) {
1375       for (i = 0; i < l; i++) {
1376         for (link=dm->subdomainhook; link; link=link->next) {
1377           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1378         }
1379         (*dmlist)[i]->ctx = dm->ctx;
1380       }
1381     }
1382     if (len) *len = l;
1383   }
1384   PetscFunctionReturn(0);
1385 }
1386 
1387 
1388 #undef __FUNCT__
1389 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1390 /*@C
1391   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1392 
1393   Not collective
1394 
1395   Input Parameters:
1396 + dm - the DM object
1397 . n  - the number of subdomain scatters
1398 - subdms - the local subdomains
1399 
1400   Output Parameters:
1401 + n     - the number of scatters returned
1402 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1403 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1404 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1405 
1406   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1407   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1408   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1409   solution and residual data.
1410 
1411   Level: developer
1412 
1413 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1414 @*/
1415 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1416 {
1417   PetscErrorCode ierr;
1418 
1419   PetscFunctionBegin;
1420   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1421   PetscValidPointer(subdms,3);
1422   PetscValidPointer(iscat,4);
1423   PetscValidPointer(oscat,5);
1424   PetscValidPointer(gscat,6);
1425   if (dm->ops->createddscatters) {
1426     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1427   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1428   PetscFunctionReturn(0);
1429 }
1430 
1431 #undef __FUNCT__
1432 #define __FUNCT__ "DMRefine"
1433 /*@
1434   DMRefine - Refines a DM object
1435 
1436   Collective on DM
1437 
1438   Input Parameter:
1439 + dm   - the DM object
1440 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1441 
1442   Output Parameter:
1443 . dmf - the refined DM, or PETSC_NULL
1444 
1445   Note: If no refinement was done, the return value is PETSC_NULL
1446 
1447   Level: developer
1448 
1449 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1450 @*/
1451 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1452 {
1453   PetscErrorCode   ierr;
1454   DMRefineHookLink link;
1455 
1456   PetscFunctionBegin;
1457   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1458   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1459   if (*dmf) {
1460     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1461 
1462     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1463 
1464     (*dmf)->ctx       = dm->ctx;
1465     (*dmf)->leveldown = dm->leveldown;
1466     (*dmf)->levelup   = dm->levelup + 1;
1467 
1468     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1469     for (link=dm->refinehook; link; link=link->next) {
1470       if (link->refinehook) {
1471         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1472       }
1473     }
1474   }
1475   PetscFunctionReturn(0);
1476 }
1477 
1478 #undef __FUNCT__
1479 #define __FUNCT__ "DMRefineHookAdd"
1480 /*@
1481    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1482 
1483    Logically Collective
1484 
1485    Input Arguments:
1486 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1487 .  refinehook - function to run when setting up a coarser level
1488 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1489 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1490 
1491    Calling sequence of refinehook:
1492 $    refinehook(DM coarse,DM fine,void *ctx);
1493 
1494 +  coarse - coarse level DM
1495 .  fine - fine level DM to interpolate problem to
1496 -  ctx - optional user-defined function context
1497 
1498    Calling sequence for interphook:
1499 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1500 
1501 +  coarse - coarse level DM
1502 .  interp - matrix interpolating a coarse-level solution to the finer grid
1503 .  fine - fine level DM to update
1504 -  ctx - optional user-defined function context
1505 
1506    Level: advanced
1507 
1508    Notes:
1509    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1510 
1511    If this function is called multiple times, the hooks will be run in the order they are added.
1512 
1513 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1514 @*/
1515 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1516 {
1517   PetscErrorCode   ierr;
1518   DMRefineHookLink link,*p;
1519 
1520   PetscFunctionBegin;
1521   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1522   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1523   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1524   link->refinehook = refinehook;
1525   link->interphook = interphook;
1526   link->ctx        = ctx;
1527   link->next       = PETSC_NULL;
1528   *p               = link;
1529   PetscFunctionReturn(0);
1530 }
1531 
1532 #undef __FUNCT__
1533 #define __FUNCT__ "DMInterpolate"
1534 /*@
1535    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1536 
1537    Collective if any hooks are
1538 
1539    Input Arguments:
1540 +  coarse - coarser DM to use as a base
1541 .  restrct - interpolation matrix, apply using MatInterpolate()
1542 -  fine - finer DM to update
1543 
1544    Level: developer
1545 
1546 .seealso: DMRefineHookAdd(), MatInterpolate()
1547 @*/
1548 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1549 {
1550   PetscErrorCode   ierr;
1551   DMRefineHookLink link;
1552 
1553   PetscFunctionBegin;
1554   for (link=fine->refinehook; link; link=link->next) {
1555     if (link->interphook) {
1556       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1557     }
1558   }
1559   PetscFunctionReturn(0);
1560 }
1561 
1562 #undef __FUNCT__
1563 #define __FUNCT__ "DMGetRefineLevel"
1564 /*@
1565     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1566 
1567     Not Collective
1568 
1569     Input Parameter:
1570 .   dm - the DM object
1571 
1572     Output Parameter:
1573 .   level - number of refinements
1574 
1575     Level: developer
1576 
1577 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1578 
1579 @*/
1580 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1581 {
1582   PetscFunctionBegin;
1583   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1584   *level = dm->levelup;
1585   PetscFunctionReturn(0);
1586 }
1587 
1588 #undef __FUNCT__
1589 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1590 /*@
1591    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1592 
1593    Logically Collective
1594 
1595    Input Arguments:
1596 +  dm - the DM
1597 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1598 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1599 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1600 
1601    Calling sequence for beginhook:
1602 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1603 
1604 +  dm - global DM
1605 .  g - global vector
1606 .  mode - mode
1607 .  l - local vector
1608 -  ctx - optional user-defined function context
1609 
1610 
1611    Calling sequence for endhook:
1612 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1613 
1614 +  global - global DM
1615 -  ctx - optional user-defined function context
1616 
1617    Level: advanced
1618 
1619    Notes:
1620    This function is only needed if auxiliary data needs to be set up on coarse grids.
1621 
1622    If this function is called multiple times, the hooks will be run in the order they are added.
1623 
1624    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1625    extract the finest level information from its context (instead of from the SNES).
1626 
1627 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1628 @*/
1629 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1630 {
1631   PetscErrorCode          ierr;
1632   DMGlobalToLocalHookLink link,*p;
1633 
1634   PetscFunctionBegin;
1635   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1636   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1637   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1638   link->beginhook = beginhook;
1639   link->endhook   = endhook;
1640   link->ctx       = ctx;
1641   link->next      = PETSC_NULL;
1642   *p              = link;
1643   PetscFunctionReturn(0);
1644 }
1645 
1646 #undef __FUNCT__
1647 #define __FUNCT__ "DMGlobalToLocalBegin"
1648 /*@
1649     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1650 
1651     Neighbor-wise Collective on DM
1652 
1653     Input Parameters:
1654 +   dm - the DM object
1655 .   g - the global vector
1656 .   mode - INSERT_VALUES or ADD_VALUES
1657 -   l - the local vector
1658 
1659 
1660     Level: beginner
1661 
1662 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1663 
1664 @*/
1665 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1666 {
1667   PetscSF                 sf;
1668   PetscErrorCode          ierr;
1669   DMGlobalToLocalHookLink link;
1670 
1671   PetscFunctionBegin;
1672   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1673   for (link=dm->gtolhook; link; link=link->next) {
1674     if (link->beginhook) {
1675       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1676     }
1677   }
1678   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1679   if (sf) {
1680     PetscScalar *lArray, *gArray;
1681 
1682     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1683     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1684     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1685     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1686     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1687     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1688   } else {
1689     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1690   }
1691   PetscFunctionReturn(0);
1692 }
1693 
1694 #undef __FUNCT__
1695 #define __FUNCT__ "DMGlobalToLocalEnd"
1696 /*@
1697     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1698 
1699     Neighbor-wise Collective on DM
1700 
1701     Input Parameters:
1702 +   dm - the DM object
1703 .   g - the global vector
1704 .   mode - INSERT_VALUES or ADD_VALUES
1705 -   l - the local vector
1706 
1707 
1708     Level: beginner
1709 
1710 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1711 
1712 @*/
1713 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1714 {
1715   PetscSF                 sf;
1716   PetscErrorCode          ierr;
1717   PetscScalar             *lArray, *gArray;
1718   DMGlobalToLocalHookLink link;
1719 
1720   PetscFunctionBegin;
1721   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1722   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1723   if (sf) {
1724     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1725 
1726     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1727     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1728     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1729     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1730     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1731   } else {
1732     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1733   }
1734   for (link=dm->gtolhook; link; link=link->next) {
1735     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1736   }
1737   PetscFunctionReturn(0);
1738 }
1739 
1740 #undef __FUNCT__
1741 #define __FUNCT__ "DMLocalToGlobalBegin"
1742 /*@
1743     DMLocalToGlobalBegin - updates global vectors from local vectors
1744 
1745     Neighbor-wise Collective on DM
1746 
1747     Input Parameters:
1748 +   dm - the DM object
1749 .   l - the local vector
1750 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1751            base point.
1752 - - the global vector
1753 
1754     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1755            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1756            global array to the final global array with VecAXPY().
1757 
1758     Level: beginner
1759 
1760 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1761 
1762 @*/
1763 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1764 {
1765   PetscSF        sf;
1766   PetscErrorCode ierr;
1767 
1768   PetscFunctionBegin;
1769   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1770   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1771   if (sf) {
1772     MPI_Op      op;
1773     PetscScalar *lArray, *gArray;
1774 
1775     switch (mode) {
1776     case INSERT_VALUES:
1777     case INSERT_ALL_VALUES:
1778 #if defined(PETSC_HAVE_MPI_REPLACE)
1779       op = MPI_REPLACE; break;
1780 #else
1781       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1782 #endif
1783     case ADD_VALUES:
1784     case ADD_ALL_VALUES:
1785       op = MPI_SUM; break;
1786     default:
1787       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1788     }
1789     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1790     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1791     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1792     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1793     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1794   } else {
1795     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1796   }
1797   PetscFunctionReturn(0);
1798 }
1799 
1800 #undef __FUNCT__
1801 #define __FUNCT__ "DMLocalToGlobalEnd"
1802 /*@
1803     DMLocalToGlobalEnd - updates global vectors from local vectors
1804 
1805     Neighbor-wise Collective on DM
1806 
1807     Input Parameters:
1808 +   dm - the DM object
1809 .   l - the local vector
1810 .   mode - INSERT_VALUES or ADD_VALUES
1811 -   g - the global vector
1812 
1813 
1814     Level: beginner
1815 
1816 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1817 
1818 @*/
1819 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1820 {
1821   PetscSF        sf;
1822   PetscErrorCode ierr;
1823 
1824   PetscFunctionBegin;
1825   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1826   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1827   if (sf) {
1828     MPI_Op      op;
1829     PetscScalar *lArray, *gArray;
1830 
1831     switch (mode) {
1832     case INSERT_VALUES:
1833     case INSERT_ALL_VALUES:
1834 #if defined(PETSC_HAVE_MPI_REPLACE)
1835       op = MPI_REPLACE; break;
1836 #else
1837       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1838 #endif
1839     case ADD_VALUES:
1840     case ADD_ALL_VALUES:
1841       op = MPI_SUM; break;
1842     default:
1843       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1844     }
1845     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1846     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1847     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1848     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1849     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1850   } else {
1851     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1852   }
1853   PetscFunctionReturn(0);
1854 }
1855 
1856 #undef __FUNCT__
1857 #define __FUNCT__ "DMCoarsen"
1858 /*@
1859     DMCoarsen - Coarsens a DM object
1860 
1861     Collective on DM
1862 
1863     Input Parameter:
1864 +   dm - the DM object
1865 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1866 
1867     Output Parameter:
1868 .   dmc - the coarsened DM
1869 
1870     Level: developer
1871 
1872 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1873 
1874 @*/
1875 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1876 {
1877   PetscErrorCode    ierr;
1878   DMCoarsenHookLink link;
1879 
1880   PetscFunctionBegin;
1881   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1882   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1883   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1884   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1885   (*dmc)->ctx               = dm->ctx;
1886   (*dmc)->levelup           = dm->levelup;
1887   (*dmc)->leveldown         = dm->leveldown + 1;
1888   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1889   for (link=dm->coarsenhook; link; link=link->next) {
1890     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1891   }
1892   PetscFunctionReturn(0);
1893 }
1894 
1895 #undef __FUNCT__
1896 #define __FUNCT__ "DMCoarsenHookAdd"
1897 /*@
1898    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1899 
1900    Logically Collective
1901 
1902    Input Arguments:
1903 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1904 .  coarsenhook - function to run when setting up a coarser level
1905 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1906 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1907 
1908    Calling sequence of coarsenhook:
1909 $    coarsenhook(DM fine,DM coarse,void *ctx);
1910 
1911 +  fine - fine level DM
1912 .  coarse - coarse level DM to restrict problem to
1913 -  ctx - optional user-defined function context
1914 
1915    Calling sequence for restricthook:
1916 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1917 
1918 +  fine - fine level DM
1919 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1920 .  rscale - scaling vector for restriction
1921 .  inject - matrix restricting by injection
1922 .  coarse - coarse level DM to update
1923 -  ctx - optional user-defined function context
1924 
1925    Level: advanced
1926 
1927    Notes:
1928    This function is only needed if auxiliary data needs to be set up on coarse grids.
1929 
1930    If this function is called multiple times, the hooks will be run in the order they are added.
1931 
1932    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1933    extract the finest level information from its context (instead of from the SNES).
1934 
1935 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1936 @*/
1937 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1938 {
1939   PetscErrorCode    ierr;
1940   DMCoarsenHookLink link,*p;
1941 
1942   PetscFunctionBegin;
1943   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1944   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1945   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1946   link->coarsenhook  = coarsenhook;
1947   link->restricthook = restricthook;
1948   link->ctx          = ctx;
1949   link->next         = PETSC_NULL;
1950   *p                 = link;
1951   PetscFunctionReturn(0);
1952 }
1953 
1954 #undef __FUNCT__
1955 #define __FUNCT__ "DMRestrict"
1956 /*@
1957    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1958 
1959    Collective if any hooks are
1960 
1961    Input Arguments:
1962 +  fine - finer DM to use as a base
1963 .  restrct - restriction matrix, apply using MatRestrict()
1964 .  inject - injection matrix, also use MatRestrict()
1965 -  coarse - coarer DM to update
1966 
1967    Level: developer
1968 
1969 .seealso: DMCoarsenHookAdd(), MatRestrict()
1970 @*/
1971 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1972 {
1973   PetscErrorCode    ierr;
1974   DMCoarsenHookLink link;
1975 
1976   PetscFunctionBegin;
1977   for (link=fine->coarsenhook; link; link=link->next) {
1978     if (link->restricthook) {
1979       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
1980     }
1981   }
1982   PetscFunctionReturn(0);
1983 }
1984 
1985 #undef __FUNCT__
1986 #define __FUNCT__ "DMSubDomainHookAdd"
1987 /*@
1988    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1989 
1990    Logically Collective
1991 
1992    Input Arguments:
1993 +  global - global DM
1994 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1995 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1996 
1997    Calling sequence for restricthook:
1998 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1999 
2000 +  global - global DM
2001 .  out    - scatter to the outer (with ghost and overlap points) block vector
2002 .  in     - scatter to block vector values only owned locally
2003 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
2004 -  ctx - optional user-defined function context
2005 
2006    Level: advanced
2007 
2008    Notes:
2009    This function is only needed if auxiliary data needs to be set up on coarse grids.
2010 
2011    If this function is called multiple times, the hooks will be run in the order they are added.
2012 
2013    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2014    extract the finest level information from its context (instead of from the SNES).
2015 
2016 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2017 @*/
2018 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2019 {
2020   PetscErrorCode      ierr;
2021   DMSubDomainHookLink link,*p;
2022 
2023   PetscFunctionBegin;
2024   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2025   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2026   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2027   link->restricthook = restricthook;
2028   link->ddhook       = ddhook;
2029   link->ctx          = ctx;
2030   link->next         = PETSC_NULL;
2031   *p                 = link;
2032   PetscFunctionReturn(0);
2033 }
2034 
2035 #undef __FUNCT__
2036 #define __FUNCT__ "DMSubDomainRestrict"
2037 /*@
2038    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2039 
2040    Collective if any hooks are
2041 
2042    Input Arguments:
2043 +  fine - finer DM to use as a base
2044 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2045 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2046 -  coarse - coarer DM to update
2047 
2048    Level: developer
2049 
2050 .seealso: DMCoarsenHookAdd(), MatRestrict()
2051 @*/
2052 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2053 {
2054   PetscErrorCode      ierr;
2055   DMSubDomainHookLink link;
2056 
2057   PetscFunctionBegin;
2058   for (link=global->subdomainhook; link; link=link->next) {
2059     if (link->restricthook) {
2060       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
2061     }
2062   }
2063   PetscFunctionReturn(0);
2064 }
2065 
2066 #undef __FUNCT__
2067 #define __FUNCT__ "DMGetCoarsenLevel"
2068 /*@
2069     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2070 
2071     Not Collective
2072 
2073     Input Parameter:
2074 .   dm - the DM object
2075 
2076     Output Parameter:
2077 .   level - number of coarsenings
2078 
2079     Level: developer
2080 
2081 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2082 
2083 @*/
2084 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2085 {
2086   PetscFunctionBegin;
2087   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2088   *level = dm->leveldown;
2089   PetscFunctionReturn(0);
2090 }
2091 
2092 
2093 
2094 #undef __FUNCT__
2095 #define __FUNCT__ "DMRefineHierarchy"
2096 /*@C
2097     DMRefineHierarchy - Refines a DM object, all levels at once
2098 
2099     Collective on DM
2100 
2101     Input Parameter:
2102 +   dm - the DM object
2103 -   nlevels - the number of levels of refinement
2104 
2105     Output Parameter:
2106 .   dmf - the refined DM hierarchy
2107 
2108     Level: developer
2109 
2110 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2111 
2112 @*/
2113 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2114 {
2115   PetscErrorCode ierr;
2116 
2117   PetscFunctionBegin;
2118   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2119   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2120   if (nlevels == 0) PetscFunctionReturn(0);
2121   if (dm->ops->refinehierarchy) {
2122     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2123   } else if (dm->ops->refine) {
2124     PetscInt i;
2125 
2126     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2127     for (i=1; i<nlevels; i++) {
2128       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2129     }
2130   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2131   PetscFunctionReturn(0);
2132 }
2133 
2134 #undef __FUNCT__
2135 #define __FUNCT__ "DMCoarsenHierarchy"
2136 /*@C
2137     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2138 
2139     Collective on DM
2140 
2141     Input Parameter:
2142 +   dm - the DM object
2143 -   nlevels - the number of levels of coarsening
2144 
2145     Output Parameter:
2146 .   dmc - the coarsened DM hierarchy
2147 
2148     Level: developer
2149 
2150 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2151 
2152 @*/
2153 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2154 {
2155   PetscErrorCode ierr;
2156 
2157   PetscFunctionBegin;
2158   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2159   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2160   if (nlevels == 0) PetscFunctionReturn(0);
2161   PetscValidPointer(dmc,3);
2162   if (dm->ops->coarsenhierarchy) {
2163     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2164   } else if (dm->ops->coarsen) {
2165     PetscInt i;
2166 
2167     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2168     for (i=1; i<nlevels; i++) {
2169       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2170     }
2171   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2172   PetscFunctionReturn(0);
2173 }
2174 
2175 #undef __FUNCT__
2176 #define __FUNCT__ "DMCreateAggregates"
2177 /*@
2178    DMCreateAggregates - Gets the aggregates that map between
2179    grids associated with two DMs.
2180 
2181    Collective on DM
2182 
2183    Input Parameters:
2184 +  dmc - the coarse grid DM
2185 -  dmf - the fine grid DM
2186 
2187    Output Parameters:
2188 .  rest - the restriction matrix (transpose of the projection matrix)
2189 
2190    Level: intermediate
2191 
2192 .keywords: interpolation, restriction, multigrid
2193 
2194 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2195 @*/
2196 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2197 {
2198   PetscErrorCode ierr;
2199 
2200   PetscFunctionBegin;
2201   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2202   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2203   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2204   PetscFunctionReturn(0);
2205 }
2206 
2207 #undef __FUNCT__
2208 #define __FUNCT__ "DMSetApplicationContextDestroy"
2209 /*@C
2210     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2211 
2212     Not Collective
2213 
2214     Input Parameters:
2215 +   dm - the DM object
2216 -   destroy - the destroy function
2217 
2218     Level: intermediate
2219 
2220 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2221 
2222 @*/
2223 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2224 {
2225   PetscFunctionBegin;
2226   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2227   dm->ctxdestroy = destroy;
2228   PetscFunctionReturn(0);
2229 }
2230 
2231 #undef __FUNCT__
2232 #define __FUNCT__ "DMSetApplicationContext"
2233 /*@
2234     DMSetApplicationContext - Set a user context into a DM object
2235 
2236     Not Collective
2237 
2238     Input Parameters:
2239 +   dm - the DM object
2240 -   ctx - the user context
2241 
2242     Level: intermediate
2243 
2244 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2245 
2246 @*/
2247 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2248 {
2249   PetscFunctionBegin;
2250   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2251   dm->ctx = ctx;
2252   PetscFunctionReturn(0);
2253 }
2254 
2255 #undef __FUNCT__
2256 #define __FUNCT__ "DMGetApplicationContext"
2257 /*@
2258     DMGetApplicationContext - Gets a user context from a DM object
2259 
2260     Not Collective
2261 
2262     Input Parameter:
2263 .   dm - the DM object
2264 
2265     Output Parameter:
2266 .   ctx - the user context
2267 
2268     Level: intermediate
2269 
2270 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2271 
2272 @*/
2273 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2274 {
2275   PetscFunctionBegin;
2276   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2277   *(void**)ctx = dm->ctx;
2278   PetscFunctionReturn(0);
2279 }
2280 
2281 #undef __FUNCT__
2282 #define __FUNCT__ "DMSetVariableBounds"
2283 /*@C
2284     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2285 
2286     Logically Collective on DM
2287 
2288     Input Parameter:
2289 +   dm - the DM object
2290 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2291 
2292     Level: intermediate
2293 
2294 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2295          DMSetJacobian()
2296 
2297 @*/
2298 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2299 {
2300   PetscFunctionBegin;
2301   dm->ops->computevariablebounds = f;
2302   PetscFunctionReturn(0);
2303 }
2304 
2305 #undef __FUNCT__
2306 #define __FUNCT__ "DMHasVariableBounds"
2307 /*@
2308     DMHasVariableBounds - does the DM object have a variable bounds function?
2309 
2310     Not Collective
2311 
2312     Input Parameter:
2313 .   dm - the DM object to destroy
2314 
2315     Output Parameter:
2316 .   flg - PETSC_TRUE if the variable bounds function exists
2317 
2318     Level: developer
2319 
2320 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2321 
2322 @*/
2323 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2324 {
2325   PetscFunctionBegin;
2326   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2327   PetscFunctionReturn(0);
2328 }
2329 
2330 #undef __FUNCT__
2331 #define __FUNCT__ "DMComputeVariableBounds"
2332 /*@C
2333     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2334 
2335     Logically Collective on DM
2336 
2337     Input Parameters:
2338 +   dm - the DM object to destroy
2339 -   x  - current solution at which the bounds are computed
2340 
2341     Output parameters:
2342 +   xl - lower bound
2343 -   xu - upper bound
2344 
2345     Level: intermediate
2346 
2347 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2348 
2349 @*/
2350 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2351 {
2352   PetscErrorCode ierr;
2353 
2354   PetscFunctionBegin;
2355   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2356   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2357   if (dm->ops->computevariablebounds) {
2358     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2359   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2360   PetscFunctionReturn(0);
2361 }
2362 
2363 #undef __FUNCT__
2364 #define __FUNCT__ "DMHasColoring"
2365 /*@
2366     DMHasColoring - does the DM object have a method of providing a coloring?
2367 
2368     Not Collective
2369 
2370     Input Parameter:
2371 .   dm - the DM object
2372 
2373     Output Parameter:
2374 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2375 
2376     Level: developer
2377 
2378 .seealso DMHasFunction(), DMCreateColoring()
2379 
2380 @*/
2381 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2382 {
2383   PetscFunctionBegin;
2384   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2385   PetscFunctionReturn(0);
2386 }
2387 
2388 #undef  __FUNCT__
2389 #define __FUNCT__ "DMSetVec"
2390 /*@C
2391     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2392 
2393     Collective on DM
2394 
2395     Input Parameter:
2396 +   dm - the DM object
2397 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2398 
2399     Level: developer
2400 
2401 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2402 
2403 @*/
2404 PetscErrorCode  DMSetVec(DM dm,Vec x)
2405 {
2406   PetscErrorCode ierr;
2407 
2408   PetscFunctionBegin;
2409   if (x) {
2410     if (!dm->x) {
2411       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2412     }
2413     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2414   } else if (dm->x) {
2415     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2416   }
2417   PetscFunctionReturn(0);
2418 }
2419 
2420 PetscFunctionList DMList              = PETSC_NULL;
2421 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2422 
2423 #undef __FUNCT__
2424 #define __FUNCT__ "DMSetType"
2425 /*@C
2426   DMSetType - Builds a DM, for a particular DM implementation.
2427 
2428   Collective on DM
2429 
2430   Input Parameters:
2431 + dm     - The DM object
2432 - method - The name of the DM type
2433 
2434   Options Database Key:
2435 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2436 
2437   Notes:
2438   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2439 
2440   Level: intermediate
2441 
2442 .keywords: DM, set, type
2443 .seealso: DMGetType(), DMCreate()
2444 @*/
2445 PetscErrorCode  DMSetType(DM dm, DMType method)
2446 {
2447   PetscErrorCode (*r)(DM);
2448   PetscBool      match;
2449   PetscErrorCode ierr;
2450 
2451   PetscFunctionBegin;
2452   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2453   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2454   if (match) PetscFunctionReturn(0);
2455 
2456   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2457   ierr = PetscFunctionListFind(((PetscObject)dm)->comm, DMList, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2458   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2459 
2460   if (dm->ops->destroy) {
2461     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2462     dm->ops->destroy = PETSC_NULL;
2463   }
2464   ierr = (*r)(dm);CHKERRQ(ierr);
2465   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2466   PetscFunctionReturn(0);
2467 }
2468 
2469 #undef __FUNCT__
2470 #define __FUNCT__ "DMGetType"
2471 /*@C
2472   DMGetType - Gets the DM type name (as a string) from the DM.
2473 
2474   Not Collective
2475 
2476   Input Parameter:
2477 . dm  - The DM
2478 
2479   Output Parameter:
2480 . type - The DM type name
2481 
2482   Level: intermediate
2483 
2484 .keywords: DM, get, type, name
2485 .seealso: DMSetType(), DMCreate()
2486 @*/
2487 PetscErrorCode  DMGetType(DM dm, DMType *type)
2488 {
2489   PetscErrorCode ierr;
2490 
2491   PetscFunctionBegin;
2492   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2493   PetscValidCharPointer(type,2);
2494   if (!DMRegisterAllCalled) {
2495     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2496   }
2497   *type = ((PetscObject)dm)->type_name;
2498   PetscFunctionReturn(0);
2499 }
2500 
2501 #undef __FUNCT__
2502 #define __FUNCT__ "DMConvert"
2503 /*@C
2504   DMConvert - Converts a DM to another DM, either of the same or different type.
2505 
2506   Collective on DM
2507 
2508   Input Parameters:
2509 + dm - the DM
2510 - newtype - new DM type (use "same" for the same type)
2511 
2512   Output Parameter:
2513 . M - pointer to new DM
2514 
2515   Notes:
2516   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2517   the MPI communicator of the generated DM is always the same as the communicator
2518   of the input DM.
2519 
2520   Level: intermediate
2521 
2522 .seealso: DMCreate()
2523 @*/
2524 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2525 {
2526   DM             B;
2527   char           convname[256];
2528   PetscBool      sametype, issame;
2529   PetscErrorCode ierr;
2530 
2531   PetscFunctionBegin;
2532   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2533   PetscValidType(dm,1);
2534   PetscValidPointer(M,3);
2535   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2536   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2537   {
2538     PetscErrorCode (*conv)(DM, DMType, DM*) = PETSC_NULL;
2539 
2540     /*
2541        Order of precedence:
2542        1) See if a specialized converter is known to the current DM.
2543        2) See if a specialized converter is known to the desired DM class.
2544        3) See if a good general converter is registered for the desired class
2545        4) See if a good general converter is known for the current matrix.
2546        5) Use a really basic converter.
2547     */
2548 
2549     /* 1) See if a specialized converter is known to the current DM and the desired class */
2550     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2551     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2552     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2553     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2554     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2555     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2556     if (conv) goto foundconv;
2557 
2558     /* 2)  See if a specialized converter is known to the desired DM class. */
2559     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2560     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2561     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2562     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2563     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2564     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2565     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2566     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2567     if (conv) {
2568       ierr = DMDestroy(&B);CHKERRQ(ierr);
2569       goto foundconv;
2570     }
2571 
2572 #if 0
2573     /* 3) See if a good general converter is registered for the desired class */
2574     conv = B->ops->convertfrom;
2575     ierr = DMDestroy(&B);CHKERRQ(ierr);
2576     if (conv) goto foundconv;
2577 
2578     /* 4) See if a good general converter is known for the current matrix */
2579     if (dm->ops->convert) {
2580       conv = dm->ops->convert;
2581     }
2582     if (conv) goto foundconv;
2583 #endif
2584 
2585     /* 5) Use a really basic converter. */
2586     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2587 
2588 foundconv:
2589     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2590     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2591     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2592   }
2593   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2594   PetscFunctionReturn(0);
2595 }
2596 
2597 /*--------------------------------------------------------------------------------------------------------------------*/
2598 
2599 #undef __FUNCT__
2600 #define __FUNCT__ "DMRegister"
2601 /*@C
2602   DMRegister - See DMRegisterDynamic()
2603 
2604   Level: advanced
2605 @*/
2606 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2607 {
2608   char           fullname[PETSC_MAX_PATH_LEN];
2609   PetscErrorCode ierr;
2610 
2611   PetscFunctionBegin;
2612   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2613   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2614   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2615   ierr = PetscFunctionListAdd(PETSC_COMM_WORLD,&DMList, sname, fullname, (void (*)(void))function);CHKERRQ(ierr);
2616   PetscFunctionReturn(0);
2617 }
2618 
2619 
2620 /*--------------------------------------------------------------------------------------------------------------------*/
2621 #undef __FUNCT__
2622 #define __FUNCT__ "DMRegisterDestroy"
2623 /*@C
2624    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2625 
2626    Not Collective
2627 
2628    Level: advanced
2629 
2630 .keywords: DM, register, destroy
2631 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2632 @*/
2633 PetscErrorCode  DMRegisterDestroy(void)
2634 {
2635   PetscErrorCode ierr;
2636 
2637   PetscFunctionBegin;
2638   ierr                = PetscFunctionListDestroy(&DMList);CHKERRQ(ierr);
2639   DMRegisterAllCalled = PETSC_FALSE;
2640   PetscFunctionReturn(0);
2641 }
2642 
2643 #undef __FUNCT__
2644 #define __FUNCT__ "DMLoad"
2645 /*@C
2646   DMLoad - Loads a DM that has been stored in binary  with DMView().
2647 
2648   Collective on PetscViewer
2649 
2650   Input Parameters:
2651 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2652            some related function before a call to DMLoad().
2653 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2654            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2655 
2656    Level: intermediate
2657 
2658   Notes:
2659    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2660 
2661   Notes for advanced users:
2662   Most users should not need to know the details of the binary storage
2663   format, since DMLoad() and DMView() completely hide these details.
2664   But for anyone who's interested, the standard binary matrix storage
2665   format is
2666 .vb
2667      has not yet been determined
2668 .ve
2669 
2670 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2671 @*/
2672 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2673 {
2674   PetscErrorCode ierr;
2675   PetscBool      isbinary;
2676   PetscInt       classid;
2677   char           type[256];
2678 
2679   PetscFunctionBegin;
2680   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2681   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2682   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2683   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2684 
2685   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2686   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2687   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2688   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2689   if (newdm->ops->load) {
2690     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2691   }
2692   PetscFunctionReturn(0);
2693 }
2694 
2695 /******************************** FEM Support **********************************/
2696 
2697 #undef __FUNCT__
2698 #define __FUNCT__ "DMPrintCellVector"
2699 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2700 {
2701   PetscInt       f;
2702   PetscErrorCode ierr;
2703 
2704   PetscFunctionBegin;
2705   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2706   for (f = 0; f < len; ++f) {
2707     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2708   }
2709   PetscFunctionReturn(0);
2710 }
2711 
2712 #undef __FUNCT__
2713 #define __FUNCT__ "DMPrintCellMatrix"
2714 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2715 {
2716   PetscInt       f, g;
2717   PetscErrorCode ierr;
2718 
2719   PetscFunctionBegin;
2720   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2721   for (f = 0; f < rows; ++f) {
2722     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2723     for (g = 0; g < cols; ++g) {
2724       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2725     }
2726     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2727   }
2728   PetscFunctionReturn(0);
2729 }
2730 
2731 #undef __FUNCT__
2732 #define __FUNCT__ "DMGetDefaultSection"
2733 /*@
2734   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2735 
2736   Input Parameter:
2737 . dm - The DM
2738 
2739   Output Parameter:
2740 . section - The PetscSection
2741 
2742   Level: intermediate
2743 
2744   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2745 
2746 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2747 @*/
2748 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2749 {
2750   PetscFunctionBegin;
2751   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2752   PetscValidPointer(section, 2);
2753   *section = dm->defaultSection;
2754   PetscFunctionReturn(0);
2755 }
2756 
2757 #undef __FUNCT__
2758 #define __FUNCT__ "DMSetDefaultSection"
2759 /*@
2760   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2761 
2762   Input Parameters:
2763 + dm - The DM
2764 - section - The PetscSection
2765 
2766   Level: intermediate
2767 
2768   Note: Any existing Section will be destroyed
2769 
2770 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2771 @*/
2772 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2773 {
2774   PetscInt       numFields;
2775   PetscInt       f;
2776   PetscErrorCode ierr;
2777 
2778   PetscFunctionBegin;
2779   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2780   ierr               = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2781   ierr               = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2782   dm->defaultSection = section;
2783   ierr               = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2784   if (numFields) {
2785     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2786     for (f = 0; f < numFields; ++f) {
2787       const char *name;
2788 
2789       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2790       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2791     }
2792   }
2793   PetscFunctionReturn(0);
2794 }
2795 
2796 #undef __FUNCT__
2797 #define __FUNCT__ "DMGetDefaultGlobalSection"
2798 /*@
2799   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2800 
2801   Collective on DM
2802 
2803   Input Parameter:
2804 . dm - The DM
2805 
2806   Output Parameter:
2807 . section - The PetscSection
2808 
2809   Level: intermediate
2810 
2811   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2812 
2813 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2814 @*/
2815 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2816 {
2817   PetscErrorCode ierr;
2818 
2819   PetscFunctionBegin;
2820   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2821   PetscValidPointer(section, 2);
2822   if (!dm->defaultGlobalSection) {
2823     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2824     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2825     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2826   }
2827   *section = dm->defaultGlobalSection;
2828   PetscFunctionReturn(0);
2829 }
2830 
2831 #undef __FUNCT__
2832 #define __FUNCT__ "DMSetDefaultGlobalSection"
2833 /*@
2834   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2835 
2836   Input Parameters:
2837 + dm - The DM
2838 - section - The PetscSection
2839 
2840   Level: intermediate
2841 
2842   Note: Any existing Section will be destroyed
2843 
2844 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2845 @*/
2846 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2847 {
2848   PetscErrorCode ierr;
2849 
2850   PetscFunctionBegin;
2851   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2852   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2853 
2854   dm->defaultGlobalSection = section;
2855   PetscFunctionReturn(0);
2856 }
2857 
2858 #undef __FUNCT__
2859 #define __FUNCT__ "DMGetDefaultSF"
2860 /*@
2861   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2862   it is created from the default PetscSection layouts in the DM.
2863 
2864   Input Parameter:
2865 . dm - The DM
2866 
2867   Output Parameter:
2868 . sf - The PetscSF
2869 
2870   Level: intermediate
2871 
2872   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2873 
2874 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2875 @*/
2876 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2877 {
2878   PetscInt       nroots;
2879   PetscErrorCode ierr;
2880 
2881   PetscFunctionBegin;
2882   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2883   PetscValidPointer(sf, 2);
2884   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2885   if (nroots < 0) {
2886     PetscSection section, gSection;
2887 
2888     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2889     if (section) {
2890       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2891       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2892     } else {
2893       *sf = PETSC_NULL;
2894       PetscFunctionReturn(0);
2895     }
2896   }
2897   *sf = dm->defaultSF;
2898   PetscFunctionReturn(0);
2899 }
2900 
2901 #undef __FUNCT__
2902 #define __FUNCT__ "DMSetDefaultSF"
2903 /*@
2904   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2905 
2906   Input Parameters:
2907 + dm - The DM
2908 - sf - The PetscSF
2909 
2910   Level: intermediate
2911 
2912   Note: Any previous SF is destroyed
2913 
2914 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2915 @*/
2916 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
2917 {
2918   PetscErrorCode ierr;
2919 
2920   PetscFunctionBegin;
2921   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2922   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2923   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2924   dm->defaultSF = sf;
2925   PetscFunctionReturn(0);
2926 }
2927 
2928 #undef __FUNCT__
2929 #define __FUNCT__ "DMCreateDefaultSF"
2930 /*@C
2931   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2932   describing the data layout.
2933 
2934   Input Parameters:
2935 + dm - The DM
2936 . localSection - PetscSection describing the local data layout
2937 - globalSection - PetscSection describing the global data layout
2938 
2939   Level: intermediate
2940 
2941 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2942 @*/
2943 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2944 {
2945   MPI_Comm       comm = ((PetscObject) dm)->comm;
2946   PetscLayout    layout;
2947   const PetscInt *ranges;
2948   PetscInt       *local;
2949   PetscSFNode    *remote;
2950   PetscInt       pStart, pEnd, p, nroots, nleaves, l;
2951   PetscMPIInt    size, rank;
2952   PetscErrorCode ierr;
2953 
2954   PetscFunctionBegin;
2955   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2956   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2957   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2958   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2959   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2960   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2961   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2962   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2963   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2964   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2965   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2966     PetscInt gdof, gcdof;
2967 
2968     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2969     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2970     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2971   }
2972   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2973   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2974   for (p = pStart, l = 0; p < pEnd; ++p) {
2975     const PetscInt *cind;
2976     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2977 
2978     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2979     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2980     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2981     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2982     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2983     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2984     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2985     if (!gdof) continue; /* Censored point */
2986     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2987     if (gsize != dof-cdof) {
2988       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2989       cdof = 0; /* Ignore constraints */
2990     }
2991     for (d = 0, c = 0; d < dof; ++d) {
2992       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2993       local[l+d-c] = off+d;
2994     }
2995     if (gdof < 0) {
2996       for (d = 0; d < gsize; ++d, ++l) {
2997         PetscInt offset = -(goff+1) + d, r;
2998 
2999         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3000         if (r < 0) r = -(r+2);
3001         remote[l].rank  = r;
3002         remote[l].index = offset - ranges[r];
3003       }
3004     } else {
3005       for (d = 0; d < gsize; ++d, ++l) {
3006         remote[l].rank  = rank;
3007         remote[l].index = goff+d - ranges[rank];
3008       }
3009     }
3010   }
3011   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3012   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3013   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3014   PetscFunctionReturn(0);
3015 }
3016 
3017 #undef __FUNCT__
3018 #define __FUNCT__ "DMGetPointSF"
3019 /*@
3020   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3021 
3022   Input Parameter:
3023 . dm - The DM
3024 
3025   Output Parameter:
3026 . sf - The PetscSF
3027 
3028   Level: intermediate
3029 
3030   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3031 
3032 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3033 @*/
3034 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3035 {
3036   PetscFunctionBegin;
3037   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3038   PetscValidPointer(sf, 2);
3039   *sf = dm->sf;
3040   PetscFunctionReturn(0);
3041 }
3042 
3043 #undef __FUNCT__
3044 #define __FUNCT__ "DMSetPointSF"
3045 /*@
3046   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3047 
3048   Input Parameters:
3049 + dm - The DM
3050 - sf - The PetscSF
3051 
3052   Level: intermediate
3053 
3054 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3055 @*/
3056 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3057 {
3058   PetscErrorCode ierr;
3059 
3060   PetscFunctionBegin;
3061   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3062   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3063   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3064   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3065   dm->sf = sf;
3066   PetscFunctionReturn(0);
3067 }
3068 
3069 #undef __FUNCT__
3070 #define __FUNCT__ "DMGetNumFields"
3071 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3072 {
3073   PetscFunctionBegin;
3074   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3075   PetscValidPointer(numFields, 2);
3076   *numFields = dm->numFields;
3077   PetscFunctionReturn(0);
3078 }
3079 
3080 #undef __FUNCT__
3081 #define __FUNCT__ "DMSetNumFields"
3082 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3083 {
3084   PetscInt       f;
3085   PetscErrorCode ierr;
3086 
3087   PetscFunctionBegin;
3088   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3089   for (f = 0; f < dm->numFields; ++f) {
3090     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3091   }
3092   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3093   dm->numFields = numFields;
3094   ierr          = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3095   for (f = 0; f < dm->numFields; ++f) {
3096     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3097   }
3098   PetscFunctionReturn(0);
3099 }
3100 
3101 #undef __FUNCT__
3102 #define __FUNCT__ "DMGetField"
3103 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3104 {
3105   PetscFunctionBegin;
3106   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3107   PetscValidPointer(field, 2);
3108   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3109   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3110   *field = dm->fields[f];
3111   PetscFunctionReturn(0);
3112 }
3113 
3114 #undef __FUNCT__
3115 #define __FUNCT__ "DMSetCoordinates"
3116 /*@
3117   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3118 
3119   Collective on DM
3120 
3121   Input Parameters:
3122 + dm - the DM
3123 - c - coordinate vector
3124 
3125   Note:
3126   The coordinates do include those for ghost points, which are in the local vector
3127 
3128   Level: intermediate
3129 
3130 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3131 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3132 @*/
3133 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3134 {
3135   PetscErrorCode ierr;
3136 
3137   PetscFunctionBegin;
3138   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3139   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3140   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3141   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3142   dm->coordinates = c;
3143   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3144   PetscFunctionReturn(0);
3145 }
3146 
3147 #undef __FUNCT__
3148 #define __FUNCT__ "DMSetCoordinatesLocal"
3149 /*@
3150   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3151 
3152   Collective on DM
3153 
3154    Input Parameters:
3155 +  dm - the DM
3156 -  c - coordinate vector
3157 
3158   Note:
3159   The coordinates of ghost points can be set using DMSetCoordinates()
3160   followed by DMGetCoordinatesLocal(). This is intended to enable the
3161   setting of ghost coordinates outside of the domain.
3162 
3163   Level: intermediate
3164 
3165 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3166 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3167 @*/
3168 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3169 {
3170   PetscErrorCode ierr;
3171 
3172   PetscFunctionBegin;
3173   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3174   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3175   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3176   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3177 
3178   dm->coordinatesLocal = c;
3179 
3180   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3181   PetscFunctionReturn(0);
3182 }
3183 
3184 #undef __FUNCT__
3185 #define __FUNCT__ "DMGetCoordinates"
3186 /*@
3187   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3188 
3189   Not Collective
3190 
3191   Input Parameter:
3192 . dm - the DM
3193 
3194   Output Parameter:
3195 . c - global coordinate vector
3196 
3197   Note:
3198   This is a borrowed reference, so the user should NOT destroy this vector
3199 
3200   Each process has only the local coordinates (does NOT have the ghost coordinates).
3201 
3202   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3203   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3204 
3205   Level: intermediate
3206 
3207 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3208 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3209 @*/
3210 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3211 {
3212   PetscErrorCode ierr;
3213 
3214   PetscFunctionBegin;
3215   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3216   PetscValidPointer(c,2);
3217   if (!dm->coordinates && dm->coordinatesLocal) {
3218     DM cdm = PETSC_NULL;
3219 
3220     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3221     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3222     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3223     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3224     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3225   }
3226   *c = dm->coordinates;
3227   PetscFunctionReturn(0);
3228 }
3229 
3230 #undef __FUNCT__
3231 #define __FUNCT__ "DMGetCoordinatesLocal"
3232 /*@
3233   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3234 
3235   Collective on DM
3236 
3237   Input Parameter:
3238 . dm - the DM
3239 
3240   Output Parameter:
3241 . c - coordinate vector
3242 
3243   Note:
3244   This is a borrowed reference, so the user should NOT destroy this vector
3245 
3246   Each process has the local and ghost coordinates
3247 
3248   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3249   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3250 
3251   Level: intermediate
3252 
3253 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3254 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3255 @*/
3256 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3257 {
3258   PetscErrorCode ierr;
3259 
3260   PetscFunctionBegin;
3261   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3262   PetscValidPointer(c,2);
3263   if (!dm->coordinatesLocal && dm->coordinates) {
3264     DM cdm = PETSC_NULL;
3265 
3266     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3267     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3268     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3269     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3270     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3271   }
3272   *c = dm->coordinatesLocal;
3273   PetscFunctionReturn(0);
3274 }
3275 
3276 #undef __FUNCT__
3277 #define __FUNCT__ "DMGetCoordinateDM"
3278 /*@
3279   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3280 
3281   Collective on DM
3282 
3283   Input Parameter:
3284 . dm - the DM
3285 
3286   Output Parameter:
3287 . cdm - coordinate DM
3288 
3289   Level: intermediate
3290 
3291 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3292 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3293 @*/
3294 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3295 {
3296   PetscErrorCode ierr;
3297 
3298   PetscFunctionBegin;
3299   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3300   PetscValidPointer(cdm,2);
3301   if (!dm->coordinateDM) {
3302     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3303     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3304   }
3305   *cdm = dm->coordinateDM;
3306   PetscFunctionReturn(0);
3307 }
3308 
3309 #undef __FUNCT__
3310 #define __FUNCT__ "DMLocatePoints"
3311 /*@
3312   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3313 
3314   Not collective
3315 
3316   Input Parameters:
3317 + dm - The DM
3318 - v - The Vec of points
3319 
3320   Output Parameter:
3321 . cells - The local cell numbers for cells which contain the points
3322 
3323   Level: developer
3324 
3325 .keywords: point location, mesh
3326 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3327 @*/
3328 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3329 {
3330   PetscErrorCode ierr;
3331 
3332   PetscFunctionBegin;
3333   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3334   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3335   PetscValidPointer(cells,3);
3336   if (dm->ops->locatepoints) {
3337     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3338   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Point location not available for this DM");
3339   PetscFunctionReturn(0);
3340 }
3341