xref: /petsc/src/dm/interface/dm.c (revision ce94432eddcd14845bc7e8083b7f8ea723b9bf7d)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMViewFromOptions"
8 /*
9   Processes command line options to determine if/how a dm
10   is to be viewed.
11 
12 */
13 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
14 {
15   PetscErrorCode    ierr;
16   PetscBool         flg;
17   PetscViewer       viewer;
18   PetscViewerFormat format;
19 
20   PetscFunctionBegin;
21   ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
22   if (flg) {
23     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
24     ierr = DMView(dm,viewer);CHKERRQ(ierr);
25     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
26     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMCreate"
34 /*@
35   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
36 
37    If you never  call DMSetType()  it will generate an
38    error when you try to use the vector.
39 
40   Collective on MPI_Comm
41 
42   Input Parameter:
43 . comm - The communicator for the DM object
44 
45   Output Parameter:
46 . dm - The DM object
47 
48   Level: beginner
49 
50 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
51 @*/
52 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
53 {
54   DM             v;
55   PetscErrorCode ierr;
56 
57   PetscFunctionBegin;
58   PetscValidPointer(dm,2);
59   *dm = NULL;
60 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
61   ierr = VecInitializePackage(NULL);CHKERRQ(ierr);
62   ierr = MatInitializePackage(NULL);CHKERRQ(ierr);
63   ierr = DMInitializePackage(NULL);CHKERRQ(ierr);
64 #endif
65 
66   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
67   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
68 
69 
70   v->ltogmap              = NULL;
71   v->ltogmapb             = NULL;
72   v->bs                   = 1;
73   v->coloringtype         = IS_COLORING_GLOBAL;
74   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
75   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
76   v->defaultSection       = NULL;
77   v->defaultGlobalSection = NULL;
78   {
79     PetscInt i;
80     for (i = 0; i < 10; ++i) {
81       v->nullspaceConstructors[i] = NULL;
82     }
83   }
84   v->numFields = 0;
85   v->fields    = NULL;
86 
87   *dm = v;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMSetVecType"
93 /*@C
94        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
95 
96    Logically Collective on DMDA
97 
98    Input Parameter:
99 +  da - initial distributed array
100 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
101 
102    Options Database:
103 .   -dm_vec_type ctype
104 
105    Level: intermediate
106 
107 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
108 @*/
109 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   PetscValidHeaderSpecific(da,DM_CLASSID,1);
115   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
116   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "VecGetDM"
122 /*@
123   VecGetDM - Gets the DM defining the data layout of the vector
124 
125   Not collective
126 
127   Input Parameter:
128 . v - The Vec
129 
130   Output Parameter:
131 . dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecGetDM(Vec v, DM *dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidPointer(dm,2);
144   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "VecSetDM"
150 /*@
151   VecSetDM - Sets the DM defining the data layout of the vector
152 
153   Not collective
154 
155   Input Parameters:
156 + v - The Vec
157 - dm - The DM
158 
159   Level: intermediate
160 
161 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
162 @*/
163 PetscErrorCode VecSetDM(Vec v, DM dm)
164 {
165   PetscErrorCode ierr;
166 
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
169   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
170   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "DMSetMatType"
176 /*@C
177        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
178 
179    Logically Collective on DM
180 
181    Input Parameter:
182 +  dm - the DM context
183 .  ctype - the matrix type
184 
185    Options Database:
186 .   -dm_mat_type ctype
187 
188    Level: intermediate
189 
190 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
191 @*/
192 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
193 {
194   PetscErrorCode ierr;
195 
196   PetscFunctionBegin;
197   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
198   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
199   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "MatGetDM"
205 /*@
206   MatGetDM - Gets the DM defining the data layout of the matrix
207 
208   Not collective
209 
210   Input Parameter:
211 . A - The Mat
212 
213   Output Parameter:
214 . dm - The DM
215 
216   Level: intermediate
217 
218 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
219 @*/
220 PetscErrorCode MatGetDM(Mat A, DM *dm)
221 {
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
226   PetscValidPointer(dm,2);
227   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatSetDM"
233 /*@
234   MatSetDM - Sets the DM defining the data layout of the matrix
235 
236   Not collective
237 
238   Input Parameters:
239 + A - The Mat
240 - dm - The DM
241 
242   Level: intermediate
243 
244 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
245 @*/
246 PetscErrorCode MatSetDM(Mat A, DM dm)
247 {
248   PetscErrorCode ierr;
249 
250   PetscFunctionBegin;
251   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
252   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
253   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMSetOptionsPrefix"
259 /*@C
260    DMSetOptionsPrefix - Sets the prefix used for searching for all
261    DMDA options in the database.
262 
263    Logically Collective on DMDA
264 
265    Input Parameter:
266 +  da - the DMDA context
267 -  prefix - the prefix to prepend to all option names
268 
269    Notes:
270    A hyphen (-) must NOT be given at the beginning of the prefix name.
271    The first character of all runtime options is AUTOMATICALLY the hyphen.
272 
273    Level: advanced
274 
275 .keywords: DMDA, set, options, prefix, database
276 
277 .seealso: DMSetFromOptions()
278 @*/
279 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
280 {
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
285   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "DMDestroy"
291 /*@
292     DMDestroy - Destroys a vector packer or DMDA.
293 
294     Collective on DM
295 
296     Input Parameter:
297 .   dm - the DM object to destroy
298 
299     Level: developer
300 
301 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
302 
303 @*/
304 PetscErrorCode  DMDestroy(DM *dm)
305 {
306   PetscInt       i, cnt = 0, f;
307   DMNamedVecLink nlink,nnext;
308   PetscErrorCode ierr;
309 
310   PetscFunctionBegin;
311   if (!*dm) PetscFunctionReturn(0);
312   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
313 
314   /* count all the circular references of DM and its contained Vecs */
315   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
316     if ((*dm)->localin[i])  cnt++;
317     if ((*dm)->globalin[i]) cnt++;
318   }
319   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
320   if ((*dm)->x) {
321     DM obj;
322     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
323     if (obj == *dm) cnt++;
324   }
325   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
326   if ((*dm)->x) {
327     DM obj;
328     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
329     if (obj == *dm) cnt++;
330   }
331 
332   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
333   /*
334      Need this test because the dm references the vectors that
335      reference the dm, so destroying the dm calls destroy on the
336      vectors that cause another destroy on the dm
337   */
338   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
339   ((PetscObject) (*dm))->refct = 0;
340   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
341     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
342     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
343   }
344   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
345     nnext = nlink->next;
346     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
347     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
348     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
349     ierr = PetscFree(nlink);CHKERRQ(ierr);
350   }
351   (*dm)->namedglobal = NULL;
352 
353   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
354     nnext = nlink->next;
355     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
356     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
357     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
358     ierr = PetscFree(nlink);CHKERRQ(ierr);
359   }
360   (*dm)->namedlocal = NULL;
361 
362   /* Destroy the list of hooks */
363   {
364     DMCoarsenHookLink link,next;
365     for (link=(*dm)->coarsenhook; link; link=next) {
366       next = link->next;
367       ierr = PetscFree(link);CHKERRQ(ierr);
368     }
369     (*dm)->coarsenhook = NULL;
370   }
371   {
372     DMRefineHookLink link,next;
373     for (link=(*dm)->refinehook; link; link=next) {
374       next = link->next;
375       ierr = PetscFree(link);CHKERRQ(ierr);
376     }
377     (*dm)->refinehook = NULL;
378   }
379   {
380     DMSubDomainHookLink link,next;
381     for (link=(*dm)->subdomainhook; link; link=next) {
382       next = link->next;
383       ierr = PetscFree(link);CHKERRQ(ierr);
384     }
385     (*dm)->subdomainhook = NULL;
386   }
387   {
388     DMGlobalToLocalHookLink link,next;
389     for (link=(*dm)->gtolhook; link; link=next) {
390       next = link->next;
391       ierr = PetscFree(link);CHKERRQ(ierr);
392     }
393     (*dm)->gtolhook = NULL;
394   }
395   /* Destroy the work arrays */
396   {
397     DMWorkLink link,next;
398     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
399     for (link=(*dm)->workin; link; link=next) {
400       next = link->next;
401       ierr = PetscFree(link->mem);CHKERRQ(ierr);
402       ierr = PetscFree(link);CHKERRQ(ierr);
403     }
404     (*dm)->workin = NULL;
405   }
406 
407   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
408   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
409   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
410 
411   if ((*dm)->ctx && (*dm)->ctxdestroy) {
412     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
413   }
414   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
415   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
416   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
417   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
418   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
419   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
420   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
421 
422   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
423   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
424   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
425   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
426   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
427 
428   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
429   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
430   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
431 
432   for (f = 0; f < (*dm)->numFields; ++f) {
433     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
434   }
435   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
436   /* if memory was published with AMS then destroy it */
437   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
438 
439   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
440   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
441   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
442   PetscFunctionReturn(0);
443 }
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "DMSetUp"
447 /*@
448     DMSetUp - sets up the data structures inside a DM object
449 
450     Collective on DM
451 
452     Input Parameter:
453 .   dm - the DM object to setup
454 
455     Level: developer
456 
457 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
458 
459 @*/
460 PetscErrorCode  DMSetUp(DM dm)
461 {
462   PetscErrorCode ierr;
463 
464   PetscFunctionBegin;
465   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
466   if (dm->setupcalled) PetscFunctionReturn(0);
467   if (dm->ops->setup) {
468     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
469   }
470   dm->setupcalled = PETSC_TRUE;
471   PetscFunctionReturn(0);
472 }
473 
474 #undef __FUNCT__
475 #define __FUNCT__ "DMSetFromOptions"
476 /*@
477     DMSetFromOptions - sets parameters in a DM from the options database
478 
479     Collective on DM
480 
481     Input Parameter:
482 .   dm - the DM object to set options for
483 
484     Options Database:
485 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
486 .   -dm_vec_type <type>  type of vector to create inside DM
487 .   -dm_mat_type <type>  type of matrix to create inside DM
488 -   -dm_coloring_type <global or ghosted>
489 
490     Level: developer
491 
492 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
493 
494 @*/
495 PetscErrorCode  DMSetFromOptions(DM dm)
496 {
497   char           typeName[256] = MATAIJ;
498   PetscBool      flg;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
503   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
504   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
505   ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
506   if (flg) {
507     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
508   }
509   ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
510   if (flg) {
511     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
512   }
513   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
514   if (dm->ops->setfromoptions) {
515     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
516   }
517   /* process any options handlers added with PetscObjectAddOptionsHandler() */
518   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
519   ierr = PetscOptionsEnd();CHKERRQ(ierr);
520   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "DMView"
526 /*@C
527     DMView - Views a vector packer or DMDA.
528 
529     Collective on DM
530 
531     Input Parameter:
532 +   dm - the DM object to view
533 -   v - the viewer
534 
535     Level: developer
536 
537 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
538 
539 @*/
540 PetscErrorCode  DMView(DM dm,PetscViewer v)
541 {
542   PetscErrorCode ierr;
543   PetscBool      isbinary;
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
547   if (!v) {
548     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
549   }
550   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
551   if (isbinary) {
552     PetscInt classid = DM_FILE_CLASSID;
553     char     type[256];
554 
555     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
556     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
557     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
558   }
559   if (dm->ops->view) {
560     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
561   }
562   PetscFunctionReturn(0);
563 }
564 
565 #undef __FUNCT__
566 #define __FUNCT__ "DMCreateGlobalVector"
567 /*@
568     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
569 
570     Collective on DM
571 
572     Input Parameter:
573 .   dm - the DM object
574 
575     Output Parameter:
576 .   vec - the global vector
577 
578     Level: beginner
579 
580 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
581 
582 @*/
583 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
584 {
585   PetscErrorCode ierr;
586 
587   PetscFunctionBegin;
588   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
589   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
590   PetscFunctionReturn(0);
591 }
592 
593 #undef __FUNCT__
594 #define __FUNCT__ "DMCreateLocalVector"
595 /*@
596     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
597 
598     Not Collective
599 
600     Input Parameter:
601 .   dm - the DM object
602 
603     Output Parameter:
604 .   vec - the local vector
605 
606     Level: beginner
607 
608 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
609 
610 @*/
611 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
612 {
613   PetscErrorCode ierr;
614 
615   PetscFunctionBegin;
616   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
617   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
618   PetscFunctionReturn(0);
619 }
620 
621 #undef __FUNCT__
622 #define __FUNCT__ "DMGetLocalToGlobalMapping"
623 /*@
624    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
625 
626    Collective on DM
627 
628    Input Parameter:
629 .  dm - the DM that provides the mapping
630 
631    Output Parameter:
632 .  ltog - the mapping
633 
634    Level: intermediate
635 
636    Notes:
637    This mapping can then be used by VecSetLocalToGlobalMapping() or
638    MatSetLocalToGlobalMapping().
639 
640 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
641 @*/
642 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
643 {
644   PetscErrorCode ierr;
645 
646   PetscFunctionBegin;
647   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
648   PetscValidPointer(ltog,2);
649   if (!dm->ltogmap) {
650     PetscSection section, sectionGlobal;
651 
652     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
653     if (section) {
654       PetscInt *ltog;
655       PetscInt pStart, pEnd, size, p, l;
656 
657       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
658       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
659       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
660       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
661       for (p = pStart, l = 0; p < pEnd; ++p) {
662         PetscInt dof, off, c;
663 
664         /* Should probably use constrained dofs */
665         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
666         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
667         for (c = 0; c < dof; ++c, ++l) {
668           ltog[l] = off+c;
669         }
670       }
671       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
672       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
673     } else {
674       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
675       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
676     }
677   }
678   *ltog = dm->ltogmap;
679   PetscFunctionReturn(0);
680 }
681 
682 #undef __FUNCT__
683 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
684 /*@
685    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
686 
687    Collective on DM
688 
689    Input Parameter:
690 .  da - the distributed array that provides the mapping
691 
692    Output Parameter:
693 .  ltog - the block mapping
694 
695    Level: intermediate
696 
697    Notes:
698    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
699    MatSetLocalToGlobalMappingBlock().
700 
701 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
702 @*/
703 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
704 {
705   PetscErrorCode ierr;
706 
707   PetscFunctionBegin;
708   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
709   PetscValidPointer(ltog,2);
710   if (!dm->ltogmapb) {
711     PetscInt bs;
712     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
713     if (bs > 1) {
714       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
715       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
716     } else {
717       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
718       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
719     }
720   }
721   *ltog = dm->ltogmapb;
722   PetscFunctionReturn(0);
723 }
724 
725 #undef __FUNCT__
726 #define __FUNCT__ "DMGetBlockSize"
727 /*@
728    DMGetBlockSize - Gets the inherent block size associated with a DM
729 
730    Not Collective
731 
732    Input Parameter:
733 .  dm - the DM with block structure
734 
735    Output Parameter:
736 .  bs - the block size, 1 implies no exploitable block structure
737 
738    Level: intermediate
739 
740 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
741 @*/
742 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
743 {
744   PetscFunctionBegin;
745   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
746   PetscValidPointer(bs,2);
747   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
748   *bs = dm->bs;
749   PetscFunctionReturn(0);
750 }
751 
752 #undef __FUNCT__
753 #define __FUNCT__ "DMCreateInterpolation"
754 /*@
755     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
756 
757     Collective on DM
758 
759     Input Parameter:
760 +   dm1 - the DM object
761 -   dm2 - the second, finer DM object
762 
763     Output Parameter:
764 +  mat - the interpolation
765 -  vec - the scaling (optional)
766 
767     Level: developer
768 
769     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
770         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
771 
772         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
773         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
774 
775 
776 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
777 
778 @*/
779 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
780 {
781   PetscErrorCode ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
785   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
786   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
787   PetscFunctionReturn(0);
788 }
789 
790 #undef __FUNCT__
791 #define __FUNCT__ "DMCreateInjection"
792 /*@
793     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
794 
795     Collective on DM
796 
797     Input Parameter:
798 +   dm1 - the DM object
799 -   dm2 - the second, finer DM object
800 
801     Output Parameter:
802 .   ctx - the injection
803 
804     Level: developer
805 
806    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
807         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
808 
809 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
810 
811 @*/
812 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
813 {
814   PetscErrorCode ierr;
815 
816   PetscFunctionBegin;
817   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
818   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
819   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
820   PetscFunctionReturn(0);
821 }
822 
823 #undef __FUNCT__
824 #define __FUNCT__ "DMCreateColoring"
825 /*@C
826     DMCreateColoring - Gets coloring for a DMDA or DMComposite
827 
828     Collective on DM
829 
830     Input Parameter:
831 +   dm - the DM object
832 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
833 -   matype - either MATAIJ or MATBAIJ
834 
835     Output Parameter:
836 .   coloring - the coloring
837 
838     Level: developer
839 
840 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
841 
842 @*/
843 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
844 {
845   PetscErrorCode ierr;
846 
847   PetscFunctionBegin;
848   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
849   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
850   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
851   PetscFunctionReturn(0);
852 }
853 
854 #undef __FUNCT__
855 #define __FUNCT__ "DMCreateMatrix"
856 /*@C
857     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
858 
859     Collective on DM
860 
861     Input Parameter:
862 +   dm - the DM object
863 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
864             any type which inherits from one of these (such as MATAIJ)
865 
866     Output Parameter:
867 .   mat - the empty Jacobian
868 
869     Level: beginner
870 
871     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
872        do not need to do it yourself.
873 
874        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
875        the nonzero pattern call DMDASetMatPreallocateOnly()
876 
877        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
878        internally by PETSc.
879 
880        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
881        the indices for the global numbering for DMDAs which is complicated.
882 
883 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
884 
885 @*/
886 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
887 {
888   PetscErrorCode ierr;
889 
890   PetscFunctionBegin;
891   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
892 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
893   ierr = MatInitializePackage(NULL);CHKERRQ(ierr);
894 #endif
895   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
896   PetscValidPointer(mat,3);
897   if (dm->mattype) {
898     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
899   } else {
900     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
901   }
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
907 /*@
908   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
909     preallocated but the nonzero structure and zero values will not be set.
910 
911   Logically Collective on DMDA
912 
913   Input Parameter:
914 + dm - the DM
915 - only - PETSC_TRUE if only want preallocation
916 
917   Level: developer
918 .seealso DMCreateMatrix()
919 @*/
920 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
921 {
922   PetscFunctionBegin;
923   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
924   dm->prealloc_only = only;
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "DMGetWorkArray"
930 /*@C
931   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
932 
933   Not Collective
934 
935   Input Parameters:
936 + dm - the DM object
937 . count - The minium size
938 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
939 
940   Output Parameter:
941 . array - the work array
942 
943   Level: developer
944 
945 .seealso DMDestroy(), DMCreate()
946 @*/
947 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
948 {
949   PetscErrorCode ierr;
950   DMWorkLink     link;
951   size_t         size;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
955   PetscValidPointer(mem,4);
956   if (dm->workin) {
957     link       = dm->workin;
958     dm->workin = dm->workin->next;
959   } else {
960     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
961   }
962   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
963   if (size*count > link->bytes) {
964     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
965     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
966     link->bytes = size*count;
967   }
968   link->next   = dm->workout;
969   dm->workout  = link;
970   *(void**)mem = link->mem;
971   PetscFunctionReturn(0);
972 }
973 
974 #undef __FUNCT__
975 #define __FUNCT__ "DMRestoreWorkArray"
976 /*@C
977   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
978 
979   Not Collective
980 
981   Input Parameters:
982 + dm - the DM object
983 . count - The minium size
984 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
985 
986   Output Parameter:
987 . array - the work array
988 
989   Level: developer
990 
991 .seealso DMDestroy(), DMCreate()
992 @*/
993 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
994 {
995   DMWorkLink *p,link;
996 
997   PetscFunctionBegin;
998   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
999   PetscValidPointer(mem,4);
1000   for (p=&dm->workout; (link=*p); p=&link->next) {
1001     if (link->mem == *(void**)mem) {
1002       *p           = link->next;
1003       link->next   = dm->workin;
1004       dm->workin   = link;
1005       *(void**)mem = NULL;
1006       PetscFunctionReturn(0);
1007     }
1008   }
1009   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1010   PetscFunctionReturn(0);
1011 }
1012 
1013 #undef __FUNCT__
1014 #define __FUNCT__ "DMSetNullSpaceConstructor"
1015 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1016 {
1017   PetscFunctionBegin;
1018   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1019   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1020   dm->nullspaceConstructors[field] = nullsp;
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 #undef __FUNCT__
1025 #define __FUNCT__ "DMCreateFieldIS"
1026 /*@C
1027   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1028 
1029   Not collective
1030 
1031   Input Parameter:
1032 . dm - the DM object
1033 
1034   Output Parameters:
1035 + numFields  - The number of fields (or NULL if not requested)
1036 . fieldNames - The name for each field (or NULL if not requested)
1037 - fields     - The global indices for each field (or NULL if not requested)
1038 
1039   Level: intermediate
1040 
1041   Notes:
1042   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1043   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1044   PetscFree().
1045 
1046 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1047 @*/
1048 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1049 {
1050   PetscSection   section, sectionGlobal;
1051   PetscErrorCode ierr;
1052 
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1055   if (numFields) {
1056     PetscValidPointer(numFields,2);
1057     *numFields = 0;
1058   }
1059   if (fieldNames) {
1060     PetscValidPointer(fieldNames,3);
1061     *fieldNames = NULL;
1062   }
1063   if (fields) {
1064     PetscValidPointer(fields,4);
1065     *fields = NULL;
1066   }
1067   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1068   if (section) {
1069     PetscInt *fieldSizes, **fieldIndices;
1070     PetscInt nF, f, pStart, pEnd, p;
1071 
1072     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1073     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1074     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr);
1075     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1076     for (f = 0; f < nF; ++f) {
1077       fieldSizes[f] = 0;
1078     }
1079     for (p = pStart; p < pEnd; ++p) {
1080       PetscInt gdof;
1081 
1082       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1083       if (gdof > 0) {
1084         for (f = 0; f < nF; ++f) {
1085           PetscInt fdof, fcdof;
1086 
1087           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1088           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1089           fieldSizes[f] += fdof-fcdof;
1090         }
1091       }
1092     }
1093     for (f = 0; f < nF; ++f) {
1094       ierr          = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1095       fieldSizes[f] = 0;
1096     }
1097     for (p = pStart; p < pEnd; ++p) {
1098       PetscInt gdof, goff;
1099 
1100       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1101       if (gdof > 0) {
1102         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1103         for (f = 0; f < nF; ++f) {
1104           PetscInt fdof, fcdof, fc;
1105 
1106           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1107           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1108           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1109             fieldIndices[f][fieldSizes[f]] = goff++;
1110           }
1111         }
1112       }
1113     }
1114     if (numFields) *numFields = nF;
1115     if (fieldNames) {
1116       ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr);
1117       for (f = 0; f < nF; ++f) {
1118         const char *fieldName;
1119 
1120         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1121         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1122       }
1123     }
1124     if (fields) {
1125       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1126       for (f = 0; f < nF; ++f) {
1127         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1128       }
1129     }
1130     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1131   } else if (dm->ops->createfieldis) {
1132     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1133   }
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 
1138 #undef __FUNCT__
1139 #define __FUNCT__ "DMCreateFieldDecomposition"
1140 /*@C
1141   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1142                           corresponding to different fields: each IS contains the global indices of the dofs of the
1143                           corresponding field. The optional list of DMs define the DM for each subproblem.
1144                           Generalizes DMCreateFieldIS().
1145 
1146   Not collective
1147 
1148   Input Parameter:
1149 . dm - the DM object
1150 
1151   Output Parameters:
1152 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1153 . namelist  - The name for each field (or NULL if not requested)
1154 . islist    - The global indices for each field (or NULL if not requested)
1155 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1156 
1157   Level: intermediate
1158 
1159   Notes:
1160   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1161   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1162   and all of the arrays should be freed with PetscFree().
1163 
1164 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1165 @*/
1166 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1167 {
1168   PetscErrorCode ierr;
1169 
1170   PetscFunctionBegin;
1171   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1172   if (len) {
1173     PetscValidPointer(len,2);
1174     *len = 0;
1175   }
1176   if (namelist) {
1177     PetscValidPointer(namelist,3);
1178     *namelist = 0;
1179   }
1180   if (islist) {
1181     PetscValidPointer(islist,4);
1182     *islist = 0;
1183   }
1184   if (dmlist) {
1185     PetscValidPointer(dmlist,5);
1186     *dmlist = 0;
1187   }
1188   /*
1189    Is it a good idea to apply the following check across all impls?
1190    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1191    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1192    */
1193   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1194   if (!dm->ops->createfielddecomposition) {
1195     PetscSection section;
1196     PetscInt     numFields, f;
1197 
1198     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1199     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1200     if (section && numFields && dm->ops->createsubdm) {
1201       *len = numFields;
1202       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1203       for (f = 0; f < numFields; ++f) {
1204         const char *fieldName;
1205 
1206         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1207         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1208         ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1209       }
1210     } else {
1211       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1212       /* By default there are no DMs associated with subproblems. */
1213       if (dmlist) *dmlist = NULL;
1214     }
1215   } else {
1216     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1217   }
1218   PetscFunctionReturn(0);
1219 }
1220 
1221 #undef __FUNCT__
1222 #define __FUNCT__ "DMCreateSubDM"
1223 /*@C
1224   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1225                   The fields are defined by DMCreateFieldIS().
1226 
1227   Not collective
1228 
1229   Input Parameters:
1230 + dm - the DM object
1231 . numFields - number of fields in this subproblem
1232 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1233 
1234   Output Parameters:
1235 . is - The global indices for the subproblem
1236 - dm - The DM for the subproblem
1237 
1238   Level: intermediate
1239 
1240 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1241 @*/
1242 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1243 {
1244   PetscErrorCode ierr;
1245 
1246   PetscFunctionBegin;
1247   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1248   PetscValidPointer(fields,3);
1249   if (is) PetscValidPointer(is,4);
1250   if (subdm) PetscValidPointer(subdm,5);
1251   if (dm->ops->createsubdm) {
1252     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1253   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 
1258 #undef __FUNCT__
1259 #define __FUNCT__ "DMCreateDomainDecomposition"
1260 /*@C
1261   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1262                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1263                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1264                           define a nonoverlapping covering, while outer subdomains can overlap.
1265                           The optional list of DMs define the DM for each subproblem.
1266 
1267   Not collective
1268 
1269   Input Parameter:
1270 . dm - the DM object
1271 
1272   Output Parameters:
1273 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1274 . namelist    - The name for each subdomain (or NULL if not requested)
1275 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1276 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1277 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1278 
1279   Level: intermediate
1280 
1281   Notes:
1282   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1283   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1284   and all of the arrays should be freed with PetscFree().
1285 
1286 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1287 @*/
1288 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1289 {
1290   PetscErrorCode      ierr;
1291   DMSubDomainHookLink link;
1292   PetscInt            i,l;
1293 
1294   PetscFunctionBegin;
1295   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1296   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1297   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1298   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1299   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1300   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1301   /*
1302    Is it a good idea to apply the following check across all impls?
1303    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1304    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1305    */
1306   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1307   if (dm->ops->createdomaindecomposition) {
1308     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1309     /* copy subdomain hooks and context over to the subdomain DMs */
1310     if (dmlist) {
1311       for (i = 0; i < l; i++) {
1312         for (link=dm->subdomainhook; link; link=link->next) {
1313           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1314         }
1315         (*dmlist)[i]->ctx = dm->ctx;
1316       }
1317     }
1318     if (len) *len = l;
1319   }
1320   PetscFunctionReturn(0);
1321 }
1322 
1323 
1324 #undef __FUNCT__
1325 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1326 /*@C
1327   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1328 
1329   Not collective
1330 
1331   Input Parameters:
1332 + dm - the DM object
1333 . n  - the number of subdomain scatters
1334 - subdms - the local subdomains
1335 
1336   Output Parameters:
1337 + n     - the number of scatters returned
1338 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1339 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1340 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1341 
1342   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1343   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1344   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1345   solution and residual data.
1346 
1347   Level: developer
1348 
1349 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1350 @*/
1351 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1352 {
1353   PetscErrorCode ierr;
1354 
1355   PetscFunctionBegin;
1356   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1357   PetscValidPointer(subdms,3);
1358   PetscValidPointer(iscat,4);
1359   PetscValidPointer(oscat,5);
1360   PetscValidPointer(gscat,6);
1361   if (dm->ops->createddscatters) {
1362     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1363   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1364   PetscFunctionReturn(0);
1365 }
1366 
1367 #undef __FUNCT__
1368 #define __FUNCT__ "DMRefine"
1369 /*@
1370   DMRefine - Refines a DM object
1371 
1372   Collective on DM
1373 
1374   Input Parameter:
1375 + dm   - the DM object
1376 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1377 
1378   Output Parameter:
1379 . dmf - the refined DM, or NULL
1380 
1381   Note: If no refinement was done, the return value is NULL
1382 
1383   Level: developer
1384 
1385 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1386 @*/
1387 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1388 {
1389   PetscErrorCode   ierr;
1390   DMRefineHookLink link;
1391 
1392   PetscFunctionBegin;
1393   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1394   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1395   if (*dmf) {
1396     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1397 
1398     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1399 
1400     (*dmf)->ctx       = dm->ctx;
1401     (*dmf)->leveldown = dm->leveldown;
1402     (*dmf)->levelup   = dm->levelup + 1;
1403 
1404     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1405     for (link=dm->refinehook; link; link=link->next) {
1406       if (link->refinehook) {
1407         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1408       }
1409     }
1410   }
1411   PetscFunctionReturn(0);
1412 }
1413 
1414 #undef __FUNCT__
1415 #define __FUNCT__ "DMRefineHookAdd"
1416 /*@
1417    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1418 
1419    Logically Collective
1420 
1421    Input Arguments:
1422 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1423 .  refinehook - function to run when setting up a coarser level
1424 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1425 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1426 
1427    Calling sequence of refinehook:
1428 $    refinehook(DM coarse,DM fine,void *ctx);
1429 
1430 +  coarse - coarse level DM
1431 .  fine - fine level DM to interpolate problem to
1432 -  ctx - optional user-defined function context
1433 
1434    Calling sequence for interphook:
1435 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1436 
1437 +  coarse - coarse level DM
1438 .  interp - matrix interpolating a coarse-level solution to the finer grid
1439 .  fine - fine level DM to update
1440 -  ctx - optional user-defined function context
1441 
1442    Level: advanced
1443 
1444    Notes:
1445    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1446 
1447    If this function is called multiple times, the hooks will be run in the order they are added.
1448 
1449 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1450 @*/
1451 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1452 {
1453   PetscErrorCode   ierr;
1454   DMRefineHookLink link,*p;
1455 
1456   PetscFunctionBegin;
1457   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1458   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1459   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1460   link->refinehook = refinehook;
1461   link->interphook = interphook;
1462   link->ctx        = ctx;
1463   link->next       = NULL;
1464   *p               = link;
1465   PetscFunctionReturn(0);
1466 }
1467 
1468 #undef __FUNCT__
1469 #define __FUNCT__ "DMInterpolate"
1470 /*@
1471    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1472 
1473    Collective if any hooks are
1474 
1475    Input Arguments:
1476 +  coarse - coarser DM to use as a base
1477 .  restrct - interpolation matrix, apply using MatInterpolate()
1478 -  fine - finer DM to update
1479 
1480    Level: developer
1481 
1482 .seealso: DMRefineHookAdd(), MatInterpolate()
1483 @*/
1484 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1485 {
1486   PetscErrorCode   ierr;
1487   DMRefineHookLink link;
1488 
1489   PetscFunctionBegin;
1490   for (link=fine->refinehook; link; link=link->next) {
1491     if (link->interphook) {
1492       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1493     }
1494   }
1495   PetscFunctionReturn(0);
1496 }
1497 
1498 #undef __FUNCT__
1499 #define __FUNCT__ "DMGetRefineLevel"
1500 /*@
1501     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1502 
1503     Not Collective
1504 
1505     Input Parameter:
1506 .   dm - the DM object
1507 
1508     Output Parameter:
1509 .   level - number of refinements
1510 
1511     Level: developer
1512 
1513 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1514 
1515 @*/
1516 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1517 {
1518   PetscFunctionBegin;
1519   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1520   *level = dm->levelup;
1521   PetscFunctionReturn(0);
1522 }
1523 
1524 #undef __FUNCT__
1525 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1526 /*@
1527    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1528 
1529    Logically Collective
1530 
1531    Input Arguments:
1532 +  dm - the DM
1533 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1534 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1535 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1536 
1537    Calling sequence for beginhook:
1538 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1539 
1540 +  dm - global DM
1541 .  g - global vector
1542 .  mode - mode
1543 .  l - local vector
1544 -  ctx - optional user-defined function context
1545 
1546 
1547    Calling sequence for endhook:
1548 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1549 
1550 +  global - global DM
1551 -  ctx - optional user-defined function context
1552 
1553    Level: advanced
1554 
1555    Notes:
1556    This function is only needed if auxiliary data needs to be set up on coarse grids.
1557 
1558    If this function is called multiple times, the hooks will be run in the order they are added.
1559 
1560    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1561    extract the finest level information from its context (instead of from the SNES).
1562 
1563 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1564 @*/
1565 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1566 {
1567   PetscErrorCode          ierr;
1568   DMGlobalToLocalHookLink link,*p;
1569 
1570   PetscFunctionBegin;
1571   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1572   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1573   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1574   link->beginhook = beginhook;
1575   link->endhook   = endhook;
1576   link->ctx       = ctx;
1577   link->next      = NULL;
1578   *p              = link;
1579   PetscFunctionReturn(0);
1580 }
1581 
1582 #undef __FUNCT__
1583 #define __FUNCT__ "DMGlobalToLocalBegin"
1584 /*@
1585     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1586 
1587     Neighbor-wise Collective on DM
1588 
1589     Input Parameters:
1590 +   dm - the DM object
1591 .   g - the global vector
1592 .   mode - INSERT_VALUES or ADD_VALUES
1593 -   l - the local vector
1594 
1595 
1596     Level: beginner
1597 
1598 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1599 
1600 @*/
1601 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1602 {
1603   PetscSF                 sf;
1604   PetscErrorCode          ierr;
1605   DMGlobalToLocalHookLink link;
1606 
1607   PetscFunctionBegin;
1608   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1609   for (link=dm->gtolhook; link; link=link->next) {
1610     if (link->beginhook) {
1611       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1612     }
1613   }
1614   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1615   if (sf) {
1616     PetscScalar *lArray, *gArray;
1617 
1618     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1619     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1620     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1621     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1622     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1623     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1624   } else {
1625     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1626   }
1627   PetscFunctionReturn(0);
1628 }
1629 
1630 #undef __FUNCT__
1631 #define __FUNCT__ "DMGlobalToLocalEnd"
1632 /*@
1633     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1634 
1635     Neighbor-wise Collective on DM
1636 
1637     Input Parameters:
1638 +   dm - the DM object
1639 .   g - the global vector
1640 .   mode - INSERT_VALUES or ADD_VALUES
1641 -   l - the local vector
1642 
1643 
1644     Level: beginner
1645 
1646 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1647 
1648 @*/
1649 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1650 {
1651   PetscSF                 sf;
1652   PetscErrorCode          ierr;
1653   PetscScalar             *lArray, *gArray;
1654   DMGlobalToLocalHookLink link;
1655 
1656   PetscFunctionBegin;
1657   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1658   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1659   if (sf) {
1660     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1661 
1662     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1663     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1664     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1665     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1666     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1667   } else {
1668     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1669   }
1670   for (link=dm->gtolhook; link; link=link->next) {
1671     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1672   }
1673   PetscFunctionReturn(0);
1674 }
1675 
1676 #undef __FUNCT__
1677 #define __FUNCT__ "DMLocalToGlobalBegin"
1678 /*@
1679     DMLocalToGlobalBegin - updates global vectors from local vectors
1680 
1681     Neighbor-wise Collective on DM
1682 
1683     Input Parameters:
1684 +   dm - the DM object
1685 .   l - the local vector
1686 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1687            base point.
1688 - - the global vector
1689 
1690     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1691            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1692            global array to the final global array with VecAXPY().
1693 
1694     Level: beginner
1695 
1696 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1697 
1698 @*/
1699 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1700 {
1701   PetscSF        sf;
1702   PetscErrorCode ierr;
1703 
1704   PetscFunctionBegin;
1705   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1706   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1707   if (sf) {
1708     MPI_Op      op;
1709     PetscScalar *lArray, *gArray;
1710 
1711     switch (mode) {
1712     case INSERT_VALUES:
1713     case INSERT_ALL_VALUES:
1714 #if defined(PETSC_HAVE_MPI_REPLACE)
1715       op = MPI_REPLACE; break;
1716 #else
1717       SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1718 #endif
1719     case ADD_VALUES:
1720     case ADD_ALL_VALUES:
1721       op = MPI_SUM; break;
1722     default:
1723       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1724     }
1725     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1726     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1727     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1728     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1729     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1730   } else {
1731     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1732   }
1733   PetscFunctionReturn(0);
1734 }
1735 
1736 #undef __FUNCT__
1737 #define __FUNCT__ "DMLocalToGlobalEnd"
1738 /*@
1739     DMLocalToGlobalEnd - updates global vectors from local vectors
1740 
1741     Neighbor-wise Collective on DM
1742 
1743     Input Parameters:
1744 +   dm - the DM object
1745 .   l - the local vector
1746 .   mode - INSERT_VALUES or ADD_VALUES
1747 -   g - the global vector
1748 
1749 
1750     Level: beginner
1751 
1752 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1753 
1754 @*/
1755 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1756 {
1757   PetscSF        sf;
1758   PetscErrorCode ierr;
1759 
1760   PetscFunctionBegin;
1761   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1762   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1763   if (sf) {
1764     MPI_Op      op;
1765     PetscScalar *lArray, *gArray;
1766 
1767     switch (mode) {
1768     case INSERT_VALUES:
1769     case INSERT_ALL_VALUES:
1770 #if defined(PETSC_HAVE_MPI_REPLACE)
1771       op = MPI_REPLACE; break;
1772 #else
1773       SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1774 #endif
1775     case ADD_VALUES:
1776     case ADD_ALL_VALUES:
1777       op = MPI_SUM; break;
1778     default:
1779       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1780     }
1781     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1782     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1783     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1784     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1785     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1786   } else {
1787     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1788   }
1789   PetscFunctionReturn(0);
1790 }
1791 
1792 #undef __FUNCT__
1793 #define __FUNCT__ "DMCoarsen"
1794 /*@
1795     DMCoarsen - Coarsens a DM object
1796 
1797     Collective on DM
1798 
1799     Input Parameter:
1800 +   dm - the DM object
1801 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1802 
1803     Output Parameter:
1804 .   dmc - the coarsened DM
1805 
1806     Level: developer
1807 
1808 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1809 
1810 @*/
1811 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1812 {
1813   PetscErrorCode    ierr;
1814   DMCoarsenHookLink link;
1815 
1816   PetscFunctionBegin;
1817   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1818   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1819   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1820   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1821   (*dmc)->ctx               = dm->ctx;
1822   (*dmc)->levelup           = dm->levelup;
1823   (*dmc)->leveldown         = dm->leveldown + 1;
1824   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1825   for (link=dm->coarsenhook; link; link=link->next) {
1826     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1827   }
1828   PetscFunctionReturn(0);
1829 }
1830 
1831 #undef __FUNCT__
1832 #define __FUNCT__ "DMCoarsenHookAdd"
1833 /*@
1834    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1835 
1836    Logically Collective
1837 
1838    Input Arguments:
1839 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1840 .  coarsenhook - function to run when setting up a coarser level
1841 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1842 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1843 
1844    Calling sequence of coarsenhook:
1845 $    coarsenhook(DM fine,DM coarse,void *ctx);
1846 
1847 +  fine - fine level DM
1848 .  coarse - coarse level DM to restrict problem to
1849 -  ctx - optional user-defined function context
1850 
1851    Calling sequence for restricthook:
1852 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1853 
1854 +  fine - fine level DM
1855 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1856 .  rscale - scaling vector for restriction
1857 .  inject - matrix restricting by injection
1858 .  coarse - coarse level DM to update
1859 -  ctx - optional user-defined function context
1860 
1861    Level: advanced
1862 
1863    Notes:
1864    This function is only needed if auxiliary data needs to be set up on coarse grids.
1865 
1866    If this function is called multiple times, the hooks will be run in the order they are added.
1867 
1868    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1869    extract the finest level information from its context (instead of from the SNES).
1870 
1871 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1872 @*/
1873 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1874 {
1875   PetscErrorCode    ierr;
1876   DMCoarsenHookLink link,*p;
1877 
1878   PetscFunctionBegin;
1879   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1880   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1881   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1882   link->coarsenhook  = coarsenhook;
1883   link->restricthook = restricthook;
1884   link->ctx          = ctx;
1885   link->next         = NULL;
1886   *p                 = link;
1887   PetscFunctionReturn(0);
1888 }
1889 
1890 #undef __FUNCT__
1891 #define __FUNCT__ "DMRestrict"
1892 /*@
1893    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1894 
1895    Collective if any hooks are
1896 
1897    Input Arguments:
1898 +  fine - finer DM to use as a base
1899 .  restrct - restriction matrix, apply using MatRestrict()
1900 .  inject - injection matrix, also use MatRestrict()
1901 -  coarse - coarer DM to update
1902 
1903    Level: developer
1904 
1905 .seealso: DMCoarsenHookAdd(), MatRestrict()
1906 @*/
1907 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1908 {
1909   PetscErrorCode    ierr;
1910   DMCoarsenHookLink link;
1911 
1912   PetscFunctionBegin;
1913   for (link=fine->coarsenhook; link; link=link->next) {
1914     if (link->restricthook) {
1915       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
1916     }
1917   }
1918   PetscFunctionReturn(0);
1919 }
1920 
1921 #undef __FUNCT__
1922 #define __FUNCT__ "DMSubDomainHookAdd"
1923 /*@
1924    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1925 
1926    Logically Collective
1927 
1928    Input Arguments:
1929 +  global - global DM
1930 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1931 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1932 
1933    Calling sequence for restricthook:
1934 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1935 
1936 +  global - global DM
1937 .  out    - scatter to the outer (with ghost and overlap points) block vector
1938 .  in     - scatter to block vector values only owned locally
1939 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1940 -  ctx - optional user-defined function context
1941 
1942    Level: advanced
1943 
1944    Notes:
1945    This function is only needed if auxiliary data needs to be set up on coarse grids.
1946 
1947    If this function is called multiple times, the hooks will be run in the order they are added.
1948 
1949    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1950    extract the finest level information from its context (instead of from the SNES).
1951 
1952 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1953 @*/
1954 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1955 {
1956   PetscErrorCode      ierr;
1957   DMSubDomainHookLink link,*p;
1958 
1959   PetscFunctionBegin;
1960   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1961   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1962   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
1963   link->restricthook = restricthook;
1964   link->ddhook       = ddhook;
1965   link->ctx          = ctx;
1966   link->next         = NULL;
1967   *p                 = link;
1968   PetscFunctionReturn(0);
1969 }
1970 
1971 #undef __FUNCT__
1972 #define __FUNCT__ "DMSubDomainRestrict"
1973 /*@
1974    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
1975 
1976    Collective if any hooks are
1977 
1978    Input Arguments:
1979 +  fine - finer DM to use as a base
1980 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
1981 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
1982 -  coarse - coarer DM to update
1983 
1984    Level: developer
1985 
1986 .seealso: DMCoarsenHookAdd(), MatRestrict()
1987 @*/
1988 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
1989 {
1990   PetscErrorCode      ierr;
1991   DMSubDomainHookLink link;
1992 
1993   PetscFunctionBegin;
1994   for (link=global->subdomainhook; link; link=link->next) {
1995     if (link->restricthook) {
1996       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
1997     }
1998   }
1999   PetscFunctionReturn(0);
2000 }
2001 
2002 #undef __FUNCT__
2003 #define __FUNCT__ "DMGetCoarsenLevel"
2004 /*@
2005     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2006 
2007     Not Collective
2008 
2009     Input Parameter:
2010 .   dm - the DM object
2011 
2012     Output Parameter:
2013 .   level - number of coarsenings
2014 
2015     Level: developer
2016 
2017 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2018 
2019 @*/
2020 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2021 {
2022   PetscFunctionBegin;
2023   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2024   *level = dm->leveldown;
2025   PetscFunctionReturn(0);
2026 }
2027 
2028 
2029 
2030 #undef __FUNCT__
2031 #define __FUNCT__ "DMRefineHierarchy"
2032 /*@C
2033     DMRefineHierarchy - Refines a DM object, all levels at once
2034 
2035     Collective on DM
2036 
2037     Input Parameter:
2038 +   dm - the DM object
2039 -   nlevels - the number of levels of refinement
2040 
2041     Output Parameter:
2042 .   dmf - the refined DM hierarchy
2043 
2044     Level: developer
2045 
2046 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2047 
2048 @*/
2049 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2050 {
2051   PetscErrorCode ierr;
2052 
2053   PetscFunctionBegin;
2054   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2055   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2056   if (nlevels == 0) PetscFunctionReturn(0);
2057   if (dm->ops->refinehierarchy) {
2058     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2059   } else if (dm->ops->refine) {
2060     PetscInt i;
2061 
2062     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2063     for (i=1; i<nlevels; i++) {
2064       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2065     }
2066   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2067   PetscFunctionReturn(0);
2068 }
2069 
2070 #undef __FUNCT__
2071 #define __FUNCT__ "DMCoarsenHierarchy"
2072 /*@C
2073     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2074 
2075     Collective on DM
2076 
2077     Input Parameter:
2078 +   dm - the DM object
2079 -   nlevels - the number of levels of coarsening
2080 
2081     Output Parameter:
2082 .   dmc - the coarsened DM hierarchy
2083 
2084     Level: developer
2085 
2086 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2087 
2088 @*/
2089 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2090 {
2091   PetscErrorCode ierr;
2092 
2093   PetscFunctionBegin;
2094   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2095   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2096   if (nlevels == 0) PetscFunctionReturn(0);
2097   PetscValidPointer(dmc,3);
2098   if (dm->ops->coarsenhierarchy) {
2099     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2100   } else if (dm->ops->coarsen) {
2101     PetscInt i;
2102 
2103     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2104     for (i=1; i<nlevels; i++) {
2105       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2106     }
2107   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2108   PetscFunctionReturn(0);
2109 }
2110 
2111 #undef __FUNCT__
2112 #define __FUNCT__ "DMCreateAggregates"
2113 /*@
2114    DMCreateAggregates - Gets the aggregates that map between
2115    grids associated with two DMs.
2116 
2117    Collective on DM
2118 
2119    Input Parameters:
2120 +  dmc - the coarse grid DM
2121 -  dmf - the fine grid DM
2122 
2123    Output Parameters:
2124 .  rest - the restriction matrix (transpose of the projection matrix)
2125 
2126    Level: intermediate
2127 
2128 .keywords: interpolation, restriction, multigrid
2129 
2130 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2131 @*/
2132 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2133 {
2134   PetscErrorCode ierr;
2135 
2136   PetscFunctionBegin;
2137   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2138   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2139   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2140   PetscFunctionReturn(0);
2141 }
2142 
2143 #undef __FUNCT__
2144 #define __FUNCT__ "DMSetApplicationContextDestroy"
2145 /*@C
2146     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2147 
2148     Not Collective
2149 
2150     Input Parameters:
2151 +   dm - the DM object
2152 -   destroy - the destroy function
2153 
2154     Level: intermediate
2155 
2156 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2157 
2158 @*/
2159 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2160 {
2161   PetscFunctionBegin;
2162   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2163   dm->ctxdestroy = destroy;
2164   PetscFunctionReturn(0);
2165 }
2166 
2167 #undef __FUNCT__
2168 #define __FUNCT__ "DMSetApplicationContext"
2169 /*@
2170     DMSetApplicationContext - Set a user context into a DM object
2171 
2172     Not Collective
2173 
2174     Input Parameters:
2175 +   dm - the DM object
2176 -   ctx - the user context
2177 
2178     Level: intermediate
2179 
2180 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2181 
2182 @*/
2183 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2184 {
2185   PetscFunctionBegin;
2186   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2187   dm->ctx = ctx;
2188   PetscFunctionReturn(0);
2189 }
2190 
2191 #undef __FUNCT__
2192 #define __FUNCT__ "DMGetApplicationContext"
2193 /*@
2194     DMGetApplicationContext - Gets a user context from a DM object
2195 
2196     Not Collective
2197 
2198     Input Parameter:
2199 .   dm - the DM object
2200 
2201     Output Parameter:
2202 .   ctx - the user context
2203 
2204     Level: intermediate
2205 
2206 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2207 
2208 @*/
2209 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2210 {
2211   PetscFunctionBegin;
2212   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2213   *(void**)ctx = dm->ctx;
2214   PetscFunctionReturn(0);
2215 }
2216 
2217 #undef __FUNCT__
2218 #define __FUNCT__ "DMSetVariableBounds"
2219 /*@C
2220     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2221 
2222     Logically Collective on DM
2223 
2224     Input Parameter:
2225 +   dm - the DM object
2226 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2227 
2228     Level: intermediate
2229 
2230 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2231          DMSetJacobian()
2232 
2233 @*/
2234 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2235 {
2236   PetscFunctionBegin;
2237   dm->ops->computevariablebounds = f;
2238   PetscFunctionReturn(0);
2239 }
2240 
2241 #undef __FUNCT__
2242 #define __FUNCT__ "DMHasVariableBounds"
2243 /*@
2244     DMHasVariableBounds - does the DM object have a variable bounds function?
2245 
2246     Not Collective
2247 
2248     Input Parameter:
2249 .   dm - the DM object to destroy
2250 
2251     Output Parameter:
2252 .   flg - PETSC_TRUE if the variable bounds function exists
2253 
2254     Level: developer
2255 
2256 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2257 
2258 @*/
2259 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2260 {
2261   PetscFunctionBegin;
2262   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2263   PetscFunctionReturn(0);
2264 }
2265 
2266 #undef __FUNCT__
2267 #define __FUNCT__ "DMComputeVariableBounds"
2268 /*@C
2269     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2270 
2271     Logically Collective on DM
2272 
2273     Input Parameters:
2274 +   dm - the DM object to destroy
2275 -   x  - current solution at which the bounds are computed
2276 
2277     Output parameters:
2278 +   xl - lower bound
2279 -   xu - upper bound
2280 
2281     Level: intermediate
2282 
2283 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2284 
2285 @*/
2286 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2287 {
2288   PetscErrorCode ierr;
2289 
2290   PetscFunctionBegin;
2291   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2292   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2293   if (dm->ops->computevariablebounds) {
2294     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2295   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2296   PetscFunctionReturn(0);
2297 }
2298 
2299 #undef __FUNCT__
2300 #define __FUNCT__ "DMHasColoring"
2301 /*@
2302     DMHasColoring - does the DM object have a method of providing a coloring?
2303 
2304     Not Collective
2305 
2306     Input Parameter:
2307 .   dm - the DM object
2308 
2309     Output Parameter:
2310 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2311 
2312     Level: developer
2313 
2314 .seealso DMHasFunction(), DMCreateColoring()
2315 
2316 @*/
2317 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2318 {
2319   PetscFunctionBegin;
2320   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2321   PetscFunctionReturn(0);
2322 }
2323 
2324 #undef  __FUNCT__
2325 #define __FUNCT__ "DMSetVec"
2326 /*@C
2327     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2328 
2329     Collective on DM
2330 
2331     Input Parameter:
2332 +   dm - the DM object
2333 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2334 
2335     Level: developer
2336 
2337 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2338 
2339 @*/
2340 PetscErrorCode  DMSetVec(DM dm,Vec x)
2341 {
2342   PetscErrorCode ierr;
2343 
2344   PetscFunctionBegin;
2345   if (x) {
2346     if (!dm->x) {
2347       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2348     }
2349     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2350   } else if (dm->x) {
2351     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2352   }
2353   PetscFunctionReturn(0);
2354 }
2355 
2356 PetscFunctionList DMList              = NULL;
2357 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2358 
2359 #undef __FUNCT__
2360 #define __FUNCT__ "DMSetType"
2361 /*@C
2362   DMSetType - Builds a DM, for a particular DM implementation.
2363 
2364   Collective on DM
2365 
2366   Input Parameters:
2367 + dm     - The DM object
2368 - method - The name of the DM type
2369 
2370   Options Database Key:
2371 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2372 
2373   Notes:
2374   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2375 
2376   Level: intermediate
2377 
2378 .keywords: DM, set, type
2379 .seealso: DMGetType(), DMCreate()
2380 @*/
2381 PetscErrorCode  DMSetType(DM dm, DMType method)
2382 {
2383   PetscErrorCode (*r)(DM);
2384   PetscBool      match;
2385   PetscErrorCode ierr;
2386 
2387   PetscFunctionBegin;
2388   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2389   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2390   if (match) PetscFunctionReturn(0);
2391 
2392   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(NULL);CHKERRQ(ierr);}
2393   ierr = PetscFunctionListFind(PetscObjectComm((PetscObject)dm), DMList, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2394   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2395 
2396   if (dm->ops->destroy) {
2397     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2398     dm->ops->destroy = NULL;
2399   }
2400   ierr = (*r)(dm);CHKERRQ(ierr);
2401   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2402   PetscFunctionReturn(0);
2403 }
2404 
2405 #undef __FUNCT__
2406 #define __FUNCT__ "DMGetType"
2407 /*@C
2408   DMGetType - Gets the DM type name (as a string) from the DM.
2409 
2410   Not Collective
2411 
2412   Input Parameter:
2413 . dm  - The DM
2414 
2415   Output Parameter:
2416 . type - The DM type name
2417 
2418   Level: intermediate
2419 
2420 .keywords: DM, get, type, name
2421 .seealso: DMSetType(), DMCreate()
2422 @*/
2423 PetscErrorCode  DMGetType(DM dm, DMType *type)
2424 {
2425   PetscErrorCode ierr;
2426 
2427   PetscFunctionBegin;
2428   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2429   PetscValidCharPointer(type,2);
2430   if (!DMRegisterAllCalled) {
2431     ierr = DMRegisterAll(NULL);CHKERRQ(ierr);
2432   }
2433   *type = ((PetscObject)dm)->type_name;
2434   PetscFunctionReturn(0);
2435 }
2436 
2437 #undef __FUNCT__
2438 #define __FUNCT__ "DMConvert"
2439 /*@C
2440   DMConvert - Converts a DM to another DM, either of the same or different type.
2441 
2442   Collective on DM
2443 
2444   Input Parameters:
2445 + dm - the DM
2446 - newtype - new DM type (use "same" for the same type)
2447 
2448   Output Parameter:
2449 . M - pointer to new DM
2450 
2451   Notes:
2452   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2453   the MPI communicator of the generated DM is always the same as the communicator
2454   of the input DM.
2455 
2456   Level: intermediate
2457 
2458 .seealso: DMCreate()
2459 @*/
2460 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2461 {
2462   DM             B;
2463   char           convname[256];
2464   PetscBool      sametype, issame;
2465   PetscErrorCode ierr;
2466 
2467   PetscFunctionBegin;
2468   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2469   PetscValidType(dm,1);
2470   PetscValidPointer(M,3);
2471   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2472   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2473   {
2474     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2475 
2476     /*
2477        Order of precedence:
2478        1) See if a specialized converter is known to the current DM.
2479        2) See if a specialized converter is known to the desired DM class.
2480        3) See if a good general converter is registered for the desired class
2481        4) See if a good general converter is known for the current matrix.
2482        5) Use a really basic converter.
2483     */
2484 
2485     /* 1) See if a specialized converter is known to the current DM and the desired class */
2486     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2487     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2488     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2489     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2490     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2491     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2492     if (conv) goto foundconv;
2493 
2494     /* 2)  See if a specialized converter is known to the desired DM class. */
2495     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2496     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2497     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2498     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2499     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2500     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2501     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2502     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2503     if (conv) {
2504       ierr = DMDestroy(&B);CHKERRQ(ierr);
2505       goto foundconv;
2506     }
2507 
2508 #if 0
2509     /* 3) See if a good general converter is registered for the desired class */
2510     conv = B->ops->convertfrom;
2511     ierr = DMDestroy(&B);CHKERRQ(ierr);
2512     if (conv) goto foundconv;
2513 
2514     /* 4) See if a good general converter is known for the current matrix */
2515     if (dm->ops->convert) {
2516       conv = dm->ops->convert;
2517     }
2518     if (conv) goto foundconv;
2519 #endif
2520 
2521     /* 5) Use a really basic converter. */
2522     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2523 
2524 foundconv:
2525     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2526     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2527     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2528   }
2529   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2530   PetscFunctionReturn(0);
2531 }
2532 
2533 /*--------------------------------------------------------------------------------------------------------------------*/
2534 
2535 #undef __FUNCT__
2536 #define __FUNCT__ "DMRegister"
2537 /*@C
2538   DMRegister - See DMRegisterDynamic()
2539 
2540   Level: advanced
2541 @*/
2542 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2543 {
2544   char           fullname[PETSC_MAX_PATH_LEN];
2545   PetscErrorCode ierr;
2546 
2547   PetscFunctionBegin;
2548   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2549   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2550   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2551   ierr = PetscFunctionListAdd(PETSC_COMM_WORLD,&DMList, sname, fullname, (void (*)(void))function);CHKERRQ(ierr);
2552   PetscFunctionReturn(0);
2553 }
2554 
2555 
2556 /*--------------------------------------------------------------------------------------------------------------------*/
2557 #undef __FUNCT__
2558 #define __FUNCT__ "DMRegisterDestroy"
2559 /*@C
2560    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2561 
2562    Not Collective
2563 
2564    Level: advanced
2565 
2566 .keywords: DM, register, destroy
2567 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2568 @*/
2569 PetscErrorCode  DMRegisterDestroy(void)
2570 {
2571   PetscErrorCode ierr;
2572 
2573   PetscFunctionBegin;
2574   ierr                = PetscFunctionListDestroy(&DMList);CHKERRQ(ierr);
2575   DMRegisterAllCalled = PETSC_FALSE;
2576   PetscFunctionReturn(0);
2577 }
2578 
2579 #undef __FUNCT__
2580 #define __FUNCT__ "DMLoad"
2581 /*@C
2582   DMLoad - Loads a DM that has been stored in binary  with DMView().
2583 
2584   Collective on PetscViewer
2585 
2586   Input Parameters:
2587 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2588            some related function before a call to DMLoad().
2589 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2590            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2591 
2592    Level: intermediate
2593 
2594   Notes:
2595    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2596 
2597   Notes for advanced users:
2598   Most users should not need to know the details of the binary storage
2599   format, since DMLoad() and DMView() completely hide these details.
2600   But for anyone who's interested, the standard binary matrix storage
2601   format is
2602 .vb
2603      has not yet been determined
2604 .ve
2605 
2606 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2607 @*/
2608 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2609 {
2610   PetscErrorCode ierr;
2611   PetscBool      isbinary;
2612   PetscInt       classid;
2613   char           type[256];
2614 
2615   PetscFunctionBegin;
2616   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2617   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2618   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2619   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2620 
2621   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2622   if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file");
2623   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2624   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2625   if (newdm->ops->load) {
2626     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2627   }
2628   PetscFunctionReturn(0);
2629 }
2630 
2631 /******************************** FEM Support **********************************/
2632 
2633 #undef __FUNCT__
2634 #define __FUNCT__ "DMPrintCellVector"
2635 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2636 {
2637   PetscInt       f;
2638   PetscErrorCode ierr;
2639 
2640   PetscFunctionBegin;
2641   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2642   for (f = 0; f < len; ++f) {
2643     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2644   }
2645   PetscFunctionReturn(0);
2646 }
2647 
2648 #undef __FUNCT__
2649 #define __FUNCT__ "DMPrintCellMatrix"
2650 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2651 {
2652   PetscInt       f, g;
2653   PetscErrorCode ierr;
2654 
2655   PetscFunctionBegin;
2656   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2657   for (f = 0; f < rows; ++f) {
2658     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2659     for (g = 0; g < cols; ++g) {
2660       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2661     }
2662     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2663   }
2664   PetscFunctionReturn(0);
2665 }
2666 
2667 #undef __FUNCT__
2668 #define __FUNCT__ "DMGetDefaultSection"
2669 /*@
2670   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2671 
2672   Input Parameter:
2673 . dm - The DM
2674 
2675   Output Parameter:
2676 . section - The PetscSection
2677 
2678   Level: intermediate
2679 
2680   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2681 
2682 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2683 @*/
2684 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2685 {
2686   PetscFunctionBegin;
2687   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2688   PetscValidPointer(section, 2);
2689   *section = dm->defaultSection;
2690   PetscFunctionReturn(0);
2691 }
2692 
2693 #undef __FUNCT__
2694 #define __FUNCT__ "DMSetDefaultSection"
2695 /*@
2696   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2697 
2698   Input Parameters:
2699 + dm - The DM
2700 - section - The PetscSection
2701 
2702   Level: intermediate
2703 
2704   Note: Any existing Section will be destroyed
2705 
2706 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2707 @*/
2708 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2709 {
2710   PetscInt       numFields;
2711   PetscInt       f;
2712   PetscErrorCode ierr;
2713 
2714   PetscFunctionBegin;
2715   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2716   ierr               = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2717   ierr               = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2718   dm->defaultSection = section;
2719   ierr               = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2720   if (numFields) {
2721     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2722     for (f = 0; f < numFields; ++f) {
2723       const char *name;
2724 
2725       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2726       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2727     }
2728   }
2729   PetscFunctionReturn(0);
2730 }
2731 
2732 #undef __FUNCT__
2733 #define __FUNCT__ "DMGetDefaultGlobalSection"
2734 /*@
2735   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2736 
2737   Collective on DM
2738 
2739   Input Parameter:
2740 . dm - The DM
2741 
2742   Output Parameter:
2743 . section - The PetscSection
2744 
2745   Level: intermediate
2746 
2747   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2748 
2749 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2750 @*/
2751 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2752 {
2753   PetscErrorCode ierr;
2754 
2755   PetscFunctionBegin;
2756   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2757   PetscValidPointer(section, 2);
2758   if (!dm->defaultGlobalSection) {
2759     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2760     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2761     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2762   }
2763   *section = dm->defaultGlobalSection;
2764   PetscFunctionReturn(0);
2765 }
2766 
2767 #undef __FUNCT__
2768 #define __FUNCT__ "DMSetDefaultGlobalSection"
2769 /*@
2770   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2771 
2772   Input Parameters:
2773 + dm - The DM
2774 - section - The PetscSection
2775 
2776   Level: intermediate
2777 
2778   Note: Any existing Section will be destroyed
2779 
2780 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2781 @*/
2782 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2783 {
2784   PetscErrorCode ierr;
2785 
2786   PetscFunctionBegin;
2787   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2788   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2789 
2790   dm->defaultGlobalSection = section;
2791   PetscFunctionReturn(0);
2792 }
2793 
2794 #undef __FUNCT__
2795 #define __FUNCT__ "DMGetDefaultSF"
2796 /*@
2797   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2798   it is created from the default PetscSection layouts in the DM.
2799 
2800   Input Parameter:
2801 . dm - The DM
2802 
2803   Output Parameter:
2804 . sf - The PetscSF
2805 
2806   Level: intermediate
2807 
2808   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2809 
2810 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2811 @*/
2812 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2813 {
2814   PetscInt       nroots;
2815   PetscErrorCode ierr;
2816 
2817   PetscFunctionBegin;
2818   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2819   PetscValidPointer(sf, 2);
2820   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
2821   if (nroots < 0) {
2822     PetscSection section, gSection;
2823 
2824     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2825     if (section) {
2826       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2827       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2828     } else {
2829       *sf = NULL;
2830       PetscFunctionReturn(0);
2831     }
2832   }
2833   *sf = dm->defaultSF;
2834   PetscFunctionReturn(0);
2835 }
2836 
2837 #undef __FUNCT__
2838 #define __FUNCT__ "DMSetDefaultSF"
2839 /*@
2840   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2841 
2842   Input Parameters:
2843 + dm - The DM
2844 - sf - The PetscSF
2845 
2846   Level: intermediate
2847 
2848   Note: Any previous SF is destroyed
2849 
2850 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2851 @*/
2852 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
2853 {
2854   PetscErrorCode ierr;
2855 
2856   PetscFunctionBegin;
2857   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2858   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2859   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2860   dm->defaultSF = sf;
2861   PetscFunctionReturn(0);
2862 }
2863 
2864 #undef __FUNCT__
2865 #define __FUNCT__ "DMCreateDefaultSF"
2866 /*@C
2867   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2868   describing the data layout.
2869 
2870   Input Parameters:
2871 + dm - The DM
2872 . localSection - PetscSection describing the local data layout
2873 - globalSection - PetscSection describing the global data layout
2874 
2875   Level: intermediate
2876 
2877 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2878 @*/
2879 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2880 {
2881   MPI_Comm       comm = ((PetscObject) dm)->comm;
2882   PetscLayout    layout;
2883   const PetscInt *ranges;
2884   PetscInt       *local;
2885   PetscSFNode    *remote;
2886   PetscInt       pStart, pEnd, p, nroots, nleaves, l;
2887   PetscMPIInt    size, rank;
2888   PetscErrorCode ierr;
2889 
2890   PetscFunctionBegin;
2891   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2892   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2893   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2894   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2895   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2896   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2897   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2898   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2899   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2900   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2901   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2902     PetscInt gdof, gcdof;
2903 
2904     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2905     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2906     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2907   }
2908   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2909   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2910   for (p = pStart, l = 0; p < pEnd; ++p) {
2911     const PetscInt *cind;
2912     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2913 
2914     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2915     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2916     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2917     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2918     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2919     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2920     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2921     if (!gdof) continue; /* Censored point */
2922     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2923     if (gsize != dof-cdof) {
2924       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2925       cdof = 0; /* Ignore constraints */
2926     }
2927     for (d = 0, c = 0; d < dof; ++d) {
2928       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2929       local[l+d-c] = off+d;
2930     }
2931     if (gdof < 0) {
2932       for (d = 0; d < gsize; ++d, ++l) {
2933         PetscInt offset = -(goff+1) + d, r;
2934 
2935         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2936         if (r < 0) r = -(r+2);
2937         remote[l].rank  = r;
2938         remote[l].index = offset - ranges[r];
2939       }
2940     } else {
2941       for (d = 0; d < gsize; ++d, ++l) {
2942         remote[l].rank  = rank;
2943         remote[l].index = goff+d - ranges[rank];
2944       }
2945     }
2946   }
2947   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2948   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2949   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2950   PetscFunctionReturn(0);
2951 }
2952 
2953 #undef __FUNCT__
2954 #define __FUNCT__ "DMGetPointSF"
2955 /*@
2956   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2957 
2958   Input Parameter:
2959 . dm - The DM
2960 
2961   Output Parameter:
2962 . sf - The PetscSF
2963 
2964   Level: intermediate
2965 
2966   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2967 
2968 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2969 @*/
2970 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
2971 {
2972   PetscFunctionBegin;
2973   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2974   PetscValidPointer(sf, 2);
2975   *sf = dm->sf;
2976   PetscFunctionReturn(0);
2977 }
2978 
2979 #undef __FUNCT__
2980 #define __FUNCT__ "DMSetPointSF"
2981 /*@
2982   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
2983 
2984   Input Parameters:
2985 + dm - The DM
2986 - sf - The PetscSF
2987 
2988   Level: intermediate
2989 
2990 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2991 @*/
2992 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
2993 {
2994   PetscErrorCode ierr;
2995 
2996   PetscFunctionBegin;
2997   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2998   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
2999   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3000   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3001   dm->sf = sf;
3002   PetscFunctionReturn(0);
3003 }
3004 
3005 #undef __FUNCT__
3006 #define __FUNCT__ "DMGetNumFields"
3007 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3008 {
3009   PetscFunctionBegin;
3010   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3011   PetscValidPointer(numFields, 2);
3012   *numFields = dm->numFields;
3013   PetscFunctionReturn(0);
3014 }
3015 
3016 #undef __FUNCT__
3017 #define __FUNCT__ "DMSetNumFields"
3018 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3019 {
3020   PetscInt       f;
3021   PetscErrorCode ierr;
3022 
3023   PetscFunctionBegin;
3024   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3025   for (f = 0; f < dm->numFields; ++f) {
3026     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3027   }
3028   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3029   dm->numFields = numFields;
3030   ierr          = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3031   for (f = 0; f < dm->numFields; ++f) {
3032     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3033   }
3034   PetscFunctionReturn(0);
3035 }
3036 
3037 #undef __FUNCT__
3038 #define __FUNCT__ "DMGetField"
3039 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3040 {
3041   PetscFunctionBegin;
3042   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3043   PetscValidPointer(field, 2);
3044   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3045   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3046   *field = dm->fields[f];
3047   PetscFunctionReturn(0);
3048 }
3049 
3050 #undef __FUNCT__
3051 #define __FUNCT__ "DMSetCoordinates"
3052 /*@
3053   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3054 
3055   Collective on DM
3056 
3057   Input Parameters:
3058 + dm - the DM
3059 - c - coordinate vector
3060 
3061   Note:
3062   The coordinates do include those for ghost points, which are in the local vector
3063 
3064   Level: intermediate
3065 
3066 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3067 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3068 @*/
3069 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3070 {
3071   PetscErrorCode ierr;
3072 
3073   PetscFunctionBegin;
3074   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3075   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3076   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3077   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3078   dm->coordinates = c;
3079   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3080   PetscFunctionReturn(0);
3081 }
3082 
3083 #undef __FUNCT__
3084 #define __FUNCT__ "DMSetCoordinatesLocal"
3085 /*@
3086   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3087 
3088   Collective on DM
3089 
3090    Input Parameters:
3091 +  dm - the DM
3092 -  c - coordinate vector
3093 
3094   Note:
3095   The coordinates of ghost points can be set using DMSetCoordinates()
3096   followed by DMGetCoordinatesLocal(). This is intended to enable the
3097   setting of ghost coordinates outside of the domain.
3098 
3099   Level: intermediate
3100 
3101 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3102 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3103 @*/
3104 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3105 {
3106   PetscErrorCode ierr;
3107 
3108   PetscFunctionBegin;
3109   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3110   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3111   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3112   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3113 
3114   dm->coordinatesLocal = c;
3115 
3116   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3117   PetscFunctionReturn(0);
3118 }
3119 
3120 #undef __FUNCT__
3121 #define __FUNCT__ "DMGetCoordinates"
3122 /*@
3123   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3124 
3125   Not Collective
3126 
3127   Input Parameter:
3128 . dm - the DM
3129 
3130   Output Parameter:
3131 . c - global coordinate vector
3132 
3133   Note:
3134   This is a borrowed reference, so the user should NOT destroy this vector
3135 
3136   Each process has only the local coordinates (does NOT have the ghost coordinates).
3137 
3138   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3139   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3140 
3141   Level: intermediate
3142 
3143 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3144 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3145 @*/
3146 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3147 {
3148   PetscErrorCode ierr;
3149 
3150   PetscFunctionBegin;
3151   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3152   PetscValidPointer(c,2);
3153   if (!dm->coordinates && dm->coordinatesLocal) {
3154     DM cdm = NULL;
3155 
3156     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3157     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3158     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3159     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3160     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3161   }
3162   *c = dm->coordinates;
3163   PetscFunctionReturn(0);
3164 }
3165 
3166 #undef __FUNCT__
3167 #define __FUNCT__ "DMGetCoordinatesLocal"
3168 /*@
3169   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3170 
3171   Collective on DM
3172 
3173   Input Parameter:
3174 . dm - the DM
3175 
3176   Output Parameter:
3177 . c - coordinate vector
3178 
3179   Note:
3180   This is a borrowed reference, so the user should NOT destroy this vector
3181 
3182   Each process has the local and ghost coordinates
3183 
3184   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3185   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3186 
3187   Level: intermediate
3188 
3189 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3190 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3191 @*/
3192 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3193 {
3194   PetscErrorCode ierr;
3195 
3196   PetscFunctionBegin;
3197   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3198   PetscValidPointer(c,2);
3199   if (!dm->coordinatesLocal && dm->coordinates) {
3200     DM cdm = NULL;
3201 
3202     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3203     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3204     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3205     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3206     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3207   }
3208   *c = dm->coordinatesLocal;
3209   PetscFunctionReturn(0);
3210 }
3211 
3212 #undef __FUNCT__
3213 #define __FUNCT__ "DMGetCoordinateDM"
3214 /*@
3215   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3216 
3217   Collective on DM
3218 
3219   Input Parameter:
3220 . dm - the DM
3221 
3222   Output Parameter:
3223 . cdm - coordinate DM
3224 
3225   Level: intermediate
3226 
3227 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3228 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3229 @*/
3230 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3231 {
3232   PetscErrorCode ierr;
3233 
3234   PetscFunctionBegin;
3235   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3236   PetscValidPointer(cdm,2);
3237   if (!dm->coordinateDM) {
3238     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3239     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3240   }
3241   *cdm = dm->coordinateDM;
3242   PetscFunctionReturn(0);
3243 }
3244 
3245 #undef __FUNCT__
3246 #define __FUNCT__ "DMLocatePoints"
3247 /*@
3248   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3249 
3250   Not collective
3251 
3252   Input Parameters:
3253 + dm - The DM
3254 - v - The Vec of points
3255 
3256   Output Parameter:
3257 . cells - The local cell numbers for cells which contain the points
3258 
3259   Level: developer
3260 
3261 .keywords: point location, mesh
3262 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3263 @*/
3264 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3265 {
3266   PetscErrorCode ierr;
3267 
3268   PetscFunctionBegin;
3269   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3270   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3271   PetscValidPointer(cells,3);
3272   if (dm->ops->locatepoints) {
3273     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3274   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Point location not available for this DM");
3275   PetscFunctionReturn(0);
3276 }
3277