xref: /petsc/src/dm/interface/dm.c (revision bb9467b5cce22331205e6efb0a76dc6fc9053ba2)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMViewFromOptions"
8 /*
9   Processes command line options to determine if/how a dm
10   is to be viewed.
11 
12 */
13 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
14 {
15   PetscErrorCode    ierr;
16   PetscBool         flg;
17   PetscViewer       viewer;
18   PetscViewerFormat format;
19 
20   PetscFunctionBegin;
21   ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
22   if (flg) {
23     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
24     ierr = DMView(dm,viewer);CHKERRQ(ierr);
25     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
26     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMCreate"
34 /*@
35   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
36 
37    If you never  call DMSetType()  it will generate an
38    error when you try to use the vector.
39 
40   Collective on MPI_Comm
41 
42   Input Parameter:
43 . comm - The communicator for the DM object
44 
45   Output Parameter:
46 . dm - The DM object
47 
48   Level: beginner
49 
50 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
51 @*/
52 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
53 {
54   DM             v;
55   PetscErrorCode ierr;
56 
57   PetscFunctionBegin;
58   PetscValidPointer(dm,2);
59   *dm = NULL;
60 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
61   ierr = VecInitializePackage(NULL);CHKERRQ(ierr);
62   ierr = MatInitializePackage(NULL);CHKERRQ(ierr);
63   ierr = DMInitializePackage(NULL);CHKERRQ(ierr);
64 #endif
65 
66   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
67   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
68 
69 
70   v->ltogmap              = NULL;
71   v->ltogmapb             = NULL;
72   v->bs                   = 1;
73   v->coloringtype         = IS_COLORING_GLOBAL;
74   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
75   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
76   v->defaultSection       = NULL;
77   v->defaultGlobalSection = NULL;
78   {
79     PetscInt i;
80     for (i = 0; i < 10; ++i) {
81       v->nullspaceConstructors[i] = NULL;
82     }
83   }
84   v->numFields = 0;
85   v->fields    = NULL;
86 
87   *dm = v;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMSetVecType"
93 /*@C
94        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
95 
96    Logically Collective on DMDA
97 
98    Input Parameter:
99 +  da - initial distributed array
100 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
101 
102    Options Database:
103 .   -dm_vec_type ctype
104 
105    Level: intermediate
106 
107 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
108 @*/
109 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   PetscValidHeaderSpecific(da,DM_CLASSID,1);
115   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
116   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "VecGetDM"
122 /*@
123   VecGetDM - Gets the DM defining the data layout of the vector
124 
125   Not collective
126 
127   Input Parameter:
128 . v - The Vec
129 
130   Output Parameter:
131 . dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecGetDM(Vec v, DM *dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidPointer(dm,2);
144   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "VecSetDM"
150 /*@
151   VecSetDM - Sets the DM defining the data layout of the vector
152 
153   Not collective
154 
155   Input Parameters:
156 + v - The Vec
157 - dm - The DM
158 
159   Level: intermediate
160 
161 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
162 @*/
163 PetscErrorCode VecSetDM(Vec v, DM dm)
164 {
165   PetscErrorCode ierr;
166 
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
169   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
170   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "DMSetMatType"
176 /*@C
177        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
178 
179    Logically Collective on DM
180 
181    Input Parameter:
182 +  dm - the DM context
183 .  ctype - the matrix type
184 
185    Options Database:
186 .   -dm_mat_type ctype
187 
188    Level: intermediate
189 
190 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
191 @*/
192 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
193 {
194   PetscErrorCode ierr;
195 
196   PetscFunctionBegin;
197   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
198   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
199   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
200   PetscFunctionReturn(0);
201 }
202 
203 #undef __FUNCT__
204 #define __FUNCT__ "MatGetDM"
205 /*@
206   MatGetDM - Gets the DM defining the data layout of the matrix
207 
208   Not collective
209 
210   Input Parameter:
211 . A - The Mat
212 
213   Output Parameter:
214 . dm - The DM
215 
216   Level: intermediate
217 
218 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
219 @*/
220 PetscErrorCode MatGetDM(Mat A, DM *dm)
221 {
222   PetscErrorCode ierr;
223 
224   PetscFunctionBegin;
225   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
226   PetscValidPointer(dm,2);
227   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
228   PetscFunctionReturn(0);
229 }
230 
231 #undef __FUNCT__
232 #define __FUNCT__ "MatSetDM"
233 /*@
234   MatSetDM - Sets the DM defining the data layout of the matrix
235 
236   Not collective
237 
238   Input Parameters:
239 + A - The Mat
240 - dm - The DM
241 
242   Level: intermediate
243 
244 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
245 @*/
246 PetscErrorCode MatSetDM(Mat A, DM dm)
247 {
248   PetscErrorCode ierr;
249 
250   PetscFunctionBegin;
251   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
252   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
253   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
254   PetscFunctionReturn(0);
255 }
256 
257 #undef __FUNCT__
258 #define __FUNCT__ "DMSetOptionsPrefix"
259 /*@C
260    DMSetOptionsPrefix - Sets the prefix used for searching for all
261    DMDA options in the database.
262 
263    Logically Collective on DMDA
264 
265    Input Parameter:
266 +  da - the DMDA context
267 -  prefix - the prefix to prepend to all option names
268 
269    Notes:
270    A hyphen (-) must NOT be given at the beginning of the prefix name.
271    The first character of all runtime options is AUTOMATICALLY the hyphen.
272 
273    Level: advanced
274 
275 .keywords: DMDA, set, options, prefix, database
276 
277 .seealso: DMSetFromOptions()
278 @*/
279 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
280 {
281   PetscErrorCode ierr;
282 
283   PetscFunctionBegin;
284   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
285   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "DMDestroy"
291 /*@
292     DMDestroy - Destroys a vector packer or DMDA.
293 
294     Collective on DM
295 
296     Input Parameter:
297 .   dm - the DM object to destroy
298 
299     Level: developer
300 
301 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
302 
303 @*/
304 PetscErrorCode  DMDestroy(DM *dm)
305 {
306   PetscInt       i, cnt = 0, f;
307   DMNamedVecLink nlink,nnext;
308   PetscErrorCode ierr;
309 
310   PetscFunctionBegin;
311   if (!*dm) PetscFunctionReturn(0);
312   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
313 
314   /* count all the circular references of DM and its contained Vecs */
315   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
316     if ((*dm)->localin[i])  cnt++;
317     if ((*dm)->globalin[i]) cnt++;
318   }
319   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
320   if ((*dm)->x) {
321     DM obj;
322     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
323     if (obj == *dm) cnt++;
324   }
325   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
326   if ((*dm)->x) {
327     DM obj;
328     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
329     if (obj == *dm) cnt++;
330   }
331 
332   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
333   /*
334      Need this test because the dm references the vectors that
335      reference the dm, so destroying the dm calls destroy on the
336      vectors that cause another destroy on the dm
337   */
338   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
339   ((PetscObject) (*dm))->refct = 0;
340   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
341     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
342     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
343   }
344   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
345     nnext = nlink->next;
346     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
347     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
348     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
349     ierr = PetscFree(nlink);CHKERRQ(ierr);
350   }
351   (*dm)->namedglobal = NULL;
352 
353   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
354     nnext = nlink->next;
355     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
356     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
357     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
358     ierr = PetscFree(nlink);CHKERRQ(ierr);
359   }
360   (*dm)->namedlocal = NULL;
361 
362   /* Destroy the list of hooks */
363   {
364     DMCoarsenHookLink link,next;
365     for (link=(*dm)->coarsenhook; link; link=next) {
366       next = link->next;
367       ierr = PetscFree(link);CHKERRQ(ierr);
368     }
369     (*dm)->coarsenhook = NULL;
370   }
371   {
372     DMRefineHookLink link,next;
373     for (link=(*dm)->refinehook; link; link=next) {
374       next = link->next;
375       ierr = PetscFree(link);CHKERRQ(ierr);
376     }
377     (*dm)->refinehook = NULL;
378   }
379   {
380     DMSubDomainHookLink link,next;
381     for (link=(*dm)->subdomainhook; link; link=next) {
382       next = link->next;
383       ierr = PetscFree(link);CHKERRQ(ierr);
384     }
385     (*dm)->subdomainhook = NULL;
386   }
387   {
388     DMGlobalToLocalHookLink link,next;
389     for (link=(*dm)->gtolhook; link; link=next) {
390       next = link->next;
391       ierr = PetscFree(link);CHKERRQ(ierr);
392     }
393     (*dm)->gtolhook = NULL;
394   }
395   /* Destroy the work arrays */
396   {
397     DMWorkLink link,next;
398     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
399     for (link=(*dm)->workin; link; link=next) {
400       next = link->next;
401       ierr = PetscFree(link->mem);CHKERRQ(ierr);
402       ierr = PetscFree(link);CHKERRQ(ierr);
403     }
404     (*dm)->workin = NULL;
405   }
406 
407   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
408   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
409   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
410 
411   if ((*dm)->ctx && (*dm)->ctxdestroy) {
412     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
413   }
414   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
415   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
416   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
417   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
418   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
419   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
420   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
421 
422   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
423   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
424   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
425   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
426   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
427 
428   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
429   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
430   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
431 
432   for (f = 0; f < (*dm)->numFields; ++f) {
433     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
434   }
435   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
436   /* if memory was published with AMS then destroy it */
437   ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr);
438 
439   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
440   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
441   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
442   PetscFunctionReturn(0);
443 }
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "DMSetUp"
447 /*@
448     DMSetUp - sets up the data structures inside a DM object
449 
450     Collective on DM
451 
452     Input Parameter:
453 .   dm - the DM object to setup
454 
455     Level: developer
456 
457 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
458 
459 @*/
460 PetscErrorCode  DMSetUp(DM dm)
461 {
462   PetscErrorCode ierr;
463 
464   PetscFunctionBegin;
465   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
466   if (dm->setupcalled) PetscFunctionReturn(0);
467   if (dm->ops->setup) {
468     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
469   }
470   dm->setupcalled = PETSC_TRUE;
471   PetscFunctionReturn(0);
472 }
473 
474 #undef __FUNCT__
475 #define __FUNCT__ "DMSetFromOptions"
476 /*@
477     DMSetFromOptions - sets parameters in a DM from the options database
478 
479     Collective on DM
480 
481     Input Parameter:
482 .   dm - the DM object to set options for
483 
484     Options Database:
485 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
486 .   -dm_vec_type <type>  type of vector to create inside DM
487 .   -dm_mat_type <type>  type of matrix to create inside DM
488 -   -dm_coloring_type <global or ghosted>
489 
490     Level: developer
491 
492 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
493 
494 @*/
495 PetscErrorCode  DMSetFromOptions(DM dm)
496 {
497   char           typeName[256] = MATAIJ;
498   PetscBool      flg;
499   PetscErrorCode ierr;
500 
501   PetscFunctionBegin;
502   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
503   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
504   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
505   ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
506   if (flg) {
507     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
508   }
509   ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
510   if (flg) {
511     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
512   }
513   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
514   if (dm->ops->setfromoptions) {
515     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
516   }
517   /* process any options handlers added with PetscObjectAddOptionsHandler() */
518   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
519   ierr = PetscOptionsEnd();CHKERRQ(ierr);
520   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
521   PetscFunctionReturn(0);
522 }
523 
524 #undef __FUNCT__
525 #define __FUNCT__ "DMView"
526 /*@C
527     DMView - Views a vector packer or DMDA.
528 
529     Collective on DM
530 
531     Input Parameter:
532 +   dm - the DM object to view
533 -   v - the viewer
534 
535     Level: developer
536 
537 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
538 
539 @*/
540 PetscErrorCode  DMView(DM dm,PetscViewer v)
541 {
542   PetscErrorCode ierr;
543   PetscBool      isbinary;
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
547   if (!v) {
548     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
549   }
550   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
551   if (isbinary) {
552     PetscInt classid = DM_FILE_CLASSID;
553     char     type[256];
554 
555     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
556     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
557     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
558   }
559   if (dm->ops->view) {
560     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
561   }
562   PetscFunctionReturn(0);
563 }
564 
565 #undef __FUNCT__
566 #define __FUNCT__ "DMCreateGlobalVector"
567 /*@
568     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
569 
570     Collective on DM
571 
572     Input Parameter:
573 .   dm - the DM object
574 
575     Output Parameter:
576 .   vec - the global vector
577 
578     Level: beginner
579 
580 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
581 
582 @*/
583 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
584 {
585   PetscErrorCode ierr;
586 
587   PetscFunctionBegin;
588   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
589   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
590   PetscFunctionReturn(0);
591 }
592 
593 #undef __FUNCT__
594 #define __FUNCT__ "DMCreateLocalVector"
595 /*@
596     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
597 
598     Not Collective
599 
600     Input Parameter:
601 .   dm - the DM object
602 
603     Output Parameter:
604 .   vec - the local vector
605 
606     Level: beginner
607 
608 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
609 
610 @*/
611 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
612 {
613   PetscErrorCode ierr;
614 
615   PetscFunctionBegin;
616   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
617   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
618   PetscFunctionReturn(0);
619 }
620 
621 #undef __FUNCT__
622 #define __FUNCT__ "DMGetLocalToGlobalMapping"
623 /*@
624    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
625 
626    Collective on DM
627 
628    Input Parameter:
629 .  dm - the DM that provides the mapping
630 
631    Output Parameter:
632 .  ltog - the mapping
633 
634    Level: intermediate
635 
636    Notes:
637    This mapping can then be used by VecSetLocalToGlobalMapping() or
638    MatSetLocalToGlobalMapping().
639 
640 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
641 @*/
642 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
643 {
644   PetscErrorCode ierr;
645 
646   PetscFunctionBegin;
647   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
648   PetscValidPointer(ltog,2);
649   if (!dm->ltogmap) {
650     PetscSection section, sectionGlobal;
651 
652     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
653     if (section) {
654       PetscInt *ltog;
655       PetscInt pStart, pEnd, size, p, l;
656 
657       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
658       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
659       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
660       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
661       for (p = pStart, l = 0; p < pEnd; ++p) {
662         PetscInt dof, off, c;
663 
664         /* Should probably use constrained dofs */
665         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
666         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
667         for (c = 0; c < dof; ++c, ++l) {
668           ltog[l] = off+c;
669         }
670       }
671       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
672       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
673     } else {
674       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
675       ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr);
676     }
677   }
678   *ltog = dm->ltogmap;
679   PetscFunctionReturn(0);
680 }
681 
682 #undef __FUNCT__
683 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
684 /*@
685    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
686 
687    Collective on DM
688 
689    Input Parameter:
690 .  da - the distributed array that provides the mapping
691 
692    Output Parameter:
693 .  ltog - the block mapping
694 
695    Level: intermediate
696 
697    Notes:
698    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
699    MatSetLocalToGlobalMappingBlock().
700 
701 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
702 @*/
703 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
704 {
705   PetscErrorCode ierr;
706 
707   PetscFunctionBegin;
708   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
709   PetscValidPointer(ltog,2);
710   if (!dm->ltogmapb) {
711     PetscInt bs;
712     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
713     if (bs > 1) {
714       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
715       ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
716     } else {
717       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
718       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
719     }
720   }
721   *ltog = dm->ltogmapb;
722   PetscFunctionReturn(0);
723 }
724 
725 #undef __FUNCT__
726 #define __FUNCT__ "DMGetBlockSize"
727 /*@
728    DMGetBlockSize - Gets the inherent block size associated with a DM
729 
730    Not Collective
731 
732    Input Parameter:
733 .  dm - the DM with block structure
734 
735    Output Parameter:
736 .  bs - the block size, 1 implies no exploitable block structure
737 
738    Level: intermediate
739 
740 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
741 @*/
742 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
743 {
744   PetscFunctionBegin;
745   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
746   PetscValidPointer(bs,2);
747   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
748   *bs = dm->bs;
749   PetscFunctionReturn(0);
750 }
751 
752 #undef __FUNCT__
753 #define __FUNCT__ "DMCreateInterpolation"
754 /*@
755     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
756 
757     Collective on DM
758 
759     Input Parameter:
760 +   dm1 - the DM object
761 -   dm2 - the second, finer DM object
762 
763     Output Parameter:
764 +  mat - the interpolation
765 -  vec - the scaling (optional)
766 
767     Level: developer
768 
769     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
770         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
771 
772         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
773         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
774 
775 
776 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
777 
778 @*/
779 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
780 {
781   PetscErrorCode ierr;
782 
783   PetscFunctionBegin;
784   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
785   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
786   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
787   PetscFunctionReturn(0);
788 }
789 
790 #undef __FUNCT__
791 #define __FUNCT__ "DMCreateInjection"
792 /*@
793     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
794 
795     Collective on DM
796 
797     Input Parameter:
798 +   dm1 - the DM object
799 -   dm2 - the second, finer DM object
800 
801     Output Parameter:
802 .   ctx - the injection
803 
804     Level: developer
805 
806    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
807         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
808 
809 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
810 
811 @*/
812 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
813 {
814   PetscErrorCode ierr;
815 
816   PetscFunctionBegin;
817   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
818   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
819   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
820   PetscFunctionReturn(0);
821 }
822 
823 #undef __FUNCT__
824 #define __FUNCT__ "DMCreateColoring"
825 /*@C
826     DMCreateColoring - Gets coloring for a DMDA or DMComposite
827 
828     Collective on DM
829 
830     Input Parameter:
831 +   dm - the DM object
832 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
833 -   matype - either MATAIJ or MATBAIJ
834 
835     Output Parameter:
836 .   coloring - the coloring
837 
838     Level: developer
839 
840 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
841 
842 @*/
843 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
844 {
845   PetscErrorCode ierr;
846 
847   PetscFunctionBegin;
848   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
849   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
850   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
851   PetscFunctionReturn(0);
852 }
853 
854 #undef __FUNCT__
855 #define __FUNCT__ "DMCreateMatrix"
856 /*@C
857     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
858 
859     Collective on DM
860 
861     Input Parameter:
862 +   dm - the DM object
863 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
864             any type which inherits from one of these (such as MATAIJ)
865 
866     Output Parameter:
867 .   mat - the empty Jacobian
868 
869     Level: beginner
870 
871     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
872        do not need to do it yourself.
873 
874        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
875        the nonzero pattern call DMDASetMatPreallocateOnly()
876 
877        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
878        internally by PETSc.
879 
880        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
881        the indices for the global numbering for DMDAs which is complicated.
882 
883 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
884 
885 @*/
886 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
887 {
888   PetscErrorCode ierr;
889 
890   PetscFunctionBegin;
891   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
892 #if !defined(PETSC_USE_DYNAMIC_LIBRARIES)
893   ierr = MatInitializePackage(NULL);CHKERRQ(ierr);
894 #endif
895   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
896   PetscValidPointer(mat,3);
897   if (dm->mattype) {
898     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
899   } else {
900     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
901   }
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
907 /*@
908   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
909     preallocated but the nonzero structure and zero values will not be set.
910 
911   Logically Collective on DMDA
912 
913   Input Parameter:
914 + dm - the DM
915 - only - PETSC_TRUE if only want preallocation
916 
917   Level: developer
918 .seealso DMCreateMatrix()
919 @*/
920 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
921 {
922   PetscFunctionBegin;
923   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
924   dm->prealloc_only = only;
925   PetscFunctionReturn(0);
926 }
927 
928 #undef __FUNCT__
929 #define __FUNCT__ "DMGetWorkArray"
930 /*@C
931   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
932 
933   Not Collective
934 
935   Input Parameters:
936 + dm - the DM object
937 . count - The minium size
938 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
939 
940   Output Parameter:
941 . array - the work array
942 
943   Level: developer
944 
945 .seealso DMDestroy(), DMCreate()
946 @*/
947 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
948 {
949   PetscErrorCode ierr;
950   DMWorkLink     link;
951   size_t         size;
952 
953   PetscFunctionBegin;
954   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
955   PetscValidPointer(mem,4);
956   if (dm->workin) {
957     link       = dm->workin;
958     dm->workin = dm->workin->next;
959   } else {
960     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
961   }
962   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
963   if (size*count > link->bytes) {
964     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
965     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
966     link->bytes = size*count;
967   }
968   link->next   = dm->workout;
969   dm->workout  = link;
970   *(void**)mem = link->mem;
971   PetscFunctionReturn(0);
972 }
973 
974 #undef __FUNCT__
975 #define __FUNCT__ "DMRestoreWorkArray"
976 /*@C
977   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
978 
979   Not Collective
980 
981   Input Parameters:
982 + dm - the DM object
983 . count - The minium size
984 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
985 
986   Output Parameter:
987 . array - the work array
988 
989   Level: developer
990 
991 .seealso DMDestroy(), DMCreate()
992 @*/
993 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
994 {
995   DMWorkLink *p,link;
996 
997   PetscFunctionBegin;
998   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
999   PetscValidPointer(mem,4);
1000   for (p=&dm->workout; (link=*p); p=&link->next) {
1001     if (link->mem == *(void**)mem) {
1002       *p           = link->next;
1003       link->next   = dm->workin;
1004       dm->workin   = link;
1005       *(void**)mem = NULL;
1006       PetscFunctionReturn(0);
1007     }
1008   }
1009   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1010   PetscFunctionReturn(0);
1011 }
1012 
1013 #undef __FUNCT__
1014 #define __FUNCT__ "DMSetNullSpaceConstructor"
1015 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1016 {
1017   PetscFunctionBegin;
1018   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1019   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1020   dm->nullspaceConstructors[field] = nullsp;
1021   PetscFunctionReturn(0);
1022 }
1023 
1024 #undef __FUNCT__
1025 #define __FUNCT__ "DMCreateFieldIS"
1026 /*@C
1027   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1028 
1029   Not collective
1030 
1031   Input Parameter:
1032 . dm - the DM object
1033 
1034   Output Parameters:
1035 + numFields  - The number of fields (or NULL if not requested)
1036 . fieldNames - The name for each field (or NULL if not requested)
1037 - fields     - The global indices for each field (or NULL if not requested)
1038 
1039   Level: intermediate
1040 
1041   Notes:
1042   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1043   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1044   PetscFree().
1045 
1046 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1047 @*/
1048 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1049 {
1050   PetscSection   section, sectionGlobal;
1051   PetscErrorCode ierr;
1052 
1053   PetscFunctionBegin;
1054   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1055   if (numFields) {
1056     PetscValidPointer(numFields,2);
1057     *numFields = 0;
1058   }
1059   if (fieldNames) {
1060     PetscValidPointer(fieldNames,3);
1061     *fieldNames = NULL;
1062   }
1063   if (fields) {
1064     PetscValidPointer(fields,4);
1065     *fields = NULL;
1066   }
1067   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1068   if (section) {
1069     PetscInt *fieldSizes, **fieldIndices;
1070     PetscInt nF, f, pStart, pEnd, p;
1071 
1072     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1073     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1074     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr);
1075     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1076     for (f = 0; f < nF; ++f) {
1077       fieldSizes[f] = 0;
1078     }
1079     for (p = pStart; p < pEnd; ++p) {
1080       PetscInt gdof;
1081 
1082       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1083       if (gdof > 0) {
1084         for (f = 0; f < nF; ++f) {
1085           PetscInt fdof, fcdof;
1086 
1087           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1088           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1089           fieldSizes[f] += fdof-fcdof;
1090         }
1091       }
1092     }
1093     for (f = 0; f < nF; ++f) {
1094       ierr          = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1095       fieldSizes[f] = 0;
1096     }
1097     for (p = pStart; p < pEnd; ++p) {
1098       PetscInt gdof, goff;
1099 
1100       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1101       if (gdof > 0) {
1102         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1103         for (f = 0; f < nF; ++f) {
1104           PetscInt fdof, fcdof, fc;
1105 
1106           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1107           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1108           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1109             fieldIndices[f][fieldSizes[f]] = goff++;
1110           }
1111         }
1112       }
1113     }
1114     if (numFields) *numFields = nF;
1115     if (fieldNames) {
1116       ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr);
1117       for (f = 0; f < nF; ++f) {
1118         const char *fieldName;
1119 
1120         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1121         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1122       }
1123     }
1124     if (fields) {
1125       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1126       for (f = 0; f < nF; ++f) {
1127         ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1128       }
1129     }
1130     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1131   } else if (dm->ops->createfieldis) {
1132     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1133   }
1134   PetscFunctionReturn(0);
1135 }
1136 
1137 
1138 #undef __FUNCT__
1139 #define __FUNCT__ "DMCreateFieldDecomposition"
1140 /*@C
1141   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1142                           corresponding to different fields: each IS contains the global indices of the dofs of the
1143                           corresponding field. The optional list of DMs define the DM for each subproblem.
1144                           Generalizes DMCreateFieldIS().
1145 
1146   Not collective
1147 
1148   Input Parameter:
1149 . dm - the DM object
1150 
1151   Output Parameters:
1152 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1153 . namelist  - The name for each field (or NULL if not requested)
1154 . islist    - The global indices for each field (or NULL if not requested)
1155 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1156 
1157   Level: intermediate
1158 
1159   Notes:
1160   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1161   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1162   and all of the arrays should be freed with PetscFree().
1163 
1164 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1165 @*/
1166 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1167 {
1168   PetscErrorCode ierr;
1169 
1170   PetscFunctionBegin;
1171   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1172   if (len) {
1173     PetscValidPointer(len,2);
1174     *len = 0;
1175   }
1176   if (namelist) {
1177     PetscValidPointer(namelist,3);
1178     *namelist = 0;
1179   }
1180   if (islist) {
1181     PetscValidPointer(islist,4);
1182     *islist = 0;
1183   }
1184   if (dmlist) {
1185     PetscValidPointer(dmlist,5);
1186     *dmlist = 0;
1187   }
1188   /*
1189    Is it a good idea to apply the following check across all impls?
1190    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1191    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1192    */
1193   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1194   if (!dm->ops->createfielddecomposition) {
1195     PetscSection section;
1196     PetscInt     numFields, f;
1197 
1198     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1199     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1200     if (section && numFields && dm->ops->createsubdm) {
1201       *len = numFields;
1202       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1203       for (f = 0; f < numFields; ++f) {
1204         const char *fieldName;
1205 
1206         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1207         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1208         ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1209       }
1210     } else {
1211       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1212       /* By default there are no DMs associated with subproblems. */
1213       if (dmlist) *dmlist = NULL;
1214     }
1215   } else {
1216     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1217   }
1218   PetscFunctionReturn(0);
1219 }
1220 
1221 #undef __FUNCT__
1222 #define __FUNCT__ "DMCreateSubDM"
1223 /*@C
1224   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1225                   The fields are defined by DMCreateFieldIS().
1226 
1227   Not collective
1228 
1229   Input Parameters:
1230 + dm - the DM object
1231 . numFields - number of fields in this subproblem
1232 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1233 
1234   Output Parameters:
1235 . is - The global indices for the subproblem
1236 - dm - The DM for the subproblem
1237 
1238   Level: intermediate
1239 
1240 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1241 @*/
1242 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1243 {
1244   PetscErrorCode ierr;
1245 
1246   PetscFunctionBegin;
1247   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1248   PetscValidPointer(fields,3);
1249   if (is) PetscValidPointer(is,4);
1250   if (subdm) PetscValidPointer(subdm,5);
1251   if (dm->ops->createsubdm) {
1252     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1253   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1254   PetscFunctionReturn(0);
1255 }
1256 
1257 
1258 #undef __FUNCT__
1259 #define __FUNCT__ "DMCreateDomainDecomposition"
1260 /*@C
1261   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1262                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1263                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1264                           define a nonoverlapping covering, while outer subdomains can overlap.
1265                           The optional list of DMs define the DM for each subproblem.
1266 
1267   Not collective
1268 
1269   Input Parameter:
1270 . dm - the DM object
1271 
1272   Output Parameters:
1273 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1274 . namelist    - The name for each subdomain (or NULL if not requested)
1275 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1276 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1277 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1278 
1279   Level: intermediate
1280 
1281   Notes:
1282   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1283   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1284   and all of the arrays should be freed with PetscFree().
1285 
1286 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1287 @*/
1288 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1289 {
1290   PetscErrorCode      ierr;
1291   DMSubDomainHookLink link;
1292   PetscInt            i,l;
1293 
1294   PetscFunctionBegin;
1295   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1296   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1297   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1298   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1299   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1300   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1301   /*
1302    Is it a good idea to apply the following check across all impls?
1303    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1304    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1305    */
1306   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1307   if (dm->ops->createdomaindecomposition) {
1308     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1309     /* copy subdomain hooks and context over to the subdomain DMs */
1310     if (dmlist) {
1311       for (i = 0; i < l; i++) {
1312         for (link=dm->subdomainhook; link; link=link->next) {
1313           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1314         }
1315         (*dmlist)[i]->ctx = dm->ctx;
1316       }
1317     }
1318     if (len) *len = l;
1319   }
1320   PetscFunctionReturn(0);
1321 }
1322 
1323 
1324 #undef __FUNCT__
1325 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1326 /*@C
1327   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1328 
1329   Not collective
1330 
1331   Input Parameters:
1332 + dm - the DM object
1333 . n  - the number of subdomain scatters
1334 - subdms - the local subdomains
1335 
1336   Output Parameters:
1337 + n     - the number of scatters returned
1338 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1339 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1340 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1341 
1342   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1343   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1344   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1345   solution and residual data.
1346 
1347   Level: developer
1348 
1349 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1350 @*/
1351 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1352 {
1353   PetscErrorCode ierr;
1354 
1355   PetscFunctionBegin;
1356   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1357   PetscValidPointer(subdms,3);
1358   PetscValidPointer(iscat,4);
1359   PetscValidPointer(oscat,5);
1360   PetscValidPointer(gscat,6);
1361   if (dm->ops->createddscatters) {
1362     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1363   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1364   PetscFunctionReturn(0);
1365 }
1366 
1367 #undef __FUNCT__
1368 #define __FUNCT__ "DMRefine"
1369 /*@
1370   DMRefine - Refines a DM object
1371 
1372   Collective on DM
1373 
1374   Input Parameter:
1375 + dm   - the DM object
1376 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1377 
1378   Output Parameter:
1379 . dmf - the refined DM, or NULL
1380 
1381   Note: If no refinement was done, the return value is NULL
1382 
1383   Level: developer
1384 
1385 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1386 @*/
1387 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1388 {
1389   PetscErrorCode   ierr;
1390   DMRefineHookLink link;
1391 
1392   PetscFunctionBegin;
1393   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1394   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1395   if (*dmf) {
1396     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1397 
1398     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1399 
1400     (*dmf)->ctx       = dm->ctx;
1401     (*dmf)->leveldown = dm->leveldown;
1402     (*dmf)->levelup   = dm->levelup + 1;
1403 
1404     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1405     for (link=dm->refinehook; link; link=link->next) {
1406       if (link->refinehook) {
1407         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1408       }
1409     }
1410   }
1411   PetscFunctionReturn(0);
1412 }
1413 
1414 #undef __FUNCT__
1415 #define __FUNCT__ "DMRefineHookAdd"
1416 /*@C
1417    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1418 
1419    Logically Collective
1420 
1421    Input Arguments:
1422 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1423 .  refinehook - function to run when setting up a coarser level
1424 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1425 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1426 
1427    Calling sequence of refinehook:
1428 $    refinehook(DM coarse,DM fine,void *ctx);
1429 
1430 +  coarse - coarse level DM
1431 .  fine - fine level DM to interpolate problem to
1432 -  ctx - optional user-defined function context
1433 
1434    Calling sequence for interphook:
1435 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1436 
1437 +  coarse - coarse level DM
1438 .  interp - matrix interpolating a coarse-level solution to the finer grid
1439 .  fine - fine level DM to update
1440 -  ctx - optional user-defined function context
1441 
1442    Level: advanced
1443 
1444    Notes:
1445    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1446 
1447    If this function is called multiple times, the hooks will be run in the order they are added.
1448 
1449    This function is currently not available from Fortran.
1450 
1451 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1452 @*/
1453 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1454 {
1455   PetscErrorCode   ierr;
1456   DMRefineHookLink link,*p;
1457 
1458   PetscFunctionBegin;
1459   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1460   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1461   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1462   link->refinehook = refinehook;
1463   link->interphook = interphook;
1464   link->ctx        = ctx;
1465   link->next       = NULL;
1466   *p               = link;
1467   PetscFunctionReturn(0);
1468 }
1469 
1470 #undef __FUNCT__
1471 #define __FUNCT__ "DMInterpolate"
1472 /*@
1473    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1474 
1475    Collective if any hooks are
1476 
1477    Input Arguments:
1478 +  coarse - coarser DM to use as a base
1479 .  restrct - interpolation matrix, apply using MatInterpolate()
1480 -  fine - finer DM to update
1481 
1482    Level: developer
1483 
1484 .seealso: DMRefineHookAdd(), MatInterpolate()
1485 @*/
1486 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1487 {
1488   PetscErrorCode   ierr;
1489   DMRefineHookLink link;
1490 
1491   PetscFunctionBegin;
1492   for (link=fine->refinehook; link; link=link->next) {
1493     if (link->interphook) {
1494       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1495     }
1496   }
1497   PetscFunctionReturn(0);
1498 }
1499 
1500 #undef __FUNCT__
1501 #define __FUNCT__ "DMGetRefineLevel"
1502 /*@
1503     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1504 
1505     Not Collective
1506 
1507     Input Parameter:
1508 .   dm - the DM object
1509 
1510     Output Parameter:
1511 .   level - number of refinements
1512 
1513     Level: developer
1514 
1515 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1516 
1517 @*/
1518 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1519 {
1520   PetscFunctionBegin;
1521   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1522   *level = dm->levelup;
1523   PetscFunctionReturn(0);
1524 }
1525 
1526 #undef __FUNCT__
1527 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1528 /*@C
1529    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1530 
1531    Logically Collective
1532 
1533    Input Arguments:
1534 +  dm - the DM
1535 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1536 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1537 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1538 
1539    Calling sequence for beginhook:
1540 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1541 
1542 +  dm - global DM
1543 .  g - global vector
1544 .  mode - mode
1545 .  l - local vector
1546 -  ctx - optional user-defined function context
1547 
1548 
1549    Calling sequence for endhook:
1550 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1551 
1552 +  global - global DM
1553 -  ctx - optional user-defined function context
1554 
1555    Level: advanced
1556 
1557    Notes:
1558    This function is only needed if auxiliary data needs to be set up on coarse grids.
1559 
1560    If this function is called multiple times, the hooks will be run in the order they are added.
1561 
1562    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1563    extract the finest level information from its context (instead of from the SNES).
1564 
1565    This function is currently not available from Fortran.
1566 
1567 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1568 @*/
1569 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1570 {
1571   PetscErrorCode          ierr;
1572   DMGlobalToLocalHookLink link,*p;
1573 
1574   PetscFunctionBegin;
1575   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1576   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1577   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1578   link->beginhook = beginhook;
1579   link->endhook   = endhook;
1580   link->ctx       = ctx;
1581   link->next      = NULL;
1582   *p              = link;
1583   PetscFunctionReturn(0);
1584 }
1585 
1586 #undef __FUNCT__
1587 #define __FUNCT__ "DMGlobalToLocalBegin"
1588 /*@
1589     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1590 
1591     Neighbor-wise Collective on DM
1592 
1593     Input Parameters:
1594 +   dm - the DM object
1595 .   g - the global vector
1596 .   mode - INSERT_VALUES or ADD_VALUES
1597 -   l - the local vector
1598 
1599 
1600     Level: beginner
1601 
1602 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1603 
1604 @*/
1605 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1606 {
1607   PetscSF                 sf;
1608   PetscErrorCode          ierr;
1609   DMGlobalToLocalHookLink link;
1610 
1611   PetscFunctionBegin;
1612   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1613   for (link=dm->gtolhook; link; link=link->next) {
1614     if (link->beginhook) {
1615       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1616     }
1617   }
1618   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1619   if (sf) {
1620     PetscScalar *lArray, *gArray;
1621 
1622     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1623     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1624     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1625     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1626     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1627     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1628   } else {
1629     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1630   }
1631   PetscFunctionReturn(0);
1632 }
1633 
1634 #undef __FUNCT__
1635 #define __FUNCT__ "DMGlobalToLocalEnd"
1636 /*@
1637     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1638 
1639     Neighbor-wise Collective on DM
1640 
1641     Input Parameters:
1642 +   dm - the DM object
1643 .   g - the global vector
1644 .   mode - INSERT_VALUES or ADD_VALUES
1645 -   l - the local vector
1646 
1647 
1648     Level: beginner
1649 
1650 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1651 
1652 @*/
1653 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1654 {
1655   PetscSF                 sf;
1656   PetscErrorCode          ierr;
1657   PetscScalar             *lArray, *gArray;
1658   DMGlobalToLocalHookLink link;
1659 
1660   PetscFunctionBegin;
1661   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1662   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1663   if (sf) {
1664     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1665 
1666     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1667     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1668     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1669     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1670     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1671   } else {
1672     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1673   }
1674   for (link=dm->gtolhook; link; link=link->next) {
1675     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1676   }
1677   PetscFunctionReturn(0);
1678 }
1679 
1680 #undef __FUNCT__
1681 #define __FUNCT__ "DMLocalToGlobalBegin"
1682 /*@
1683     DMLocalToGlobalBegin - updates global vectors from local vectors
1684 
1685     Neighbor-wise Collective on DM
1686 
1687     Input Parameters:
1688 +   dm - the DM object
1689 .   l - the local vector
1690 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1691            base point.
1692 - - the global vector
1693 
1694     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1695            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1696            global array to the final global array with VecAXPY().
1697 
1698     Level: beginner
1699 
1700 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1701 
1702 @*/
1703 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1704 {
1705   PetscSF        sf;
1706   PetscErrorCode ierr;
1707 
1708   PetscFunctionBegin;
1709   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1710   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1711   if (sf) {
1712     MPI_Op      op;
1713     PetscScalar *lArray, *gArray;
1714 
1715     switch (mode) {
1716     case INSERT_VALUES:
1717     case INSERT_ALL_VALUES:
1718 #if defined(PETSC_HAVE_MPI_REPLACE)
1719       op = MPI_REPLACE; break;
1720 #else
1721       SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1722 #endif
1723     case ADD_VALUES:
1724     case ADD_ALL_VALUES:
1725       op = MPI_SUM; break;
1726     default:
1727       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1728     }
1729     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1730     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1731     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1732     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1733     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1734   } else {
1735     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1736   }
1737   PetscFunctionReturn(0);
1738 }
1739 
1740 #undef __FUNCT__
1741 #define __FUNCT__ "DMLocalToGlobalEnd"
1742 /*@
1743     DMLocalToGlobalEnd - updates global vectors from local vectors
1744 
1745     Neighbor-wise Collective on DM
1746 
1747     Input Parameters:
1748 +   dm - the DM object
1749 .   l - the local vector
1750 .   mode - INSERT_VALUES or ADD_VALUES
1751 -   g - the global vector
1752 
1753 
1754     Level: beginner
1755 
1756 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1757 
1758 @*/
1759 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1760 {
1761   PetscSF        sf;
1762   PetscErrorCode ierr;
1763 
1764   PetscFunctionBegin;
1765   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1766   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1767   if (sf) {
1768     MPI_Op      op;
1769     PetscScalar *lArray, *gArray;
1770 
1771     switch (mode) {
1772     case INSERT_VALUES:
1773     case INSERT_ALL_VALUES:
1774 #if defined(PETSC_HAVE_MPI_REPLACE)
1775       op = MPI_REPLACE; break;
1776 #else
1777       SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1778 #endif
1779     case ADD_VALUES:
1780     case ADD_ALL_VALUES:
1781       op = MPI_SUM; break;
1782     default:
1783       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1784     }
1785     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1786     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1787     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1788     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1789     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1790   } else {
1791     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1792   }
1793   PetscFunctionReturn(0);
1794 }
1795 
1796 #undef __FUNCT__
1797 #define __FUNCT__ "DMCoarsen"
1798 /*@
1799     DMCoarsen - Coarsens a DM object
1800 
1801     Collective on DM
1802 
1803     Input Parameter:
1804 +   dm - the DM object
1805 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1806 
1807     Output Parameter:
1808 .   dmc - the coarsened DM
1809 
1810     Level: developer
1811 
1812 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1813 
1814 @*/
1815 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1816 {
1817   PetscErrorCode    ierr;
1818   DMCoarsenHookLink link;
1819 
1820   PetscFunctionBegin;
1821   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1822   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1823   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1824   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1825   (*dmc)->ctx               = dm->ctx;
1826   (*dmc)->levelup           = dm->levelup;
1827   (*dmc)->leveldown         = dm->leveldown + 1;
1828   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1829   for (link=dm->coarsenhook; link; link=link->next) {
1830     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1831   }
1832   PetscFunctionReturn(0);
1833 }
1834 
1835 #undef __FUNCT__
1836 #define __FUNCT__ "DMCoarsenHookAdd"
1837 /*@C
1838    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1839 
1840    Logically Collective
1841 
1842    Input Arguments:
1843 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1844 .  coarsenhook - function to run when setting up a coarser level
1845 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1846 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1847 
1848    Calling sequence of coarsenhook:
1849 $    coarsenhook(DM fine,DM coarse,void *ctx);
1850 
1851 +  fine - fine level DM
1852 .  coarse - coarse level DM to restrict problem to
1853 -  ctx - optional user-defined function context
1854 
1855    Calling sequence for restricthook:
1856 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1857 
1858 +  fine - fine level DM
1859 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1860 .  rscale - scaling vector for restriction
1861 .  inject - matrix restricting by injection
1862 .  coarse - coarse level DM to update
1863 -  ctx - optional user-defined function context
1864 
1865    Level: advanced
1866 
1867    Notes:
1868    This function is only needed if auxiliary data needs to be set up on coarse grids.
1869 
1870    If this function is called multiple times, the hooks will be run in the order they are added.
1871 
1872    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1873    extract the finest level information from its context (instead of from the SNES).
1874 
1875    This function is currently not available from Fortran.
1876 
1877 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1878 @*/
1879 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1880 {
1881   PetscErrorCode    ierr;
1882   DMCoarsenHookLink link,*p;
1883 
1884   PetscFunctionBegin;
1885   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1886   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1887   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1888   link->coarsenhook  = coarsenhook;
1889   link->restricthook = restricthook;
1890   link->ctx          = ctx;
1891   link->next         = NULL;
1892   *p                 = link;
1893   PetscFunctionReturn(0);
1894 }
1895 
1896 #undef __FUNCT__
1897 #define __FUNCT__ "DMRestrict"
1898 /*@
1899    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1900 
1901    Collective if any hooks are
1902 
1903    Input Arguments:
1904 +  fine - finer DM to use as a base
1905 .  restrct - restriction matrix, apply using MatRestrict()
1906 .  inject - injection matrix, also use MatRestrict()
1907 -  coarse - coarer DM to update
1908 
1909    Level: developer
1910 
1911 .seealso: DMCoarsenHookAdd(), MatRestrict()
1912 @*/
1913 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1914 {
1915   PetscErrorCode    ierr;
1916   DMCoarsenHookLink link;
1917 
1918   PetscFunctionBegin;
1919   for (link=fine->coarsenhook; link; link=link->next) {
1920     if (link->restricthook) {
1921       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
1922     }
1923   }
1924   PetscFunctionReturn(0);
1925 }
1926 
1927 #undef __FUNCT__
1928 #define __FUNCT__ "DMSubDomainHookAdd"
1929 /*@C
1930    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1931 
1932    Logically Collective
1933 
1934    Input Arguments:
1935 +  global - global DM
1936 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
1937 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1938 
1939    Calling sequence for restricthook:
1940 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1941 
1942 +  global - global DM
1943 .  out    - scatter to the outer (with ghost and overlap points) block vector
1944 .  in     - scatter to block vector values only owned locally
1945 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
1946 -  ctx - optional user-defined function context
1947 
1948    Level: advanced
1949 
1950    Notes:
1951    This function is only needed if auxiliary data needs to be set up on coarse grids.
1952 
1953    If this function is called multiple times, the hooks will be run in the order they are added.
1954 
1955    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1956    extract the finest level information from its context (instead of from the SNES).
1957 
1958    This function is currently not available from Fortran.
1959 
1960 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1961 @*/
1962 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
1963 {
1964   PetscErrorCode      ierr;
1965   DMSubDomainHookLink link,*p;
1966 
1967   PetscFunctionBegin;
1968   PetscValidHeaderSpecific(global,DM_CLASSID,1);
1969   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1970   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
1971   link->restricthook = restricthook;
1972   link->ddhook       = ddhook;
1973   link->ctx          = ctx;
1974   link->next         = NULL;
1975   *p                 = link;
1976   PetscFunctionReturn(0);
1977 }
1978 
1979 #undef __FUNCT__
1980 #define __FUNCT__ "DMSubDomainRestrict"
1981 /*@
1982    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
1983 
1984    Collective if any hooks are
1985 
1986    Input Arguments:
1987 +  fine - finer DM to use as a base
1988 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
1989 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
1990 -  coarse - coarer DM to update
1991 
1992    Level: developer
1993 
1994 .seealso: DMCoarsenHookAdd(), MatRestrict()
1995 @*/
1996 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
1997 {
1998   PetscErrorCode      ierr;
1999   DMSubDomainHookLink link;
2000 
2001   PetscFunctionBegin;
2002   for (link=global->subdomainhook; link; link=link->next) {
2003     if (link->restricthook) {
2004       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
2005     }
2006   }
2007   PetscFunctionReturn(0);
2008 }
2009 
2010 #undef __FUNCT__
2011 #define __FUNCT__ "DMGetCoarsenLevel"
2012 /*@
2013     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2014 
2015     Not Collective
2016 
2017     Input Parameter:
2018 .   dm - the DM object
2019 
2020     Output Parameter:
2021 .   level - number of coarsenings
2022 
2023     Level: developer
2024 
2025 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2026 
2027 @*/
2028 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2029 {
2030   PetscFunctionBegin;
2031   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2032   *level = dm->leveldown;
2033   PetscFunctionReturn(0);
2034 }
2035 
2036 
2037 
2038 #undef __FUNCT__
2039 #define __FUNCT__ "DMRefineHierarchy"
2040 /*@C
2041     DMRefineHierarchy - Refines a DM object, all levels at once
2042 
2043     Collective on DM
2044 
2045     Input Parameter:
2046 +   dm - the DM object
2047 -   nlevels - the number of levels of refinement
2048 
2049     Output Parameter:
2050 .   dmf - the refined DM hierarchy
2051 
2052     Level: developer
2053 
2054 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2055 
2056 @*/
2057 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2058 {
2059   PetscErrorCode ierr;
2060 
2061   PetscFunctionBegin;
2062   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2063   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2064   if (nlevels == 0) PetscFunctionReturn(0);
2065   if (dm->ops->refinehierarchy) {
2066     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2067   } else if (dm->ops->refine) {
2068     PetscInt i;
2069 
2070     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2071     for (i=1; i<nlevels; i++) {
2072       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2073     }
2074   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2075   PetscFunctionReturn(0);
2076 }
2077 
2078 #undef __FUNCT__
2079 #define __FUNCT__ "DMCoarsenHierarchy"
2080 /*@C
2081     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2082 
2083     Collective on DM
2084 
2085     Input Parameter:
2086 +   dm - the DM object
2087 -   nlevels - the number of levels of coarsening
2088 
2089     Output Parameter:
2090 .   dmc - the coarsened DM hierarchy
2091 
2092     Level: developer
2093 
2094 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2095 
2096 @*/
2097 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2098 {
2099   PetscErrorCode ierr;
2100 
2101   PetscFunctionBegin;
2102   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2103   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2104   if (nlevels == 0) PetscFunctionReturn(0);
2105   PetscValidPointer(dmc,3);
2106   if (dm->ops->coarsenhierarchy) {
2107     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2108   } else if (dm->ops->coarsen) {
2109     PetscInt i;
2110 
2111     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2112     for (i=1; i<nlevels; i++) {
2113       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2114     }
2115   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2116   PetscFunctionReturn(0);
2117 }
2118 
2119 #undef __FUNCT__
2120 #define __FUNCT__ "DMCreateAggregates"
2121 /*@
2122    DMCreateAggregates - Gets the aggregates that map between
2123    grids associated with two DMs.
2124 
2125    Collective on DM
2126 
2127    Input Parameters:
2128 +  dmc - the coarse grid DM
2129 -  dmf - the fine grid DM
2130 
2131    Output Parameters:
2132 .  rest - the restriction matrix (transpose of the projection matrix)
2133 
2134    Level: intermediate
2135 
2136 .keywords: interpolation, restriction, multigrid
2137 
2138 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2139 @*/
2140 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2141 {
2142   PetscErrorCode ierr;
2143 
2144   PetscFunctionBegin;
2145   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2146   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2147   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2148   PetscFunctionReturn(0);
2149 }
2150 
2151 #undef __FUNCT__
2152 #define __FUNCT__ "DMSetApplicationContextDestroy"
2153 /*@C
2154     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2155 
2156     Not Collective
2157 
2158     Input Parameters:
2159 +   dm - the DM object
2160 -   destroy - the destroy function
2161 
2162     Level: intermediate
2163 
2164 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2165 
2166 @*/
2167 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2168 {
2169   PetscFunctionBegin;
2170   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2171   dm->ctxdestroy = destroy;
2172   PetscFunctionReturn(0);
2173 }
2174 
2175 #undef __FUNCT__
2176 #define __FUNCT__ "DMSetApplicationContext"
2177 /*@
2178     DMSetApplicationContext - Set a user context into a DM object
2179 
2180     Not Collective
2181 
2182     Input Parameters:
2183 +   dm - the DM object
2184 -   ctx - the user context
2185 
2186     Level: intermediate
2187 
2188 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2189 
2190 @*/
2191 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2192 {
2193   PetscFunctionBegin;
2194   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2195   dm->ctx = ctx;
2196   PetscFunctionReturn(0);
2197 }
2198 
2199 #undef __FUNCT__
2200 #define __FUNCT__ "DMGetApplicationContext"
2201 /*@
2202     DMGetApplicationContext - Gets a user context from a DM object
2203 
2204     Not Collective
2205 
2206     Input Parameter:
2207 .   dm - the DM object
2208 
2209     Output Parameter:
2210 .   ctx - the user context
2211 
2212     Level: intermediate
2213 
2214 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2215 
2216 @*/
2217 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2218 {
2219   PetscFunctionBegin;
2220   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2221   *(void**)ctx = dm->ctx;
2222   PetscFunctionReturn(0);
2223 }
2224 
2225 #undef __FUNCT__
2226 #define __FUNCT__ "DMSetVariableBounds"
2227 /*@C
2228     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2229 
2230     Logically Collective on DM
2231 
2232     Input Parameter:
2233 +   dm - the DM object
2234 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2235 
2236     Level: intermediate
2237 
2238 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2239          DMSetJacobian()
2240 
2241 @*/
2242 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2243 {
2244   PetscFunctionBegin;
2245   dm->ops->computevariablebounds = f;
2246   PetscFunctionReturn(0);
2247 }
2248 
2249 #undef __FUNCT__
2250 #define __FUNCT__ "DMHasVariableBounds"
2251 /*@
2252     DMHasVariableBounds - does the DM object have a variable bounds function?
2253 
2254     Not Collective
2255 
2256     Input Parameter:
2257 .   dm - the DM object to destroy
2258 
2259     Output Parameter:
2260 .   flg - PETSC_TRUE if the variable bounds function exists
2261 
2262     Level: developer
2263 
2264 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2265 
2266 @*/
2267 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2268 {
2269   PetscFunctionBegin;
2270   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2271   PetscFunctionReturn(0);
2272 }
2273 
2274 #undef __FUNCT__
2275 #define __FUNCT__ "DMComputeVariableBounds"
2276 /*@C
2277     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2278 
2279     Logically Collective on DM
2280 
2281     Input Parameters:
2282 +   dm - the DM object to destroy
2283 -   x  - current solution at which the bounds are computed
2284 
2285     Output parameters:
2286 +   xl - lower bound
2287 -   xu - upper bound
2288 
2289     Level: intermediate
2290 
2291 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2292 
2293 @*/
2294 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2295 {
2296   PetscErrorCode ierr;
2297 
2298   PetscFunctionBegin;
2299   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2300   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2301   if (dm->ops->computevariablebounds) {
2302     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2303   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2304   PetscFunctionReturn(0);
2305 }
2306 
2307 #undef __FUNCT__
2308 #define __FUNCT__ "DMHasColoring"
2309 /*@
2310     DMHasColoring - does the DM object have a method of providing a coloring?
2311 
2312     Not Collective
2313 
2314     Input Parameter:
2315 .   dm - the DM object
2316 
2317     Output Parameter:
2318 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2319 
2320     Level: developer
2321 
2322 .seealso DMHasFunction(), DMCreateColoring()
2323 
2324 @*/
2325 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2326 {
2327   PetscFunctionBegin;
2328   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2329   PetscFunctionReturn(0);
2330 }
2331 
2332 #undef  __FUNCT__
2333 #define __FUNCT__ "DMSetVec"
2334 /*@C
2335     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2336 
2337     Collective on DM
2338 
2339     Input Parameter:
2340 +   dm - the DM object
2341 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2342 
2343     Level: developer
2344 
2345 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2346 
2347 @*/
2348 PetscErrorCode  DMSetVec(DM dm,Vec x)
2349 {
2350   PetscErrorCode ierr;
2351 
2352   PetscFunctionBegin;
2353   if (x) {
2354     if (!dm->x) {
2355       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2356     }
2357     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2358   } else if (dm->x) {
2359     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2360   }
2361   PetscFunctionReturn(0);
2362 }
2363 
2364 PetscFunctionList DMList              = NULL;
2365 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2366 
2367 #undef __FUNCT__
2368 #define __FUNCT__ "DMSetType"
2369 /*@C
2370   DMSetType - Builds a DM, for a particular DM implementation.
2371 
2372   Collective on DM
2373 
2374   Input Parameters:
2375 + dm     - The DM object
2376 - method - The name of the DM type
2377 
2378   Options Database Key:
2379 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2380 
2381   Notes:
2382   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2383 
2384   Level: intermediate
2385 
2386 .keywords: DM, set, type
2387 .seealso: DMGetType(), DMCreate()
2388 @*/
2389 PetscErrorCode  DMSetType(DM dm, DMType method)
2390 {
2391   PetscErrorCode (*r)(DM);
2392   PetscBool      match;
2393   PetscErrorCode ierr;
2394 
2395   PetscFunctionBegin;
2396   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2397   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2398   if (match) PetscFunctionReturn(0);
2399 
2400   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(NULL);CHKERRQ(ierr);}
2401   ierr = PetscFunctionListFind(PetscObjectComm((PetscObject)dm), DMList, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2402   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2403 
2404   if (dm->ops->destroy) {
2405     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2406     dm->ops->destroy = NULL;
2407   }
2408   ierr = (*r)(dm);CHKERRQ(ierr);
2409   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2410   PetscFunctionReturn(0);
2411 }
2412 
2413 #undef __FUNCT__
2414 #define __FUNCT__ "DMGetType"
2415 /*@C
2416   DMGetType - Gets the DM type name (as a string) from the DM.
2417 
2418   Not Collective
2419 
2420   Input Parameter:
2421 . dm  - The DM
2422 
2423   Output Parameter:
2424 . type - The DM type name
2425 
2426   Level: intermediate
2427 
2428 .keywords: DM, get, type, name
2429 .seealso: DMSetType(), DMCreate()
2430 @*/
2431 PetscErrorCode  DMGetType(DM dm, DMType *type)
2432 {
2433   PetscErrorCode ierr;
2434 
2435   PetscFunctionBegin;
2436   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2437   PetscValidCharPointer(type,2);
2438   if (!DMRegisterAllCalled) {
2439     ierr = DMRegisterAll(NULL);CHKERRQ(ierr);
2440   }
2441   *type = ((PetscObject)dm)->type_name;
2442   PetscFunctionReturn(0);
2443 }
2444 
2445 #undef __FUNCT__
2446 #define __FUNCT__ "DMConvert"
2447 /*@C
2448   DMConvert - Converts a DM to another DM, either of the same or different type.
2449 
2450   Collective on DM
2451 
2452   Input Parameters:
2453 + dm - the DM
2454 - newtype - new DM type (use "same" for the same type)
2455 
2456   Output Parameter:
2457 . M - pointer to new DM
2458 
2459   Notes:
2460   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2461   the MPI communicator of the generated DM is always the same as the communicator
2462   of the input DM.
2463 
2464   Level: intermediate
2465 
2466 .seealso: DMCreate()
2467 @*/
2468 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2469 {
2470   DM             B;
2471   char           convname[256];
2472   PetscBool      sametype, issame;
2473   PetscErrorCode ierr;
2474 
2475   PetscFunctionBegin;
2476   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2477   PetscValidType(dm,1);
2478   PetscValidPointer(M,3);
2479   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2480   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2481   {
2482     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2483 
2484     /*
2485        Order of precedence:
2486        1) See if a specialized converter is known to the current DM.
2487        2) See if a specialized converter is known to the desired DM class.
2488        3) See if a good general converter is registered for the desired class
2489        4) See if a good general converter is known for the current matrix.
2490        5) Use a really basic converter.
2491     */
2492 
2493     /* 1) See if a specialized converter is known to the current DM and the desired class */
2494     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2495     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2496     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2497     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2498     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2499     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2500     if (conv) goto foundconv;
2501 
2502     /* 2)  See if a specialized converter is known to the desired DM class. */
2503     ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr);
2504     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2505     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2506     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2507     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2508     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2509     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2510     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2511     if (conv) {
2512       ierr = DMDestroy(&B);CHKERRQ(ierr);
2513       goto foundconv;
2514     }
2515 
2516 #if 0
2517     /* 3) See if a good general converter is registered for the desired class */
2518     conv = B->ops->convertfrom;
2519     ierr = DMDestroy(&B);CHKERRQ(ierr);
2520     if (conv) goto foundconv;
2521 
2522     /* 4) See if a good general converter is known for the current matrix */
2523     if (dm->ops->convert) {
2524       conv = dm->ops->convert;
2525     }
2526     if (conv) goto foundconv;
2527 #endif
2528 
2529     /* 5) Use a really basic converter. */
2530     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2531 
2532 foundconv:
2533     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2534     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2535     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2536   }
2537   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2538   PetscFunctionReturn(0);
2539 }
2540 
2541 /*--------------------------------------------------------------------------------------------------------------------*/
2542 
2543 #undef __FUNCT__
2544 #define __FUNCT__ "DMRegister"
2545 /*@C
2546   DMRegister - See DMRegisterDynamic()
2547 
2548   Level: advanced
2549 @*/
2550 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2551 {
2552   char           fullname[PETSC_MAX_PATH_LEN];
2553   PetscErrorCode ierr;
2554 
2555   PetscFunctionBegin;
2556   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2557   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2558   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2559   ierr = PetscFunctionListAdd(PETSC_COMM_WORLD,&DMList, sname, fullname, (void (*)(void))function);CHKERRQ(ierr);
2560   PetscFunctionReturn(0);
2561 }
2562 
2563 
2564 /*--------------------------------------------------------------------------------------------------------------------*/
2565 #undef __FUNCT__
2566 #define __FUNCT__ "DMRegisterDestroy"
2567 /*@C
2568    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2569 
2570    Not Collective
2571 
2572    Level: advanced
2573 
2574 .keywords: DM, register, destroy
2575 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2576 @*/
2577 PetscErrorCode  DMRegisterDestroy(void)
2578 {
2579   PetscErrorCode ierr;
2580 
2581   PetscFunctionBegin;
2582   ierr                = PetscFunctionListDestroy(&DMList);CHKERRQ(ierr);
2583   DMRegisterAllCalled = PETSC_FALSE;
2584   PetscFunctionReturn(0);
2585 }
2586 
2587 #undef __FUNCT__
2588 #define __FUNCT__ "DMLoad"
2589 /*@C
2590   DMLoad - Loads a DM that has been stored in binary  with DMView().
2591 
2592   Collective on PetscViewer
2593 
2594   Input Parameters:
2595 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2596            some related function before a call to DMLoad().
2597 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2598            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2599 
2600    Level: intermediate
2601 
2602   Notes:
2603    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2604 
2605   Notes for advanced users:
2606   Most users should not need to know the details of the binary storage
2607   format, since DMLoad() and DMView() completely hide these details.
2608   But for anyone who's interested, the standard binary matrix storage
2609   format is
2610 .vb
2611      has not yet been determined
2612 .ve
2613 
2614 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2615 @*/
2616 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2617 {
2618   PetscErrorCode ierr;
2619   PetscBool      isbinary;
2620   PetscInt       classid;
2621   char           type[256];
2622 
2623   PetscFunctionBegin;
2624   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2625   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2626   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2627   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2628 
2629   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2630   if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file");
2631   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2632   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2633   if (newdm->ops->load) {
2634     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2635   }
2636   PetscFunctionReturn(0);
2637 }
2638 
2639 /******************************** FEM Support **********************************/
2640 
2641 #undef __FUNCT__
2642 #define __FUNCT__ "DMPrintCellVector"
2643 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2644 {
2645   PetscInt       f;
2646   PetscErrorCode ierr;
2647 
2648   PetscFunctionBegin;
2649   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2650   for (f = 0; f < len; ++f) {
2651     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2652   }
2653   PetscFunctionReturn(0);
2654 }
2655 
2656 #undef __FUNCT__
2657 #define __FUNCT__ "DMPrintCellMatrix"
2658 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2659 {
2660   PetscInt       f, g;
2661   PetscErrorCode ierr;
2662 
2663   PetscFunctionBegin;
2664   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2665   for (f = 0; f < rows; ++f) {
2666     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2667     for (g = 0; g < cols; ++g) {
2668       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2669     }
2670     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2671   }
2672   PetscFunctionReturn(0);
2673 }
2674 
2675 #undef __FUNCT__
2676 #define __FUNCT__ "DMGetDefaultSection"
2677 /*@
2678   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2679 
2680   Input Parameter:
2681 . dm - The DM
2682 
2683   Output Parameter:
2684 . section - The PetscSection
2685 
2686   Level: intermediate
2687 
2688   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2689 
2690 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2691 @*/
2692 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2693 {
2694   PetscFunctionBegin;
2695   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2696   PetscValidPointer(section, 2);
2697   *section = dm->defaultSection;
2698   PetscFunctionReturn(0);
2699 }
2700 
2701 #undef __FUNCT__
2702 #define __FUNCT__ "DMSetDefaultSection"
2703 /*@
2704   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2705 
2706   Input Parameters:
2707 + dm - The DM
2708 - section - The PetscSection
2709 
2710   Level: intermediate
2711 
2712   Note: Any existing Section will be destroyed
2713 
2714 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2715 @*/
2716 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2717 {
2718   PetscInt       numFields;
2719   PetscInt       f;
2720   PetscErrorCode ierr;
2721 
2722   PetscFunctionBegin;
2723   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2724   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2725   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2726   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2727   dm->defaultSection = section;
2728   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2729   if (numFields) {
2730     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2731     for (f = 0; f < numFields; ++f) {
2732       const char *name;
2733 
2734       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2735       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2736     }
2737   }
2738   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2739   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2740   PetscFunctionReturn(0);
2741 }
2742 
2743 #undef __FUNCT__
2744 #define __FUNCT__ "DMGetDefaultGlobalSection"
2745 /*@
2746   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2747 
2748   Collective on DM
2749 
2750   Input Parameter:
2751 . dm - The DM
2752 
2753   Output Parameter:
2754 . section - The PetscSection
2755 
2756   Level: intermediate
2757 
2758   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2759 
2760 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2761 @*/
2762 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2763 {
2764   PetscErrorCode ierr;
2765 
2766   PetscFunctionBegin;
2767   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2768   PetscValidPointer(section, 2);
2769   if (!dm->defaultGlobalSection) {
2770     if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2771     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2772     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2773   }
2774   *section = dm->defaultGlobalSection;
2775   PetscFunctionReturn(0);
2776 }
2777 
2778 #undef __FUNCT__
2779 #define __FUNCT__ "DMSetDefaultGlobalSection"
2780 /*@
2781   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2782 
2783   Input Parameters:
2784 + dm - The DM
2785 - section - The PetscSection, or NULL
2786 
2787   Level: intermediate
2788 
2789   Note: Any existing Section will be destroyed
2790 
2791 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2792 @*/
2793 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2794 {
2795   PetscErrorCode ierr;
2796 
2797   PetscFunctionBegin;
2798   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2799   if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2800   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2801   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2802   dm->defaultGlobalSection = section;
2803   PetscFunctionReturn(0);
2804 }
2805 
2806 #undef __FUNCT__
2807 #define __FUNCT__ "DMGetDefaultSF"
2808 /*@
2809   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2810   it is created from the default PetscSection layouts in the DM.
2811 
2812   Input Parameter:
2813 . dm - The DM
2814 
2815   Output Parameter:
2816 . sf - The PetscSF
2817 
2818   Level: intermediate
2819 
2820   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2821 
2822 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2823 @*/
2824 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
2825 {
2826   PetscInt       nroots;
2827   PetscErrorCode ierr;
2828 
2829   PetscFunctionBegin;
2830   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2831   PetscValidPointer(sf, 2);
2832   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
2833   if (nroots < 0) {
2834     PetscSection section, gSection;
2835 
2836     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2837     if (section) {
2838       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2839       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2840     } else {
2841       *sf = NULL;
2842       PetscFunctionReturn(0);
2843     }
2844   }
2845   *sf = dm->defaultSF;
2846   PetscFunctionReturn(0);
2847 }
2848 
2849 #undef __FUNCT__
2850 #define __FUNCT__ "DMSetDefaultSF"
2851 /*@
2852   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2853 
2854   Input Parameters:
2855 + dm - The DM
2856 - sf - The PetscSF
2857 
2858   Level: intermediate
2859 
2860   Note: Any previous SF is destroyed
2861 
2862 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2863 @*/
2864 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
2865 {
2866   PetscErrorCode ierr;
2867 
2868   PetscFunctionBegin;
2869   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2870   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2871   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2872   dm->defaultSF = sf;
2873   PetscFunctionReturn(0);
2874 }
2875 
2876 #undef __FUNCT__
2877 #define __FUNCT__ "DMCreateDefaultSF"
2878 /*@C
2879   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2880   describing the data layout.
2881 
2882   Input Parameters:
2883 + dm - The DM
2884 . localSection - PetscSection describing the local data layout
2885 - globalSection - PetscSection describing the global data layout
2886 
2887   Level: intermediate
2888 
2889 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2890 @*/
2891 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2892 {
2893   MPI_Comm       comm;
2894   PetscLayout    layout;
2895   const PetscInt *ranges;
2896   PetscInt       *local;
2897   PetscSFNode    *remote;
2898   PetscInt       pStart, pEnd, p, nroots, nleaves, l;
2899   PetscMPIInt    size, rank;
2900   PetscErrorCode ierr;
2901 
2902   PetscFunctionBegin;
2903   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
2904   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2905   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2906   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2907   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2908   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2909   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2910   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2911   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2912   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2913   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2914   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2915     PetscInt gdof, gcdof;
2916 
2917     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2918     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2919     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2920   }
2921   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2922   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2923   for (p = pStart, l = 0; p < pEnd; ++p) {
2924     const PetscInt *cind;
2925     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2926 
2927     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2928     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2929     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2930     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2931     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2932     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2933     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2934     if (!gdof) continue; /* Censored point */
2935     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2936     if (gsize != dof-cdof) {
2937       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2938       cdof = 0; /* Ignore constraints */
2939     }
2940     for (d = 0, c = 0; d < dof; ++d) {
2941       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2942       local[l+d-c] = off+d;
2943     }
2944     if (gdof < 0) {
2945       for (d = 0; d < gsize; ++d, ++l) {
2946         PetscInt offset = -(goff+1) + d, r;
2947 
2948         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2949         if (r < 0) r = -(r+2);
2950         remote[l].rank  = r;
2951         remote[l].index = offset - ranges[r];
2952       }
2953     } else {
2954       for (d = 0; d < gsize; ++d, ++l) {
2955         remote[l].rank  = rank;
2956         remote[l].index = goff+d - ranges[rank];
2957       }
2958     }
2959   }
2960   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
2961   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
2962   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
2963   PetscFunctionReturn(0);
2964 }
2965 
2966 #undef __FUNCT__
2967 #define __FUNCT__ "DMGetPointSF"
2968 /*@
2969   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
2970 
2971   Input Parameter:
2972 . dm - The DM
2973 
2974   Output Parameter:
2975 . sf - The PetscSF
2976 
2977   Level: intermediate
2978 
2979   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2980 
2981 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
2982 @*/
2983 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
2984 {
2985   PetscFunctionBegin;
2986   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2987   PetscValidPointer(sf, 2);
2988   *sf = dm->sf;
2989   PetscFunctionReturn(0);
2990 }
2991 
2992 #undef __FUNCT__
2993 #define __FUNCT__ "DMSetPointSF"
2994 /*@
2995   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
2996 
2997   Input Parameters:
2998 + dm - The DM
2999 - sf - The PetscSF
3000 
3001   Level: intermediate
3002 
3003 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3004 @*/
3005 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3006 {
3007   PetscErrorCode ierr;
3008 
3009   PetscFunctionBegin;
3010   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3011   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3012   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3013   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3014   dm->sf = sf;
3015   PetscFunctionReturn(0);
3016 }
3017 
3018 #undef __FUNCT__
3019 #define __FUNCT__ "DMGetNumFields"
3020 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3021 {
3022   PetscFunctionBegin;
3023   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3024   PetscValidPointer(numFields, 2);
3025   *numFields = dm->numFields;
3026   PetscFunctionReturn(0);
3027 }
3028 
3029 #undef __FUNCT__
3030 #define __FUNCT__ "DMSetNumFields"
3031 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3032 {
3033   PetscInt       f;
3034   PetscErrorCode ierr;
3035 
3036   PetscFunctionBegin;
3037   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3038   for (f = 0; f < dm->numFields; ++f) {
3039     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3040   }
3041   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3042   dm->numFields = numFields;
3043   ierr          = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3044   for (f = 0; f < dm->numFields; ++f) {
3045     ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3046   }
3047   PetscFunctionReturn(0);
3048 }
3049 
3050 #undef __FUNCT__
3051 #define __FUNCT__ "DMGetField"
3052 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3053 {
3054   PetscFunctionBegin;
3055   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3056   PetscValidPointer(field, 2);
3057   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3058   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3059   *field = dm->fields[f];
3060   PetscFunctionReturn(0);
3061 }
3062 
3063 #undef __FUNCT__
3064 #define __FUNCT__ "DMSetCoordinates"
3065 /*@
3066   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3067 
3068   Collective on DM
3069 
3070   Input Parameters:
3071 + dm - the DM
3072 - c - coordinate vector
3073 
3074   Note:
3075   The coordinates do include those for ghost points, which are in the local vector
3076 
3077   Level: intermediate
3078 
3079 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3080 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3081 @*/
3082 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3083 {
3084   PetscErrorCode ierr;
3085 
3086   PetscFunctionBegin;
3087   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3088   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3089   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3090   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3091   dm->coordinates = c;
3092   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3093   PetscFunctionReturn(0);
3094 }
3095 
3096 #undef __FUNCT__
3097 #define __FUNCT__ "DMSetCoordinatesLocal"
3098 /*@
3099   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3100 
3101   Collective on DM
3102 
3103    Input Parameters:
3104 +  dm - the DM
3105 -  c - coordinate vector
3106 
3107   Note:
3108   The coordinates of ghost points can be set using DMSetCoordinates()
3109   followed by DMGetCoordinatesLocal(). This is intended to enable the
3110   setting of ghost coordinates outside of the domain.
3111 
3112   Level: intermediate
3113 
3114 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3115 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3116 @*/
3117 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3118 {
3119   PetscErrorCode ierr;
3120 
3121   PetscFunctionBegin;
3122   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3123   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3124   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3125   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3126 
3127   dm->coordinatesLocal = c;
3128 
3129   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3130   PetscFunctionReturn(0);
3131 }
3132 
3133 #undef __FUNCT__
3134 #define __FUNCT__ "DMGetCoordinates"
3135 /*@
3136   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3137 
3138   Not Collective
3139 
3140   Input Parameter:
3141 . dm - the DM
3142 
3143   Output Parameter:
3144 . c - global coordinate vector
3145 
3146   Note:
3147   This is a borrowed reference, so the user should NOT destroy this vector
3148 
3149   Each process has only the local coordinates (does NOT have the ghost coordinates).
3150 
3151   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3152   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3153 
3154   Level: intermediate
3155 
3156 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3157 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3158 @*/
3159 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3160 {
3161   PetscErrorCode ierr;
3162 
3163   PetscFunctionBegin;
3164   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3165   PetscValidPointer(c,2);
3166   if (!dm->coordinates && dm->coordinatesLocal) {
3167     DM cdm = NULL;
3168 
3169     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3170     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3171     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3172     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3173     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3174   }
3175   *c = dm->coordinates;
3176   PetscFunctionReturn(0);
3177 }
3178 
3179 #undef __FUNCT__
3180 #define __FUNCT__ "DMGetCoordinatesLocal"
3181 /*@
3182   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3183 
3184   Collective on DM
3185 
3186   Input Parameter:
3187 . dm - the DM
3188 
3189   Output Parameter:
3190 . c - coordinate vector
3191 
3192   Note:
3193   This is a borrowed reference, so the user should NOT destroy this vector
3194 
3195   Each process has the local and ghost coordinates
3196 
3197   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3198   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3199 
3200   Level: intermediate
3201 
3202 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3203 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3204 @*/
3205 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3206 {
3207   PetscErrorCode ierr;
3208 
3209   PetscFunctionBegin;
3210   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3211   PetscValidPointer(c,2);
3212   if (!dm->coordinatesLocal && dm->coordinates) {
3213     DM cdm = NULL;
3214 
3215     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3216     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3217     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3218     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3219     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3220   }
3221   *c = dm->coordinatesLocal;
3222   PetscFunctionReturn(0);
3223 }
3224 
3225 #undef __FUNCT__
3226 #define __FUNCT__ "DMGetCoordinateDM"
3227 /*@
3228   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3229 
3230   Collective on DM
3231 
3232   Input Parameter:
3233 . dm - the DM
3234 
3235   Output Parameter:
3236 . cdm - coordinate DM
3237 
3238   Level: intermediate
3239 
3240 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3241 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3242 @*/
3243 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3244 {
3245   PetscErrorCode ierr;
3246 
3247   PetscFunctionBegin;
3248   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3249   PetscValidPointer(cdm,2);
3250   if (!dm->coordinateDM) {
3251     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3252     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3253   }
3254   *cdm = dm->coordinateDM;
3255   PetscFunctionReturn(0);
3256 }
3257 
3258 #undef __FUNCT__
3259 #define __FUNCT__ "DMLocatePoints"
3260 /*@
3261   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3262 
3263   Not collective
3264 
3265   Input Parameters:
3266 + dm - The DM
3267 - v - The Vec of points
3268 
3269   Output Parameter:
3270 . cells - The local cell numbers for cells which contain the points
3271 
3272   Level: developer
3273 
3274 .keywords: point location, mesh
3275 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3276 @*/
3277 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3278 {
3279   PetscErrorCode ierr;
3280 
3281   PetscFunctionBegin;
3282   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3283   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3284   PetscValidPointer(cells,3);
3285   if (dm->ops->locatepoints) {
3286     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3287   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3288   PetscFunctionReturn(0);
3289 }
3290