xref: /petsc/src/dm/interface/dm.c (revision dcca6d9d80ebd869fe6029bd05a3aa9faafef49e)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 #include <petscsf.h>
3 
4 PetscClassId  DM_CLASSID;
5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "DMViewFromOptions"
9 /*
10   DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed.
11 
12   Collective on Vec
13 
14   Input Parameters:
15 + dm   - the DM
16 . prefix - prefix to use for viewing, or NULL to use prefix of 'dm'
17 - optionname - option to activate viewing
18 
19   Level: intermediate
20 
21 .keywords: DM, view, options, database
22 .seealso: VecViewFromOptions(), MatViewFromOptions()
23 */
24 PetscErrorCode  DMViewFromOptions(DM dm,const char prefix[],const char optionname[])
25 {
26   PetscErrorCode    ierr;
27   PetscBool         flg;
28   PetscViewer       viewer;
29   PetscViewerFormat format;
30 
31   PetscFunctionBegin;
32   if (prefix) {
33     ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
34   } else {
35     ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
36   }
37   if (flg) {
38     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
39     ierr = DMView(dm,viewer);CHKERRQ(ierr);
40     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
41     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
42   }
43   PetscFunctionReturn(0);
44 }
45 
46 
47 #undef __FUNCT__
48 #define __FUNCT__ "DMCreate"
49 /*@
50   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
51 
52    If you never  call DMSetType()  it will generate an
53    error when you try to use the vector.
54 
55   Collective on MPI_Comm
56 
57   Input Parameter:
58 . comm - The communicator for the DM object
59 
60   Output Parameter:
61 . dm - The DM object
62 
63   Level: beginner
64 
65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
66 @*/
67 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
68 {
69   DM             v;
70   PetscErrorCode ierr;
71 
72   PetscFunctionBegin;
73   PetscValidPointer(dm,2);
74   *dm = NULL;
75   ierr = PetscSysInitializePackage();CHKERRQ(ierr);
76   ierr = VecInitializePackage();CHKERRQ(ierr);
77   ierr = MatInitializePackage();CHKERRQ(ierr);
78   ierr = DMInitializePackage();CHKERRQ(ierr);
79 
80   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
81   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
82 
83 
84   v->ltogmap              = NULL;
85   v->ltogmapb             = NULL;
86   v->bs                   = 1;
87   v->coloringtype         = IS_COLORING_GLOBAL;
88   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
89   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
90   v->defaultSection       = NULL;
91   v->defaultGlobalSection = NULL;
92   {
93     PetscInt i;
94     for (i = 0; i < 10; ++i) {
95       v->nullspaceConstructors[i] = NULL;
96     }
97   }
98   v->numFields = 0;
99   v->fields    = NULL;
100   ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr);
101   ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr);
102   *dm = v;
103   PetscFunctionReturn(0);
104 }
105 
106 #undef __FUNCT__
107 #define __FUNCT__ "DMClone"
108 /*@
109   DMClone - Creates a DM object with the same topology as the original.
110 
111   Collective on MPI_Comm
112 
113   Input Parameter:
114 . dm - The original DM object
115 
116   Output Parameter:
117 . newdm  - The new DM object
118 
119   Level: beginner
120 
121 .keywords: DM, topology, create
122 @*/
123 PetscErrorCode DMClone(DM dm, DM *newdm)
124 {
125   PetscSF        sf;
126   Vec            coords;
127   void          *ctx;
128   PetscErrorCode ierr;
129 
130   PetscFunctionBegin;
131   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
132   PetscValidPointer(newdm,2);
133   ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr);
134   if (dm->ops->clone) {
135     ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr);
136   }
137   (*newdm)->setupcalled = PETSC_TRUE;
138   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
139   ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr);
140   ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr);
141   ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr);
142   ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr);
143   if (coords) {
144     ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr);
145   } else {
146     ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr);
147     if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);}
148   }
149   PetscFunctionReturn(0);
150 }
151 
152 #undef __FUNCT__
153 #define __FUNCT__ "DMSetVecType"
154 /*@C
155        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
156 
157    Logically Collective on DMDA
158 
159    Input Parameter:
160 +  da - initial distributed array
161 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
162 
163    Options Database:
164 .   -dm_vec_type ctype
165 
166    Level: intermediate
167 
168 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType()
169 @*/
170 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
171 {
172   PetscErrorCode ierr;
173 
174   PetscFunctionBegin;
175   PetscValidHeaderSpecific(da,DM_CLASSID,1);
176   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
177   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
178   PetscFunctionReturn(0);
179 }
180 
181 #undef __FUNCT__
182 #define __FUNCT__ "DMGetVecType"
183 /*@C
184        DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
185 
186    Logically Collective on DMDA
187 
188    Input Parameter:
189 .  da - initial distributed array
190 
191    Output Parameter:
192 .  ctype - the vector type
193 
194    Level: intermediate
195 
196 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
197 @*/
198 PetscErrorCode  DMGetVecType(DM da,VecType *ctype)
199 {
200   PetscFunctionBegin;
201   PetscValidHeaderSpecific(da,DM_CLASSID,1);
202   *ctype = da->vectype;
203   PetscFunctionReturn(0);
204 }
205 
206 #undef __FUNCT__
207 #define __FUNCT__ "VecGetDM"
208 /*@
209   VecGetDM - Gets the DM defining the data layout of the vector
210 
211   Not collective
212 
213   Input Parameter:
214 . v - The Vec
215 
216   Output Parameter:
217 . dm - The DM
218 
219   Level: intermediate
220 
221 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
222 @*/
223 PetscErrorCode VecGetDM(Vec v, DM *dm)
224 {
225   PetscErrorCode ierr;
226 
227   PetscFunctionBegin;
228   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
229   PetscValidPointer(dm,2);
230   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
231   PetscFunctionReturn(0);
232 }
233 
234 #undef __FUNCT__
235 #define __FUNCT__ "VecSetDM"
236 /*@
237   VecSetDM - Sets the DM defining the data layout of the vector
238 
239   Not collective
240 
241   Input Parameters:
242 + v - The Vec
243 - dm - The DM
244 
245   Level: intermediate
246 
247 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
248 @*/
249 PetscErrorCode VecSetDM(Vec v, DM dm)
250 {
251   PetscErrorCode ierr;
252 
253   PetscFunctionBegin;
254   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
255   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
256   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
257   PetscFunctionReturn(0);
258 }
259 
260 #undef __FUNCT__
261 #define __FUNCT__ "DMSetMatType"
262 /*@C
263        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
264 
265    Logically Collective on DM
266 
267    Input Parameter:
268 +  dm - the DM context
269 .  ctype - the matrix type
270 
271    Options Database:
272 .   -dm_mat_type ctype
273 
274    Level: intermediate
275 
276 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType()
277 @*/
278 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
279 {
280   PetscErrorCode ierr;
281 
282   PetscFunctionBegin;
283   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
284   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
285   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
286   PetscFunctionReturn(0);
287 }
288 
289 #undef __FUNCT__
290 #define __FUNCT__ "DMGetMatType"
291 /*@C
292        DMGetMatType - Gets the type of matrix created with DMCreateMatrix()
293 
294    Logically Collective on DM
295 
296    Input Parameter:
297 .  dm - the DM context
298 
299    Output Parameter:
300 .  ctype - the matrix type
301 
302    Options Database:
303 .   -dm_mat_type ctype
304 
305    Level: intermediate
306 
307 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType()
308 @*/
309 PetscErrorCode  DMGetMatType(DM dm,MatType *ctype)
310 {
311   PetscFunctionBegin;
312   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
313   *ctype = dm->mattype;
314   PetscFunctionReturn(0);
315 }
316 
317 #undef __FUNCT__
318 #define __FUNCT__ "MatGetDM"
319 /*@
320   MatGetDM - Gets the DM defining the data layout of the matrix
321 
322   Not collective
323 
324   Input Parameter:
325 . A - The Mat
326 
327   Output Parameter:
328 . dm - The DM
329 
330   Level: intermediate
331 
332 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
333 @*/
334 PetscErrorCode MatGetDM(Mat A, DM *dm)
335 {
336   PetscErrorCode ierr;
337 
338   PetscFunctionBegin;
339   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
340   PetscValidPointer(dm,2);
341   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
342   PetscFunctionReturn(0);
343 }
344 
345 #undef __FUNCT__
346 #define __FUNCT__ "MatSetDM"
347 /*@
348   MatSetDM - Sets the DM defining the data layout of the matrix
349 
350   Not collective
351 
352   Input Parameters:
353 + A - The Mat
354 - dm - The DM
355 
356   Level: intermediate
357 
358 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
359 @*/
360 PetscErrorCode MatSetDM(Mat A, DM dm)
361 {
362   PetscErrorCode ierr;
363 
364   PetscFunctionBegin;
365   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
366   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
367   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
368   PetscFunctionReturn(0);
369 }
370 
371 #undef __FUNCT__
372 #define __FUNCT__ "DMSetOptionsPrefix"
373 /*@C
374    DMSetOptionsPrefix - Sets the prefix used for searching for all
375    DMDA options in the database.
376 
377    Logically Collective on DMDA
378 
379    Input Parameter:
380 +  da - the DMDA context
381 -  prefix - the prefix to prepend to all option names
382 
383    Notes:
384    A hyphen (-) must NOT be given at the beginning of the prefix name.
385    The first character of all runtime options is AUTOMATICALLY the hyphen.
386 
387    Level: advanced
388 
389 .keywords: DMDA, set, options, prefix, database
390 
391 .seealso: DMSetFromOptions()
392 @*/
393 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
394 {
395   PetscErrorCode ierr;
396 
397   PetscFunctionBegin;
398   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
399   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
400   PetscFunctionReturn(0);
401 }
402 
403 #undef __FUNCT__
404 #define __FUNCT__ "DMDestroy"
405 /*@
406     DMDestroy - Destroys a vector packer or DMDA.
407 
408     Collective on DM
409 
410     Input Parameter:
411 .   dm - the DM object to destroy
412 
413     Level: developer
414 
415 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
416 
417 @*/
418 PetscErrorCode  DMDestroy(DM *dm)
419 {
420   PetscInt       i, cnt = 0, f;
421   DMNamedVecLink nlink,nnext;
422   PetscErrorCode ierr;
423 
424   PetscFunctionBegin;
425   if (!*dm) PetscFunctionReturn(0);
426   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
427 
428   /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */
429   for (f = 0; f < (*dm)->numFields; ++f) {
430     ierr = PetscObjectCompose((*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr);
431     ierr = PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr);
432     ierr = PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr);
433   }
434   /* count all the circular references of DM and its contained Vecs */
435   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
436     if ((*dm)->localin[i])  cnt++;
437     if ((*dm)->globalin[i]) cnt++;
438   }
439   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
440   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
441   if ((*dm)->x) {
442     DM obj;
443     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
444     if (obj == *dm) cnt++;
445   }
446 
447   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
448   /*
449      Need this test because the dm references the vectors that
450      reference the dm, so destroying the dm calls destroy on the
451      vectors that cause another destroy on the dm
452   */
453   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
454   ((PetscObject) (*dm))->refct = 0;
455   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
456     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
457     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
458   }
459   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
460     nnext = nlink->next;
461     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
462     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
463     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
464     ierr = PetscFree(nlink);CHKERRQ(ierr);
465   }
466   (*dm)->namedglobal = NULL;
467 
468   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
469     nnext = nlink->next;
470     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
471     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
472     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
473     ierr = PetscFree(nlink);CHKERRQ(ierr);
474   }
475   (*dm)->namedlocal = NULL;
476 
477   /* Destroy the list of hooks */
478   {
479     DMCoarsenHookLink link,next;
480     for (link=(*dm)->coarsenhook; link; link=next) {
481       next = link->next;
482       ierr = PetscFree(link);CHKERRQ(ierr);
483     }
484     (*dm)->coarsenhook = NULL;
485   }
486   {
487     DMRefineHookLink link,next;
488     for (link=(*dm)->refinehook; link; link=next) {
489       next = link->next;
490       ierr = PetscFree(link);CHKERRQ(ierr);
491     }
492     (*dm)->refinehook = NULL;
493   }
494   {
495     DMSubDomainHookLink link,next;
496     for (link=(*dm)->subdomainhook; link; link=next) {
497       next = link->next;
498       ierr = PetscFree(link);CHKERRQ(ierr);
499     }
500     (*dm)->subdomainhook = NULL;
501   }
502   {
503     DMGlobalToLocalHookLink link,next;
504     for (link=(*dm)->gtolhook; link; link=next) {
505       next = link->next;
506       ierr = PetscFree(link);CHKERRQ(ierr);
507     }
508     (*dm)->gtolhook = NULL;
509   }
510   /* Destroy the work arrays */
511   {
512     DMWorkLink link,next;
513     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
514     for (link=(*dm)->workin; link; link=next) {
515       next = link->next;
516       ierr = PetscFree(link->mem);CHKERRQ(ierr);
517       ierr = PetscFree(link);CHKERRQ(ierr);
518     }
519     (*dm)->workin = NULL;
520   }
521 
522   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
523   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
524   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
525 
526   if ((*dm)->ctx && (*dm)->ctxdestroy) {
527     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
528   }
529   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
530   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
531   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
532   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
533   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
534   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
535   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
536 
537   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
538   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
539   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
540   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
541   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
542 
543   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
544   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
545   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
546 
547   for (f = 0; f < (*dm)->numFields; ++f) {
548     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
549   }
550   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
551   /* if memory was published with SAWs then destroy it */
552   ierr = PetscObjectSAWsViewOff((PetscObject)*dm);CHKERRQ(ierr);
553 
554   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
555   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
556   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
557   PetscFunctionReturn(0);
558 }
559 
560 #undef __FUNCT__
561 #define __FUNCT__ "DMSetUp"
562 /*@
563     DMSetUp - sets up the data structures inside a DM object
564 
565     Collective on DM
566 
567     Input Parameter:
568 .   dm - the DM object to setup
569 
570     Level: developer
571 
572 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
573 
574 @*/
575 PetscErrorCode  DMSetUp(DM dm)
576 {
577   PetscErrorCode ierr;
578 
579   PetscFunctionBegin;
580   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
581   if (dm->setupcalled) PetscFunctionReturn(0);
582   if (dm->ops->setup) {
583     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
584   }
585   dm->setupcalled = PETSC_TRUE;
586   PetscFunctionReturn(0);
587 }
588 
589 #undef __FUNCT__
590 #define __FUNCT__ "DMSetFromOptions"
591 /*@
592     DMSetFromOptions - sets parameters in a DM from the options database
593 
594     Collective on DM
595 
596     Input Parameter:
597 .   dm - the DM object to set options for
598 
599     Options Database:
600 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
601 .   -dm_vec_type <type>  type of vector to create inside DM
602 .   -dm_mat_type <type>  type of matrix to create inside DM
603 -   -dm_coloring_type <global or ghosted>
604 
605     Level: developer
606 
607 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
608 
609 @*/
610 PetscErrorCode  DMSetFromOptions(DM dm)
611 {
612   char           typeName[256];
613   PetscBool      flg;
614   PetscErrorCode ierr;
615 
616   PetscFunctionBegin;
617   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
618   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
619   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
620   ierr = PetscOptionsFList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
621   if (flg) {
622     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
623   }
624   ierr = PetscOptionsFList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
625   if (flg) {
626     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
627   }
628   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
629   if (dm->ops->setfromoptions) {
630     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
631   }
632   /* process any options handlers added with PetscObjectAddOptionsHandler() */
633   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
634   ierr = PetscOptionsEnd();CHKERRQ(ierr);
635   ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr);
636   PetscFunctionReturn(0);
637 }
638 
639 #undef __FUNCT__
640 #define __FUNCT__ "DMView"
641 /*@C
642     DMView - Views a vector packer or DMDA.
643 
644     Collective on DM
645 
646     Input Parameter:
647 +   dm - the DM object to view
648 -   v - the viewer
649 
650     Level: developer
651 
652 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
653 
654 @*/
655 PetscErrorCode  DMView(DM dm,PetscViewer v)
656 {
657   PetscErrorCode ierr;
658   PetscBool      isbinary;
659 
660   PetscFunctionBegin;
661   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
662   if (!v) {
663     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
664   }
665   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
666   if (isbinary) {
667     PetscInt classid = DM_FILE_CLASSID;
668     char     type[256];
669 
670     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
671     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
672     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
673   }
674   if (dm->ops->view) {
675     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
676   }
677   PetscFunctionReturn(0);
678 }
679 
680 #undef __FUNCT__
681 #define __FUNCT__ "DMCreateGlobalVector"
682 /*@
683     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
684 
685     Collective on DM
686 
687     Input Parameter:
688 .   dm - the DM object
689 
690     Output Parameter:
691 .   vec - the global vector
692 
693     Level: beginner
694 
695 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
696 
697 @*/
698 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
699 {
700   PetscErrorCode ierr;
701 
702   PetscFunctionBegin;
703   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
704   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
705   PetscFunctionReturn(0);
706 }
707 
708 #undef __FUNCT__
709 #define __FUNCT__ "DMCreateLocalVector"
710 /*@
711     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
712 
713     Not Collective
714 
715     Input Parameter:
716 .   dm - the DM object
717 
718     Output Parameter:
719 .   vec - the local vector
720 
721     Level: beginner
722 
723 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
724 
725 @*/
726 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
727 {
728   PetscErrorCode ierr;
729 
730   PetscFunctionBegin;
731   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
732   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
733   PetscFunctionReturn(0);
734 }
735 
736 #undef __FUNCT__
737 #define __FUNCT__ "DMGetLocalToGlobalMapping"
738 /*@
739    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
740 
741    Collective on DM
742 
743    Input Parameter:
744 .  dm - the DM that provides the mapping
745 
746    Output Parameter:
747 .  ltog - the mapping
748 
749    Level: intermediate
750 
751    Notes:
752    This mapping can then be used by VecSetLocalToGlobalMapping() or
753    MatSetLocalToGlobalMapping().
754 
755 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
756 @*/
757 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
758 {
759   PetscErrorCode ierr;
760 
761   PetscFunctionBegin;
762   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
763   PetscValidPointer(ltog,2);
764   if (!dm->ltogmap) {
765     PetscSection section, sectionGlobal;
766 
767     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
768     if (section) {
769       PetscInt *ltog;
770       PetscInt pStart, pEnd, size, p, l;
771 
772       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
773       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
774       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
775       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
776       for (p = pStart, l = 0; p < pEnd; ++p) {
777         PetscInt dof, off, c;
778 
779         /* Should probably use constrained dofs */
780         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
781         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
782         for (c = 0; c < dof; ++c, ++l) {
783           ltog[l] = off+c;
784         }
785       }
786       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
787       ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr);
788     } else {
789       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
790       ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr);
791     }
792   }
793   *ltog = dm->ltogmap;
794   PetscFunctionReturn(0);
795 }
796 
797 #undef __FUNCT__
798 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
799 /*@
800    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
801 
802    Collective on DM
803 
804    Input Parameter:
805 .  da - the distributed array that provides the mapping
806 
807    Output Parameter:
808 .  ltog - the block mapping
809 
810    Level: intermediate
811 
812    Notes:
813    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
814    MatSetLocalToGlobalMappingBlock().
815 
816 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
817 @*/
818 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
819 {
820   PetscErrorCode ierr;
821 
822   PetscFunctionBegin;
823   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
824   PetscValidPointer(ltog,2);
825   if (!dm->ltogmapb) {
826     PetscInt bs;
827     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
828     if (bs > 1) {
829       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
830       ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
831     } else {
832       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
833       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
834     }
835   }
836   *ltog = dm->ltogmapb;
837   PetscFunctionReturn(0);
838 }
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "DMGetBlockSize"
842 /*@
843    DMGetBlockSize - Gets the inherent block size associated with a DM
844 
845    Not Collective
846 
847    Input Parameter:
848 .  dm - the DM with block structure
849 
850    Output Parameter:
851 .  bs - the block size, 1 implies no exploitable block structure
852 
853    Level: intermediate
854 
855 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
856 @*/
857 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
858 {
859   PetscFunctionBegin;
860   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
861   PetscValidPointer(bs,2);
862   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
863   *bs = dm->bs;
864   PetscFunctionReturn(0);
865 }
866 
867 #undef __FUNCT__
868 #define __FUNCT__ "DMCreateInterpolation"
869 /*@
870     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
871 
872     Collective on DM
873 
874     Input Parameter:
875 +   dm1 - the DM object
876 -   dm2 - the second, finer DM object
877 
878     Output Parameter:
879 +  mat - the interpolation
880 -  vec - the scaling (optional)
881 
882     Level: developer
883 
884     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
885         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
886 
887         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
888         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
889 
890 
891 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
892 
893 @*/
894 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
895 {
896   PetscErrorCode ierr;
897 
898   PetscFunctionBegin;
899   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
900   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
901   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
902   PetscFunctionReturn(0);
903 }
904 
905 #undef __FUNCT__
906 #define __FUNCT__ "DMCreateInjection"
907 /*@
908     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
909 
910     Collective on DM
911 
912     Input Parameter:
913 +   dm1 - the DM object
914 -   dm2 - the second, finer DM object
915 
916     Output Parameter:
917 .   ctx - the injection
918 
919     Level: developer
920 
921    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
922         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
923 
924 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
925 
926 @*/
927 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
928 {
929   PetscErrorCode ierr;
930 
931   PetscFunctionBegin;
932   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
933   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
934   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
935   PetscFunctionReturn(0);
936 }
937 
938 #undef __FUNCT__
939 #define __FUNCT__ "DMCreateColoring"
940 /*@
941     DMCreateColoring - Gets coloring for a DM
942 
943     Collective on DM
944 
945     Input Parameter:
946 +   dm - the DM object
947 -   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
948 
949     Output Parameter:
950 .   coloring - the coloring
951 
952     Level: developer
953 
954 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType()
955 
956 @*/
957 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring)
958 {
959   PetscErrorCode ierr;
960 
961   PetscFunctionBegin;
962   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
963   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
964   ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr);
965   PetscFunctionReturn(0);
966 }
967 
968 #undef __FUNCT__
969 #define __FUNCT__ "DMCreateMatrix"
970 /*@
971     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
972 
973     Collective on DM
974 
975     Input Parameter:
976 .   dm - the DM object
977 
978     Output Parameter:
979 .   mat - the empty Jacobian
980 
981     Level: beginner
982 
983     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
984        do not need to do it yourself.
985 
986        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
987        the nonzero pattern call DMDASetMatPreallocateOnly()
988 
989        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
990        internally by PETSc.
991 
992        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
993        the indices for the global numbering for DMDAs which is complicated.
994 
995 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType()
996 
997 @*/
998 PetscErrorCode  DMCreateMatrix(DM dm,Mat *mat)
999 {
1000   PetscErrorCode ierr;
1001 
1002   PetscFunctionBegin;
1003   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1004   ierr = MatInitializePackage();CHKERRQ(ierr);
1005   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1006   PetscValidPointer(mat,3);
1007   ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr);
1008   PetscFunctionReturn(0);
1009 }
1010 
1011 #undef __FUNCT__
1012 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
1013 /*@
1014   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
1015     preallocated but the nonzero structure and zero values will not be set.
1016 
1017   Logically Collective on DMDA
1018 
1019   Input Parameter:
1020 + dm - the DM
1021 - only - PETSC_TRUE if only want preallocation
1022 
1023   Level: developer
1024 .seealso DMCreateMatrix()
1025 @*/
1026 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
1027 {
1028   PetscFunctionBegin;
1029   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1030   dm->prealloc_only = only;
1031   PetscFunctionReturn(0);
1032 }
1033 
1034 #undef __FUNCT__
1035 #define __FUNCT__ "DMGetWorkArray"
1036 /*@C
1037   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1038 
1039   Not Collective
1040 
1041   Input Parameters:
1042 + dm - the DM object
1043 . count - The minium size
1044 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1045 
1046   Output Parameter:
1047 . array - the work array
1048 
1049   Level: developer
1050 
1051 .seealso DMDestroy(), DMCreate()
1052 @*/
1053 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1054 {
1055   PetscErrorCode ierr;
1056   DMWorkLink     link;
1057   size_t         size;
1058 
1059   PetscFunctionBegin;
1060   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1061   PetscValidPointer(mem,4);
1062   if (dm->workin) {
1063     link       = dm->workin;
1064     dm->workin = dm->workin->next;
1065   } else {
1066     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1067   }
1068   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1069   if (size*count > link->bytes) {
1070     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
1071     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1072     link->bytes = size*count;
1073   }
1074   link->next   = dm->workout;
1075   dm->workout  = link;
1076   *(void**)mem = link->mem;
1077   PetscFunctionReturn(0);
1078 }
1079 
1080 #undef __FUNCT__
1081 #define __FUNCT__ "DMRestoreWorkArray"
1082 /*@C
1083   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1084 
1085   Not Collective
1086 
1087   Input Parameters:
1088 + dm - the DM object
1089 . count - The minium size
1090 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1091 
1092   Output Parameter:
1093 . array - the work array
1094 
1095   Level: developer
1096 
1097 .seealso DMDestroy(), DMCreate()
1098 @*/
1099 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1100 {
1101   DMWorkLink *p,link;
1102 
1103   PetscFunctionBegin;
1104   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1105   PetscValidPointer(mem,4);
1106   for (p=&dm->workout; (link=*p); p=&link->next) {
1107     if (link->mem == *(void**)mem) {
1108       *p           = link->next;
1109       link->next   = dm->workin;
1110       dm->workin   = link;
1111       *(void**)mem = NULL;
1112       PetscFunctionReturn(0);
1113     }
1114   }
1115   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1116   PetscFunctionReturn(0);
1117 }
1118 
1119 #undef __FUNCT__
1120 #define __FUNCT__ "DMSetNullSpaceConstructor"
1121 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1122 {
1123   PetscFunctionBegin;
1124   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1125   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1126   dm->nullspaceConstructors[field] = nullsp;
1127   PetscFunctionReturn(0);
1128 }
1129 
1130 #undef __FUNCT__
1131 #define __FUNCT__ "DMCreateFieldIS"
1132 /*@C
1133   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1134 
1135   Not collective
1136 
1137   Input Parameter:
1138 . dm - the DM object
1139 
1140   Output Parameters:
1141 + numFields  - The number of fields (or NULL if not requested)
1142 . fieldNames - The name for each field (or NULL if not requested)
1143 - fields     - The global indices for each field (or NULL if not requested)
1144 
1145   Level: intermediate
1146 
1147   Notes:
1148   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1149   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1150   PetscFree().
1151 
1152 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1153 @*/
1154 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1155 {
1156   PetscSection   section, sectionGlobal;
1157   PetscErrorCode ierr;
1158 
1159   PetscFunctionBegin;
1160   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1161   if (numFields) {
1162     PetscValidPointer(numFields,2);
1163     *numFields = 0;
1164   }
1165   if (fieldNames) {
1166     PetscValidPointer(fieldNames,3);
1167     *fieldNames = NULL;
1168   }
1169   if (fields) {
1170     PetscValidPointer(fields,4);
1171     *fields = NULL;
1172   }
1173   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1174   if (section) {
1175     PetscInt *fieldSizes, **fieldIndices;
1176     PetscInt nF, f, pStart, pEnd, p;
1177 
1178     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1179     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1180     ierr = PetscMalloc2(nF,&fieldSizes,nF,&fieldIndices);CHKERRQ(ierr);
1181     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1182     for (f = 0; f < nF; ++f) {
1183       fieldSizes[f] = 0;
1184     }
1185     for (p = pStart; p < pEnd; ++p) {
1186       PetscInt gdof;
1187 
1188       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1189       if (gdof > 0) {
1190         for (f = 0; f < nF; ++f) {
1191           PetscInt fdof, fcdof;
1192 
1193           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1194           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1195           fieldSizes[f] += fdof-fcdof;
1196         }
1197       }
1198     }
1199     for (f = 0; f < nF; ++f) {
1200       ierr          = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1201       fieldSizes[f] = 0;
1202     }
1203     for (p = pStart; p < pEnd; ++p) {
1204       PetscInt gdof, goff;
1205 
1206       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1207       if (gdof > 0) {
1208         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1209         for (f = 0; f < nF; ++f) {
1210           PetscInt fdof, fcdof, fc;
1211 
1212           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1213           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1214           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1215             fieldIndices[f][fieldSizes[f]] = goff++;
1216           }
1217         }
1218       }
1219     }
1220     if (numFields) *numFields = nF;
1221     if (fieldNames) {
1222       ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr);
1223       for (f = 0; f < nF; ++f) {
1224         const char *fieldName;
1225 
1226         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1227         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1228       }
1229     }
1230     if (fields) {
1231       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1232       for (f = 0; f < nF; ++f) {
1233         ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1234       }
1235     }
1236     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1237   } else if (dm->ops->createfieldis) {
1238     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1239   }
1240   PetscFunctionReturn(0);
1241 }
1242 
1243 
1244 #undef __FUNCT__
1245 #define __FUNCT__ "DMCreateFieldDecomposition"
1246 /*@C
1247   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1248                           corresponding to different fields: each IS contains the global indices of the dofs of the
1249                           corresponding field. The optional list of DMs define the DM for each subproblem.
1250                           Generalizes DMCreateFieldIS().
1251 
1252   Not collective
1253 
1254   Input Parameter:
1255 . dm - the DM object
1256 
1257   Output Parameters:
1258 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1259 . namelist  - The name for each field (or NULL if not requested)
1260 . islist    - The global indices for each field (or NULL if not requested)
1261 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1262 
1263   Level: intermediate
1264 
1265   Notes:
1266   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1267   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1268   and all of the arrays should be freed with PetscFree().
1269 
1270 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1271 @*/
1272 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1273 {
1274   PetscErrorCode ierr;
1275 
1276   PetscFunctionBegin;
1277   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1278   if (len) {
1279     PetscValidPointer(len,2);
1280     *len = 0;
1281   }
1282   if (namelist) {
1283     PetscValidPointer(namelist,3);
1284     *namelist = 0;
1285   }
1286   if (islist) {
1287     PetscValidPointer(islist,4);
1288     *islist = 0;
1289   }
1290   if (dmlist) {
1291     PetscValidPointer(dmlist,5);
1292     *dmlist = 0;
1293   }
1294   /*
1295    Is it a good idea to apply the following check across all impls?
1296    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1297    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1298    */
1299   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1300   if (!dm->ops->createfielddecomposition) {
1301     PetscSection section;
1302     PetscInt     numFields, f;
1303 
1304     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1305     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1306     if (section && numFields && dm->ops->createsubdm) {
1307       *len = numFields;
1308       ierr = PetscMalloc3(numFields,namelist,numFields,islist,numFields,dmlist);CHKERRQ(ierr);
1309       for (f = 0; f < numFields; ++f) {
1310         const char *fieldName;
1311 
1312         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1313         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1314         ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1315       }
1316     } else {
1317       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1318       /* By default there are no DMs associated with subproblems. */
1319       if (dmlist) *dmlist = NULL;
1320     }
1321   } else {
1322     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1323   }
1324   PetscFunctionReturn(0);
1325 }
1326 
1327 #undef __FUNCT__
1328 #define __FUNCT__ "DMCreateSubDM"
1329 /*@C
1330   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1331                   The fields are defined by DMCreateFieldIS().
1332 
1333   Not collective
1334 
1335   Input Parameters:
1336 + dm - the DM object
1337 . numFields - number of fields in this subproblem
1338 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1339 
1340   Output Parameters:
1341 . is - The global indices for the subproblem
1342 - dm - The DM for the subproblem
1343 
1344   Level: intermediate
1345 
1346 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1347 @*/
1348 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1349 {
1350   PetscErrorCode ierr;
1351 
1352   PetscFunctionBegin;
1353   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1354   PetscValidPointer(fields,3);
1355   if (is) PetscValidPointer(is,4);
1356   if (subdm) PetscValidPointer(subdm,5);
1357   if (dm->ops->createsubdm) {
1358     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1359   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1360   PetscFunctionReturn(0);
1361 }
1362 
1363 
1364 #undef __FUNCT__
1365 #define __FUNCT__ "DMCreateDomainDecomposition"
1366 /*@C
1367   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1368                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1369                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1370                           define a nonoverlapping covering, while outer subdomains can overlap.
1371                           The optional list of DMs define the DM for each subproblem.
1372 
1373   Not collective
1374 
1375   Input Parameter:
1376 . dm - the DM object
1377 
1378   Output Parameters:
1379 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1380 . namelist    - The name for each subdomain (or NULL if not requested)
1381 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1382 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1383 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1384 
1385   Level: intermediate
1386 
1387   Notes:
1388   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1389   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1390   and all of the arrays should be freed with PetscFree().
1391 
1392 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1393 @*/
1394 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1395 {
1396   PetscErrorCode      ierr;
1397   DMSubDomainHookLink link;
1398   PetscInt            i,l;
1399 
1400   PetscFunctionBegin;
1401   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1402   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1403   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1404   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1405   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1406   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1407   /*
1408    Is it a good idea to apply the following check across all impls?
1409    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1410    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1411    */
1412   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1413   if (dm->ops->createdomaindecomposition) {
1414     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1415     /* copy subdomain hooks and context over to the subdomain DMs */
1416     if (dmlist) {
1417       for (i = 0; i < l; i++) {
1418         for (link=dm->subdomainhook; link; link=link->next) {
1419           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1420         }
1421         (*dmlist)[i]->ctx = dm->ctx;
1422       }
1423     }
1424     if (len) *len = l;
1425   }
1426   PetscFunctionReturn(0);
1427 }
1428 
1429 
1430 #undef __FUNCT__
1431 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1432 /*@C
1433   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1434 
1435   Not collective
1436 
1437   Input Parameters:
1438 + dm - the DM object
1439 . n  - the number of subdomain scatters
1440 - subdms - the local subdomains
1441 
1442   Output Parameters:
1443 + n     - the number of scatters returned
1444 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1445 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1446 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1447 
1448   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1449   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1450   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1451   solution and residual data.
1452 
1453   Level: developer
1454 
1455 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1456 @*/
1457 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1458 {
1459   PetscErrorCode ierr;
1460 
1461   PetscFunctionBegin;
1462   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1463   PetscValidPointer(subdms,3);
1464   if (dm->ops->createddscatters) {
1465     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1466   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1467   PetscFunctionReturn(0);
1468 }
1469 
1470 #undef __FUNCT__
1471 #define __FUNCT__ "DMRefine"
1472 /*@
1473   DMRefine - Refines a DM object
1474 
1475   Collective on DM
1476 
1477   Input Parameter:
1478 + dm   - the DM object
1479 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1480 
1481   Output Parameter:
1482 . dmf - the refined DM, or NULL
1483 
1484   Note: If no refinement was done, the return value is NULL
1485 
1486   Level: developer
1487 
1488 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1489 @*/
1490 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1491 {
1492   PetscErrorCode   ierr;
1493   DMRefineHookLink link;
1494 
1495   PetscFunctionBegin;
1496   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1497   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1498   if (*dmf) {
1499     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1500 
1501     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1502 
1503     (*dmf)->ctx       = dm->ctx;
1504     (*dmf)->leveldown = dm->leveldown;
1505     (*dmf)->levelup   = dm->levelup + 1;
1506 
1507     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1508     for (link=dm->refinehook; link; link=link->next) {
1509       if (link->refinehook) {
1510         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1511       }
1512     }
1513   }
1514   PetscFunctionReturn(0);
1515 }
1516 
1517 #undef __FUNCT__
1518 #define __FUNCT__ "DMRefineHookAdd"
1519 /*@C
1520    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1521 
1522    Logically Collective
1523 
1524    Input Arguments:
1525 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1526 .  refinehook - function to run when setting up a coarser level
1527 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1528 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1529 
1530    Calling sequence of refinehook:
1531 $    refinehook(DM coarse,DM fine,void *ctx);
1532 
1533 +  coarse - coarse level DM
1534 .  fine - fine level DM to interpolate problem to
1535 -  ctx - optional user-defined function context
1536 
1537    Calling sequence for interphook:
1538 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1539 
1540 +  coarse - coarse level DM
1541 .  interp - matrix interpolating a coarse-level solution to the finer grid
1542 .  fine - fine level DM to update
1543 -  ctx - optional user-defined function context
1544 
1545    Level: advanced
1546 
1547    Notes:
1548    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1549 
1550    If this function is called multiple times, the hooks will be run in the order they are added.
1551 
1552    This function is currently not available from Fortran.
1553 
1554 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1555 @*/
1556 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1557 {
1558   PetscErrorCode   ierr;
1559   DMRefineHookLink link,*p;
1560 
1561   PetscFunctionBegin;
1562   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1563   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1564   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1565   link->refinehook = refinehook;
1566   link->interphook = interphook;
1567   link->ctx        = ctx;
1568   link->next       = NULL;
1569   *p               = link;
1570   PetscFunctionReturn(0);
1571 }
1572 
1573 #undef __FUNCT__
1574 #define __FUNCT__ "DMInterpolate"
1575 /*@
1576    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1577 
1578    Collective if any hooks are
1579 
1580    Input Arguments:
1581 +  coarse - coarser DM to use as a base
1582 .  restrct - interpolation matrix, apply using MatInterpolate()
1583 -  fine - finer DM to update
1584 
1585    Level: developer
1586 
1587 .seealso: DMRefineHookAdd(), MatInterpolate()
1588 @*/
1589 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1590 {
1591   PetscErrorCode   ierr;
1592   DMRefineHookLink link;
1593 
1594   PetscFunctionBegin;
1595   for (link=fine->refinehook; link; link=link->next) {
1596     if (link->interphook) {
1597       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1598     }
1599   }
1600   PetscFunctionReturn(0);
1601 }
1602 
1603 #undef __FUNCT__
1604 #define __FUNCT__ "DMGetRefineLevel"
1605 /*@
1606     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1607 
1608     Not Collective
1609 
1610     Input Parameter:
1611 .   dm - the DM object
1612 
1613     Output Parameter:
1614 .   level - number of refinements
1615 
1616     Level: developer
1617 
1618 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1619 
1620 @*/
1621 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1622 {
1623   PetscFunctionBegin;
1624   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1625   *level = dm->levelup;
1626   PetscFunctionReturn(0);
1627 }
1628 
1629 #undef __FUNCT__
1630 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1631 /*@C
1632    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1633 
1634    Logically Collective
1635 
1636    Input Arguments:
1637 +  dm - the DM
1638 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1639 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1640 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1641 
1642    Calling sequence for beginhook:
1643 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1644 
1645 +  dm - global DM
1646 .  g - global vector
1647 .  mode - mode
1648 .  l - local vector
1649 -  ctx - optional user-defined function context
1650 
1651 
1652    Calling sequence for endhook:
1653 $    endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1654 
1655 +  global - global DM
1656 -  ctx - optional user-defined function context
1657 
1658    Level: advanced
1659 
1660 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1661 @*/
1662 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1663 {
1664   PetscErrorCode          ierr;
1665   DMGlobalToLocalHookLink link,*p;
1666 
1667   PetscFunctionBegin;
1668   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1669   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1670   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1671   link->beginhook = beginhook;
1672   link->endhook   = endhook;
1673   link->ctx       = ctx;
1674   link->next      = NULL;
1675   *p              = link;
1676   PetscFunctionReturn(0);
1677 }
1678 
1679 #undef __FUNCT__
1680 #define __FUNCT__ "DMGlobalToLocalBegin"
1681 /*@
1682     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1683 
1684     Neighbor-wise Collective on DM
1685 
1686     Input Parameters:
1687 +   dm - the DM object
1688 .   g - the global vector
1689 .   mode - INSERT_VALUES or ADD_VALUES
1690 -   l - the local vector
1691 
1692 
1693     Level: beginner
1694 
1695 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1696 
1697 @*/
1698 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1699 {
1700   PetscSF                 sf;
1701   PetscErrorCode          ierr;
1702   DMGlobalToLocalHookLink link;
1703 
1704   PetscFunctionBegin;
1705   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1706   for (link=dm->gtolhook; link; link=link->next) {
1707     if (link->beginhook) {
1708       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1709     }
1710   }
1711   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1712   if (sf) {
1713     PetscScalar *lArray, *gArray;
1714 
1715     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1716     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1717     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1718     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1719     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1720     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1721   } else {
1722     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1723   }
1724   PetscFunctionReturn(0);
1725 }
1726 
1727 #undef __FUNCT__
1728 #define __FUNCT__ "DMGlobalToLocalEnd"
1729 /*@
1730     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1731 
1732     Neighbor-wise Collective on DM
1733 
1734     Input Parameters:
1735 +   dm - the DM object
1736 .   g - the global vector
1737 .   mode - INSERT_VALUES or ADD_VALUES
1738 -   l - the local vector
1739 
1740 
1741     Level: beginner
1742 
1743 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1744 
1745 @*/
1746 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1747 {
1748   PetscSF                 sf;
1749   PetscErrorCode          ierr;
1750   PetscScalar             *lArray, *gArray;
1751   DMGlobalToLocalHookLink link;
1752 
1753   PetscFunctionBegin;
1754   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1755   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1756   if (sf) {
1757     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1758 
1759     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1760     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1761     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1762     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1763     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1764   } else {
1765     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1766   }
1767   for (link=dm->gtolhook; link; link=link->next) {
1768     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1769   }
1770   PetscFunctionReturn(0);
1771 }
1772 
1773 #undef __FUNCT__
1774 #define __FUNCT__ "DMLocalToGlobalBegin"
1775 /*@
1776     DMLocalToGlobalBegin - updates global vectors from local vectors
1777 
1778     Neighbor-wise Collective on DM
1779 
1780     Input Parameters:
1781 +   dm - the DM object
1782 .   l - the local vector
1783 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1784            base point.
1785 - - the global vector
1786 
1787     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1788            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1789            global array to the final global array with VecAXPY().
1790 
1791     Level: beginner
1792 
1793 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1794 
1795 @*/
1796 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1797 {
1798   PetscSF        sf;
1799   PetscErrorCode ierr;
1800 
1801   PetscFunctionBegin;
1802   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1803   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1804   if (sf) {
1805     MPI_Op      op;
1806     PetscScalar *lArray, *gArray;
1807 
1808     switch (mode) {
1809     case INSERT_VALUES:
1810     case INSERT_ALL_VALUES:
1811       op = MPIU_REPLACE; break;
1812     case ADD_VALUES:
1813     case ADD_ALL_VALUES:
1814       op = MPI_SUM; break;
1815     default:
1816       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1817     }
1818     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1819     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1820     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1821     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1822     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1823   } else {
1824     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1825   }
1826   PetscFunctionReturn(0);
1827 }
1828 
1829 #undef __FUNCT__
1830 #define __FUNCT__ "DMLocalToGlobalEnd"
1831 /*@
1832     DMLocalToGlobalEnd - updates global vectors from local vectors
1833 
1834     Neighbor-wise Collective on DM
1835 
1836     Input Parameters:
1837 +   dm - the DM object
1838 .   l - the local vector
1839 .   mode - INSERT_VALUES or ADD_VALUES
1840 -   g - the global vector
1841 
1842 
1843     Level: beginner
1844 
1845 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1846 
1847 @*/
1848 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1849 {
1850   PetscSF        sf;
1851   PetscErrorCode ierr;
1852 
1853   PetscFunctionBegin;
1854   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1855   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1856   if (sf) {
1857     MPI_Op      op;
1858     PetscScalar *lArray, *gArray;
1859 
1860     switch (mode) {
1861     case INSERT_VALUES:
1862     case INSERT_ALL_VALUES:
1863       op = MPIU_REPLACE; break;
1864     case ADD_VALUES:
1865     case ADD_ALL_VALUES:
1866       op = MPI_SUM; break;
1867     default:
1868       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1869     }
1870     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1871     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1872     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1873     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1874     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1875   } else {
1876     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1877   }
1878   PetscFunctionReturn(0);
1879 }
1880 
1881 #undef __FUNCT__
1882 #define __FUNCT__ "DMLocalToLocalBegin"
1883 /*@
1884    DMLocalToLocalBegin - Maps from a local vector (including ghost points
1885    that contain irrelevant values) to another local vector where the ghost
1886    points in the second are set correctly. Must be followed by DMLocalToLocalEnd().
1887 
1888    Neighbor-wise Collective on DM and Vec
1889 
1890    Input Parameters:
1891 +  dm - the DM object
1892 .  g - the original local vector
1893 -  mode - one of INSERT_VALUES or ADD_VALUES
1894 
1895    Output Parameter:
1896 .  l  - the local vector with correct ghost values
1897 
1898    Level: intermediate
1899 
1900    Notes:
1901    The local vectors used here need not be the same as those
1902    obtained from DMCreateLocalVector(), BUT they
1903    must have the same parallel data layout; they could, for example, be
1904    obtained with VecDuplicate() from the DM originating vectors.
1905 
1906 .keywords: DM, local-to-local, begin
1907 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1908 
1909 @*/
1910 PetscErrorCode  DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1911 {
1912   PetscErrorCode          ierr;
1913 
1914   PetscFunctionBegin;
1915   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1916   ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1917   PetscFunctionReturn(0);
1918 }
1919 
1920 #undef __FUNCT__
1921 #define __FUNCT__ "DMLocalToLocalEnd"
1922 /*@
1923    DMLocalToLocalEnd - Maps from a local vector (including ghost points
1924    that contain irrelevant values) to another local vector where the ghost
1925    points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().
1926 
1927    Neighbor-wise Collective on DM and Vec
1928 
1929    Input Parameters:
1930 +  da - the DM object
1931 .  g - the original local vector
1932 -  mode - one of INSERT_VALUES or ADD_VALUES
1933 
1934    Output Parameter:
1935 .  l  - the local vector with correct ghost values
1936 
1937    Level: intermediate
1938 
1939    Notes:
1940    The local vectors used here need not be the same as those
1941    obtained from DMCreateLocalVector(), BUT they
1942    must have the same parallel data layout; they could, for example, be
1943    obtained with VecDuplicate() from the DM originating vectors.
1944 
1945 .keywords: DM, local-to-local, end
1946 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1947 
1948 @*/
1949 PetscErrorCode  DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1950 {
1951   PetscErrorCode          ierr;
1952 
1953   PetscFunctionBegin;
1954   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1955   ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1956   PetscFunctionReturn(0);
1957 }
1958 
1959 
1960 #undef __FUNCT__
1961 #define __FUNCT__ "DMCoarsen"
1962 /*@
1963     DMCoarsen - Coarsens a DM object
1964 
1965     Collective on DM
1966 
1967     Input Parameter:
1968 +   dm - the DM object
1969 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1970 
1971     Output Parameter:
1972 .   dmc - the coarsened DM
1973 
1974     Level: developer
1975 
1976 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1977 
1978 @*/
1979 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1980 {
1981   PetscErrorCode    ierr;
1982   DMCoarsenHookLink link;
1983 
1984   PetscFunctionBegin;
1985   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1986   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1987   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1988   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1989   (*dmc)->ctx               = dm->ctx;
1990   (*dmc)->levelup           = dm->levelup;
1991   (*dmc)->leveldown         = dm->leveldown + 1;
1992   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1993   for (link=dm->coarsenhook; link; link=link->next) {
1994     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1995   }
1996   PetscFunctionReturn(0);
1997 }
1998 
1999 #undef __FUNCT__
2000 #define __FUNCT__ "DMCoarsenHookAdd"
2001 /*@C
2002    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
2003 
2004    Logically Collective
2005 
2006    Input Arguments:
2007 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
2008 .  coarsenhook - function to run when setting up a coarser level
2009 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
2010 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2011 
2012    Calling sequence of coarsenhook:
2013 $    coarsenhook(DM fine,DM coarse,void *ctx);
2014 
2015 +  fine - fine level DM
2016 .  coarse - coarse level DM to restrict problem to
2017 -  ctx - optional user-defined function context
2018 
2019    Calling sequence for restricthook:
2020 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
2021 
2022 +  fine - fine level DM
2023 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
2024 .  rscale - scaling vector for restriction
2025 .  inject - matrix restricting by injection
2026 .  coarse - coarse level DM to update
2027 -  ctx - optional user-defined function context
2028 
2029    Level: advanced
2030 
2031    Notes:
2032    This function is only needed if auxiliary data needs to be set up on coarse grids.
2033 
2034    If this function is called multiple times, the hooks will be run in the order they are added.
2035 
2036    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2037    extract the finest level information from its context (instead of from the SNES).
2038 
2039    This function is currently not available from Fortran.
2040 
2041 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2042 @*/
2043 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
2044 {
2045   PetscErrorCode    ierr;
2046   DMCoarsenHookLink link,*p;
2047 
2048   PetscFunctionBegin;
2049   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
2050   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2051   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
2052   link->coarsenhook  = coarsenhook;
2053   link->restricthook = restricthook;
2054   link->ctx          = ctx;
2055   link->next         = NULL;
2056   *p                 = link;
2057   PetscFunctionReturn(0);
2058 }
2059 
2060 #undef __FUNCT__
2061 #define __FUNCT__ "DMRestrict"
2062 /*@
2063    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
2064 
2065    Collective if any hooks are
2066 
2067    Input Arguments:
2068 +  fine - finer DM to use as a base
2069 .  restrct - restriction matrix, apply using MatRestrict()
2070 .  inject - injection matrix, also use MatRestrict()
2071 -  coarse - coarer DM to update
2072 
2073    Level: developer
2074 
2075 .seealso: DMCoarsenHookAdd(), MatRestrict()
2076 @*/
2077 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2078 {
2079   PetscErrorCode    ierr;
2080   DMCoarsenHookLink link;
2081 
2082   PetscFunctionBegin;
2083   for (link=fine->coarsenhook; link; link=link->next) {
2084     if (link->restricthook) {
2085       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
2086     }
2087   }
2088   PetscFunctionReturn(0);
2089 }
2090 
2091 #undef __FUNCT__
2092 #define __FUNCT__ "DMSubDomainHookAdd"
2093 /*@C
2094    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2095 
2096    Logically Collective
2097 
2098    Input Arguments:
2099 +  global - global DM
2100 .  ddhook - function to run to pass data to the decomposition DM upon its creation
2101 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2102 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2103 
2104 
2105    Calling sequence for ddhook:
2106 $    ddhook(DM global,DM block,void *ctx)
2107 
2108 +  global - global DM
2109 .  block  - block DM
2110 -  ctx - optional user-defined function context
2111 
2112    Calling sequence for restricthook:
2113 $    restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)
2114 
2115 +  global - global DM
2116 .  out    - scatter to the outer (with ghost and overlap points) block vector
2117 .  in     - scatter to block vector values only owned locally
2118 .  block  - block DM
2119 -  ctx - optional user-defined function context
2120 
2121    Level: advanced
2122 
2123    Notes:
2124    This function is only needed if auxiliary data needs to be set up on subdomain DMs.
2125 
2126    If this function is called multiple times, the hooks will be run in the order they are added.
2127 
2128    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2129    extract the global information from its context (instead of from the SNES).
2130 
2131    This function is currently not available from Fortran.
2132 
2133 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2134 @*/
2135 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2136 {
2137   PetscErrorCode      ierr;
2138   DMSubDomainHookLink link,*p;
2139 
2140   PetscFunctionBegin;
2141   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2142   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2143   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2144   link->restricthook = restricthook;
2145   link->ddhook       = ddhook;
2146   link->ctx          = ctx;
2147   link->next         = NULL;
2148   *p                 = link;
2149   PetscFunctionReturn(0);
2150 }
2151 
2152 #undef __FUNCT__
2153 #define __FUNCT__ "DMSubDomainRestrict"
2154 /*@
2155    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2156 
2157    Collective if any hooks are
2158 
2159    Input Arguments:
2160 +  fine - finer DM to use as a base
2161 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2162 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2163 -  coarse - coarer DM to update
2164 
2165    Level: developer
2166 
2167 .seealso: DMCoarsenHookAdd(), MatRestrict()
2168 @*/
2169 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2170 {
2171   PetscErrorCode      ierr;
2172   DMSubDomainHookLink link;
2173 
2174   PetscFunctionBegin;
2175   for (link=global->subdomainhook; link; link=link->next) {
2176     if (link->restricthook) {
2177       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
2178     }
2179   }
2180   PetscFunctionReturn(0);
2181 }
2182 
2183 #undef __FUNCT__
2184 #define __FUNCT__ "DMGetCoarsenLevel"
2185 /*@
2186     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2187 
2188     Not Collective
2189 
2190     Input Parameter:
2191 .   dm - the DM object
2192 
2193     Output Parameter:
2194 .   level - number of coarsenings
2195 
2196     Level: developer
2197 
2198 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2199 
2200 @*/
2201 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2202 {
2203   PetscFunctionBegin;
2204   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2205   *level = dm->leveldown;
2206   PetscFunctionReturn(0);
2207 }
2208 
2209 
2210 
2211 #undef __FUNCT__
2212 #define __FUNCT__ "DMRefineHierarchy"
2213 /*@C
2214     DMRefineHierarchy - Refines a DM object, all levels at once
2215 
2216     Collective on DM
2217 
2218     Input Parameter:
2219 +   dm - the DM object
2220 -   nlevels - the number of levels of refinement
2221 
2222     Output Parameter:
2223 .   dmf - the refined DM hierarchy
2224 
2225     Level: developer
2226 
2227 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2228 
2229 @*/
2230 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2231 {
2232   PetscErrorCode ierr;
2233 
2234   PetscFunctionBegin;
2235   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2236   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2237   if (nlevels == 0) PetscFunctionReturn(0);
2238   if (dm->ops->refinehierarchy) {
2239     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2240   } else if (dm->ops->refine) {
2241     PetscInt i;
2242 
2243     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2244     for (i=1; i<nlevels; i++) {
2245       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2246     }
2247   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2248   PetscFunctionReturn(0);
2249 }
2250 
2251 #undef __FUNCT__
2252 #define __FUNCT__ "DMCoarsenHierarchy"
2253 /*@C
2254     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2255 
2256     Collective on DM
2257 
2258     Input Parameter:
2259 +   dm - the DM object
2260 -   nlevels - the number of levels of coarsening
2261 
2262     Output Parameter:
2263 .   dmc - the coarsened DM hierarchy
2264 
2265     Level: developer
2266 
2267 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2268 
2269 @*/
2270 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2271 {
2272   PetscErrorCode ierr;
2273 
2274   PetscFunctionBegin;
2275   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2276   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2277   if (nlevels == 0) PetscFunctionReturn(0);
2278   PetscValidPointer(dmc,3);
2279   if (dm->ops->coarsenhierarchy) {
2280     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2281   } else if (dm->ops->coarsen) {
2282     PetscInt i;
2283 
2284     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2285     for (i=1; i<nlevels; i++) {
2286       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2287     }
2288   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2289   PetscFunctionReturn(0);
2290 }
2291 
2292 #undef __FUNCT__
2293 #define __FUNCT__ "DMCreateAggregates"
2294 /*@
2295    DMCreateAggregates - Gets the aggregates that map between
2296    grids associated with two DMs.
2297 
2298    Collective on DM
2299 
2300    Input Parameters:
2301 +  dmc - the coarse grid DM
2302 -  dmf - the fine grid DM
2303 
2304    Output Parameters:
2305 .  rest - the restriction matrix (transpose of the projection matrix)
2306 
2307    Level: intermediate
2308 
2309 .keywords: interpolation, restriction, multigrid
2310 
2311 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2312 @*/
2313 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2314 {
2315   PetscErrorCode ierr;
2316 
2317   PetscFunctionBegin;
2318   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2319   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2320   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2321   PetscFunctionReturn(0);
2322 }
2323 
2324 #undef __FUNCT__
2325 #define __FUNCT__ "DMSetApplicationContextDestroy"
2326 /*@C
2327     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2328 
2329     Not Collective
2330 
2331     Input Parameters:
2332 +   dm - the DM object
2333 -   destroy - the destroy function
2334 
2335     Level: intermediate
2336 
2337 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2338 
2339 @*/
2340 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2341 {
2342   PetscFunctionBegin;
2343   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2344   dm->ctxdestroy = destroy;
2345   PetscFunctionReturn(0);
2346 }
2347 
2348 #undef __FUNCT__
2349 #define __FUNCT__ "DMSetApplicationContext"
2350 /*@
2351     DMSetApplicationContext - Set a user context into a DM object
2352 
2353     Not Collective
2354 
2355     Input Parameters:
2356 +   dm - the DM object
2357 -   ctx - the user context
2358 
2359     Level: intermediate
2360 
2361 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2362 
2363 @*/
2364 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2365 {
2366   PetscFunctionBegin;
2367   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2368   dm->ctx = ctx;
2369   PetscFunctionReturn(0);
2370 }
2371 
2372 #undef __FUNCT__
2373 #define __FUNCT__ "DMGetApplicationContext"
2374 /*@
2375     DMGetApplicationContext - Gets a user context from a DM object
2376 
2377     Not Collective
2378 
2379     Input Parameter:
2380 .   dm - the DM object
2381 
2382     Output Parameter:
2383 .   ctx - the user context
2384 
2385     Level: intermediate
2386 
2387 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2388 
2389 @*/
2390 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2391 {
2392   PetscFunctionBegin;
2393   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2394   *(void**)ctx = dm->ctx;
2395   PetscFunctionReturn(0);
2396 }
2397 
2398 #undef __FUNCT__
2399 #define __FUNCT__ "DMSetVariableBounds"
2400 /*@C
2401     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2402 
2403     Logically Collective on DM
2404 
2405     Input Parameter:
2406 +   dm - the DM object
2407 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2408 
2409     Level: intermediate
2410 
2411 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2412          DMSetJacobian()
2413 
2414 @*/
2415 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2416 {
2417   PetscFunctionBegin;
2418   dm->ops->computevariablebounds = f;
2419   PetscFunctionReturn(0);
2420 }
2421 
2422 #undef __FUNCT__
2423 #define __FUNCT__ "DMHasVariableBounds"
2424 /*@
2425     DMHasVariableBounds - does the DM object have a variable bounds function?
2426 
2427     Not Collective
2428 
2429     Input Parameter:
2430 .   dm - the DM object to destroy
2431 
2432     Output Parameter:
2433 .   flg - PETSC_TRUE if the variable bounds function exists
2434 
2435     Level: developer
2436 
2437 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2438 
2439 @*/
2440 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2441 {
2442   PetscFunctionBegin;
2443   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2444   PetscFunctionReturn(0);
2445 }
2446 
2447 #undef __FUNCT__
2448 #define __FUNCT__ "DMComputeVariableBounds"
2449 /*@C
2450     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2451 
2452     Logically Collective on DM
2453 
2454     Input Parameters:
2455 +   dm - the DM object to destroy
2456 -   x  - current solution at which the bounds are computed
2457 
2458     Output parameters:
2459 +   xl - lower bound
2460 -   xu - upper bound
2461 
2462     Level: intermediate
2463 
2464 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2465 
2466 @*/
2467 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2468 {
2469   PetscErrorCode ierr;
2470 
2471   PetscFunctionBegin;
2472   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2473   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2474   if (dm->ops->computevariablebounds) {
2475     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2476   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2477   PetscFunctionReturn(0);
2478 }
2479 
2480 #undef __FUNCT__
2481 #define __FUNCT__ "DMHasColoring"
2482 /*@
2483     DMHasColoring - does the DM object have a method of providing a coloring?
2484 
2485     Not Collective
2486 
2487     Input Parameter:
2488 .   dm - the DM object
2489 
2490     Output Parameter:
2491 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2492 
2493     Level: developer
2494 
2495 .seealso DMHasFunction(), DMCreateColoring()
2496 
2497 @*/
2498 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2499 {
2500   PetscFunctionBegin;
2501   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2502   PetscFunctionReturn(0);
2503 }
2504 
2505 #undef  __FUNCT__
2506 #define __FUNCT__ "DMSetVec"
2507 /*@C
2508     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2509 
2510     Collective on DM
2511 
2512     Input Parameter:
2513 +   dm - the DM object
2514 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2515 
2516     Level: developer
2517 
2518 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2519 
2520 @*/
2521 PetscErrorCode  DMSetVec(DM dm,Vec x)
2522 {
2523   PetscErrorCode ierr;
2524 
2525   PetscFunctionBegin;
2526   if (x) {
2527     if (!dm->x) {
2528       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2529     }
2530     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2531   } else if (dm->x) {
2532     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2533   }
2534   PetscFunctionReturn(0);
2535 }
2536 
2537 PetscFunctionList DMList              = NULL;
2538 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2539 
2540 #undef __FUNCT__
2541 #define __FUNCT__ "DMSetType"
2542 /*@C
2543   DMSetType - Builds a DM, for a particular DM implementation.
2544 
2545   Collective on DM
2546 
2547   Input Parameters:
2548 + dm     - The DM object
2549 - method - The name of the DM type
2550 
2551   Options Database Key:
2552 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2553 
2554   Notes:
2555   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2556 
2557   Level: intermediate
2558 
2559 .keywords: DM, set, type
2560 .seealso: DMGetType(), DMCreate()
2561 @*/
2562 PetscErrorCode  DMSetType(DM dm, DMType method)
2563 {
2564   PetscErrorCode (*r)(DM);
2565   PetscBool      match;
2566   PetscErrorCode ierr;
2567 
2568   PetscFunctionBegin;
2569   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2570   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2571   if (match) PetscFunctionReturn(0);
2572 
2573   if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);}
2574   ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr);
2575   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2576 
2577   if (dm->ops->destroy) {
2578     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2579     dm->ops->destroy = NULL;
2580   }
2581   ierr = (*r)(dm);CHKERRQ(ierr);
2582   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2583   PetscFunctionReturn(0);
2584 }
2585 
2586 #undef __FUNCT__
2587 #define __FUNCT__ "DMGetType"
2588 /*@C
2589   DMGetType - Gets the DM type name (as a string) from the DM.
2590 
2591   Not Collective
2592 
2593   Input Parameter:
2594 . dm  - The DM
2595 
2596   Output Parameter:
2597 . type - The DM type name
2598 
2599   Level: intermediate
2600 
2601 .keywords: DM, get, type, name
2602 .seealso: DMSetType(), DMCreate()
2603 @*/
2604 PetscErrorCode  DMGetType(DM dm, DMType *type)
2605 {
2606   PetscErrorCode ierr;
2607 
2608   PetscFunctionBegin;
2609   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2610   PetscValidCharPointer(type,2);
2611   if (!DMRegisterAllCalled) {
2612     ierr = DMRegisterAll();CHKERRQ(ierr);
2613   }
2614   *type = ((PetscObject)dm)->type_name;
2615   PetscFunctionReturn(0);
2616 }
2617 
2618 #undef __FUNCT__
2619 #define __FUNCT__ "DMConvert"
2620 /*@C
2621   DMConvert - Converts a DM to another DM, either of the same or different type.
2622 
2623   Collective on DM
2624 
2625   Input Parameters:
2626 + dm - the DM
2627 - newtype - new DM type (use "same" for the same type)
2628 
2629   Output Parameter:
2630 . M - pointer to new DM
2631 
2632   Notes:
2633   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2634   the MPI communicator of the generated DM is always the same as the communicator
2635   of the input DM.
2636 
2637   Level: intermediate
2638 
2639 .seealso: DMCreate()
2640 @*/
2641 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2642 {
2643   DM             B;
2644   char           convname[256];
2645   PetscBool      sametype, issame;
2646   PetscErrorCode ierr;
2647 
2648   PetscFunctionBegin;
2649   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2650   PetscValidType(dm,1);
2651   PetscValidPointer(M,3);
2652   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2653   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2654   {
2655     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2656 
2657     /*
2658        Order of precedence:
2659        1) See if a specialized converter is known to the current DM.
2660        2) See if a specialized converter is known to the desired DM class.
2661        3) See if a good general converter is registered for the desired class
2662        4) See if a good general converter is known for the current matrix.
2663        5) Use a really basic converter.
2664     */
2665 
2666     /* 1) See if a specialized converter is known to the current DM and the desired class */
2667     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2668     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2669     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2670     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2671     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2672     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr);
2673     if (conv) goto foundconv;
2674 
2675     /* 2)  See if a specialized converter is known to the desired DM class. */
2676     ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr);
2677     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2678     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2679     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2680     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2681     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2682     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2683     ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr);
2684     if (conv) {
2685       ierr = DMDestroy(&B);CHKERRQ(ierr);
2686       goto foundconv;
2687     }
2688 
2689 #if 0
2690     /* 3) See if a good general converter is registered for the desired class */
2691     conv = B->ops->convertfrom;
2692     ierr = DMDestroy(&B);CHKERRQ(ierr);
2693     if (conv) goto foundconv;
2694 
2695     /* 4) See if a good general converter is known for the current matrix */
2696     if (dm->ops->convert) {
2697       conv = dm->ops->convert;
2698     }
2699     if (conv) goto foundconv;
2700 #endif
2701 
2702     /* 5) Use a really basic converter. */
2703     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2704 
2705 foundconv:
2706     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2707     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2708     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2709   }
2710   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2711   PetscFunctionReturn(0);
2712 }
2713 
2714 /*--------------------------------------------------------------------------------------------------------------------*/
2715 
2716 #undef __FUNCT__
2717 #define __FUNCT__ "DMRegister"
2718 /*@C
2719   DMRegister -  Adds a new DM component implementation
2720 
2721   Not Collective
2722 
2723   Input Parameters:
2724 + name        - The name of a new user-defined creation routine
2725 - create_func - The creation routine itself
2726 
2727   Notes:
2728   DMRegister() may be called multiple times to add several user-defined DMs
2729 
2730 
2731   Sample usage:
2732 .vb
2733     DMRegister("my_da", MyDMCreate);
2734 .ve
2735 
2736   Then, your DM type can be chosen with the procedural interface via
2737 .vb
2738     DMCreate(MPI_Comm, DM *);
2739     DMSetType(DM,"my_da");
2740 .ve
2741    or at runtime via the option
2742 .vb
2743     -da_type my_da
2744 .ve
2745 
2746   Level: advanced
2747 
2748 .keywords: DM, register
2749 .seealso: DMRegisterAll(), DMRegisterDestroy()
2750 
2751 @*/
2752 PetscErrorCode  DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2753 {
2754   PetscErrorCode ierr;
2755 
2756   PetscFunctionBegin;
2757   ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr);
2758   PetscFunctionReturn(0);
2759 }
2760 
2761 #undef __FUNCT__
2762 #define __FUNCT__ "DMLoad"
2763 /*@C
2764   DMLoad - Loads a DM that has been stored in binary  with DMView().
2765 
2766   Collective on PetscViewer
2767 
2768   Input Parameters:
2769 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2770            some related function before a call to DMLoad().
2771 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2772            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2773 
2774    Level: intermediate
2775 
2776   Notes:
2777    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2778 
2779   Notes for advanced users:
2780   Most users should not need to know the details of the binary storage
2781   format, since DMLoad() and DMView() completely hide these details.
2782   But for anyone who's interested, the standard binary matrix storage
2783   format is
2784 .vb
2785      has not yet been determined
2786 .ve
2787 
2788 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2789 @*/
2790 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2791 {
2792   PetscErrorCode ierr;
2793   PetscBool      isbinary;
2794   PetscInt       classid;
2795   char           type[256];
2796 
2797   PetscFunctionBegin;
2798   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2799   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2800   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2801   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2802 
2803   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2804   if (classid != DM_FILE_CLASSID) SETERRQ1(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file, classid found %d",(int)classid);
2805   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2806   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2807   if (newdm->ops->load) {
2808     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2809   }
2810   PetscFunctionReturn(0);
2811 }
2812 
2813 /******************************** FEM Support **********************************/
2814 
2815 #undef __FUNCT__
2816 #define __FUNCT__ "DMPrintCellVector"
2817 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2818 {
2819   PetscInt       f;
2820   PetscErrorCode ierr;
2821 
2822   PetscFunctionBegin;
2823   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2824   for (f = 0; f < len; ++f) {
2825     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2826   }
2827   PetscFunctionReturn(0);
2828 }
2829 
2830 #undef __FUNCT__
2831 #define __FUNCT__ "DMPrintCellMatrix"
2832 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2833 {
2834   PetscInt       f, g;
2835   PetscErrorCode ierr;
2836 
2837   PetscFunctionBegin;
2838   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2839   for (f = 0; f < rows; ++f) {
2840     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2841     for (g = 0; g < cols; ++g) {
2842       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2843     }
2844     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2845   }
2846   PetscFunctionReturn(0);
2847 }
2848 
2849 #undef __FUNCT__
2850 #define __FUNCT__ "DMPrintLocalVec"
2851 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X)
2852 {
2853   PetscMPIInt    rank, numProcs;
2854   PetscInt       p;
2855   PetscErrorCode ierr;
2856 
2857   PetscFunctionBegin;
2858   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2859   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
2860   ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr);
2861   for (p = 0; p < numProcs; ++p) {
2862     if (p == rank) {
2863       Vec x;
2864 
2865       ierr = VecDuplicate(X, &x);CHKERRQ(ierr);
2866       ierr = VecCopy(X, x);CHKERRQ(ierr);
2867       ierr = VecChop(x, tol);CHKERRQ(ierr);
2868       ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2869       ierr = VecDestroy(&x);CHKERRQ(ierr);
2870       ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2871     }
2872     ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr);
2873   }
2874   PetscFunctionReturn(0);
2875 }
2876 
2877 #undef __FUNCT__
2878 #define __FUNCT__ "DMGetDefaultSection"
2879 /*@
2880   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2881 
2882   Input Parameter:
2883 . dm - The DM
2884 
2885   Output Parameter:
2886 . section - The PetscSection
2887 
2888   Level: intermediate
2889 
2890   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2891 
2892 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2893 @*/
2894 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2895 {
2896   PetscFunctionBegin;
2897   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2898   PetscValidPointer(section, 2);
2899   *section = dm->defaultSection;
2900   PetscFunctionReturn(0);
2901 }
2902 
2903 #undef __FUNCT__
2904 #define __FUNCT__ "DMSetDefaultSection"
2905 /*@
2906   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2907 
2908   Input Parameters:
2909 + dm - The DM
2910 - section - The PetscSection
2911 
2912   Level: intermediate
2913 
2914   Note: Any existing Section will be destroyed
2915 
2916 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2917 @*/
2918 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2919 {
2920   PetscInt       numFields;
2921   PetscInt       f;
2922   PetscErrorCode ierr;
2923 
2924   PetscFunctionBegin;
2925   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2926   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2927   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2928   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2929   dm->defaultSection = section;
2930   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2931   if (numFields) {
2932     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2933     for (f = 0; f < numFields; ++f) {
2934       const char *name;
2935 
2936       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2937       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2938     }
2939   }
2940   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2941   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2942   PetscFunctionReturn(0);
2943 }
2944 
2945 #undef __FUNCT__
2946 #define __FUNCT__ "DMGetDefaultGlobalSection"
2947 /*@
2948   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2949 
2950   Collective on DM
2951 
2952   Input Parameter:
2953 . dm - The DM
2954 
2955   Output Parameter:
2956 . section - The PetscSection
2957 
2958   Level: intermediate
2959 
2960   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2961 
2962 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2963 @*/
2964 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2965 {
2966   PetscErrorCode ierr;
2967 
2968   PetscFunctionBegin;
2969   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2970   PetscValidPointer(section, 2);
2971   if (!dm->defaultGlobalSection) {
2972     if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2973     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2974     ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr);
2975     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2976   }
2977   *section = dm->defaultGlobalSection;
2978   PetscFunctionReturn(0);
2979 }
2980 
2981 #undef __FUNCT__
2982 #define __FUNCT__ "DMSetDefaultGlobalSection"
2983 /*@
2984   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2985 
2986   Input Parameters:
2987 + dm - The DM
2988 - section - The PetscSection, or NULL
2989 
2990   Level: intermediate
2991 
2992   Note: Any existing Section will be destroyed
2993 
2994 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2995 @*/
2996 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2997 {
2998   PetscErrorCode ierr;
2999 
3000   PetscFunctionBegin;
3001   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3002   if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
3003   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
3004   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
3005   dm->defaultGlobalSection = section;
3006   PetscFunctionReturn(0);
3007 }
3008 
3009 #undef __FUNCT__
3010 #define __FUNCT__ "DMGetDefaultSF"
3011 /*@
3012   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
3013   it is created from the default PetscSection layouts in the DM.
3014 
3015   Input Parameter:
3016 . dm - The DM
3017 
3018   Output Parameter:
3019 . sf - The PetscSF
3020 
3021   Level: intermediate
3022 
3023   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3024 
3025 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
3026 @*/
3027 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
3028 {
3029   PetscInt       nroots;
3030   PetscErrorCode ierr;
3031 
3032   PetscFunctionBegin;
3033   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3034   PetscValidPointer(sf, 2);
3035   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
3036   if (nroots < 0) {
3037     PetscSection section, gSection;
3038 
3039     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
3040     if (section) {
3041       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
3042       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
3043     } else {
3044       *sf = NULL;
3045       PetscFunctionReturn(0);
3046     }
3047   }
3048   *sf = dm->defaultSF;
3049   PetscFunctionReturn(0);
3050 }
3051 
3052 #undef __FUNCT__
3053 #define __FUNCT__ "DMSetDefaultSF"
3054 /*@
3055   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3056 
3057   Input Parameters:
3058 + dm - The DM
3059 - sf - The PetscSF
3060 
3061   Level: intermediate
3062 
3063   Note: Any previous SF is destroyed
3064 
3065 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3066 @*/
3067 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
3068 {
3069   PetscErrorCode ierr;
3070 
3071   PetscFunctionBegin;
3072   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3073   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
3074   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3075   dm->defaultSF = sf;
3076   PetscFunctionReturn(0);
3077 }
3078 
3079 #undef __FUNCT__
3080 #define __FUNCT__ "DMCreateDefaultSF"
3081 /*@C
3082   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3083   describing the data layout.
3084 
3085   Input Parameters:
3086 + dm - The DM
3087 . localSection - PetscSection describing the local data layout
3088 - globalSection - PetscSection describing the global data layout
3089 
3090   Level: intermediate
3091 
3092 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3093 @*/
3094 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3095 {
3096   MPI_Comm       comm;
3097   PetscLayout    layout;
3098   const PetscInt *ranges;
3099   PetscInt       *local;
3100   PetscSFNode    *remote;
3101   PetscInt       pStart, pEnd, p, nroots, nleaves = 0, l;
3102   PetscMPIInt    size, rank;
3103   PetscErrorCode ierr;
3104 
3105   PetscFunctionBegin;
3106   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
3107   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3108   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3109   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3110   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3111   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3112   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3113   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3114   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3115   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3116   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3117   for (p = pStart; p < pEnd; ++p) {
3118     PetscInt gdof, gcdof;
3119 
3120     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3121     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3122     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3123   }
3124   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
3125   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
3126   for (p = pStart, l = 0; p < pEnd; ++p) {
3127     const PetscInt *cind;
3128     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3129 
3130     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3131     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3132     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3133     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3134     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3135     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3136     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3137     if (!gdof) continue; /* Censored point */
3138     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3139     if (gsize != dof-cdof) {
3140       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3141       cdof = 0; /* Ignore constraints */
3142     }
3143     for (d = 0, c = 0; d < dof; ++d) {
3144       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3145       local[l+d-c] = off+d;
3146     }
3147     if (gdof < 0) {
3148       for (d = 0; d < gsize; ++d, ++l) {
3149         PetscInt offset = -(goff+1) + d, r;
3150 
3151         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3152         if (r < 0) r = -(r+2);
3153         remote[l].rank  = r;
3154         remote[l].index = offset - ranges[r];
3155       }
3156     } else {
3157       for (d = 0; d < gsize; ++d, ++l) {
3158         remote[l].rank  = rank;
3159         remote[l].index = goff+d - ranges[rank];
3160       }
3161     }
3162   }
3163   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3164   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3165   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3166   PetscFunctionReturn(0);
3167 }
3168 
3169 #undef __FUNCT__
3170 #define __FUNCT__ "DMGetPointSF"
3171 /*@
3172   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3173 
3174   Input Parameter:
3175 . dm - The DM
3176 
3177   Output Parameter:
3178 . sf - The PetscSF
3179 
3180   Level: intermediate
3181 
3182   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3183 
3184 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3185 @*/
3186 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3187 {
3188   PetscFunctionBegin;
3189   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3190   PetscValidPointer(sf, 2);
3191   *sf = dm->sf;
3192   PetscFunctionReturn(0);
3193 }
3194 
3195 #undef __FUNCT__
3196 #define __FUNCT__ "DMSetPointSF"
3197 /*@
3198   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3199 
3200   Input Parameters:
3201 + dm - The DM
3202 - sf - The PetscSF
3203 
3204   Level: intermediate
3205 
3206 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3207 @*/
3208 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3209 {
3210   PetscErrorCode ierr;
3211 
3212   PetscFunctionBegin;
3213   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3214   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3215   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3216   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3217   dm->sf = sf;
3218   PetscFunctionReturn(0);
3219 }
3220 
3221 #undef __FUNCT__
3222 #define __FUNCT__ "DMGetNumFields"
3223 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3224 {
3225   PetscFunctionBegin;
3226   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3227   PetscValidPointer(numFields, 2);
3228   *numFields = dm->numFields;
3229   PetscFunctionReturn(0);
3230 }
3231 
3232 #undef __FUNCT__
3233 #define __FUNCT__ "DMSetNumFields"
3234 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3235 {
3236   PetscInt       f;
3237   PetscErrorCode ierr;
3238 
3239   PetscFunctionBegin;
3240   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3241   for (f = 0; f < dm->numFields; ++f) {
3242     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3243   }
3244   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3245   dm->numFields = numFields;
3246   ierr          = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3247   for (f = 0; f < dm->numFields; ++f) {
3248     ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3249   }
3250   PetscFunctionReturn(0);
3251 }
3252 
3253 #undef __FUNCT__
3254 #define __FUNCT__ "DMGetField"
3255 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3256 {
3257   PetscFunctionBegin;
3258   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3259   PetscValidPointer(field, 2);
3260   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3261   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3262   *field = dm->fields[f];
3263   PetscFunctionReturn(0);
3264 }
3265 
3266 #undef __FUNCT__
3267 #define __FUNCT__ "DMRestrictHook_Coordinates"
3268 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3269 {
3270   DM dm_coord,dmc_coord;
3271   PetscErrorCode ierr;
3272   Vec coords,ccoords;
3273   VecScatter scat;
3274   PetscFunctionBegin;
3275   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3276   ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr);
3277   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3278   ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr);
3279   if (coords && !ccoords) {
3280     ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr);
3281     ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr);
3282     ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3283     ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3284     ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr);
3285     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
3286     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3287   }
3288   PetscFunctionReturn(0);
3289 }
3290 
3291 #undef __FUNCT__
3292 #define __FUNCT__ "DMSubDomainHook_Coordinates"
3293 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3294 {
3295   DM dm_coord,subdm_coord;
3296   PetscErrorCode ierr;
3297   Vec coords,ccoords,clcoords;
3298   VecScatter *scat_i,*scat_g;
3299   PetscFunctionBegin;
3300   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3301   ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr);
3302   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3303   ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr);
3304   if (coords && !ccoords) {
3305     ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr);
3306     ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr);
3307     ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr);
3308     ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3309     ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3310     ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3311     ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3312     ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr);
3313     ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr);
3314     ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr);
3315     ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr);
3316     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3317     ierr = VecDestroy(&clcoords);CHKERRQ(ierr);
3318     ierr = PetscFree(scat_i);CHKERRQ(ierr);
3319     ierr = PetscFree(scat_g);CHKERRQ(ierr);
3320   }
3321   PetscFunctionReturn(0);
3322 }
3323 
3324 #undef __FUNCT__
3325 #define __FUNCT__ "DMSetCoordinates"
3326 /*@
3327   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3328 
3329   Collective on DM
3330 
3331   Input Parameters:
3332 + dm - the DM
3333 - c - coordinate vector
3334 
3335   Note:
3336   The coordinates do include those for ghost points, which are in the local vector
3337 
3338   Level: intermediate
3339 
3340 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3341 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3342 @*/
3343 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3344 {
3345   PetscErrorCode ierr;
3346 
3347   PetscFunctionBegin;
3348   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3349   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3350   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3351   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3352   dm->coordinates = c;
3353   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3354   ierr            = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3355   ierr            = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3356   PetscFunctionReturn(0);
3357 }
3358 
3359 #undef __FUNCT__
3360 #define __FUNCT__ "DMSetCoordinatesLocal"
3361 /*@
3362   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3363 
3364   Collective on DM
3365 
3366    Input Parameters:
3367 +  dm - the DM
3368 -  c - coordinate vector
3369 
3370   Note:
3371   The coordinates of ghost points can be set using DMSetCoordinates()
3372   followed by DMGetCoordinatesLocal(). This is intended to enable the
3373   setting of ghost coordinates outside of the domain.
3374 
3375   Level: intermediate
3376 
3377 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3378 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3379 @*/
3380 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3381 {
3382   PetscErrorCode ierr;
3383 
3384   PetscFunctionBegin;
3385   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3386   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3387   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3388   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3389 
3390   dm->coordinatesLocal = c;
3391 
3392   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3393   PetscFunctionReturn(0);
3394 }
3395 
3396 #undef __FUNCT__
3397 #define __FUNCT__ "DMGetCoordinates"
3398 /*@
3399   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3400 
3401   Not Collective
3402 
3403   Input Parameter:
3404 . dm - the DM
3405 
3406   Output Parameter:
3407 . c - global coordinate vector
3408 
3409   Note:
3410   This is a borrowed reference, so the user should NOT destroy this vector
3411 
3412   Each process has only the local coordinates (does NOT have the ghost coordinates).
3413 
3414   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3415   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3416 
3417   Level: intermediate
3418 
3419 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3420 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3421 @*/
3422 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3423 {
3424   PetscErrorCode ierr;
3425 
3426   PetscFunctionBegin;
3427   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3428   PetscValidPointer(c,2);
3429   if (!dm->coordinates && dm->coordinatesLocal) {
3430     DM cdm = NULL;
3431 
3432     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3433     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3434     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3435     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3436     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3437   }
3438   *c = dm->coordinates;
3439   PetscFunctionReturn(0);
3440 }
3441 
3442 #undef __FUNCT__
3443 #define __FUNCT__ "DMGetCoordinatesLocal"
3444 /*@
3445   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3446 
3447   Collective on DM
3448 
3449   Input Parameter:
3450 . dm - the DM
3451 
3452   Output Parameter:
3453 . c - coordinate vector
3454 
3455   Note:
3456   This is a borrowed reference, so the user should NOT destroy this vector
3457 
3458   Each process has the local and ghost coordinates
3459 
3460   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3461   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3462 
3463   Level: intermediate
3464 
3465 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3466 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3467 @*/
3468 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3469 {
3470   PetscErrorCode ierr;
3471 
3472   PetscFunctionBegin;
3473   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3474   PetscValidPointer(c,2);
3475   if (!dm->coordinatesLocal && dm->coordinates) {
3476     DM cdm = NULL;
3477 
3478     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3479     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3480     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3481     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3482     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3483   }
3484   *c = dm->coordinatesLocal;
3485   PetscFunctionReturn(0);
3486 }
3487 
3488 #undef __FUNCT__
3489 #define __FUNCT__ "DMGetCoordinateDM"
3490 /*@
3491   DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates
3492 
3493   Collective on DM
3494 
3495   Input Parameter:
3496 . dm - the DM
3497 
3498   Output Parameter:
3499 . cdm - coordinate DM
3500 
3501   Level: intermediate
3502 
3503 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3504 .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3505 @*/
3506 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3507 {
3508   PetscErrorCode ierr;
3509 
3510   PetscFunctionBegin;
3511   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3512   PetscValidPointer(cdm,2);
3513   if (!dm->coordinateDM) {
3514     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3515     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3516   }
3517   *cdm = dm->coordinateDM;
3518   PetscFunctionReturn(0);
3519 }
3520 
3521 #undef __FUNCT__
3522 #define __FUNCT__ "DMSetCoordinateDM"
3523 /*@
3524   DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates
3525 
3526   Logically Collective on DM
3527 
3528   Input Parameters:
3529 + dm - the DM
3530 - cdm - coordinate DM
3531 
3532   Level: intermediate
3533 
3534 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3535 .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3536 @*/
3537 PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm)
3538 {
3539   PetscErrorCode ierr;
3540 
3541   PetscFunctionBegin;
3542   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3543   PetscValidHeaderSpecific(cdm,DM_CLASSID,2);
3544   ierr = DMDestroy(&dm->coordinateDM);CHKERRQ(ierr);
3545   dm->coordinateDM = cdm;
3546   ierr = PetscObjectReference((PetscObject) dm->coordinateDM);CHKERRQ(ierr);
3547   PetscFunctionReturn(0);
3548 }
3549 
3550 #undef __FUNCT__
3551 #define __FUNCT__ "DMLocatePoints"
3552 /*@
3553   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3554 
3555   Not collective
3556 
3557   Input Parameters:
3558 + dm - The DM
3559 - v - The Vec of points
3560 
3561   Output Parameter:
3562 . cells - The local cell numbers for cells which contain the points
3563 
3564   Level: developer
3565 
3566 .keywords: point location, mesh
3567 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3568 @*/
3569 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3570 {
3571   PetscErrorCode ierr;
3572 
3573   PetscFunctionBegin;
3574   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3575   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3576   PetscValidPointer(cells,3);
3577   if (dm->ops->locatepoints) {
3578     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3579   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3580   PetscFunctionReturn(0);
3581 }
3582