xref: /petsc/src/dm/interface/dm.c (revision efca3c55b02548817e185e5069a2acfe20fa4458)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 #include <petscsf.h>
3 
4 PetscClassId  DM_CLASSID;
5 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal, DM_LocalToLocal;
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "DMViewFromOptions"
9 /*
10   DMViewFromOptions - Processes command line options to determine if/how a DM is to be viewed.
11 
12   Collective on Vec
13 
14   Input Parameters:
15 + dm   - the DM
16 . prefix - prefix to use for viewing, or NULL to use prefix of 'dm'
17 - optionname - option to activate viewing
18 
19   Level: intermediate
20 
21 .keywords: DM, view, options, database
22 .seealso: VecViewFromOptions(), MatViewFromOptions()
23 */
24 PetscErrorCode  DMViewFromOptions(DM dm,const char prefix[],const char optionname[])
25 {
26   PetscErrorCode    ierr;
27   PetscBool         flg;
28   PetscViewer       viewer;
29   PetscViewerFormat format;
30 
31   PetscFunctionBegin;
32   if (prefix) {
33     ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
34   } else {
35     ierr = PetscOptionsGetViewer(PetscObjectComm((PetscObject)dm),((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
36   }
37   if (flg) {
38     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
39     ierr = DMView(dm,viewer);CHKERRQ(ierr);
40     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
41     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
42   }
43   PetscFunctionReturn(0);
44 }
45 
46 
47 #undef __FUNCT__
48 #define __FUNCT__ "DMCreate"
49 /*@
50   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
51 
52    If you never  call DMSetType()  it will generate an
53    error when you try to use the vector.
54 
55   Collective on MPI_Comm
56 
57   Input Parameter:
58 . comm - The communicator for the DM object
59 
60   Output Parameter:
61 . dm - The DM object
62 
63   Level: beginner
64 
65 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
66 @*/
67 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
68 {
69   DM             v;
70   PetscErrorCode ierr;
71 
72   PetscFunctionBegin;
73   PetscValidPointer(dm,2);
74   *dm = NULL;
75   ierr = VecInitializePackage();CHKERRQ(ierr);
76   ierr = MatInitializePackage();CHKERRQ(ierr);
77   ierr = DMInitializePackage();CHKERRQ(ierr);
78 
79   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
80   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
81 
82 
83   v->ltogmap              = NULL;
84   v->ltogmapb             = NULL;
85   v->bs                   = 1;
86   v->coloringtype         = IS_COLORING_GLOBAL;
87   ierr                    = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
88   ierr                    = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
89   v->defaultSection       = NULL;
90   v->defaultGlobalSection = NULL;
91   {
92     PetscInt i;
93     for (i = 0; i < 10; ++i) {
94       v->nullspaceConstructors[i] = NULL;
95     }
96   }
97   v->numFields = 0;
98   v->fields    = NULL;
99   ierr = DMSetVecType(v,VECSTANDARD);CHKERRQ(ierr);
100   ierr = DMSetMatType(v,MATAIJ);CHKERRQ(ierr);
101   *dm = v;
102   PetscFunctionReturn(0);
103 }
104 
105 #undef __FUNCT__
106 #define __FUNCT__ "DMClone"
107 /*@
108   DMClone - Creates a DM object with the same topology as the original.
109 
110   Collective on MPI_Comm
111 
112   Input Parameter:
113 . dm - The original DM object
114 
115   Output Parameter:
116 . newdm  - The new DM object
117 
118   Level: beginner
119 
120 .keywords: DM, topology, create
121 @*/
122 PetscErrorCode DMClone(DM dm, DM *newdm)
123 {
124   PetscSF        sf;
125   Vec            coords;
126   void          *ctx;
127   PetscErrorCode ierr;
128 
129   PetscFunctionBegin;
130   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
131   PetscValidPointer(newdm,2);
132   ierr = DMCreate(PetscObjectComm((PetscObject)dm), newdm);CHKERRQ(ierr);
133   if (dm->ops->clone) {
134     ierr = (*dm->ops->clone)(dm, newdm);CHKERRQ(ierr);
135   }
136   (*newdm)->setupcalled = PETSC_TRUE;
137   ierr = DMGetPointSF(dm, &sf);CHKERRQ(ierr);
138   ierr = DMSetPointSF(*newdm, sf);CHKERRQ(ierr);
139   ierr = DMGetApplicationContext(dm, &ctx);CHKERRQ(ierr);
140   ierr = DMSetApplicationContext(*newdm, ctx);CHKERRQ(ierr);
141   ierr = DMGetCoordinatesLocal(dm, &coords);CHKERRQ(ierr);
142   if (coords) {
143     ierr = DMSetCoordinatesLocal(*newdm, coords);CHKERRQ(ierr);
144   } else {
145     ierr = DMGetCoordinates(dm, &coords);CHKERRQ(ierr);
146     if (coords) {ierr = DMSetCoordinates(*newdm, coords);CHKERRQ(ierr);}
147   }
148   PetscFunctionReturn(0);
149 }
150 
151 #undef __FUNCT__
152 #define __FUNCT__ "DMSetVecType"
153 /*@C
154        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
155 
156    Logically Collective on DMDA
157 
158    Input Parameter:
159 +  da - initial distributed array
160 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
161 
162    Options Database:
163 .   -dm_vec_type ctype
164 
165    Level: intermediate
166 
167 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType, DMGetVecType()
168 @*/
169 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
170 {
171   PetscErrorCode ierr;
172 
173   PetscFunctionBegin;
174   PetscValidHeaderSpecific(da,DM_CLASSID,1);
175   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
176   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
177   PetscFunctionReturn(0);
178 }
179 
180 #undef __FUNCT__
181 #define __FUNCT__ "DMGetVecType"
182 /*@C
183        DMGetVecType - Gets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
184 
185    Logically Collective on DMDA
186 
187    Input Parameter:
188 .  da - initial distributed array
189 
190    Output Parameter:
191 .  ctype - the vector type
192 
193    Level: intermediate
194 
195 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
196 @*/
197 PetscErrorCode  DMGetVecType(DM da,VecType *ctype)
198 {
199   PetscFunctionBegin;
200   PetscValidHeaderSpecific(da,DM_CLASSID,1);
201   *ctype = da->vectype;
202   PetscFunctionReturn(0);
203 }
204 
205 #undef __FUNCT__
206 #define __FUNCT__ "VecGetDM"
207 /*@
208   VecGetDM - Gets the DM defining the data layout of the vector
209 
210   Not collective
211 
212   Input Parameter:
213 . v - The Vec
214 
215   Output Parameter:
216 . dm - The DM
217 
218   Level: intermediate
219 
220 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
221 @*/
222 PetscErrorCode VecGetDM(Vec v, DM *dm)
223 {
224   PetscErrorCode ierr;
225 
226   PetscFunctionBegin;
227   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
228   PetscValidPointer(dm,2);
229   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
230   PetscFunctionReturn(0);
231 }
232 
233 #undef __FUNCT__
234 #define __FUNCT__ "VecSetDM"
235 /*@
236   VecSetDM - Sets the DM defining the data layout of the vector
237 
238   Not collective
239 
240   Input Parameters:
241 + v - The Vec
242 - dm - The DM
243 
244   Level: intermediate
245 
246 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
247 @*/
248 PetscErrorCode VecSetDM(Vec v, DM dm)
249 {
250   PetscErrorCode ierr;
251 
252   PetscFunctionBegin;
253   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
254   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
255   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
256   PetscFunctionReturn(0);
257 }
258 
259 #undef __FUNCT__
260 #define __FUNCT__ "DMSetMatType"
261 /*@C
262        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
263 
264    Logically Collective on DM
265 
266    Input Parameter:
267 +  dm - the DM context
268 .  ctype - the matrix type
269 
270    Options Database:
271 .   -dm_mat_type ctype
272 
273    Level: intermediate
274 
275 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMGetMatType()
276 @*/
277 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
278 {
279   PetscErrorCode ierr;
280 
281   PetscFunctionBegin;
282   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
283   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
284   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
285   PetscFunctionReturn(0);
286 }
287 
288 #undef __FUNCT__
289 #define __FUNCT__ "DMGetMatType"
290 /*@C
291        DMGetMatType - Gets the type of matrix created with DMCreateMatrix()
292 
293    Logically Collective on DM
294 
295    Input Parameter:
296 .  dm - the DM context
297 
298    Output Parameter:
299 .  ctype - the matrix type
300 
301    Options Database:
302 .   -dm_mat_type ctype
303 
304    Level: intermediate
305 
306 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType, DMSetMatType()
307 @*/
308 PetscErrorCode  DMGetMatType(DM dm,MatType *ctype)
309 {
310   PetscFunctionBegin;
311   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
312   *ctype = dm->mattype;
313   PetscFunctionReturn(0);
314 }
315 
316 #undef __FUNCT__
317 #define __FUNCT__ "MatGetDM"
318 /*@
319   MatGetDM - Gets the DM defining the data layout of the matrix
320 
321   Not collective
322 
323   Input Parameter:
324 . A - The Mat
325 
326   Output Parameter:
327 . dm - The DM
328 
329   Level: intermediate
330 
331 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
332 @*/
333 PetscErrorCode MatGetDM(Mat A, DM *dm)
334 {
335   PetscErrorCode ierr;
336 
337   PetscFunctionBegin;
338   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
339   PetscValidPointer(dm,2);
340   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject*) dm);CHKERRQ(ierr);
341   PetscFunctionReturn(0);
342 }
343 
344 #undef __FUNCT__
345 #define __FUNCT__ "MatSetDM"
346 /*@
347   MatSetDM - Sets the DM defining the data layout of the matrix
348 
349   Not collective
350 
351   Input Parameters:
352 + A - The Mat
353 - dm - The DM
354 
355   Level: intermediate
356 
357 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
358 @*/
359 PetscErrorCode MatSetDM(Mat A, DM dm)
360 {
361   PetscErrorCode ierr;
362 
363   PetscFunctionBegin;
364   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
365   if (dm) PetscValidHeaderSpecific(dm,DM_CLASSID,2);
366   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
367   PetscFunctionReturn(0);
368 }
369 
370 #undef __FUNCT__
371 #define __FUNCT__ "DMSetOptionsPrefix"
372 /*@C
373    DMSetOptionsPrefix - Sets the prefix used for searching for all
374    DMDA options in the database.
375 
376    Logically Collective on DMDA
377 
378    Input Parameter:
379 +  da - the DMDA context
380 -  prefix - the prefix to prepend to all option names
381 
382    Notes:
383    A hyphen (-) must NOT be given at the beginning of the prefix name.
384    The first character of all runtime options is AUTOMATICALLY the hyphen.
385 
386    Level: advanced
387 
388 .keywords: DMDA, set, options, prefix, database
389 
390 .seealso: DMSetFromOptions()
391 @*/
392 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
393 {
394   PetscErrorCode ierr;
395 
396   PetscFunctionBegin;
397   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
398   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
399   PetscFunctionReturn(0);
400 }
401 
402 #undef __FUNCT__
403 #define __FUNCT__ "DMDestroy"
404 /*@
405     DMDestroy - Destroys a vector packer or DMDA.
406 
407     Collective on DM
408 
409     Input Parameter:
410 .   dm - the DM object to destroy
411 
412     Level: developer
413 
414 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
415 
416 @*/
417 PetscErrorCode  DMDestroy(DM *dm)
418 {
419   PetscInt       i, cnt = 0, f;
420   DMNamedVecLink nlink,nnext;
421   PetscErrorCode ierr;
422 
423   PetscFunctionBegin;
424   if (!*dm) PetscFunctionReturn(0);
425   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
426 
427   /* I think it makes sense to dump all attached things when you are destroyed, which also eliminates circular references */
428   for (f = 0; f < (*dm)->numFields; ++f) {
429     ierr = PetscObjectCompose((*dm)->fields[f], "pmat", NULL);CHKERRQ(ierr);
430     ierr = PetscObjectCompose((*dm)->fields[f], "nullspace", NULL);CHKERRQ(ierr);
431     ierr = PetscObjectCompose((*dm)->fields[f], "nearnullspace", NULL);CHKERRQ(ierr);
432   }
433   /* count all the circular references of DM and its contained Vecs */
434   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
435     if ((*dm)->localin[i])  cnt++;
436     if ((*dm)->globalin[i]) cnt++;
437   }
438   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
439   for (nlink=(*dm)->namedlocal; nlink; nlink=nlink->next) cnt++;
440   if ((*dm)->x) {
441     DM obj;
442     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
443     if (obj == *dm) cnt++;
444   }
445 
446   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
447   /*
448      Need this test because the dm references the vectors that
449      reference the dm, so destroying the dm calls destroy on the
450      vectors that cause another destroy on the dm
451   */
452   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
453   ((PetscObject) (*dm))->refct = 0;
454   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
455     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
456     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
457   }
458   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
459     nnext = nlink->next;
460     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
461     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
462     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
463     ierr = PetscFree(nlink);CHKERRQ(ierr);
464   }
465   (*dm)->namedglobal = NULL;
466 
467   for (nlink=(*dm)->namedlocal; nlink; nlink=nnext) { /* Destroy the named local vectors */
468     nnext = nlink->next;
469     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
470     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
471     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
472     ierr = PetscFree(nlink);CHKERRQ(ierr);
473   }
474   (*dm)->namedlocal = NULL;
475 
476   /* Destroy the list of hooks */
477   {
478     DMCoarsenHookLink link,next;
479     for (link=(*dm)->coarsenhook; link; link=next) {
480       next = link->next;
481       ierr = PetscFree(link);CHKERRQ(ierr);
482     }
483     (*dm)->coarsenhook = NULL;
484   }
485   {
486     DMRefineHookLink link,next;
487     for (link=(*dm)->refinehook; link; link=next) {
488       next = link->next;
489       ierr = PetscFree(link);CHKERRQ(ierr);
490     }
491     (*dm)->refinehook = NULL;
492   }
493   {
494     DMSubDomainHookLink link,next;
495     for (link=(*dm)->subdomainhook; link; link=next) {
496       next = link->next;
497       ierr = PetscFree(link);CHKERRQ(ierr);
498     }
499     (*dm)->subdomainhook = NULL;
500   }
501   {
502     DMGlobalToLocalHookLink link,next;
503     for (link=(*dm)->gtolhook; link; link=next) {
504       next = link->next;
505       ierr = PetscFree(link);CHKERRQ(ierr);
506     }
507     (*dm)->gtolhook = NULL;
508   }
509   /* Destroy the work arrays */
510   {
511     DMWorkLink link,next;
512     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
513     for (link=(*dm)->workin; link; link=next) {
514       next = link->next;
515       ierr = PetscFree(link->mem);CHKERRQ(ierr);
516       ierr = PetscFree(link);CHKERRQ(ierr);
517     }
518     (*dm)->workin = NULL;
519   }
520 
521   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
522   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
523   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
524 
525   if ((*dm)->ctx && (*dm)->ctxdestroy) {
526     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
527   }
528   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
529   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
530   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
531   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
532   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
533   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
534   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
535 
536   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
537   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
538   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
539   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
540   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
541 
542   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
543   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
544   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
545 
546   for (f = 0; f < (*dm)->numFields; ++f) {
547     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
548   }
549   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
550   /* if memory was published with AMS then destroy it */
551   ierr = PetscObjectAMSViewOff((PetscObject)*dm);CHKERRQ(ierr);
552 
553   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
554   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
555   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
556   PetscFunctionReturn(0);
557 }
558 
559 #undef __FUNCT__
560 #define __FUNCT__ "DMSetUp"
561 /*@
562     DMSetUp - sets up the data structures inside a DM object
563 
564     Collective on DM
565 
566     Input Parameter:
567 .   dm - the DM object to setup
568 
569     Level: developer
570 
571 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
572 
573 @*/
574 PetscErrorCode  DMSetUp(DM dm)
575 {
576   PetscErrorCode ierr;
577 
578   PetscFunctionBegin;
579   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
580   if (dm->setupcalled) PetscFunctionReturn(0);
581   if (dm->ops->setup) {
582     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
583   }
584   dm->setupcalled = PETSC_TRUE;
585   PetscFunctionReturn(0);
586 }
587 
588 #undef __FUNCT__
589 #define __FUNCT__ "DMSetFromOptions"
590 /*@
591     DMSetFromOptions - sets parameters in a DM from the options database
592 
593     Collective on DM
594 
595     Input Parameter:
596 .   dm - the DM object to set options for
597 
598     Options Database:
599 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
600 .   -dm_vec_type <type>  type of vector to create inside DM
601 .   -dm_mat_type <type>  type of matrix to create inside DM
602 -   -dm_coloring_type <global or ghosted>
603 
604     Level: developer
605 
606 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
607 
608 @*/
609 PetscErrorCode  DMSetFromOptions(DM dm)
610 {
611   char           typeName[256] = MATAIJ;
612   PetscBool      flg;
613   PetscErrorCode ierr;
614 
615   PetscFunctionBegin;
616   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
617   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
618   ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,NULL);CHKERRQ(ierr);
619   ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
620   if (flg) {
621     ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
622   }
623   ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype ? dm->mattype : typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
624   if (flg) {
625     ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
626   }
627   ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,NULL);CHKERRQ(ierr);
628   if (dm->ops->setfromoptions) {
629     ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
630   }
631   /* process any options handlers added with PetscObjectAddOptionsHandler() */
632   ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
633   ierr = PetscOptionsEnd();CHKERRQ(ierr);
634   ierr = DMViewFromOptions(dm,NULL,"-dm_view");CHKERRQ(ierr);
635   PetscFunctionReturn(0);
636 }
637 
638 #undef __FUNCT__
639 #define __FUNCT__ "DMView"
640 /*@C
641     DMView - Views a vector packer or DMDA.
642 
643     Collective on DM
644 
645     Input Parameter:
646 +   dm - the DM object to view
647 -   v - the viewer
648 
649     Level: developer
650 
651 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
652 
653 @*/
654 PetscErrorCode  DMView(DM dm,PetscViewer v)
655 {
656   PetscErrorCode ierr;
657   PetscBool      isbinary;
658 
659   PetscFunctionBegin;
660   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
661   if (!v) {
662     ierr = PetscViewerASCIIGetStdout(PetscObjectComm((PetscObject)dm),&v);CHKERRQ(ierr);
663   }
664   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
665   if (isbinary) {
666     PetscInt classid = DM_FILE_CLASSID;
667     char     type[256];
668 
669     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
670     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
671     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
672   }
673   if (dm->ops->view) {
674     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
675   }
676   PetscFunctionReturn(0);
677 }
678 
679 #undef __FUNCT__
680 #define __FUNCT__ "DMCreateGlobalVector"
681 /*@
682     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
683 
684     Collective on DM
685 
686     Input Parameter:
687 .   dm - the DM object
688 
689     Output Parameter:
690 .   vec - the global vector
691 
692     Level: beginner
693 
694 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
695 
696 @*/
697 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
698 {
699   PetscErrorCode ierr;
700 
701   PetscFunctionBegin;
702   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
703   ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
704   PetscFunctionReturn(0);
705 }
706 
707 #undef __FUNCT__
708 #define __FUNCT__ "DMCreateLocalVector"
709 /*@
710     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
711 
712     Not Collective
713 
714     Input Parameter:
715 .   dm - the DM object
716 
717     Output Parameter:
718 .   vec - the local vector
719 
720     Level: beginner
721 
722 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
723 
724 @*/
725 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
726 {
727   PetscErrorCode ierr;
728 
729   PetscFunctionBegin;
730   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
731   ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
732   PetscFunctionReturn(0);
733 }
734 
735 #undef __FUNCT__
736 #define __FUNCT__ "DMGetLocalToGlobalMapping"
737 /*@
738    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
739 
740    Collective on DM
741 
742    Input Parameter:
743 .  dm - the DM that provides the mapping
744 
745    Output Parameter:
746 .  ltog - the mapping
747 
748    Level: intermediate
749 
750    Notes:
751    This mapping can then be used by VecSetLocalToGlobalMapping() or
752    MatSetLocalToGlobalMapping().
753 
754 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
755 @*/
756 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
757 {
758   PetscErrorCode ierr;
759 
760   PetscFunctionBegin;
761   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
762   PetscValidPointer(ltog,2);
763   if (!dm->ltogmap) {
764     PetscSection section, sectionGlobal;
765 
766     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
767     if (section) {
768       PetscInt *ltog;
769       PetscInt pStart, pEnd, size, p, l;
770 
771       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
772       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
773       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
774       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
775       for (p = pStart, l = 0; p < pEnd; ++p) {
776         PetscInt dof, off, c;
777 
778         /* Should probably use constrained dofs */
779         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
780         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
781         for (c = 0; c < dof; ++c, ++l) {
782           ltog[l] = off+c;
783         }
784       }
785       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
786       ierr = PetscLogObjectParent((PetscObject)dm, (PetscObject)dm->ltogmap);CHKERRQ(ierr);
787     } else {
788       if (!dm->ops->getlocaltoglobalmapping) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
789       ierr = (*dm->ops->getlocaltoglobalmapping)(dm);CHKERRQ(ierr);
790     }
791   }
792   *ltog = dm->ltogmap;
793   PetscFunctionReturn(0);
794 }
795 
796 #undef __FUNCT__
797 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
798 /*@
799    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
800 
801    Collective on DM
802 
803    Input Parameter:
804 .  da - the distributed array that provides the mapping
805 
806    Output Parameter:
807 .  ltog - the block mapping
808 
809    Level: intermediate
810 
811    Notes:
812    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
813    MatSetLocalToGlobalMappingBlock().
814 
815 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
816 @*/
817 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
818 {
819   PetscErrorCode ierr;
820 
821   PetscFunctionBegin;
822   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
823   PetscValidPointer(ltog,2);
824   if (!dm->ltogmapb) {
825     PetscInt bs;
826     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
827     if (bs > 1) {
828       if (!dm->ops->getlocaltoglobalmappingblock) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
829       ierr = (*dm->ops->getlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
830     } else {
831       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
832       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
833     }
834   }
835   *ltog = dm->ltogmapb;
836   PetscFunctionReturn(0);
837 }
838 
839 #undef __FUNCT__
840 #define __FUNCT__ "DMGetBlockSize"
841 /*@
842    DMGetBlockSize - Gets the inherent block size associated with a DM
843 
844    Not Collective
845 
846    Input Parameter:
847 .  dm - the DM with block structure
848 
849    Output Parameter:
850 .  bs - the block size, 1 implies no exploitable block structure
851 
852    Level: intermediate
853 
854 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
855 @*/
856 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
857 {
858   PetscFunctionBegin;
859   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
860   PetscValidPointer(bs,2);
861   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
862   *bs = dm->bs;
863   PetscFunctionReturn(0);
864 }
865 
866 #undef __FUNCT__
867 #define __FUNCT__ "DMCreateInterpolation"
868 /*@
869     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
870 
871     Collective on DM
872 
873     Input Parameter:
874 +   dm1 - the DM object
875 -   dm2 - the second, finer DM object
876 
877     Output Parameter:
878 +  mat - the interpolation
879 -  vec - the scaling (optional)
880 
881     Level: developer
882 
883     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
884         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
885 
886         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
887         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
888 
889 
890 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
891 
892 @*/
893 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
894 {
895   PetscErrorCode ierr;
896 
897   PetscFunctionBegin;
898   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
899   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
900   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
901   PetscFunctionReturn(0);
902 }
903 
904 #undef __FUNCT__
905 #define __FUNCT__ "DMCreateInjection"
906 /*@
907     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
908 
909     Collective on DM
910 
911     Input Parameter:
912 +   dm1 - the DM object
913 -   dm2 - the second, finer DM object
914 
915     Output Parameter:
916 .   ctx - the injection
917 
918     Level: developer
919 
920    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
921         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
922 
923 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
924 
925 @*/
926 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
927 {
928   PetscErrorCode ierr;
929 
930   PetscFunctionBegin;
931   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
932   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
933   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
934   PetscFunctionReturn(0);
935 }
936 
937 #undef __FUNCT__
938 #define __FUNCT__ "DMCreateColoring"
939 /*@
940     DMCreateColoring - Gets coloring for a DM
941 
942     Collective on DM
943 
944     Input Parameter:
945 +   dm - the DM object
946 -   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
947 
948     Output Parameter:
949 .   coloring - the coloring
950 
951     Level: developer
952 
953 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix(), DMSetMatType()
954 
955 @*/
956 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,ISColoring *coloring)
957 {
958   PetscErrorCode ierr;
959 
960   PetscFunctionBegin;
961   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
962   if (!dm->ops->getcoloring) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No coloring for this type of DM yet");
963   ierr = (*dm->ops->getcoloring)(dm,ctype,coloring);CHKERRQ(ierr);
964   PetscFunctionReturn(0);
965 }
966 
967 #undef __FUNCT__
968 #define __FUNCT__ "DMCreateMatrix"
969 /*@
970     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
971 
972     Collective on DM
973 
974     Input Parameter:
975 .   dm - the DM object
976 
977     Output Parameter:
978 .   mat - the empty Jacobian
979 
980     Level: beginner
981 
982     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
983        do not need to do it yourself.
984 
985        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
986        the nonzero pattern call DMDASetMatPreallocateOnly()
987 
988        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
989        internally by PETSc.
990 
991        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
992        the indices for the global numbering for DMDAs which is complicated.
993 
994 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMSetMatType()
995 
996 @*/
997 PetscErrorCode  DMCreateMatrix(DM dm,Mat *mat)
998 {
999   PetscErrorCode ierr;
1000 
1001   PetscFunctionBegin;
1002   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1003   ierr = MatInitializePackage();CHKERRQ(ierr);
1004   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1005   PetscValidPointer(mat,3);
1006   ierr = (*dm->ops->creatematrix)(dm,mat);CHKERRQ(ierr);
1007   PetscFunctionReturn(0);
1008 }
1009 
1010 #undef __FUNCT__
1011 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
1012 /*@
1013   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
1014     preallocated but the nonzero structure and zero values will not be set.
1015 
1016   Logically Collective on DMDA
1017 
1018   Input Parameter:
1019 + dm - the DM
1020 - only - PETSC_TRUE if only want preallocation
1021 
1022   Level: developer
1023 .seealso DMCreateMatrix()
1024 @*/
1025 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
1026 {
1027   PetscFunctionBegin;
1028   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1029   dm->prealloc_only = only;
1030   PetscFunctionReturn(0);
1031 }
1032 
1033 #undef __FUNCT__
1034 #define __FUNCT__ "DMGetWorkArray"
1035 /*@C
1036   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1037 
1038   Not Collective
1039 
1040   Input Parameters:
1041 + dm - the DM object
1042 . count - The minium size
1043 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1044 
1045   Output Parameter:
1046 . array - the work array
1047 
1048   Level: developer
1049 
1050 .seealso DMDestroy(), DMCreate()
1051 @*/
1052 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1053 {
1054   PetscErrorCode ierr;
1055   DMWorkLink     link;
1056   size_t         size;
1057 
1058   PetscFunctionBegin;
1059   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1060   PetscValidPointer(mem,4);
1061   if (dm->workin) {
1062     link       = dm->workin;
1063     dm->workin = dm->workin->next;
1064   } else {
1065     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1066   }
1067   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1068   if (size*count > link->bytes) {
1069     ierr        = PetscFree(link->mem);CHKERRQ(ierr);
1070     ierr        = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1071     link->bytes = size*count;
1072   }
1073   link->next   = dm->workout;
1074   dm->workout  = link;
1075   *(void**)mem = link->mem;
1076   PetscFunctionReturn(0);
1077 }
1078 
1079 #undef __FUNCT__
1080 #define __FUNCT__ "DMRestoreWorkArray"
1081 /*@C
1082   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1083 
1084   Not Collective
1085 
1086   Input Parameters:
1087 + dm - the DM object
1088 . count - The minium size
1089 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1090 
1091   Output Parameter:
1092 . array - the work array
1093 
1094   Level: developer
1095 
1096 .seealso DMDestroy(), DMCreate()
1097 @*/
1098 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1099 {
1100   DMWorkLink *p,link;
1101 
1102   PetscFunctionBegin;
1103   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1104   PetscValidPointer(mem,4);
1105   for (p=&dm->workout; (link=*p); p=&link->next) {
1106     if (link->mem == *(void**)mem) {
1107       *p           = link->next;
1108       link->next   = dm->workin;
1109       dm->workin   = link;
1110       *(void**)mem = NULL;
1111       PetscFunctionReturn(0);
1112     }
1113   }
1114   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1115   PetscFunctionReturn(0);
1116 }
1117 
1118 #undef __FUNCT__
1119 #define __FUNCT__ "DMSetNullSpaceConstructor"
1120 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1121 {
1122   PetscFunctionBegin;
1123   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1124   if (field >= 10) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1125   dm->nullspaceConstructors[field] = nullsp;
1126   PetscFunctionReturn(0);
1127 }
1128 
1129 #undef __FUNCT__
1130 #define __FUNCT__ "DMCreateFieldIS"
1131 /*@C
1132   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1133 
1134   Not collective
1135 
1136   Input Parameter:
1137 . dm - the DM object
1138 
1139   Output Parameters:
1140 + numFields  - The number of fields (or NULL if not requested)
1141 . fieldNames - The name for each field (or NULL if not requested)
1142 - fields     - The global indices for each field (or NULL if not requested)
1143 
1144   Level: intermediate
1145 
1146   Notes:
1147   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1148   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1149   PetscFree().
1150 
1151 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1152 @*/
1153 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1154 {
1155   PetscSection   section, sectionGlobal;
1156   PetscErrorCode ierr;
1157 
1158   PetscFunctionBegin;
1159   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1160   if (numFields) {
1161     PetscValidPointer(numFields,2);
1162     *numFields = 0;
1163   }
1164   if (fieldNames) {
1165     PetscValidPointer(fieldNames,3);
1166     *fieldNames = NULL;
1167   }
1168   if (fields) {
1169     PetscValidPointer(fields,4);
1170     *fields = NULL;
1171   }
1172   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1173   if (section) {
1174     PetscInt *fieldSizes, **fieldIndices;
1175     PetscInt nF, f, pStart, pEnd, p;
1176 
1177     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1178     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1179     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt*,&fieldIndices);CHKERRQ(ierr);
1180     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1181     for (f = 0; f < nF; ++f) {
1182       fieldSizes[f] = 0;
1183     }
1184     for (p = pStart; p < pEnd; ++p) {
1185       PetscInt gdof;
1186 
1187       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1188       if (gdof > 0) {
1189         for (f = 0; f < nF; ++f) {
1190           PetscInt fdof, fcdof;
1191 
1192           ierr           = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1193           ierr           = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1194           fieldSizes[f] += fdof-fcdof;
1195         }
1196       }
1197     }
1198     for (f = 0; f < nF; ++f) {
1199       ierr          = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1200       fieldSizes[f] = 0;
1201     }
1202     for (p = pStart; p < pEnd; ++p) {
1203       PetscInt gdof, goff;
1204 
1205       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1206       if (gdof > 0) {
1207         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1208         for (f = 0; f < nF; ++f) {
1209           PetscInt fdof, fcdof, fc;
1210 
1211           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1212           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1213           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1214             fieldIndices[f][fieldSizes[f]] = goff++;
1215           }
1216         }
1217       }
1218     }
1219     if (numFields) *numFields = nF;
1220     if (fieldNames) {
1221       ierr = PetscMalloc(nF * sizeof(char*), fieldNames);CHKERRQ(ierr);
1222       for (f = 0; f < nF; ++f) {
1223         const char *fieldName;
1224 
1225         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1226         ierr = PetscStrallocpy(fieldName, (char**) &(*fieldNames)[f]);CHKERRQ(ierr);
1227       }
1228     }
1229     if (fields) {
1230       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1231       for (f = 0; f < nF; ++f) {
1232         ierr = ISCreateGeneral(PetscObjectComm((PetscObject)dm), fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1233       }
1234     }
1235     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1236   } else if (dm->ops->createfieldis) {
1237     ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);
1238   }
1239   PetscFunctionReturn(0);
1240 }
1241 
1242 
1243 #undef __FUNCT__
1244 #define __FUNCT__ "DMCreateFieldDecomposition"
1245 /*@C
1246   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1247                           corresponding to different fields: each IS contains the global indices of the dofs of the
1248                           corresponding field. The optional list of DMs define the DM for each subproblem.
1249                           Generalizes DMCreateFieldIS().
1250 
1251   Not collective
1252 
1253   Input Parameter:
1254 . dm - the DM object
1255 
1256   Output Parameters:
1257 + len       - The number of subproblems in the field decomposition (or NULL if not requested)
1258 . namelist  - The name for each field (or NULL if not requested)
1259 . islist    - The global indices for each field (or NULL if not requested)
1260 - dmlist    - The DMs for each field subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1261 
1262   Level: intermediate
1263 
1264   Notes:
1265   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1266   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1267   and all of the arrays should be freed with PetscFree().
1268 
1269 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1270 @*/
1271 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1272 {
1273   PetscErrorCode ierr;
1274 
1275   PetscFunctionBegin;
1276   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1277   if (len) {
1278     PetscValidPointer(len,2);
1279     *len = 0;
1280   }
1281   if (namelist) {
1282     PetscValidPointer(namelist,3);
1283     *namelist = 0;
1284   }
1285   if (islist) {
1286     PetscValidPointer(islist,4);
1287     *islist = 0;
1288   }
1289   if (dmlist) {
1290     PetscValidPointer(dmlist,5);
1291     *dmlist = 0;
1292   }
1293   /*
1294    Is it a good idea to apply the following check across all impls?
1295    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1296    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1297    */
1298   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1299   if (!dm->ops->createfielddecomposition) {
1300     PetscSection section;
1301     PetscInt     numFields, f;
1302 
1303     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1304     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1305     if (section && numFields && dm->ops->createsubdm) {
1306       *len = numFields;
1307       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1308       for (f = 0; f < numFields; ++f) {
1309         const char *fieldName;
1310 
1311         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1312         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1313         ierr = PetscStrallocpy(fieldName, (char**) &(*namelist)[f]);CHKERRQ(ierr);
1314       }
1315     } else {
1316       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1317       /* By default there are no DMs associated with subproblems. */
1318       if (dmlist) *dmlist = NULL;
1319     }
1320   } else {
1321     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1322   }
1323   PetscFunctionReturn(0);
1324 }
1325 
1326 #undef __FUNCT__
1327 #define __FUNCT__ "DMCreateSubDM"
1328 /*@C
1329   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1330                   The fields are defined by DMCreateFieldIS().
1331 
1332   Not collective
1333 
1334   Input Parameters:
1335 + dm - the DM object
1336 . numFields - number of fields in this subproblem
1337 - len       - The number of subproblems in the decomposition (or NULL if not requested)
1338 
1339   Output Parameters:
1340 . is - The global indices for the subproblem
1341 - dm - The DM for the subproblem
1342 
1343   Level: intermediate
1344 
1345 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1346 @*/
1347 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1348 {
1349   PetscErrorCode ierr;
1350 
1351   PetscFunctionBegin;
1352   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1353   PetscValidPointer(fields,3);
1354   if (is) PetscValidPointer(is,4);
1355   if (subdm) PetscValidPointer(subdm,5);
1356   if (dm->ops->createsubdm) {
1357     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1358   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1359   PetscFunctionReturn(0);
1360 }
1361 
1362 
1363 #undef __FUNCT__
1364 #define __FUNCT__ "DMCreateDomainDecomposition"
1365 /*@C
1366   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1367                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1368                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1369                           define a nonoverlapping covering, while outer subdomains can overlap.
1370                           The optional list of DMs define the DM for each subproblem.
1371 
1372   Not collective
1373 
1374   Input Parameter:
1375 . dm - the DM object
1376 
1377   Output Parameters:
1378 + len         - The number of subproblems in the domain decomposition (or NULL if not requested)
1379 . namelist    - The name for each subdomain (or NULL if not requested)
1380 . innerislist - The global indices for each inner subdomain (or NULL, if not requested)
1381 . outerislist - The global indices for each outer subdomain (or NULL, if not requested)
1382 - dmlist      - The DMs for each subdomain subproblem (or NULL, if not requested; if NULL is returned, no DMs are defined)
1383 
1384   Level: intermediate
1385 
1386   Notes:
1387   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1388   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1389   and all of the arrays should be freed with PetscFree().
1390 
1391 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1392 @*/
1393 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1394 {
1395   PetscErrorCode      ierr;
1396   DMSubDomainHookLink link;
1397   PetscInt            i,l;
1398 
1399   PetscFunctionBegin;
1400   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1401   if (len)           {PetscValidPointer(len,2);            *len         = 0;}
1402   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = NULL;}
1403   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = NULL;}
1404   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = NULL;}
1405   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = NULL;}
1406   /*
1407    Is it a good idea to apply the following check across all impls?
1408    Perhaps some impls can have a well-defined decomposition before DMSetUp?
1409    This, however, follows the general principle that accessors are not well-behaved until the object is set up.
1410    */
1411   if (!dm->setupcalled) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONGSTATE, "Decomposition defined only after DMSetUp");
1412   if (dm->ops->createdomaindecomposition) {
1413     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1414     /* copy subdomain hooks and context over to the subdomain DMs */
1415     if (dmlist) {
1416       for (i = 0; i < l; i++) {
1417         for (link=dm->subdomainhook; link; link=link->next) {
1418           if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1419         }
1420         (*dmlist)[i]->ctx = dm->ctx;
1421       }
1422     }
1423     if (len) *len = l;
1424   }
1425   PetscFunctionReturn(0);
1426 }
1427 
1428 
1429 #undef __FUNCT__
1430 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1431 /*@C
1432   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1433 
1434   Not collective
1435 
1436   Input Parameters:
1437 + dm - the DM object
1438 . n  - the number of subdomain scatters
1439 - subdms - the local subdomains
1440 
1441   Output Parameters:
1442 + n     - the number of scatters returned
1443 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1444 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1445 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1446 
1447   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1448   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1449   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1450   solution and residual data.
1451 
1452   Level: developer
1453 
1454 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1455 @*/
1456 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1457 {
1458   PetscErrorCode ierr;
1459 
1460   PetscFunctionBegin;
1461   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1462   PetscValidPointer(subdms,3);
1463   if (dm->ops->createddscatters) {
1464     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1465   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1466   PetscFunctionReturn(0);
1467 }
1468 
1469 #undef __FUNCT__
1470 #define __FUNCT__ "DMRefine"
1471 /*@
1472   DMRefine - Refines a DM object
1473 
1474   Collective on DM
1475 
1476   Input Parameter:
1477 + dm   - the DM object
1478 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1479 
1480   Output Parameter:
1481 . dmf - the refined DM, or NULL
1482 
1483   Note: If no refinement was done, the return value is NULL
1484 
1485   Level: developer
1486 
1487 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1488 @*/
1489 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1490 {
1491   PetscErrorCode   ierr;
1492   DMRefineHookLink link;
1493 
1494   PetscFunctionBegin;
1495   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1496   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1497   if (*dmf) {
1498     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1499 
1500     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1501 
1502     (*dmf)->ctx       = dm->ctx;
1503     (*dmf)->leveldown = dm->leveldown;
1504     (*dmf)->levelup   = dm->levelup + 1;
1505 
1506     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1507     for (link=dm->refinehook; link; link=link->next) {
1508       if (link->refinehook) {
1509         ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);
1510       }
1511     }
1512   }
1513   PetscFunctionReturn(0);
1514 }
1515 
1516 #undef __FUNCT__
1517 #define __FUNCT__ "DMRefineHookAdd"
1518 /*@C
1519    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1520 
1521    Logically Collective
1522 
1523    Input Arguments:
1524 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1525 .  refinehook - function to run when setting up a coarser level
1526 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1527 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1528 
1529    Calling sequence of refinehook:
1530 $    refinehook(DM coarse,DM fine,void *ctx);
1531 
1532 +  coarse - coarse level DM
1533 .  fine - fine level DM to interpolate problem to
1534 -  ctx - optional user-defined function context
1535 
1536    Calling sequence for interphook:
1537 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1538 
1539 +  coarse - coarse level DM
1540 .  interp - matrix interpolating a coarse-level solution to the finer grid
1541 .  fine - fine level DM to update
1542 -  ctx - optional user-defined function context
1543 
1544    Level: advanced
1545 
1546    Notes:
1547    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1548 
1549    If this function is called multiple times, the hooks will be run in the order they are added.
1550 
1551    This function is currently not available from Fortran.
1552 
1553 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1554 @*/
1555 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1556 {
1557   PetscErrorCode   ierr;
1558   DMRefineHookLink link,*p;
1559 
1560   PetscFunctionBegin;
1561   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1562   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1563   ierr             = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1564   link->refinehook = refinehook;
1565   link->interphook = interphook;
1566   link->ctx        = ctx;
1567   link->next       = NULL;
1568   *p               = link;
1569   PetscFunctionReturn(0);
1570 }
1571 
1572 #undef __FUNCT__
1573 #define __FUNCT__ "DMInterpolate"
1574 /*@
1575    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1576 
1577    Collective if any hooks are
1578 
1579    Input Arguments:
1580 +  coarse - coarser DM to use as a base
1581 .  restrct - interpolation matrix, apply using MatInterpolate()
1582 -  fine - finer DM to update
1583 
1584    Level: developer
1585 
1586 .seealso: DMRefineHookAdd(), MatInterpolate()
1587 @*/
1588 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1589 {
1590   PetscErrorCode   ierr;
1591   DMRefineHookLink link;
1592 
1593   PetscFunctionBegin;
1594   for (link=fine->refinehook; link; link=link->next) {
1595     if (link->interphook) {
1596       ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);
1597     }
1598   }
1599   PetscFunctionReturn(0);
1600 }
1601 
1602 #undef __FUNCT__
1603 #define __FUNCT__ "DMGetRefineLevel"
1604 /*@
1605     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1606 
1607     Not Collective
1608 
1609     Input Parameter:
1610 .   dm - the DM object
1611 
1612     Output Parameter:
1613 .   level - number of refinements
1614 
1615     Level: developer
1616 
1617 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1618 
1619 @*/
1620 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1621 {
1622   PetscFunctionBegin;
1623   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1624   *level = dm->levelup;
1625   PetscFunctionReturn(0);
1626 }
1627 
1628 #undef __FUNCT__
1629 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1630 /*@C
1631    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1632 
1633    Logically Collective
1634 
1635    Input Arguments:
1636 +  dm - the DM
1637 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1638 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1639 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
1640 
1641    Calling sequence for beginhook:
1642 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1643 
1644 +  dm - global DM
1645 .  g - global vector
1646 .  mode - mode
1647 .  l - local vector
1648 -  ctx - optional user-defined function context
1649 
1650 
1651    Calling sequence for endhook:
1652 $    endhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1653 
1654 +  global - global DM
1655 -  ctx - optional user-defined function context
1656 
1657    Level: advanced
1658 
1659 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1660 @*/
1661 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1662 {
1663   PetscErrorCode          ierr;
1664   DMGlobalToLocalHookLink link,*p;
1665 
1666   PetscFunctionBegin;
1667   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1668   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1669   ierr            = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1670   link->beginhook = beginhook;
1671   link->endhook   = endhook;
1672   link->ctx       = ctx;
1673   link->next      = NULL;
1674   *p              = link;
1675   PetscFunctionReturn(0);
1676 }
1677 
1678 #undef __FUNCT__
1679 #define __FUNCT__ "DMGlobalToLocalBegin"
1680 /*@
1681     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1682 
1683     Neighbor-wise Collective on DM
1684 
1685     Input Parameters:
1686 +   dm - the DM object
1687 .   g - the global vector
1688 .   mode - INSERT_VALUES or ADD_VALUES
1689 -   l - the local vector
1690 
1691 
1692     Level: beginner
1693 
1694 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1695 
1696 @*/
1697 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1698 {
1699   PetscSF                 sf;
1700   PetscErrorCode          ierr;
1701   DMGlobalToLocalHookLink link;
1702 
1703   PetscFunctionBegin;
1704   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1705   for (link=dm->gtolhook; link; link=link->next) {
1706     if (link->beginhook) {
1707       ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);
1708     }
1709   }
1710   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1711   if (sf) {
1712     PetscScalar *lArray, *gArray;
1713 
1714     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1715     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1716     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1717     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1718     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1719     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1720   } else {
1721     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1722   }
1723   PetscFunctionReturn(0);
1724 }
1725 
1726 #undef __FUNCT__
1727 #define __FUNCT__ "DMGlobalToLocalEnd"
1728 /*@
1729     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1730 
1731     Neighbor-wise Collective on DM
1732 
1733     Input Parameters:
1734 +   dm - the DM object
1735 .   g - the global vector
1736 .   mode - INSERT_VALUES or ADD_VALUES
1737 -   l - the local vector
1738 
1739 
1740     Level: beginner
1741 
1742 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1743 
1744 @*/
1745 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1746 {
1747   PetscSF                 sf;
1748   PetscErrorCode          ierr;
1749   PetscScalar             *lArray, *gArray;
1750   DMGlobalToLocalHookLink link;
1751 
1752   PetscFunctionBegin;
1753   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1754   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1755   if (sf) {
1756     if (mode == ADD_VALUES) SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1757 
1758     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1759     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1760     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1761     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1762     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1763   } else {
1764     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1765   }
1766   for (link=dm->gtolhook; link; link=link->next) {
1767     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1768   }
1769   PetscFunctionReturn(0);
1770 }
1771 
1772 #undef __FUNCT__
1773 #define __FUNCT__ "DMLocalToGlobalBegin"
1774 /*@
1775     DMLocalToGlobalBegin - updates global vectors from local vectors
1776 
1777     Neighbor-wise Collective on DM
1778 
1779     Input Parameters:
1780 +   dm - the DM object
1781 .   l - the local vector
1782 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1783            base point.
1784 - - the global vector
1785 
1786     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1787            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1788            global array to the final global array with VecAXPY().
1789 
1790     Level: beginner
1791 
1792 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1793 
1794 @*/
1795 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1796 {
1797   PetscSF        sf;
1798   PetscErrorCode ierr;
1799 
1800   PetscFunctionBegin;
1801   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1802   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1803   if (sf) {
1804     MPI_Op      op;
1805     PetscScalar *lArray, *gArray;
1806 
1807     switch (mode) {
1808     case INSERT_VALUES:
1809     case INSERT_ALL_VALUES:
1810       op = MPIU_REPLACE; break;
1811     case ADD_VALUES:
1812     case ADD_ALL_VALUES:
1813       op = MPI_SUM; break;
1814     default:
1815       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1816     }
1817     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1818     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1819     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1820     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1821     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1822   } else {
1823     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1824   }
1825   PetscFunctionReturn(0);
1826 }
1827 
1828 #undef __FUNCT__
1829 #define __FUNCT__ "DMLocalToGlobalEnd"
1830 /*@
1831     DMLocalToGlobalEnd - updates global vectors from local vectors
1832 
1833     Neighbor-wise Collective on DM
1834 
1835     Input Parameters:
1836 +   dm - the DM object
1837 .   l - the local vector
1838 .   mode - INSERT_VALUES or ADD_VALUES
1839 -   g - the global vector
1840 
1841 
1842     Level: beginner
1843 
1844 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1845 
1846 @*/
1847 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1848 {
1849   PetscSF        sf;
1850   PetscErrorCode ierr;
1851 
1852   PetscFunctionBegin;
1853   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1854   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1855   if (sf) {
1856     MPI_Op      op;
1857     PetscScalar *lArray, *gArray;
1858 
1859     switch (mode) {
1860     case INSERT_VALUES:
1861     case INSERT_ALL_VALUES:
1862       op = MPIU_REPLACE; break;
1863     case ADD_VALUES:
1864     case ADD_ALL_VALUES:
1865       op = MPI_SUM; break;
1866     default:
1867       SETERRQ1(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1868     }
1869     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1870     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1871     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1872     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1873     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1874   } else {
1875     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1876   }
1877   PetscFunctionReturn(0);
1878 }
1879 
1880 #undef __FUNCT__
1881 #define __FUNCT__ "DMLocalToLocalBegin"
1882 /*@
1883    DMLocalToLocalBegin - Maps from a local vector (including ghost points
1884    that contain irrelevant values) to another local vector where the ghost
1885    points in the second are set correctly. Must be followed by DMLocalToLocalEnd().
1886 
1887    Neighbor-wise Collective on DM and Vec
1888 
1889    Input Parameters:
1890 +  dm - the DM object
1891 .  g - the original local vector
1892 -  mode - one of INSERT_VALUES or ADD_VALUES
1893 
1894    Output Parameter:
1895 .  l  - the local vector with correct ghost values
1896 
1897    Level: intermediate
1898 
1899    Notes:
1900    The local vectors used here need not be the same as those
1901    obtained from DMCreateLocalVector(), BUT they
1902    must have the same parallel data layout; they could, for example, be
1903    obtained with VecDuplicate() from the DM originating vectors.
1904 
1905 .keywords: DM, local-to-local, begin
1906 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalEnd(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1907 
1908 @*/
1909 PetscErrorCode  DMLocalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1910 {
1911   PetscErrorCode          ierr;
1912 
1913   PetscFunctionBegin;
1914   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1915   ierr = (*dm->ops->localtolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1916   PetscFunctionReturn(0);
1917 }
1918 
1919 #undef __FUNCT__
1920 #define __FUNCT__ "DMLocalToLocalEnd"
1921 /*@
1922    DMLocalToLocalEnd - Maps from a local vector (including ghost points
1923    that contain irrelevant values) to another local vector where the ghost
1924    points in the second are set correctly. Must be preceded by DMLocalToLocalBegin().
1925 
1926    Neighbor-wise Collective on DM and Vec
1927 
1928    Input Parameters:
1929 +  da - the DM object
1930 .  g - the original local vector
1931 -  mode - one of INSERT_VALUES or ADD_VALUES
1932 
1933    Output Parameter:
1934 .  l  - the local vector with correct ghost values
1935 
1936    Level: intermediate
1937 
1938    Notes:
1939    The local vectors used here need not be the same as those
1940    obtained from DMCreateLocalVector(), BUT they
1941    must have the same parallel data layout; they could, for example, be
1942    obtained with VecDuplicate() from the DM originating vectors.
1943 
1944 .keywords: DM, local-to-local, end
1945 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateLocalVector(), DMCreateGlobalVector(), DMCreateInterpolation(), DMLocalToLocalBegin(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1946 
1947 @*/
1948 PetscErrorCode  DMLocalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1949 {
1950   PetscErrorCode          ierr;
1951 
1952   PetscFunctionBegin;
1953   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1954   ierr = (*dm->ops->localtolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1955   PetscFunctionReturn(0);
1956 }
1957 
1958 
1959 #undef __FUNCT__
1960 #define __FUNCT__ "DMCoarsen"
1961 /*@
1962     DMCoarsen - Coarsens a DM object
1963 
1964     Collective on DM
1965 
1966     Input Parameter:
1967 +   dm - the DM object
1968 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1969 
1970     Output Parameter:
1971 .   dmc - the coarsened DM
1972 
1973     Level: developer
1974 
1975 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1976 
1977 @*/
1978 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1979 {
1980   PetscErrorCode    ierr;
1981   DMCoarsenHookLink link;
1982 
1983   PetscFunctionBegin;
1984   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1985   ierr                      = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1986   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1987   ierr                      = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1988   (*dmc)->ctx               = dm->ctx;
1989   (*dmc)->levelup           = dm->levelup;
1990   (*dmc)->leveldown         = dm->leveldown + 1;
1991   ierr                      = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1992   for (link=dm->coarsenhook; link; link=link->next) {
1993     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1994   }
1995   PetscFunctionReturn(0);
1996 }
1997 
1998 #undef __FUNCT__
1999 #define __FUNCT__ "DMCoarsenHookAdd"
2000 /*@C
2001    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
2002 
2003    Logically Collective
2004 
2005    Input Arguments:
2006 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
2007 .  coarsenhook - function to run when setting up a coarser level
2008 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
2009 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2010 
2011    Calling sequence of coarsenhook:
2012 $    coarsenhook(DM fine,DM coarse,void *ctx);
2013 
2014 +  fine - fine level DM
2015 .  coarse - coarse level DM to restrict problem to
2016 -  ctx - optional user-defined function context
2017 
2018    Calling sequence for restricthook:
2019 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
2020 
2021 +  fine - fine level DM
2022 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
2023 .  rscale - scaling vector for restriction
2024 .  inject - matrix restricting by injection
2025 .  coarse - coarse level DM to update
2026 -  ctx - optional user-defined function context
2027 
2028    Level: advanced
2029 
2030    Notes:
2031    This function is only needed if auxiliary data needs to be set up on coarse grids.
2032 
2033    If this function is called multiple times, the hooks will be run in the order they are added.
2034 
2035    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2036    extract the finest level information from its context (instead of from the SNES).
2037 
2038    This function is currently not available from Fortran.
2039 
2040 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2041 @*/
2042 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
2043 {
2044   PetscErrorCode    ierr;
2045   DMCoarsenHookLink link,*p;
2046 
2047   PetscFunctionBegin;
2048   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
2049   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2050   ierr               = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
2051   link->coarsenhook  = coarsenhook;
2052   link->restricthook = restricthook;
2053   link->ctx          = ctx;
2054   link->next         = NULL;
2055   *p                 = link;
2056   PetscFunctionReturn(0);
2057 }
2058 
2059 #undef __FUNCT__
2060 #define __FUNCT__ "DMRestrict"
2061 /*@
2062    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
2063 
2064    Collective if any hooks are
2065 
2066    Input Arguments:
2067 +  fine - finer DM to use as a base
2068 .  restrct - restriction matrix, apply using MatRestrict()
2069 .  inject - injection matrix, also use MatRestrict()
2070 -  coarse - coarer DM to update
2071 
2072    Level: developer
2073 
2074 .seealso: DMCoarsenHookAdd(), MatRestrict()
2075 @*/
2076 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
2077 {
2078   PetscErrorCode    ierr;
2079   DMCoarsenHookLink link;
2080 
2081   PetscFunctionBegin;
2082   for (link=fine->coarsenhook; link; link=link->next) {
2083     if (link->restricthook) {
2084       ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);
2085     }
2086   }
2087   PetscFunctionReturn(0);
2088 }
2089 
2090 #undef __FUNCT__
2091 #define __FUNCT__ "DMSubDomainHookAdd"
2092 /*@C
2093    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
2094 
2095    Logically Collective
2096 
2097    Input Arguments:
2098 +  global - global DM
2099 .  ddhook - function to run to pass data to the decomposition DM upon its creation
2100 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2101 -  ctx - [optional] user-defined context for provide data for the hooks (may be NULL)
2102 
2103 
2104    Calling sequence for ddhook:
2105 $    ddhook(DM global,DM block,void *ctx)
2106 
2107 +  global - global DM
2108 .  block  - block DM
2109 -  ctx - optional user-defined function context
2110 
2111    Calling sequence for restricthook:
2112 $    restricthook(DM global,VecScatter out,VecScatter in,DM block,void *ctx)
2113 
2114 +  global - global DM
2115 .  out    - scatter to the outer (with ghost and overlap points) block vector
2116 .  in     - scatter to block vector values only owned locally
2117 .  block  - block DM
2118 -  ctx - optional user-defined function context
2119 
2120    Level: advanced
2121 
2122    Notes:
2123    This function is only needed if auxiliary data needs to be set up on subdomain DMs.
2124 
2125    If this function is called multiple times, the hooks will be run in the order they are added.
2126 
2127    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2128    extract the global information from its context (instead of from the SNES).
2129 
2130    This function is currently not available from Fortran.
2131 
2132 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2133 @*/
2134 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2135 {
2136   PetscErrorCode      ierr;
2137   DMSubDomainHookLink link,*p;
2138 
2139   PetscFunctionBegin;
2140   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2141   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2142   ierr               = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2143   link->restricthook = restricthook;
2144   link->ddhook       = ddhook;
2145   link->ctx          = ctx;
2146   link->next         = NULL;
2147   *p                 = link;
2148   PetscFunctionReturn(0);
2149 }
2150 
2151 #undef __FUNCT__
2152 #define __FUNCT__ "DMSubDomainRestrict"
2153 /*@
2154    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2155 
2156    Collective if any hooks are
2157 
2158    Input Arguments:
2159 +  fine - finer DM to use as a base
2160 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2161 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2162 -  coarse - coarer DM to update
2163 
2164    Level: developer
2165 
2166 .seealso: DMCoarsenHookAdd(), MatRestrict()
2167 @*/
2168 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2169 {
2170   PetscErrorCode      ierr;
2171   DMSubDomainHookLink link;
2172 
2173   PetscFunctionBegin;
2174   for (link=global->subdomainhook; link; link=link->next) {
2175     if (link->restricthook) {
2176       ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);
2177     }
2178   }
2179   PetscFunctionReturn(0);
2180 }
2181 
2182 #undef __FUNCT__
2183 #define __FUNCT__ "DMGetCoarsenLevel"
2184 /*@
2185     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2186 
2187     Not Collective
2188 
2189     Input Parameter:
2190 .   dm - the DM object
2191 
2192     Output Parameter:
2193 .   level - number of coarsenings
2194 
2195     Level: developer
2196 
2197 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2198 
2199 @*/
2200 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2201 {
2202   PetscFunctionBegin;
2203   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2204   *level = dm->leveldown;
2205   PetscFunctionReturn(0);
2206 }
2207 
2208 
2209 
2210 #undef __FUNCT__
2211 #define __FUNCT__ "DMRefineHierarchy"
2212 /*@C
2213     DMRefineHierarchy - Refines a DM object, all levels at once
2214 
2215     Collective on DM
2216 
2217     Input Parameter:
2218 +   dm - the DM object
2219 -   nlevels - the number of levels of refinement
2220 
2221     Output Parameter:
2222 .   dmf - the refined DM hierarchy
2223 
2224     Level: developer
2225 
2226 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2227 
2228 @*/
2229 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2230 {
2231   PetscErrorCode ierr;
2232 
2233   PetscFunctionBegin;
2234   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2235   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2236   if (nlevels == 0) PetscFunctionReturn(0);
2237   if (dm->ops->refinehierarchy) {
2238     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2239   } else if (dm->ops->refine) {
2240     PetscInt i;
2241 
2242     ierr = DMRefine(dm,PetscObjectComm((PetscObject)dm),&dmf[0]);CHKERRQ(ierr);
2243     for (i=1; i<nlevels; i++) {
2244       ierr = DMRefine(dmf[i-1],PetscObjectComm((PetscObject)dm),&dmf[i]);CHKERRQ(ierr);
2245     }
2246   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2247   PetscFunctionReturn(0);
2248 }
2249 
2250 #undef __FUNCT__
2251 #define __FUNCT__ "DMCoarsenHierarchy"
2252 /*@C
2253     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2254 
2255     Collective on DM
2256 
2257     Input Parameter:
2258 +   dm - the DM object
2259 -   nlevels - the number of levels of coarsening
2260 
2261     Output Parameter:
2262 .   dmc - the coarsened DM hierarchy
2263 
2264     Level: developer
2265 
2266 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2267 
2268 @*/
2269 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2270 {
2271   PetscErrorCode ierr;
2272 
2273   PetscFunctionBegin;
2274   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2275   if (nlevels < 0) SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2276   if (nlevels == 0) PetscFunctionReturn(0);
2277   PetscValidPointer(dmc,3);
2278   if (dm->ops->coarsenhierarchy) {
2279     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2280   } else if (dm->ops->coarsen) {
2281     PetscInt i;
2282 
2283     ierr = DMCoarsen(dm,PetscObjectComm((PetscObject)dm),&dmc[0]);CHKERRQ(ierr);
2284     for (i=1; i<nlevels; i++) {
2285       ierr = DMCoarsen(dmc[i-1],PetscObjectComm((PetscObject)dm),&dmc[i]);CHKERRQ(ierr);
2286     }
2287   } else SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2288   PetscFunctionReturn(0);
2289 }
2290 
2291 #undef __FUNCT__
2292 #define __FUNCT__ "DMCreateAggregates"
2293 /*@
2294    DMCreateAggregates - Gets the aggregates that map between
2295    grids associated with two DMs.
2296 
2297    Collective on DM
2298 
2299    Input Parameters:
2300 +  dmc - the coarse grid DM
2301 -  dmf - the fine grid DM
2302 
2303    Output Parameters:
2304 .  rest - the restriction matrix (transpose of the projection matrix)
2305 
2306    Level: intermediate
2307 
2308 .keywords: interpolation, restriction, multigrid
2309 
2310 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2311 @*/
2312 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2313 {
2314   PetscErrorCode ierr;
2315 
2316   PetscFunctionBegin;
2317   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2318   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2319   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2320   PetscFunctionReturn(0);
2321 }
2322 
2323 #undef __FUNCT__
2324 #define __FUNCT__ "DMSetApplicationContextDestroy"
2325 /*@C
2326     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2327 
2328     Not Collective
2329 
2330     Input Parameters:
2331 +   dm - the DM object
2332 -   destroy - the destroy function
2333 
2334     Level: intermediate
2335 
2336 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2337 
2338 @*/
2339 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2340 {
2341   PetscFunctionBegin;
2342   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2343   dm->ctxdestroy = destroy;
2344   PetscFunctionReturn(0);
2345 }
2346 
2347 #undef __FUNCT__
2348 #define __FUNCT__ "DMSetApplicationContext"
2349 /*@
2350     DMSetApplicationContext - Set a user context into a DM object
2351 
2352     Not Collective
2353 
2354     Input Parameters:
2355 +   dm - the DM object
2356 -   ctx - the user context
2357 
2358     Level: intermediate
2359 
2360 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2361 
2362 @*/
2363 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2364 {
2365   PetscFunctionBegin;
2366   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2367   dm->ctx = ctx;
2368   PetscFunctionReturn(0);
2369 }
2370 
2371 #undef __FUNCT__
2372 #define __FUNCT__ "DMGetApplicationContext"
2373 /*@
2374     DMGetApplicationContext - Gets a user context from a DM object
2375 
2376     Not Collective
2377 
2378     Input Parameter:
2379 .   dm - the DM object
2380 
2381     Output Parameter:
2382 .   ctx - the user context
2383 
2384     Level: intermediate
2385 
2386 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2387 
2388 @*/
2389 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2390 {
2391   PetscFunctionBegin;
2392   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2393   *(void**)ctx = dm->ctx;
2394   PetscFunctionReturn(0);
2395 }
2396 
2397 #undef __FUNCT__
2398 #define __FUNCT__ "DMSetVariableBounds"
2399 /*@C
2400     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2401 
2402     Logically Collective on DM
2403 
2404     Input Parameter:
2405 +   dm - the DM object
2406 -   f - the function that computes variable bounds used by SNESVI (use NULL to cancel a previous function that was set)
2407 
2408     Level: intermediate
2409 
2410 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2411          DMSetJacobian()
2412 
2413 @*/
2414 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2415 {
2416   PetscFunctionBegin;
2417   dm->ops->computevariablebounds = f;
2418   PetscFunctionReturn(0);
2419 }
2420 
2421 #undef __FUNCT__
2422 #define __FUNCT__ "DMHasVariableBounds"
2423 /*@
2424     DMHasVariableBounds - does the DM object have a variable bounds function?
2425 
2426     Not Collective
2427 
2428     Input Parameter:
2429 .   dm - the DM object to destroy
2430 
2431     Output Parameter:
2432 .   flg - PETSC_TRUE if the variable bounds function exists
2433 
2434     Level: developer
2435 
2436 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2437 
2438 @*/
2439 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2440 {
2441   PetscFunctionBegin;
2442   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2443   PetscFunctionReturn(0);
2444 }
2445 
2446 #undef __FUNCT__
2447 #define __FUNCT__ "DMComputeVariableBounds"
2448 /*@C
2449     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2450 
2451     Logically Collective on DM
2452 
2453     Input Parameters:
2454 +   dm - the DM object to destroy
2455 -   x  - current solution at which the bounds are computed
2456 
2457     Output parameters:
2458 +   xl - lower bound
2459 -   xu - upper bound
2460 
2461     Level: intermediate
2462 
2463 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2464 
2465 @*/
2466 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2467 {
2468   PetscErrorCode ierr;
2469 
2470   PetscFunctionBegin;
2471   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2472   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2473   if (dm->ops->computevariablebounds) {
2474     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2475   } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2476   PetscFunctionReturn(0);
2477 }
2478 
2479 #undef __FUNCT__
2480 #define __FUNCT__ "DMHasColoring"
2481 /*@
2482     DMHasColoring - does the DM object have a method of providing a coloring?
2483 
2484     Not Collective
2485 
2486     Input Parameter:
2487 .   dm - the DM object
2488 
2489     Output Parameter:
2490 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2491 
2492     Level: developer
2493 
2494 .seealso DMHasFunction(), DMCreateColoring()
2495 
2496 @*/
2497 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2498 {
2499   PetscFunctionBegin;
2500   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2501   PetscFunctionReturn(0);
2502 }
2503 
2504 #undef  __FUNCT__
2505 #define __FUNCT__ "DMSetVec"
2506 /*@C
2507     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2508 
2509     Collective on DM
2510 
2511     Input Parameter:
2512 +   dm - the DM object
2513 -   x - location to compute residual and Jacobian, if NULL is passed to those routines; will be NULL for linear problems.
2514 
2515     Level: developer
2516 
2517 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2518 
2519 @*/
2520 PetscErrorCode  DMSetVec(DM dm,Vec x)
2521 {
2522   PetscErrorCode ierr;
2523 
2524   PetscFunctionBegin;
2525   if (x) {
2526     if (!dm->x) {
2527       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2528     }
2529     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2530   } else if (dm->x) {
2531     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2532   }
2533   PetscFunctionReturn(0);
2534 }
2535 
2536 PetscFunctionList DMList              = NULL;
2537 PetscBool         DMRegisterAllCalled = PETSC_FALSE;
2538 
2539 #undef __FUNCT__
2540 #define __FUNCT__ "DMSetType"
2541 /*@C
2542   DMSetType - Builds a DM, for a particular DM implementation.
2543 
2544   Collective on DM
2545 
2546   Input Parameters:
2547 + dm     - The DM object
2548 - method - The name of the DM type
2549 
2550   Options Database Key:
2551 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2552 
2553   Notes:
2554   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2555 
2556   Level: intermediate
2557 
2558 .keywords: DM, set, type
2559 .seealso: DMGetType(), DMCreate()
2560 @*/
2561 PetscErrorCode  DMSetType(DM dm, DMType method)
2562 {
2563   PetscErrorCode (*r)(DM);
2564   PetscBool      match;
2565   PetscErrorCode ierr;
2566 
2567   PetscFunctionBegin;
2568   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2569   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2570   if (match) PetscFunctionReturn(0);
2571 
2572   if (!DMRegisterAllCalled) {ierr = DMRegisterAll();CHKERRQ(ierr);}
2573   ierr = PetscFunctionListFind(DMList,method,&r);CHKERRQ(ierr);
2574   if (!r) SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2575 
2576   if (dm->ops->destroy) {
2577     ierr             = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2578     dm->ops->destroy = NULL;
2579   }
2580   ierr = (*r)(dm);CHKERRQ(ierr);
2581   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2582   PetscFunctionReturn(0);
2583 }
2584 
2585 #undef __FUNCT__
2586 #define __FUNCT__ "DMGetType"
2587 /*@C
2588   DMGetType - Gets the DM type name (as a string) from the DM.
2589 
2590   Not Collective
2591 
2592   Input Parameter:
2593 . dm  - The DM
2594 
2595   Output Parameter:
2596 . type - The DM type name
2597 
2598   Level: intermediate
2599 
2600 .keywords: DM, get, type, name
2601 .seealso: DMSetType(), DMCreate()
2602 @*/
2603 PetscErrorCode  DMGetType(DM dm, DMType *type)
2604 {
2605   PetscErrorCode ierr;
2606 
2607   PetscFunctionBegin;
2608   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2609   PetscValidCharPointer(type,2);
2610   if (!DMRegisterAllCalled) {
2611     ierr = DMRegisterAll();CHKERRQ(ierr);
2612   }
2613   *type = ((PetscObject)dm)->type_name;
2614   PetscFunctionReturn(0);
2615 }
2616 
2617 #undef __FUNCT__
2618 #define __FUNCT__ "DMConvert"
2619 /*@C
2620   DMConvert - Converts a DM to another DM, either of the same or different type.
2621 
2622   Collective on DM
2623 
2624   Input Parameters:
2625 + dm - the DM
2626 - newtype - new DM type (use "same" for the same type)
2627 
2628   Output Parameter:
2629 . M - pointer to new DM
2630 
2631   Notes:
2632   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2633   the MPI communicator of the generated DM is always the same as the communicator
2634   of the input DM.
2635 
2636   Level: intermediate
2637 
2638 .seealso: DMCreate()
2639 @*/
2640 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2641 {
2642   DM             B;
2643   char           convname[256];
2644   PetscBool      sametype, issame;
2645   PetscErrorCode ierr;
2646 
2647   PetscFunctionBegin;
2648   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2649   PetscValidType(dm,1);
2650   PetscValidPointer(M,3);
2651   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2652   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2653   {
2654     PetscErrorCode (*conv)(DM, DMType, DM*) = NULL;
2655 
2656     /*
2657        Order of precedence:
2658        1) See if a specialized converter is known to the current DM.
2659        2) See if a specialized converter is known to the desired DM class.
2660        3) See if a good general converter is registered for the desired class
2661        4) See if a good general converter is known for the current matrix.
2662        5) Use a really basic converter.
2663     */
2664 
2665     /* 1) See if a specialized converter is known to the current DM and the desired class */
2666     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2667     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2668     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2669     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2670     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2671     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,&conv);CHKERRQ(ierr);
2672     if (conv) goto foundconv;
2673 
2674     /* 2)  See if a specialized converter is known to the desired DM class. */
2675     ierr = DMCreate(PetscObjectComm((PetscObject)dm), &B);CHKERRQ(ierr);
2676     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2677     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2678     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2679     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2680     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2681     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2682     ierr = PetscObjectQueryFunction((PetscObject)B,convname,&conv);CHKERRQ(ierr);
2683     if (conv) {
2684       ierr = DMDestroy(&B);CHKERRQ(ierr);
2685       goto foundconv;
2686     }
2687 
2688 #if 0
2689     /* 3) See if a good general converter is registered for the desired class */
2690     conv = B->ops->convertfrom;
2691     ierr = DMDestroy(&B);CHKERRQ(ierr);
2692     if (conv) goto foundconv;
2693 
2694     /* 4) See if a good general converter is known for the current matrix */
2695     if (dm->ops->convert) {
2696       conv = dm->ops->convert;
2697     }
2698     if (conv) goto foundconv;
2699 #endif
2700 
2701     /* 5) Use a really basic converter. */
2702     SETERRQ2(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2703 
2704 foundconv:
2705     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2706     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2707     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2708   }
2709   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2710   PetscFunctionReturn(0);
2711 }
2712 
2713 /*--------------------------------------------------------------------------------------------------------------------*/
2714 
2715 #undef __FUNCT__
2716 #define __FUNCT__ "DMRegister"
2717 /*@C
2718   DMRegister -  Adds a new DM component implementation
2719 
2720   Not Collective
2721 
2722   Input Parameters:
2723 + name        - The name of a new user-defined creation routine
2724 - create_func - The creation routine itself
2725 
2726   Notes:
2727   DMRegister() may be called multiple times to add several user-defined DMs
2728 
2729 
2730   Sample usage:
2731 .vb
2732     DMRegister("my_da", MyDMCreate);
2733 .ve
2734 
2735   Then, your DM type can be chosen with the procedural interface via
2736 .vb
2737     DMCreate(MPI_Comm, DM *);
2738     DMSetType(DM,"my_da");
2739 .ve
2740    or at runtime via the option
2741 .vb
2742     -da_type my_da
2743 .ve
2744 
2745   Level: advanced
2746 
2747 .keywords: DM, register
2748 .seealso: DMRegisterAll(), DMRegisterDestroy()
2749 
2750 @*/
2751 PetscErrorCode  DMRegister(const char sname[],PetscErrorCode (*function)(DM))
2752 {
2753   PetscErrorCode ierr;
2754 
2755   PetscFunctionBegin;
2756   ierr = PetscFunctionListAdd(&DMList,sname,function);CHKERRQ(ierr);
2757   PetscFunctionReturn(0);
2758 }
2759 
2760 #undef __FUNCT__
2761 #define __FUNCT__ "DMLoad"
2762 /*@C
2763   DMLoad - Loads a DM that has been stored in binary  with DMView().
2764 
2765   Collective on PetscViewer
2766 
2767   Input Parameters:
2768 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2769            some related function before a call to DMLoad().
2770 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2771            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2772 
2773    Level: intermediate
2774 
2775   Notes:
2776    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2777 
2778   Notes for advanced users:
2779   Most users should not need to know the details of the binary storage
2780   format, since DMLoad() and DMView() completely hide these details.
2781   But for anyone who's interested, the standard binary matrix storage
2782   format is
2783 .vb
2784      has not yet been determined
2785 .ve
2786 
2787 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2788 @*/
2789 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2790 {
2791   PetscErrorCode ierr;
2792   PetscBool      isbinary;
2793   PetscInt       classid;
2794   char           type[256];
2795 
2796   PetscFunctionBegin;
2797   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2798   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2799   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2800   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2801 
2802   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2803   if (classid != DM_FILE_CLASSID) SETERRQ(PetscObjectComm((PetscObject)newdm),PETSC_ERR_ARG_WRONG,"Not DM next in file");
2804   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2805   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2806   if (newdm->ops->load) {
2807     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2808   }
2809   PetscFunctionReturn(0);
2810 }
2811 
2812 /******************************** FEM Support **********************************/
2813 
2814 #undef __FUNCT__
2815 #define __FUNCT__ "DMPrintCellVector"
2816 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[])
2817 {
2818   PetscInt       f;
2819   PetscErrorCode ierr;
2820 
2821   PetscFunctionBegin;
2822   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2823   for (f = 0; f < len; ++f) {
2824     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2825   }
2826   PetscFunctionReturn(0);
2827 }
2828 
2829 #undef __FUNCT__
2830 #define __FUNCT__ "DMPrintCellMatrix"
2831 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[])
2832 {
2833   PetscInt       f, g;
2834   PetscErrorCode ierr;
2835 
2836   PetscFunctionBegin;
2837   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2838   for (f = 0; f < rows; ++f) {
2839     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2840     for (g = 0; g < cols; ++g) {
2841       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2842     }
2843     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2844   }
2845   PetscFunctionReturn(0);
2846 }
2847 
2848 #undef __FUNCT__
2849 #define __FUNCT__ "DMPrintLocalVec"
2850 PetscErrorCode DMPrintLocalVec(DM dm, const char name[], PetscReal tol, Vec X)
2851 {
2852   PetscMPIInt    rank, numProcs;
2853   PetscInt       p;
2854   PetscErrorCode ierr;
2855 
2856   PetscFunctionBegin;
2857   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject) dm), &rank);CHKERRQ(ierr);
2858   ierr = MPI_Comm_size(PetscObjectComm((PetscObject) dm), &numProcs);CHKERRQ(ierr);
2859   ierr = PetscPrintf(PetscObjectComm((PetscObject) dm), "%s:\n", name);CHKERRQ(ierr);
2860   for (p = 0; p < numProcs; ++p) {
2861     if (p == rank) {
2862       Vec x;
2863 
2864       ierr = VecDuplicate(X, &x);CHKERRQ(ierr);
2865       ierr = VecCopy(X, x);CHKERRQ(ierr);
2866       ierr = VecChop(x, tol);CHKERRQ(ierr);
2867       ierr = VecView(x, PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2868       ierr = VecDestroy(&x);CHKERRQ(ierr);
2869       ierr = PetscViewerFlush(PETSC_VIEWER_STDOUT_SELF);CHKERRQ(ierr);
2870     }
2871     ierr = PetscBarrier((PetscObject) dm);CHKERRQ(ierr);
2872   }
2873   PetscFunctionReturn(0);
2874 }
2875 
2876 #undef __FUNCT__
2877 #define __FUNCT__ "DMGetDefaultSection"
2878 /*@
2879   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2880 
2881   Input Parameter:
2882 . dm - The DM
2883 
2884   Output Parameter:
2885 . section - The PetscSection
2886 
2887   Level: intermediate
2888 
2889   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2890 
2891 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2892 @*/
2893 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section)
2894 {
2895   PetscFunctionBegin;
2896   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2897   PetscValidPointer(section, 2);
2898   *section = dm->defaultSection;
2899   PetscFunctionReturn(0);
2900 }
2901 
2902 #undef __FUNCT__
2903 #define __FUNCT__ "DMSetDefaultSection"
2904 /*@
2905   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2906 
2907   Input Parameters:
2908 + dm - The DM
2909 - section - The PetscSection
2910 
2911   Level: intermediate
2912 
2913   Note: Any existing Section will be destroyed
2914 
2915 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2916 @*/
2917 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section)
2918 {
2919   PetscInt       numFields;
2920   PetscInt       f;
2921   PetscErrorCode ierr;
2922 
2923   PetscFunctionBegin;
2924   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2925   PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
2926   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
2927   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2928   dm->defaultSection = section;
2929   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2930   if (numFields) {
2931     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2932     for (f = 0; f < numFields; ++f) {
2933       const char *name;
2934 
2935       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2936       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2937     }
2938   }
2939   /* The global section will be rebuilt in the next call to DMGetDefaultGlobalSection(). */
2940   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2941   PetscFunctionReturn(0);
2942 }
2943 
2944 #undef __FUNCT__
2945 #define __FUNCT__ "DMGetDefaultGlobalSection"
2946 /*@
2947   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2948 
2949   Collective on DM
2950 
2951   Input Parameter:
2952 . dm - The DM
2953 
2954   Output Parameter:
2955 . section - The PetscSection
2956 
2957   Level: intermediate
2958 
2959   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2960 
2961 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2962 @*/
2963 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section)
2964 {
2965   PetscErrorCode ierr;
2966 
2967   PetscFunctionBegin;
2968   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2969   PetscValidPointer(section, 2);
2970   if (!dm->defaultGlobalSection) {
2971     if (!dm->defaultSection || !dm->sf) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2972     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2973     ierr = PetscLayoutDestroy(&dm->map);CHKERRQ(ierr);
2974     ierr = PetscSectionGetValueLayout(PetscObjectComm((PetscObject)dm),dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2975   }
2976   *section = dm->defaultGlobalSection;
2977   PetscFunctionReturn(0);
2978 }
2979 
2980 #undef __FUNCT__
2981 #define __FUNCT__ "DMSetDefaultGlobalSection"
2982 /*@
2983   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2984 
2985   Input Parameters:
2986 + dm - The DM
2987 - section - The PetscSection, or NULL
2988 
2989   Level: intermediate
2990 
2991   Note: Any existing Section will be destroyed
2992 
2993 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2994 @*/
2995 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section)
2996 {
2997   PetscErrorCode ierr;
2998 
2999   PetscFunctionBegin;
3000   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3001   if (section) PetscValidHeaderSpecific(section,PETSC_SECTION_CLASSID,2);
3002   ierr = PetscObjectReference((PetscObject)section);CHKERRQ(ierr);
3003   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
3004   dm->defaultGlobalSection = section;
3005   PetscFunctionReturn(0);
3006 }
3007 
3008 #undef __FUNCT__
3009 #define __FUNCT__ "DMGetDefaultSF"
3010 /*@
3011   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
3012   it is created from the default PetscSection layouts in the DM.
3013 
3014   Input Parameter:
3015 . dm - The DM
3016 
3017   Output Parameter:
3018 . sf - The PetscSF
3019 
3020   Level: intermediate
3021 
3022   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3023 
3024 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
3025 @*/
3026 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf)
3027 {
3028   PetscInt       nroots;
3029   PetscErrorCode ierr;
3030 
3031   PetscFunctionBegin;
3032   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3033   PetscValidPointer(sf, 2);
3034   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, NULL, NULL, NULL);CHKERRQ(ierr);
3035   if (nroots < 0) {
3036     PetscSection section, gSection;
3037 
3038     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
3039     if (section) {
3040       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
3041       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
3042     } else {
3043       *sf = NULL;
3044       PetscFunctionReturn(0);
3045     }
3046   }
3047   *sf = dm->defaultSF;
3048   PetscFunctionReturn(0);
3049 }
3050 
3051 #undef __FUNCT__
3052 #define __FUNCT__ "DMSetDefaultSF"
3053 /*@
3054   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
3055 
3056   Input Parameters:
3057 + dm - The DM
3058 - sf - The PetscSF
3059 
3060   Level: intermediate
3061 
3062   Note: Any previous SF is destroyed
3063 
3064 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
3065 @*/
3066 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf)
3067 {
3068   PetscErrorCode ierr;
3069 
3070   PetscFunctionBegin;
3071   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3072   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
3073   ierr          = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
3074   dm->defaultSF = sf;
3075   PetscFunctionReturn(0);
3076 }
3077 
3078 #undef __FUNCT__
3079 #define __FUNCT__ "DMCreateDefaultSF"
3080 /*@C
3081   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
3082   describing the data layout.
3083 
3084   Input Parameters:
3085 + dm - The DM
3086 . localSection - PetscSection describing the local data layout
3087 - globalSection - PetscSection describing the global data layout
3088 
3089   Level: intermediate
3090 
3091 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
3092 @*/
3093 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
3094 {
3095   MPI_Comm       comm;
3096   PetscLayout    layout;
3097   const PetscInt *ranges;
3098   PetscInt       *local;
3099   PetscSFNode    *remote;
3100   PetscInt       pStart, pEnd, p, nroots, nleaves = 0, l;
3101   PetscMPIInt    size, rank;
3102   PetscErrorCode ierr;
3103 
3104   PetscFunctionBegin;
3105   ierr = PetscObjectGetComm((PetscObject)dm,&comm);CHKERRQ(ierr);
3106   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3107   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
3108   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
3109   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
3110   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
3111   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
3112   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
3113   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
3114   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
3115   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
3116   for (p = pStart; p < pEnd; ++p) {
3117     PetscInt gdof, gcdof;
3118 
3119     ierr     = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3120     ierr     = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3121     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3122   }
3123   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
3124   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
3125   for (p = pStart, l = 0; p < pEnd; ++p) {
3126     const PetscInt *cind;
3127     PetscInt       dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
3128 
3129     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
3130     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
3131     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
3132     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
3133     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
3134     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
3135     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
3136     if (!gdof) continue; /* Censored point */
3137     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
3138     if (gsize != dof-cdof) {
3139       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
3140       cdof = 0; /* Ignore constraints */
3141     }
3142     for (d = 0, c = 0; d < dof; ++d) {
3143       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
3144       local[l+d-c] = off+d;
3145     }
3146     if (gdof < 0) {
3147       for (d = 0; d < gsize; ++d, ++l) {
3148         PetscInt offset = -(goff+1) + d, r;
3149 
3150         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
3151         if (r < 0) r = -(r+2);
3152         remote[l].rank  = r;
3153         remote[l].index = offset - ranges[r];
3154       }
3155     } else {
3156       for (d = 0; d < gsize; ++d, ++l) {
3157         remote[l].rank  = rank;
3158         remote[l].index = goff+d - ranges[rank];
3159       }
3160     }
3161   }
3162   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3163   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3164   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3165   PetscFunctionReturn(0);
3166 }
3167 
3168 #undef __FUNCT__
3169 #define __FUNCT__ "DMGetPointSF"
3170 /*@
3171   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3172 
3173   Input Parameter:
3174 . dm - The DM
3175 
3176   Output Parameter:
3177 . sf - The PetscSF
3178 
3179   Level: intermediate
3180 
3181   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3182 
3183 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3184 @*/
3185 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf)
3186 {
3187   PetscFunctionBegin;
3188   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3189   PetscValidPointer(sf, 2);
3190   *sf = dm->sf;
3191   PetscFunctionReturn(0);
3192 }
3193 
3194 #undef __FUNCT__
3195 #define __FUNCT__ "DMSetPointSF"
3196 /*@
3197   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3198 
3199   Input Parameters:
3200 + dm - The DM
3201 - sf - The PetscSF
3202 
3203   Level: intermediate
3204 
3205 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3206 @*/
3207 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf)
3208 {
3209   PetscErrorCode ierr;
3210 
3211   PetscFunctionBegin;
3212   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3213   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3214   ierr   = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3215   ierr   = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3216   dm->sf = sf;
3217   PetscFunctionReturn(0);
3218 }
3219 
3220 #undef __FUNCT__
3221 #define __FUNCT__ "DMGetNumFields"
3222 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3223 {
3224   PetscFunctionBegin;
3225   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3226   PetscValidPointer(numFields, 2);
3227   *numFields = dm->numFields;
3228   PetscFunctionReturn(0);
3229 }
3230 
3231 #undef __FUNCT__
3232 #define __FUNCT__ "DMSetNumFields"
3233 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3234 {
3235   PetscInt       f;
3236   PetscErrorCode ierr;
3237 
3238   PetscFunctionBegin;
3239   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3240   for (f = 0; f < dm->numFields; ++f) {
3241     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3242   }
3243   ierr          = PetscFree(dm->fields);CHKERRQ(ierr);
3244   dm->numFields = numFields;
3245   ierr          = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3246   for (f = 0; f < dm->numFields; ++f) {
3247     ierr = PetscContainerCreate(PetscObjectComm((PetscObject)dm), (PetscContainer*) &dm->fields[f]);CHKERRQ(ierr);
3248   }
3249   PetscFunctionReturn(0);
3250 }
3251 
3252 #undef __FUNCT__
3253 #define __FUNCT__ "DMGetField"
3254 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3255 {
3256   PetscFunctionBegin;
3257   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3258   PetscValidPointer(field, 2);
3259   if (!dm->fields) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3260   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(PetscObjectComm((PetscObject)dm), PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3261   *field = dm->fields[f];
3262   PetscFunctionReturn(0);
3263 }
3264 
3265 #undef __FUNCT__
3266 #define __FUNCT__ "DMRestrictHook_Coordinates"
3267 PetscErrorCode DMRestrictHook_Coordinates(DM dm,DM dmc,void *ctx)
3268 {
3269   DM dm_coord,dmc_coord;
3270   PetscErrorCode ierr;
3271   Vec coords,ccoords;
3272   VecScatter scat;
3273   PetscFunctionBegin;
3274   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3275   ierr = DMGetCoordinateDM(dmc,&dmc_coord);CHKERRQ(ierr);
3276   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3277   ierr = DMGetCoordinates(dmc,&ccoords);CHKERRQ(ierr);
3278   if (coords && !ccoords) {
3279     ierr = DMCreateGlobalVector(dmc_coord,&ccoords);CHKERRQ(ierr);
3280     ierr = DMCreateInjection(dmc_coord,dm_coord,&scat);CHKERRQ(ierr);
3281     ierr = VecScatterBegin(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3282     ierr = VecScatterEnd(scat,coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3283     ierr = DMSetCoordinates(dmc,ccoords);CHKERRQ(ierr);
3284     ierr = VecScatterDestroy(&scat);CHKERRQ(ierr);
3285     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3286   }
3287   PetscFunctionReturn(0);
3288 }
3289 
3290 #undef __FUNCT__
3291 #define __FUNCT__ "DMSubDomainHook_Coordinates"
3292 static PetscErrorCode DMSubDomainHook_Coordinates(DM dm,DM subdm,void *ctx)
3293 {
3294   DM dm_coord,subdm_coord;
3295   PetscErrorCode ierr;
3296   Vec coords,ccoords,clcoords;
3297   VecScatter *scat_i,*scat_g;
3298   PetscFunctionBegin;
3299   ierr = DMGetCoordinateDM(dm,&dm_coord);CHKERRQ(ierr);
3300   ierr = DMGetCoordinateDM(subdm,&subdm_coord);CHKERRQ(ierr);
3301   ierr = DMGetCoordinates(dm,&coords);CHKERRQ(ierr);
3302   ierr = DMGetCoordinates(subdm,&ccoords);CHKERRQ(ierr);
3303   if (coords && !ccoords) {
3304     ierr = DMCreateGlobalVector(subdm_coord,&ccoords);CHKERRQ(ierr);
3305     ierr = DMCreateLocalVector(subdm_coord,&clcoords);CHKERRQ(ierr);
3306     ierr = DMCreateDomainDecompositionScatters(dm_coord,1,&subdm_coord,NULL,&scat_i,&scat_g);CHKERRQ(ierr);
3307     ierr = VecScatterBegin(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3308     ierr = VecScatterBegin(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3309     ierr = VecScatterEnd(scat_i[0],coords,ccoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3310     ierr = VecScatterEnd(scat_g[0],coords,clcoords,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
3311     ierr = DMSetCoordinates(subdm,ccoords);CHKERRQ(ierr);
3312     ierr = DMSetCoordinatesLocal(subdm,clcoords);CHKERRQ(ierr);
3313     ierr = VecScatterDestroy(&scat_i[0]);CHKERRQ(ierr);
3314     ierr = VecScatterDestroy(&scat_g[0]);CHKERRQ(ierr);
3315     ierr = VecDestroy(&ccoords);CHKERRQ(ierr);
3316     ierr = VecDestroy(&clcoords);CHKERRQ(ierr);
3317     ierr = PetscFree(scat_i);CHKERRQ(ierr);
3318     ierr = PetscFree(scat_g);CHKERRQ(ierr);
3319   }
3320   PetscFunctionReturn(0);
3321 }
3322 
3323 #undef __FUNCT__
3324 #define __FUNCT__ "DMSetCoordinates"
3325 /*@
3326   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3327 
3328   Collective on DM
3329 
3330   Input Parameters:
3331 + dm - the DM
3332 - c - coordinate vector
3333 
3334   Note:
3335   The coordinates do include those for ghost points, which are in the local vector
3336 
3337   Level: intermediate
3338 
3339 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3340 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3341 @*/
3342 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3343 {
3344   PetscErrorCode ierr;
3345 
3346   PetscFunctionBegin;
3347   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3348   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3349   ierr            = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3350   ierr            = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3351   dm->coordinates = c;
3352   ierr            = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3353   ierr            = DMCoarsenHookAdd(dm,DMRestrictHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3354   ierr            = DMSubDomainHookAdd(dm,DMSubDomainHook_Coordinates,NULL,NULL);CHKERRQ(ierr);
3355   PetscFunctionReturn(0);
3356 }
3357 
3358 #undef __FUNCT__
3359 #define __FUNCT__ "DMSetCoordinatesLocal"
3360 /*@
3361   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3362 
3363   Collective on DM
3364 
3365    Input Parameters:
3366 +  dm - the DM
3367 -  c - coordinate vector
3368 
3369   Note:
3370   The coordinates of ghost points can be set using DMSetCoordinates()
3371   followed by DMGetCoordinatesLocal(). This is intended to enable the
3372   setting of ghost coordinates outside of the domain.
3373 
3374   Level: intermediate
3375 
3376 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3377 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3378 @*/
3379 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3380 {
3381   PetscErrorCode ierr;
3382 
3383   PetscFunctionBegin;
3384   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3385   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3386   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3387   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3388 
3389   dm->coordinatesLocal = c;
3390 
3391   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3392   PetscFunctionReturn(0);
3393 }
3394 
3395 #undef __FUNCT__
3396 #define __FUNCT__ "DMGetCoordinates"
3397 /*@
3398   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3399 
3400   Not Collective
3401 
3402   Input Parameter:
3403 . dm - the DM
3404 
3405   Output Parameter:
3406 . c - global coordinate vector
3407 
3408   Note:
3409   This is a borrowed reference, so the user should NOT destroy this vector
3410 
3411   Each process has only the local coordinates (does NOT have the ghost coordinates).
3412 
3413   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3414   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3415 
3416   Level: intermediate
3417 
3418 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3419 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3420 @*/
3421 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3422 {
3423   PetscErrorCode ierr;
3424 
3425   PetscFunctionBegin;
3426   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3427   PetscValidPointer(c,2);
3428   if (!dm->coordinates && dm->coordinatesLocal) {
3429     DM cdm = NULL;
3430 
3431     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3432     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3433     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3434     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3435     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3436   }
3437   *c = dm->coordinates;
3438   PetscFunctionReturn(0);
3439 }
3440 
3441 #undef __FUNCT__
3442 #define __FUNCT__ "DMGetCoordinatesLocal"
3443 /*@
3444   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3445 
3446   Collective on DM
3447 
3448   Input Parameter:
3449 . dm - the DM
3450 
3451   Output Parameter:
3452 . c - coordinate vector
3453 
3454   Note:
3455   This is a borrowed reference, so the user should NOT destroy this vector
3456 
3457   Each process has the local and ghost coordinates
3458 
3459   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3460   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3461 
3462   Level: intermediate
3463 
3464 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3465 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3466 @*/
3467 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3468 {
3469   PetscErrorCode ierr;
3470 
3471   PetscFunctionBegin;
3472   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3473   PetscValidPointer(c,2);
3474   if (!dm->coordinatesLocal && dm->coordinates) {
3475     DM cdm = NULL;
3476 
3477     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3478     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3479     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3480     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3481     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3482   }
3483   *c = dm->coordinatesLocal;
3484   PetscFunctionReturn(0);
3485 }
3486 
3487 #undef __FUNCT__
3488 #define __FUNCT__ "DMGetCoordinateDM"
3489 /*@
3490   DMGetCoordinateDM - Gets the DM that prescribes coordinate layout and scatters between global and local coordinates
3491 
3492   Collective on DM
3493 
3494   Input Parameter:
3495 . dm - the DM
3496 
3497   Output Parameter:
3498 . cdm - coordinate DM
3499 
3500   Level: intermediate
3501 
3502 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3503 .seealso: DMSetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3504 @*/
3505 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3506 {
3507   PetscErrorCode ierr;
3508 
3509   PetscFunctionBegin;
3510   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3511   PetscValidPointer(cdm,2);
3512   if (!dm->coordinateDM) {
3513     if (!dm->ops->createcoordinatedm) SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3514     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3515   }
3516   *cdm = dm->coordinateDM;
3517   PetscFunctionReturn(0);
3518 }
3519 
3520 #undef __FUNCT__
3521 #define __FUNCT__ "DMSetCoordinateDM"
3522 /*@
3523   DMSetCoordinateDM - Sets the DM that prescribes coordinate layout and scatters between global and local coordinates
3524 
3525   Logically Collective on DM
3526 
3527   Input Parameters:
3528 + dm - the DM
3529 - cdm - coordinate DM
3530 
3531   Level: intermediate
3532 
3533 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3534 .seealso: DMGetCoordinateDM(), DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3535 @*/
3536 PetscErrorCode DMSetCoordinateDM(DM dm, DM cdm)
3537 {
3538   PetscErrorCode ierr;
3539 
3540   PetscFunctionBegin;
3541   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3542   PetscValidHeaderSpecific(cdm,DM_CLASSID,2);
3543   ierr = DMDestroy(&dm->coordinateDM);CHKERRQ(ierr);
3544   dm->coordinateDM = cdm;
3545   ierr = PetscObjectReference((PetscObject) dm->coordinateDM);CHKERRQ(ierr);
3546   PetscFunctionReturn(0);
3547 }
3548 
3549 #undef __FUNCT__
3550 #define __FUNCT__ "DMLocatePoints"
3551 /*@
3552   DMLocatePoints - Locate the points in v in the mesh and return an IS of the containing cells
3553 
3554   Not collective
3555 
3556   Input Parameters:
3557 + dm - The DM
3558 - v - The Vec of points
3559 
3560   Output Parameter:
3561 . cells - The local cell numbers for cells which contain the points
3562 
3563   Level: developer
3564 
3565 .keywords: point location, mesh
3566 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3567 @*/
3568 PetscErrorCode DMLocatePoints(DM dm, Vec v, IS *cells)
3569 {
3570   PetscErrorCode ierr;
3571 
3572   PetscFunctionBegin;
3573   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3574   PetscValidHeaderSpecific(v,VEC_CLASSID,2);
3575   PetscValidPointer(cells,3);
3576   if (dm->ops->locatepoints) {
3577     ierr = (*dm->ops->locatepoints)(dm,v,cells);CHKERRQ(ierr);
3578   } else SETERRQ(PetscObjectComm((PetscObject)dm), PETSC_ERR_SUP, "Point location not available for this DM");
3579   PetscFunctionReturn(0);
3580 }
3581