xref: /petsc/src/dm/interface/dm.c (revision e383bbd07f1c175fdb04eea9fdcaf8d9f9e92c0f)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"     I*/
2 
3 PetscClassId  DM_CLASSID;
4 PetscLogEvent DM_Convert, DM_GlobalToLocal, DM_LocalToGlobal;
5 
6 #undef __FUNCT__
7 #define __FUNCT__ "DMViewFromOptions"
8 /*
9   Processes command line options to determine if/how a dm
10   is to be viewed.
11 
12 */
13 PetscErrorCode  DMViewFromOptions(DM dm,const char optionname[])
14 {
15   PetscErrorCode    ierr;
16   PetscBool         flg;
17   PetscViewer       viewer;
18   PetscViewerFormat format;
19 
20   PetscFunctionBegin;
21   ierr = PetscOptionsGetViewer(((PetscObject)dm)->comm,((PetscObject)dm)->prefix,optionname,&viewer,&format,&flg);CHKERRQ(ierr);
22   if (flg) {
23     ierr = PetscViewerPushFormat(viewer,format);CHKERRQ(ierr);
24     ierr = DMView(dm,viewer);CHKERRQ(ierr);
25     ierr = PetscViewerPopFormat(viewer);CHKERRQ(ierr);
26     ierr = PetscViewerDestroy(&viewer);CHKERRQ(ierr);
27   }
28   PetscFunctionReturn(0);
29 }
30 
31 
32 #undef __FUNCT__
33 #define __FUNCT__ "DMCreate"
34 /*@
35   DMCreate - Creates an empty DM object. The type can then be set with DMSetType().
36 
37    If you never  call DMSetType()  it will generate an
38    error when you try to use the vector.
39 
40   Collective on MPI_Comm
41 
42   Input Parameter:
43 . comm - The communicator for the DM object
44 
45   Output Parameter:
46 . dm - The DM object
47 
48   Level: beginner
49 
50 .seealso: DMSetType(), DMDA, DMSLICED, DMCOMPOSITE
51 @*/
52 PetscErrorCode  DMCreate(MPI_Comm comm,DM *dm)
53 {
54   DM             v;
55   PetscErrorCode ierr;
56 
57   PetscFunctionBegin;
58   PetscValidPointer(dm,2);
59   *dm = PETSC_NULL;
60 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
61   ierr = VecInitializePackage(PETSC_NULL);CHKERRQ(ierr);
62   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
63   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
64 #endif
65 
66   ierr = PetscHeaderCreate(v, _p_DM, struct _DMOps, DM_CLASSID, -1, "DM", "Distribution Manager", "DM", comm, DMDestroy, DMView);CHKERRQ(ierr);
67   ierr = PetscMemzero(v->ops, sizeof(struct _DMOps));CHKERRQ(ierr);
68 
69 
70   v->ltogmap      = PETSC_NULL;
71   v->ltogmapb     = PETSC_NULL;
72   v->bs           = 1;
73   v->coloringtype = IS_COLORING_GLOBAL;
74   ierr = PetscSFCreate(comm, &v->sf);CHKERRQ(ierr);
75   ierr = PetscSFCreate(comm, &v->defaultSF);CHKERRQ(ierr);
76   v->defaultSection       = PETSC_NULL;
77   v->defaultGlobalSection = PETSC_NULL;
78   {
79     PetscInt i;
80     for (i = 0; i < 10; ++i) {
81       v->nullspaceConstructors[i] = PETSC_NULL;
82     }
83   }
84   v->numFields = 0;
85   v->fields    = PETSC_NULL;
86 
87   *dm = v;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMSetVecType"
93 /*@C
94        DMSetVecType - Sets the type of vector created with DMCreateLocalVector() and DMCreateGlobalVector()
95 
96    Logically Collective on DMDA
97 
98    Input Parameter:
99 +  da - initial distributed array
100 .  ctype - the vector type, currently either VECSTANDARD or VECCUSP
101 
102    Options Database:
103 .   -dm_vec_type ctype
104 
105    Level: intermediate
106 
107 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMDestroy(), DMDA, DMDAInterpolationType, VecType
108 @*/
109 PetscErrorCode  DMSetVecType(DM da,VecType ctype)
110 {
111   PetscErrorCode ierr;
112 
113   PetscFunctionBegin;
114   PetscValidHeaderSpecific(da,DM_CLASSID,1);
115   ierr = PetscFree(da->vectype);CHKERRQ(ierr);
116   ierr = PetscStrallocpy(ctype,(char**)&da->vectype);CHKERRQ(ierr);
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNCT__
121 #define __FUNCT__ "VecGetDM"
122 /*@
123   VecGetDM - Gets the DM defining the data layout of the vector
124 
125   Not collective
126 
127   Input Parameter:
128 . v - The Vec
129 
130   Output Parameter:
131 . dm - The DM
132 
133   Level: intermediate
134 
135 .seealso: VecSetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
136 @*/
137 PetscErrorCode VecGetDM(Vec v, DM *dm)
138 {
139   PetscErrorCode ierr;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
143   PetscValidPointer(dm,2);
144   ierr = PetscObjectQuery((PetscObject) v, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "VecSetDM"
150 /*@
151   VecSetDM - Sets the DM defining the data layout of the vector
152 
153   Not collective
154 
155   Input Parameters:
156 + v - The Vec
157 - dm - The DM
158 
159   Level: intermediate
160 
161 .seealso: VecGetDM(), DMGetLocalVector(), DMGetGlobalVector(), DMSetVecType()
162 @*/
163 PetscErrorCode VecSetDM(Vec v, DM dm)
164 {
165   PetscErrorCode ierr;
166 
167   PetscFunctionBegin;
168   PetscValidHeaderSpecific(v,VEC_CLASSID,1);
169   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
170   ierr = PetscObjectCompose((PetscObject) v, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
171   PetscFunctionReturn(0);
172 }
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "DMSetMatType"
176 /*@C
177        DMSetMatType - Sets the type of matrix created with DMCreateMatrix()
178 
179    Logically Collective on DM
180 
181    Input Parameter:
182 +  dm - the DM context
183 .  ctype - the matrix type
184 
185    Options Database:
186 .   -dm_mat_type ctype
187 
188    Level: intermediate
189 
190 .seealso: DMDACreate1d(), DMDACreate2d(), DMDACreate3d(), DMCreateMatrix(), DMSetMatrixPreallocateOnly(), MatType
191 @*/
192 PetscErrorCode  DMSetMatType(DM dm,MatType ctype)
193 {
194   PetscErrorCode ierr;
195   PetscFunctionBegin;
196   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
197   ierr = PetscFree(dm->mattype);CHKERRQ(ierr);
198   ierr = PetscStrallocpy(ctype,(char**)&dm->mattype);CHKERRQ(ierr);
199   PetscFunctionReturn(0);
200 }
201 
202 #undef __FUNCT__
203 #define __FUNCT__ "MatGetDM"
204 /*@
205   MatGetDM - Gets the DM defining the data layout of the matrix
206 
207   Not collective
208 
209   Input Parameter:
210 . A - The Mat
211 
212   Output Parameter:
213 . dm - The DM
214 
215   Level: intermediate
216 
217 .seealso: MatSetDM(), DMCreateMatrix(), DMSetMatType()
218 @*/
219 PetscErrorCode MatGetDM(Mat A, DM *dm)
220 {
221   PetscErrorCode ierr;
222 
223   PetscFunctionBegin;
224   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
225   PetscValidPointer(dm,2);
226   ierr = PetscObjectQuery((PetscObject) A, "__PETSc_dm", (PetscObject *) dm);CHKERRQ(ierr);
227   PetscFunctionReturn(0);
228 }
229 
230 #undef __FUNCT__
231 #define __FUNCT__ "MatSetDM"
232 /*@
233   MatSetDM - Sets the DM defining the data layout of the matrix
234 
235   Not collective
236 
237   Input Parameters:
238 + A - The Mat
239 - dm - The DM
240 
241   Level: intermediate
242 
243 .seealso: MatGetDM(), DMCreateMatrix(), DMSetMatType()
244 @*/
245 PetscErrorCode MatSetDM(Mat A, DM dm)
246 {
247   PetscErrorCode ierr;
248 
249   PetscFunctionBegin;
250   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
251   if (dm) {PetscValidHeaderSpecific(dm,DM_CLASSID,2);}
252   ierr = PetscObjectCompose((PetscObject) A, "__PETSc_dm", (PetscObject) dm);CHKERRQ(ierr);
253   PetscFunctionReturn(0);
254 }
255 
256 #undef __FUNCT__
257 #define __FUNCT__ "DMSetOptionsPrefix"
258 /*@C
259    DMSetOptionsPrefix - Sets the prefix used for searching for all
260    DMDA options in the database.
261 
262    Logically Collective on DMDA
263 
264    Input Parameter:
265 +  da - the DMDA context
266 -  prefix - the prefix to prepend to all option names
267 
268    Notes:
269    A hyphen (-) must NOT be given at the beginning of the prefix name.
270    The first character of all runtime options is AUTOMATICALLY the hyphen.
271 
272    Level: advanced
273 
274 .keywords: DMDA, set, options, prefix, database
275 
276 .seealso: DMSetFromOptions()
277 @*/
278 PetscErrorCode  DMSetOptionsPrefix(DM dm,const char prefix[])
279 {
280   PetscErrorCode ierr;
281 
282   PetscFunctionBegin;
283   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
284   ierr = PetscObjectSetOptionsPrefix((PetscObject)dm,prefix);CHKERRQ(ierr);
285   PetscFunctionReturn(0);
286 }
287 
288 #undef __FUNCT__
289 #define __FUNCT__ "DMDestroy"
290 /*@
291     DMDestroy - Destroys a vector packer or DMDA.
292 
293     Collective on DM
294 
295     Input Parameter:
296 .   dm - the DM object to destroy
297 
298     Level: developer
299 
300 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
301 
302 @*/
303 PetscErrorCode  DMDestroy(DM *dm)
304 {
305   PetscInt       i, cnt = 0, f;
306   DMNamedVecLink nlink,nnext;
307   PetscErrorCode ierr;
308 
309   PetscFunctionBegin;
310   if (!*dm) PetscFunctionReturn(0);
311   PetscValidHeaderSpecific((*dm),DM_CLASSID,1);
312 
313   /* count all the circular references of DM and its contained Vecs */
314   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
315     if ((*dm)->localin[i])  {cnt++;}
316     if ((*dm)->globalin[i]) {cnt++;}
317   }
318   for (nlink=(*dm)->namedglobal; nlink; nlink=nlink->next) cnt++;
319   if ((*dm)->x) {
320     DM obj;
321     ierr = VecGetDM((*dm)->x, &obj);CHKERRQ(ierr);
322     if (obj == *dm) cnt++;
323   }
324 
325   if (--((PetscObject)(*dm))->refct - cnt > 0) {*dm = 0; PetscFunctionReturn(0);}
326   /*
327      Need this test because the dm references the vectors that
328      reference the dm, so destroying the dm calls destroy on the
329      vectors that cause another destroy on the dm
330   */
331   if (((PetscObject)(*dm))->refct < 0) PetscFunctionReturn(0);
332   ((PetscObject) (*dm))->refct = 0;
333   for (i=0; i<DM_MAX_WORK_VECTORS; i++) {
334     if ((*dm)->localout[i]) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Destroying a DM that has a local vector obtained with DMGetLocalVector()");
335     ierr = VecDestroy(&(*dm)->localin[i]);CHKERRQ(ierr);
336   }
337   for (nlink=(*dm)->namedglobal; nlink; nlink=nnext) { /* Destroy the named vectors */
338     nnext = nlink->next;
339     if (nlink->status != DMVEC_STATUS_IN) SETERRQ1(((PetscObject)*dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"DM still has Vec named '%s' checked out",nlink->name);
340     ierr = PetscFree(nlink->name);CHKERRQ(ierr);
341     ierr = VecDestroy(&nlink->X);CHKERRQ(ierr);
342     ierr = PetscFree(nlink);CHKERRQ(ierr);
343   }
344   (*dm)->namedglobal = PETSC_NULL;
345 
346   /* Destroy the list of hooks */
347   {
348     DMCoarsenHookLink link,next;
349     for (link=(*dm)->coarsenhook; link; link=next) {
350       next = link->next;
351       ierr = PetscFree(link);CHKERRQ(ierr);
352     }
353     (*dm)->coarsenhook = PETSC_NULL;
354   }
355   {
356     DMRefineHookLink link,next;
357     for (link=(*dm)->refinehook; link; link=next) {
358       next = link->next;
359       ierr = PetscFree(link);CHKERRQ(ierr);
360     }
361     (*dm)->refinehook = PETSC_NULL;
362   }
363   {
364     DMSubDomainHookLink link,next;
365     for (link=(*dm)->subdomainhook; link; link=next) {
366       next = link->next;
367       ierr = PetscFree(link);CHKERRQ(ierr);
368     }
369     (*dm)->subdomainhook = PETSC_NULL;
370   }
371   {
372     DMGlobalToLocalHookLink link,next;
373     for (link=(*dm)->gtolhook; link; link=next) {
374       next = link->next;
375       ierr = PetscFree(link);CHKERRQ(ierr);
376     }
377     (*dm)->gtolhook = PETSC_NULL;
378   }
379   /* Destroy the work arrays */
380   {
381     DMWorkLink link,next;
382     if ((*dm)->workout) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Work array still checked out");
383     for (link=(*dm)->workin; link; link=next) {
384       next = link->next;
385       ierr = PetscFree(link->mem);CHKERRQ(ierr);
386       ierr = PetscFree(link);CHKERRQ(ierr);
387     }
388     (*dm)->workin = PETSC_NULL;
389   }
390 
391   ierr = PetscObjectDestroy(&(*dm)->dmksp);CHKERRQ(ierr);
392   ierr = PetscObjectDestroy(&(*dm)->dmsnes);CHKERRQ(ierr);
393   ierr = PetscObjectDestroy(&(*dm)->dmts);CHKERRQ(ierr);
394 
395   if ((*dm)->ctx && (*dm)->ctxdestroy) {
396     ierr = (*(*dm)->ctxdestroy)(&(*dm)->ctx);CHKERRQ(ierr);
397   }
398   ierr = VecDestroy(&(*dm)->x);CHKERRQ(ierr);
399   ierr = MatFDColoringDestroy(&(*dm)->fd);CHKERRQ(ierr);
400   ierr = DMClearGlobalVectors(*dm);CHKERRQ(ierr);
401   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmap);CHKERRQ(ierr);
402   ierr = ISLocalToGlobalMappingDestroy(&(*dm)->ltogmapb);CHKERRQ(ierr);
403   ierr = PetscFree((*dm)->vectype);CHKERRQ(ierr);
404   ierr = PetscFree((*dm)->mattype);CHKERRQ(ierr);
405 
406   ierr = PetscSectionDestroy(&(*dm)->defaultSection);CHKERRQ(ierr);
407   ierr = PetscSectionDestroy(&(*dm)->defaultGlobalSection);CHKERRQ(ierr);
408   ierr = PetscLayoutDestroy(&(*dm)->map);CHKERRQ(ierr);
409   ierr = PetscSFDestroy(&(*dm)->sf);CHKERRQ(ierr);
410   ierr = PetscSFDestroy(&(*dm)->defaultSF);CHKERRQ(ierr);
411 
412   ierr = DMDestroy(&(*dm)->coordinateDM);CHKERRQ(ierr);
413   ierr = VecDestroy(&(*dm)->coordinates);CHKERRQ(ierr);
414   ierr = VecDestroy(&(*dm)->coordinatesLocal);CHKERRQ(ierr);
415 
416   for (f = 0; f < (*dm)->numFields; ++f) {
417     ierr = PetscObjectDestroy(&(*dm)->fields[f]);CHKERRQ(ierr);
418   }
419   ierr = PetscFree((*dm)->fields);CHKERRQ(ierr);
420   /* if memory was published with AMS then destroy it */
421   ierr = PetscObjectDepublish(*dm);CHKERRQ(ierr);
422 
423   ierr = (*(*dm)->ops->destroy)(*dm);CHKERRQ(ierr);
424   /* We do not destroy (*dm)->data here so that we can reference count backend objects */
425   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
426   PetscFunctionReturn(0);
427 }
428 
429 #undef __FUNCT__
430 #define __FUNCT__ "DMSetUp"
431 /*@
432     DMSetUp - sets up the data structures inside a DM object
433 
434     Collective on DM
435 
436     Input Parameter:
437 .   dm - the DM object to setup
438 
439     Level: developer
440 
441 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
442 
443 @*/
444 PetscErrorCode  DMSetUp(DM dm)
445 {
446   PetscErrorCode ierr;
447 
448   PetscFunctionBegin;
449   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
450   if (dm->setupcalled) PetscFunctionReturn(0);
451   if (dm->ops->setup) {
452     ierr = (*dm->ops->setup)(dm);CHKERRQ(ierr);
453   }
454   dm->setupcalled = PETSC_TRUE;
455   PetscFunctionReturn(0);
456 }
457 
458 #undef __FUNCT__
459 #define __FUNCT__ "DMSetFromOptions"
460 /*@
461     DMSetFromOptions - sets parameters in a DM from the options database
462 
463     Collective on DM
464 
465     Input Parameter:
466 .   dm - the DM object to set options for
467 
468     Options Database:
469 +   -dm_preallocate_only: Only preallocate the matrix for DMCreateMatrix(), but do not fill it with zeros
470 .   -dm_vec_type <type>  type of vector to create inside DM
471 .   -dm_mat_type <type>  type of matrix to create inside DM
472 -   -dm_coloring_type <global or ghosted>
473 
474     Level: developer
475 
476 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
477 
478 @*/
479 PetscErrorCode  DMSetFromOptions(DM dm)
480 {
481   char           typeName[256] = MATAIJ;
482   PetscBool      flg;
483   PetscErrorCode ierr;
484 
485   PetscFunctionBegin;
486   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
487   ierr = PetscObjectOptionsBegin((PetscObject)dm);CHKERRQ(ierr);
488     ierr = PetscOptionsBool("-dm_preallocate_only","only preallocate matrix, but do not set column indices","DMSetMatrixPreallocateOnly",dm->prealloc_only,&dm->prealloc_only,PETSC_NULL);CHKERRQ(ierr);
489     ierr = PetscOptionsList("-dm_vec_type","Vector type used for created vectors","DMSetVecType",VecList,dm->vectype,typeName,256,&flg);CHKERRQ(ierr);
490     if (flg) {
491       ierr = DMSetVecType(dm,typeName);CHKERRQ(ierr);
492     }
493     ierr = PetscOptionsList("-dm_mat_type","Matrix type used for created matrices","DMSetMatType",MatList,dm->mattype?dm->mattype:typeName,typeName,sizeof(typeName),&flg);CHKERRQ(ierr);
494     if (flg) {
495       ierr = DMSetMatType(dm,typeName);CHKERRQ(ierr);
496     }
497     ierr = PetscOptionsEnum("-dm_is_coloring_type","Global or local coloring of Jacobian","ISColoringType",ISColoringTypes,(PetscEnum)dm->coloringtype,(PetscEnum*)&dm->coloringtype,PETSC_NULL);CHKERRQ(ierr);
498     if (dm->ops->setfromoptions) {
499       ierr = (*dm->ops->setfromoptions)(dm);CHKERRQ(ierr);
500     }
501     /* process any options handlers added with PetscObjectAddOptionsHandler() */
502     ierr = PetscObjectProcessOptionsHandlers((PetscObject) dm);CHKERRQ(ierr);
503   ierr = PetscOptionsEnd();CHKERRQ(ierr);
504   ierr = DMViewFromOptions(dm,"-dm_view");CHKERRQ(ierr);
505   PetscFunctionReturn(0);
506 }
507 
508 #undef __FUNCT__
509 #define __FUNCT__ "DMView"
510 /*@C
511     DMView - Views a vector packer or DMDA.
512 
513     Collective on DM
514 
515     Input Parameter:
516 +   dm - the DM object to view
517 -   v - the viewer
518 
519     Level: developer
520 
521 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
522 
523 @*/
524 PetscErrorCode  DMView(DM dm,PetscViewer v)
525 {
526   PetscErrorCode ierr;
527   PetscBool      isbinary;
528 
529   PetscFunctionBegin;
530   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
531   if (!v) {
532     ierr = PetscViewerASCIIGetStdout(((PetscObject)dm)->comm,&v);CHKERRQ(ierr);
533   }
534   ierr = PetscObjectTypeCompare((PetscObject)v,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
535   if (isbinary) {
536     PetscInt         classid = DM_FILE_CLASSID;
537     char             type[256];
538 
539     ierr = PetscViewerBinaryWrite(v,&classid,1,PETSC_INT,PETSC_FALSE);CHKERRQ(ierr);
540     ierr = PetscStrncpy(type,((PetscObject)dm)->type_name,256);CHKERRQ(ierr);
541     ierr = PetscViewerBinaryWrite(v,type,256,PETSC_CHAR,PETSC_FALSE);CHKERRQ(ierr);
542   }
543   if (dm->ops->view) {
544     ierr = (*dm->ops->view)(dm,v);CHKERRQ(ierr);
545   }
546   PetscFunctionReturn(0);
547 }
548 
549 PETSC_EXTERN PetscErrorCode VecView_Complex_Local(Vec, PetscViewer);
550 PETSC_EXTERN PetscErrorCode VecView_Complex(Vec, PetscViewer);
551 
552 #undef __FUNCT__
553 #define __FUNCT__ "DMCreateGlobalVector"
554 /*@
555     DMCreateGlobalVector - Creates a global vector from a DMDA or DMComposite object
556 
557     Collective on DM
558 
559     Input Parameter:
560 .   dm - the DM object
561 
562     Output Parameter:
563 .   vec - the global vector
564 
565     Level: beginner
566 
567 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
568 
569 @*/
570 PetscErrorCode  DMCreateGlobalVector(DM dm,Vec *vec)
571 {
572   PetscErrorCode ierr;
573 
574   PetscFunctionBegin;
575   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
576   if (dm->defaultSection) {
577     PetscSection gSection;
578     PetscInt     localSize, blockSize = -1, pStart, pEnd, p;
579 
580     ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
581     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
582     for(p = pStart; p < pEnd; ++p) {
583       PetscInt dof, cdof;
584 
585       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
586       ierr = PetscSectionGetConstraintDof(dm->defaultSection, p, &cdof);CHKERRQ(ierr);
587       if ((blockSize < 0) && (dof > 0)) blockSize = dof-cdof;
588       if ((dof > 0) && (dof-cdof != blockSize)) {
589         blockSize = 1;
590         break;
591       }
592     }
593     ierr = PetscSectionGetConstrainedStorageSize(dm->defaultGlobalSection, &localSize);CHKERRQ(ierr);
594     if (localSize%blockSize) SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONG, "Mismatch between blocksize %d and local storage size %d", blockSize, localSize);
595     ierr = VecCreate(((PetscObject) dm)->comm, vec);CHKERRQ(ierr);
596     ierr = VecSetSizes(*vec, localSize, PETSC_DETERMINE);CHKERRQ(ierr);
597     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
598     /* ierr = VecSetType(*vec, dm->vectype);CHKERRQ(ierr); */
599     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
600     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
601     /* ierr = VecSetLocalToGlobalMapping(*vec, dm->ltogmap);CHKERRQ(ierr); */
602     /* ierr = VecSetLocalToGlobalMappingBlock(*vec, dm->ltogmapb);CHKERRQ(ierr); */
603     /* ierr = VecSetOperation(*vec, VECOP_DUPLICATE, (void(*)(void)) VecDuplicate_MPI_DM);CHKERRQ(ierr); */
604     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex);CHKERRQ(ierr);
605   } else {
606     ierr = (*dm->ops->createglobalvector)(dm,vec);CHKERRQ(ierr);
607   }
608   PetscFunctionReturn(0);
609 }
610 
611 #undef __FUNCT__
612 #define __FUNCT__ "DMCreateLocalVector"
613 /*@
614     DMCreateLocalVector - Creates a local vector from a DMDA or DMComposite object
615 
616     Not Collective
617 
618     Input Parameter:
619 .   dm - the DM object
620 
621     Output Parameter:
622 .   vec - the local vector
623 
624     Level: beginner
625 
626 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
627 
628 @*/
629 PetscErrorCode  DMCreateLocalVector(DM dm,Vec *vec)
630 {
631   PetscErrorCode ierr;
632 
633   PetscFunctionBegin;
634   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
635   if (dm->defaultSection) {
636     PetscInt localSize, blockSize = -1, pStart, pEnd, p;
637 
638     ierr = PetscSectionGetChart(dm->defaultSection, &pStart, &pEnd);CHKERRQ(ierr);
639     for(p = pStart; p < pEnd; ++p) {
640       PetscInt dof;
641 
642       ierr = PetscSectionGetDof(dm->defaultSection, p, &dof);CHKERRQ(ierr);
643       if ((blockSize < 0) && (dof > 0)) blockSize = dof;
644       if ((dof > 0) && (dof != blockSize)) {
645         blockSize = 1;
646         break;
647       }
648     }
649     ierr = PetscSectionGetStorageSize(dm->defaultSection, &localSize);CHKERRQ(ierr);
650     ierr = VecCreate(PETSC_COMM_SELF, vec);CHKERRQ(ierr);
651     ierr = VecSetSizes(*vec, localSize, localSize);CHKERRQ(ierr);
652     ierr = VecSetBlockSize(*vec, blockSize);CHKERRQ(ierr);
653     ierr = VecSetFromOptions(*vec);CHKERRQ(ierr);
654     ierr = VecSetDM(*vec, dm);CHKERRQ(ierr);
655     ierr = VecSetOperation(*vec, VECOP_VIEW, (void(*)(void)) VecView_Complex_Local);CHKERRQ(ierr);
656   } else {
657     ierr = (*dm->ops->createlocalvector)(dm,vec);CHKERRQ(ierr);
658   }
659   PetscFunctionReturn(0);
660 }
661 
662 #undef __FUNCT__
663 #define __FUNCT__ "DMGetLocalToGlobalMapping"
664 /*@
665    DMGetLocalToGlobalMapping - Accesses the local-to-global mapping in a DM.
666 
667    Collective on DM
668 
669    Input Parameter:
670 .  dm - the DM that provides the mapping
671 
672    Output Parameter:
673 .  ltog - the mapping
674 
675    Level: intermediate
676 
677    Notes:
678    This mapping can then be used by VecSetLocalToGlobalMapping() or
679    MatSetLocalToGlobalMapping().
680 
681 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMappingBlock()
682 @*/
683 PetscErrorCode  DMGetLocalToGlobalMapping(DM dm,ISLocalToGlobalMapping *ltog)
684 {
685   PetscErrorCode ierr;
686 
687   PetscFunctionBegin;
688   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
689   PetscValidPointer(ltog,2);
690   if (!dm->ltogmap) {
691     PetscSection section, sectionGlobal;
692 
693     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
694     if (section) {
695       PetscInt      *ltog;
696       PetscInt       pStart, pEnd, size, p, l;
697 
698       ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
699       ierr = PetscSectionGetChart(section, &pStart, &pEnd);CHKERRQ(ierr);
700       ierr = PetscSectionGetStorageSize(section, &size);CHKERRQ(ierr);
701       ierr = PetscMalloc(size * sizeof(PetscInt), &ltog);CHKERRQ(ierr); /* We want the local+overlap size */
702       for (p = pStart, l = 0; p < pEnd; ++p) {
703         PetscInt dof, off, c;
704 
705         /* Should probably use constrained dofs */
706         ierr = PetscSectionGetDof(section, p, &dof);CHKERRQ(ierr);
707         ierr = PetscSectionGetOffset(sectionGlobal, p, &off);CHKERRQ(ierr);
708         for (c = 0; c < dof; ++c, ++l) {
709           ltog[l] = off+c;
710         }
711       }
712       ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF, size, ltog, PETSC_OWN_POINTER, &dm->ltogmap);CHKERRQ(ierr);
713       ierr = PetscLogObjectParent(dm, dm->ltogmap);CHKERRQ(ierr);
714     } else {
715       if (!dm->ops->createlocaltoglobalmapping) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMapping");
716       ierr = (*dm->ops->createlocaltoglobalmapping)(dm);CHKERRQ(ierr);
717     }
718   }
719   *ltog = dm->ltogmap;
720   PetscFunctionReturn(0);
721 }
722 
723 #undef __FUNCT__
724 #define __FUNCT__ "DMGetLocalToGlobalMappingBlock"
725 /*@
726    DMGetLocalToGlobalMappingBlock - Accesses the blocked local-to-global mapping in a DM.
727 
728    Collective on DM
729 
730    Input Parameter:
731 .  da - the distributed array that provides the mapping
732 
733    Output Parameter:
734 .  ltog - the block mapping
735 
736    Level: intermediate
737 
738    Notes:
739    This mapping can then be used by VecSetLocalToGlobalMappingBlock() or
740    MatSetLocalToGlobalMappingBlock().
741 
742 .seealso: DMCreateLocalVector(), DMGetLocalToGlobalMapping(), DMGetBlockSize(), VecSetBlockSize(), MatSetBlockSize()
743 @*/
744 PetscErrorCode  DMGetLocalToGlobalMappingBlock(DM dm,ISLocalToGlobalMapping *ltog)
745 {
746   PetscErrorCode ierr;
747 
748   PetscFunctionBegin;
749   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
750   PetscValidPointer(ltog,2);
751   if (!dm->ltogmapb) {
752     PetscInt bs;
753     ierr = DMGetBlockSize(dm,&bs);CHKERRQ(ierr);
754     if (bs > 1) {
755       if (!dm->ops->createlocaltoglobalmappingblock) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DM can not create LocalToGlobalMappingBlock");
756       ierr = (*dm->ops->createlocaltoglobalmappingblock)(dm);CHKERRQ(ierr);
757     } else {
758       ierr = DMGetLocalToGlobalMapping(dm,&dm->ltogmapb);CHKERRQ(ierr);
759       ierr = PetscObjectReference((PetscObject)dm->ltogmapb);CHKERRQ(ierr);
760     }
761   }
762   *ltog = dm->ltogmapb;
763   PetscFunctionReturn(0);
764 }
765 
766 #undef __FUNCT__
767 #define __FUNCT__ "DMGetBlockSize"
768 /*@
769    DMGetBlockSize - Gets the inherent block size associated with a DM
770 
771    Not Collective
772 
773    Input Parameter:
774 .  dm - the DM with block structure
775 
776    Output Parameter:
777 .  bs - the block size, 1 implies no exploitable block structure
778 
779    Level: intermediate
780 
781 .seealso: ISCreateBlock(), VecSetBlockSize(), MatSetBlockSize(), DMGetLocalToGlobalMappingBlock()
782 @*/
783 PetscErrorCode  DMGetBlockSize(DM dm,PetscInt *bs)
784 {
785   PetscFunctionBegin;
786   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
787   PetscValidPointer(bs,2);
788   if (dm->bs < 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"DM does not have enough information to provide a block size yet");
789   *bs = dm->bs;
790   PetscFunctionReturn(0);
791 }
792 
793 #undef __FUNCT__
794 #define __FUNCT__ "DMCreateInterpolation"
795 /*@
796     DMCreateInterpolation - Gets interpolation matrix between two DMDA or DMComposite objects
797 
798     Collective on DM
799 
800     Input Parameter:
801 +   dm1 - the DM object
802 -   dm2 - the second, finer DM object
803 
804     Output Parameter:
805 +  mat - the interpolation
806 -  vec - the scaling (optional)
807 
808     Level: developer
809 
810     Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
811         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the interpolation.
812 
813         For DMDA objects you can use this interpolation (more precisely the interpolation from the DMGetCoordinateDM()) to interpolate the mesh coordinate vectors
814         EXCEPT in the periodic case where it does not make sense since the coordinate vectors are not periodic.
815 
816 
817 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMRefine(), DMCoarsen()
818 
819 @*/
820 PetscErrorCode  DMCreateInterpolation(DM dm1,DM dm2,Mat *mat,Vec *vec)
821 {
822   PetscErrorCode ierr;
823 
824   PetscFunctionBegin;
825   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
826   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
827   ierr = (*dm1->ops->createinterpolation)(dm1,dm2,mat,vec);CHKERRQ(ierr);
828   PetscFunctionReturn(0);
829 }
830 
831 #undef __FUNCT__
832 #define __FUNCT__ "DMCreateInjection"
833 /*@
834     DMCreateInjection - Gets injection matrix between two DMDA or DMComposite objects
835 
836     Collective on DM
837 
838     Input Parameter:
839 +   dm1 - the DM object
840 -   dm2 - the second, finer DM object
841 
842     Output Parameter:
843 .   ctx - the injection
844 
845     Level: developer
846 
847    Notes:  For DMDA objects this only works for "uniform refinement", that is the refined mesh was obtained DMRefine() or the coarse mesh was obtained by
848         DMCoarsen(). The coordinates set into the DMDA are completely ignored in computing the injection.
849 
850 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateColoring(), DMCreateMatrix(), DMCreateInterpolation()
851 
852 @*/
853 PetscErrorCode  DMCreateInjection(DM dm1,DM dm2,VecScatter *ctx)
854 {
855   PetscErrorCode ierr;
856 
857   PetscFunctionBegin;
858   PetscValidHeaderSpecific(dm1,DM_CLASSID,1);
859   PetscValidHeaderSpecific(dm2,DM_CLASSID,2);
860   ierr = (*dm1->ops->getinjection)(dm1,dm2,ctx);CHKERRQ(ierr);
861   PetscFunctionReturn(0);
862 }
863 
864 #undef __FUNCT__
865 #define __FUNCT__ "DMCreateColoring"
866 /*@C
867     DMCreateColoring - Gets coloring for a DMDA or DMComposite
868 
869     Collective on DM
870 
871     Input Parameter:
872 +   dm - the DM object
873 .   ctype - IS_COLORING_GHOSTED or IS_COLORING_GLOBAL
874 -   matype - either MATAIJ or MATBAIJ
875 
876     Output Parameter:
877 .   coloring - the coloring
878 
879     Level: developer
880 
881 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateMatrix()
882 
883 @*/
884 PetscErrorCode  DMCreateColoring(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
885 {
886   PetscErrorCode ierr;
887 
888   PetscFunctionBegin;
889   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
890   if (!dm->ops->getcoloring) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No coloring for this type of DM yet");
891   ierr = (*dm->ops->getcoloring)(dm,ctype,mtype,coloring);CHKERRQ(ierr);
892   PetscFunctionReturn(0);
893 }
894 
895 #undef __FUNCT__
896 #define __FUNCT__ "DMCreateMatrix"
897 /*@C
898     DMCreateMatrix - Gets empty Jacobian for a DMDA or DMComposite
899 
900     Collective on DM
901 
902     Input Parameter:
903 +   dm - the DM object
904 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ, MATSEQBAIJ, MATMPIBAIJ, or
905             any type which inherits from one of these (such as MATAIJ)
906 
907     Output Parameter:
908 .   mat - the empty Jacobian
909 
910     Level: beginner
911 
912     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
913        do not need to do it yourself.
914 
915        By default it also sets the nonzero structure and puts in the zero entries. To prevent setting
916        the nonzero pattern call DMDASetMatPreallocateOnly()
917 
918        For structured grid problems, when you call MatView() on this matrix it is displayed using the global natural ordering, NOT in the ordering used
919        internally by PETSc.
920 
921        For structured grid problems, in general it is easiest to use MatSetValuesStencil() or MatSetValuesLocal() to put values into the matrix because MatSetValues() requires
922        the indices for the global numbering for DMDAs which is complicated.
923 
924 .seealso DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
925 
926 @*/
927 PetscErrorCode  DMCreateMatrix(DM dm,MatType mtype,Mat *mat)
928 {
929   PetscErrorCode ierr;
930 
931   PetscFunctionBegin;
932   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
933 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
934   ierr = MatInitializePackage(PETSC_NULL);CHKERRQ(ierr);
935 #endif
936   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
937   PetscValidPointer(mat,3);
938   if (dm->mattype) {
939     ierr = (*dm->ops->creatematrix)(dm,dm->mattype,mat);CHKERRQ(ierr);
940   } else {
941     ierr = (*dm->ops->creatematrix)(dm,mtype,mat);CHKERRQ(ierr);
942   }
943   PetscFunctionReturn(0);
944 }
945 
946 #undef __FUNCT__
947 #define __FUNCT__ "DMSetMatrixPreallocateOnly"
948 /*@
949   DMSetMatrixPreallocateOnly - When DMCreateMatrix() is called the matrix will be properly
950     preallocated but the nonzero structure and zero values will not be set.
951 
952   Logically Collective on DMDA
953 
954   Input Parameter:
955 + dm - the DM
956 - only - PETSC_TRUE if only want preallocation
957 
958   Level: developer
959 .seealso DMCreateMatrix()
960 @*/
961 PetscErrorCode DMSetMatrixPreallocateOnly(DM dm, PetscBool only)
962 {
963   PetscFunctionBegin;
964   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
965   dm->prealloc_only = only;
966   PetscFunctionReturn(0);
967 }
968 
969 #undef __FUNCT__
970 #define __FUNCT__ "DMGetWorkArray"
971 /*@C
972   DMGetWorkArray - Gets a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
973 
974   Not Collective
975 
976   Input Parameters:
977 + dm - the DM object
978 . count - The minium size
979 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
980 
981   Output Parameter:
982 . array - the work array
983 
984   Level: developer
985 
986 .seealso DMDestroy(), DMCreate()
987 @*/
988 PetscErrorCode DMGetWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
989 {
990   PetscErrorCode ierr;
991   DMWorkLink link;
992   size_t size;
993 
994   PetscFunctionBegin;
995   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
996   PetscValidPointer(mem,4);
997   if (dm->workin) {
998     link = dm->workin;
999     dm->workin = dm->workin->next;
1000   } else {
1001     ierr = PetscNewLog(dm,struct _DMWorkLink,&link);CHKERRQ(ierr);
1002   }
1003   ierr = PetscDataTypeGetSize(dtype,&size);CHKERRQ(ierr);
1004   if (size*count > link->bytes) {
1005     ierr = PetscFree(link->mem);CHKERRQ(ierr);
1006     ierr = PetscMalloc(size*count,&link->mem);CHKERRQ(ierr);
1007     link->bytes = size*count;
1008   }
1009   link->next = dm->workout;
1010   dm->workout = link;
1011   *(void**)mem = link->mem;
1012   PetscFunctionReturn(0);
1013 }
1014 
1015 #undef __FUNCT__
1016 #define __FUNCT__ "DMRestoreWorkArray"
1017 /*@C
1018   DMRestoreWorkArray - Restores a work array guaranteed to be at least the input size, restore with DMRestoreWorkArray()
1019 
1020   Not Collective
1021 
1022   Input Parameters:
1023 + dm - the DM object
1024 . count - The minium size
1025 - dtype - data type (PETSC_REAL, PETSC_SCALAR, PETSC_INT)
1026 
1027   Output Parameter:
1028 . array - the work array
1029 
1030   Level: developer
1031 
1032 .seealso DMDestroy(), DMCreate()
1033 @*/
1034 PetscErrorCode DMRestoreWorkArray(DM dm,PetscInt count,PetscDataType dtype,void *mem)
1035 {
1036   DMWorkLink *p,link;
1037 
1038   PetscFunctionBegin;
1039   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1040   PetscValidPointer(mem,4);
1041   for (p=&dm->workout; (link=*p); p=&link->next) {
1042     if (link->mem == *(void**)mem) {
1043       *p = link->next;
1044       link->next = dm->workin;
1045       dm->workin = link;
1046       *(void**)mem = PETSC_NULL;
1047       PetscFunctionReturn(0);
1048     }
1049   }
1050   SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Array was not checked out");
1051   PetscFunctionReturn(0);
1052 }
1053 
1054 #undef __FUNCT__
1055 #define __FUNCT__ "DMSetNullSpaceConstructor"
1056 PetscErrorCode DMSetNullSpaceConstructor(DM dm, PetscInt field, PetscErrorCode (*nullsp)(DM dm, PetscInt field, MatNullSpace *nullSpace))
1057 {
1058   PetscFunctionBegin;
1059   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
1060   if (field >= 10) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Cannot handle %d >= 10 fields", field);
1061   dm->nullspaceConstructors[field] = nullsp;
1062   PetscFunctionReturn(0);
1063 }
1064 
1065 #undef __FUNCT__
1066 #define __FUNCT__ "DMCreateFieldIS"
1067 /*@C
1068   DMCreateFieldIS - Creates a set of IS objects with the global indices of dofs for each field
1069 
1070   Not collective
1071 
1072   Input Parameter:
1073 . dm - the DM object
1074 
1075   Output Parameters:
1076 + numFields  - The number of fields (or PETSC_NULL if not requested)
1077 . fieldNames - The name for each field (or PETSC_NULL if not requested)
1078 - fields     - The global indices for each field (or PETSC_NULL if not requested)
1079 
1080   Level: intermediate
1081 
1082   Notes:
1083   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1084   PetscFree(), every entry of fields should be destroyed with ISDestroy(), and both arrays should be freed with
1085   PetscFree().
1086 
1087 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix()
1088 @*/
1089 PetscErrorCode DMCreateFieldIS(DM dm, PetscInt *numFields, char ***fieldNames, IS **fields)
1090 {
1091   PetscSection   section, sectionGlobal;
1092   PetscErrorCode ierr;
1093 
1094   PetscFunctionBegin;
1095   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1096   if (numFields) {
1097     PetscValidPointer(numFields,2);
1098     *numFields = 0;
1099   }
1100   if (fieldNames) {
1101     PetscValidPointer(fieldNames,3);
1102     *fieldNames = PETSC_NULL;
1103   }
1104   if (fields) {
1105     PetscValidPointer(fields,4);
1106     *fields = PETSC_NULL;
1107   }
1108   ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1109   if (section) {
1110     PetscInt *fieldSizes, **fieldIndices;
1111     PetscInt  nF, f, pStart, pEnd, p;
1112 
1113     ierr = DMGetDefaultGlobalSection(dm, &sectionGlobal);CHKERRQ(ierr);
1114     ierr = PetscSectionGetNumFields(section, &nF);CHKERRQ(ierr);
1115     ierr = PetscMalloc2(nF,PetscInt,&fieldSizes,nF,PetscInt *,&fieldIndices);CHKERRQ(ierr);
1116     ierr = PetscSectionGetChart(sectionGlobal, &pStart, &pEnd);CHKERRQ(ierr);
1117     for (f = 0; f < nF; ++f) {
1118       fieldSizes[f] = 0;
1119     }
1120     for (p = pStart; p < pEnd; ++p) {
1121       PetscInt gdof;
1122 
1123       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1124       if (gdof > 0) {
1125         for (f = 0; f < nF; ++f) {
1126           PetscInt fdof, fcdof;
1127 
1128           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1129           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1130           fieldSizes[f] += fdof-fcdof;
1131         }
1132       }
1133     }
1134     for (f = 0; f < nF; ++f) {
1135       ierr = PetscMalloc(fieldSizes[f] * sizeof(PetscInt), &fieldIndices[f]);CHKERRQ(ierr);
1136       fieldSizes[f] = 0;
1137     }
1138     for (p = pStart; p < pEnd; ++p) {
1139       PetscInt gdof, goff;
1140 
1141       ierr = PetscSectionGetDof(sectionGlobal, p, &gdof);CHKERRQ(ierr);
1142       if (gdof > 0) {
1143         ierr = PetscSectionGetOffset(sectionGlobal, p, &goff);CHKERRQ(ierr);
1144         for (f = 0; f < nF; ++f) {
1145           PetscInt fdof, fcdof, fc;
1146 
1147           ierr = PetscSectionGetFieldDof(section, p, f, &fdof);CHKERRQ(ierr);
1148           ierr = PetscSectionGetFieldConstraintDof(section, p, f, &fcdof);CHKERRQ(ierr);
1149           for (fc = 0; fc < fdof-fcdof; ++fc, ++fieldSizes[f]) {
1150             fieldIndices[f][fieldSizes[f]] = goff++;
1151           }
1152         }
1153       }
1154     }
1155     if (numFields) {*numFields = nF;}
1156     if (fieldNames) {
1157       ierr = PetscMalloc(nF * sizeof(char *), fieldNames);CHKERRQ(ierr);
1158       for (f = 0; f < nF; ++f) {
1159         const char *fieldName;
1160 
1161         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1162         ierr = PetscStrallocpy(fieldName, (char **) &(*fieldNames)[f]);CHKERRQ(ierr);
1163       }
1164     }
1165     if (fields) {
1166       ierr = PetscMalloc(nF * sizeof(IS), fields);CHKERRQ(ierr);
1167       for (f = 0; f < nF; ++f) {
1168         ierr = ISCreateGeneral(((PetscObject) dm)->comm, fieldSizes[f], fieldIndices[f], PETSC_OWN_POINTER, &(*fields)[f]);CHKERRQ(ierr);
1169       }
1170     }
1171     ierr = PetscFree2(fieldSizes,fieldIndices);CHKERRQ(ierr);
1172   } else {
1173     if (dm->ops->createfieldis) {ierr = (*dm->ops->createfieldis)(dm, numFields, fieldNames, fields);CHKERRQ(ierr);}
1174   }
1175   PetscFunctionReturn(0);
1176 }
1177 
1178 #undef __FUNCT__
1179 #define __FUNCT__ "DMCreateFieldDecompositionDM"
1180 /*@C
1181   DMCreateFieldDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into fields.
1182 
1183   Not Collective
1184 
1185   Input Parameters:
1186 + dm   - the DM object
1187 - name - the name of the field decomposition
1188 
1189   Output Parameter:
1190 . ddm  - the field decomposition DM (PETSC_NULL, if no such decomposition is known)
1191 
1192   Level: advanced
1193 
1194 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1195 @*/
1196 PetscErrorCode DMCreateFieldDecompositionDM(DM dm, const char* name, DM *ddm)
1197 {
1198   PetscErrorCode ierr;
1199 
1200   PetscFunctionBegin;
1201   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1202   PetscValidCharPointer(name,2);
1203   PetscValidPointer(ddm,3);
1204   *ddm = PETSC_NULL;
1205   if (dm->ops->createfielddecompositiondm) {
1206     ierr = (*dm->ops->createfielddecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1207   }
1208   PetscFunctionReturn(0);
1209 }
1210 
1211 #undef __FUNCT__
1212 #define __FUNCT__ "DMCreateFieldDecomposition"
1213 /*@C
1214   DMCreateFieldDecomposition - Returns a list of IS objects defining a decomposition of a problem into subproblems
1215                           corresponding to different fields: each IS contains the global indices of the dofs of the
1216                           corresponding field. The optional list of DMs define the DM for each subproblem.
1217                           Generalizes DMCreateFieldIS().
1218 
1219   Not collective
1220 
1221   Input Parameter:
1222 . dm - the DM object
1223 
1224   Output Parameters:
1225 + len       - The number of subproblems in the field decomposition (or PETSC_NULL if not requested)
1226 . namelist  - The name for each field (or PETSC_NULL if not requested)
1227 . islist    - The global indices for each field (or PETSC_NULL if not requested)
1228 - dmlist    - The DMs for each field subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1229 
1230   Level: intermediate
1231 
1232   Notes:
1233   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1234   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1235   and all of the arrays should be freed with PetscFree().
1236 
1237 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1238 @*/
1239 PetscErrorCode DMCreateFieldDecomposition(DM dm, PetscInt *len, char ***namelist, IS **islist, DM **dmlist)
1240 {
1241   PetscErrorCode ierr;
1242 
1243   PetscFunctionBegin;
1244   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1245   if (len)      {PetscValidPointer(len,2);      *len      = 0;}
1246   if (namelist) {PetscValidPointer(namelist,3); *namelist = 0;}
1247   if (islist)   {PetscValidPointer(islist,4);   *islist   = 0;}
1248   if (dmlist)   {PetscValidPointer(dmlist,5);   *dmlist   = 0;}
1249   if (!dm->ops->createfielddecomposition) {
1250     PetscSection section;
1251     PetscInt     numFields, f;
1252 
1253     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
1254     if (section) {ierr = PetscSectionGetNumFields(section, &numFields);CHKERRQ(ierr);}
1255     if (section && numFields && dm->ops->createsubdm) {
1256       *len = numFields;
1257       ierr = PetscMalloc3(numFields,char*,namelist,numFields,IS,islist,numFields,DM,dmlist);CHKERRQ(ierr);
1258       for (f = 0; f < numFields; ++f) {
1259         const char *fieldName;
1260 
1261         ierr = DMCreateSubDM(dm, 1, &f, &(*islist)[f], &(*dmlist)[f]);CHKERRQ(ierr);
1262         ierr = PetscSectionGetFieldName(section, f, &fieldName);CHKERRQ(ierr);
1263         ierr = PetscStrallocpy(fieldName, (char **) &(*namelist)[f]);CHKERRQ(ierr);
1264       }
1265     } else {
1266       ierr = DMCreateFieldIS(dm, len, namelist, islist);CHKERRQ(ierr);
1267       /* By default there are no DMs associated with subproblems. */
1268       if (dmlist) *dmlist = PETSC_NULL;
1269     }
1270   }
1271   else {
1272     ierr = (*dm->ops->createfielddecomposition)(dm,len,namelist,islist,dmlist);CHKERRQ(ierr);
1273   }
1274   PetscFunctionReturn(0);
1275 }
1276 
1277 #undef __FUNCT__
1278 #define __FUNCT__ "DMCreateSubDM"
1279 /*@C
1280   DMCreateSubDM - Returns an IS and DM encapsulating a subproblem defined by the fields passed in.
1281                   The fields are defined by DMCreateFieldIS().
1282 
1283   Not collective
1284 
1285   Input Parameters:
1286 + dm - the DM object
1287 . numFields - number of fields in this subproblem
1288 - len       - The number of subproblems in the decomposition (or PETSC_NULL if not requested)
1289 
1290   Output Parameters:
1291 . is - The global indices for the subproblem
1292 - dm - The DM for the subproblem
1293 
1294   Level: intermediate
1295 
1296 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1297 @*/
1298 PetscErrorCode DMCreateSubDM(DM dm, PetscInt numFields, PetscInt fields[], IS *is, DM *subdm)
1299 {
1300   PetscErrorCode ierr;
1301 
1302   PetscFunctionBegin;
1303   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1304   PetscValidPointer(fields,3);
1305   if (is) {PetscValidPointer(is,4);}
1306   if (subdm) {PetscValidPointer(subdm,5);}
1307   if (dm->ops->createsubdm) {
1308     ierr = (*dm->ops->createsubdm)(dm, numFields, fields, is, subdm);CHKERRQ(ierr);
1309   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateSubDM implementation defined");
1310   PetscFunctionReturn(0);
1311 }
1312 
1313 #undef __FUNCT__
1314 #define __FUNCT__ "DMCreateDomainDecompositionDM"
1315 /*@C
1316   DMCreateDomainDecompositionDM - creates a DM that encapsulates a decomposition of the original DM into subdomains.
1317 
1318   Not Collective
1319 
1320   Input Parameters:
1321 + dm   - the DM object
1322 - name - the name of the subdomain decomposition
1323 
1324   Output Parameter:
1325 . ddm  - the subdomain decomposition DM (PETSC_NULL, if no such decomposition is known)
1326 
1327   Level: advanced
1328 
1329 .seealso DMDestroy(), DMCreate(), DMCreateFieldDecomposition(), DMCreateDomainDecompositionDM()
1330 @*/
1331 PetscErrorCode DMCreateDomainDecompositionDM(DM dm, const char* name, DM *ddm)
1332 {
1333   PetscErrorCode ierr;
1334 
1335   PetscFunctionBegin;
1336   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1337   PetscValidCharPointer(name,2);
1338   PetscValidPointer(ddm,3);
1339   *ddm = PETSC_NULL;
1340   if (dm->ops->createdomaindecompositiondm) {
1341     ierr = (*dm->ops->createdomaindecompositiondm)(dm,name,ddm);CHKERRQ(ierr);
1342   }
1343   PetscFunctionReturn(0);
1344 }
1345 
1346 #undef __FUNCT__
1347 #define __FUNCT__ "DMCreateDomainDecomposition"
1348 /*@C
1349   DMCreateDomainDecomposition - Returns lists of IS objects defining a decomposition of a problem into subproblems
1350                           corresponding to restrictions to pairs nested subdomains: each IS contains the global
1351                           indices of the dofs of the corresponding subdomains.  The inner subdomains conceptually
1352                           define a nonoverlapping covering, while outer subdomains can overlap.
1353                           The optional list of DMs define the DM for each subproblem.
1354 
1355   Not collective
1356 
1357   Input Parameter:
1358 . dm - the DM object
1359 
1360   Output Parameters:
1361 + len         - The number of subproblems in the domain decomposition (or PETSC_NULL if not requested)
1362 . namelist    - The name for each subdomain (or PETSC_NULL if not requested)
1363 . innerislist - The global indices for each inner subdomain (or PETSC_NULL, if not requested)
1364 . outerislist - The global indices for each outer subdomain (or PETSC_NULL, if not requested)
1365 - dmlist      - The DMs for each subdomain subproblem (or PETSC_NULL, if not requested; if PETSC_NULL is returned, no DMs are defined)
1366 
1367   Level: intermediate
1368 
1369   Notes:
1370   The user is responsible for freeing all requested arrays. In particular, every entry of names should be freed with
1371   PetscFree(), every entry of is should be destroyed with ISDestroy(), every entry of dm should be destroyed with DMDestroy(),
1372   and all of the arrays should be freed with PetscFree().
1373 
1374 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateDomainDecompositionDM(), DMCreateFieldDecomposition()
1375 @*/
1376 PetscErrorCode DMCreateDomainDecomposition(DM dm, PetscInt *len, char ***namelist, IS **innerislist, IS **outerislist, DM **dmlist)
1377 {
1378   PetscErrorCode ierr;
1379   DMSubDomainHookLink link;
1380   PetscInt            i,l;
1381 
1382   PetscFunctionBegin;
1383   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1384   if (len)           {PetscValidPointer(len,2);            *len         = PETSC_NULL;}
1385   if (namelist)      {PetscValidPointer(namelist,3);       *namelist    = PETSC_NULL;}
1386   if (innerislist)   {PetscValidPointer(innerislist,4);    *innerislist = PETSC_NULL;}
1387   if (outerislist)   {PetscValidPointer(outerislist,5);    *outerislist = PETSC_NULL;}
1388   if (dmlist)        {PetscValidPointer(dmlist,6);         *dmlist      = PETSC_NULL;}
1389   if (dm->ops->createdomaindecomposition) {
1390     ierr = (*dm->ops->createdomaindecomposition)(dm,&l,namelist,innerislist,outerislist,dmlist);CHKERRQ(ierr);
1391   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"DMCreateDomainDecomposition not supported for this DM type!");CHKERRQ(ierr);
1392   if (len) *len = l;
1393   for (i = 0; i < l; i++) {
1394     for (link=dm->subdomainhook; link; link=link->next) {
1395       if (link->ddhook) {ierr = (*link->ddhook)(dm,(*dmlist)[i],link->ctx);CHKERRQ(ierr);}
1396     }
1397     (*dmlist)[i]->ctx = dm->ctx;
1398   }
1399   PetscFunctionReturn(0);
1400 }
1401 
1402 
1403 #undef __FUNCT__
1404 #define __FUNCT__ "DMCreateDomainDecompositionScatters"
1405 /*@C
1406   DMCreateDomainDecompositionScatters - Returns scatters to the subdomain vectors from the global vector
1407 
1408   Not collective
1409 
1410   Input Parameters:
1411 + dm - the DM object
1412 . n  - the number of subdomain scatters
1413 - subdms - the local subdomains
1414 
1415   Output Parameters:
1416 + n     - the number of scatters returned
1417 . iscat - scatter from global vector to nonoverlapping global vector entries on subdomain
1418 . oscat - scatter from global vector to overlapping global vector entries on subdomain
1419 - gscat - scatter from global vector to local vector on subdomain (fills in ghosts)
1420 
1421   Notes: This is an alternative to the iis and ois arguments in DMCreateDomainDecomposition that allow for the solution
1422   of general nonlinear problems with overlapping subdomain methods.  While merely having index sets that enable subsets
1423   of the residual equations to be created is fine for linear problems, nonlinear problems require local assembly of
1424   solution and residual data.
1425 
1426   Level: developer
1427 
1428 .seealso DMDestroy(), DMView(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMCreateFieldIS()
1429 @*/
1430 PetscErrorCode DMCreateDomainDecompositionScatters(DM dm,PetscInt n,DM *subdms,VecScatter **iscat,VecScatter **oscat,VecScatter **gscat)
1431 {
1432   PetscErrorCode ierr;
1433 
1434   PetscFunctionBegin;
1435   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1436   PetscValidPointer(subdms,3);
1437   PetscValidPointer(iscat,4);
1438   PetscValidPointer(oscat,5);
1439   PetscValidPointer(gscat,6);
1440   if (dm->ops->createddscatters) {
1441     ierr = (*dm->ops->createddscatters)(dm,n,subdms,iscat,oscat,gscat);CHKERRQ(ierr);
1442   } else SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "This type has no DMCreateDomainDecompositionLocalScatter implementation defined");
1443   PetscFunctionReturn(0);
1444 }
1445 
1446 #undef __FUNCT__
1447 #define __FUNCT__ "DMRefine"
1448 /*@
1449   DMRefine - Refines a DM object
1450 
1451   Collective on DM
1452 
1453   Input Parameter:
1454 + dm   - the DM object
1455 - comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1456 
1457   Output Parameter:
1458 . dmf - the refined DM, or PETSC_NULL
1459 
1460   Note: If no refinement was done, the return value is PETSC_NULL
1461 
1462   Level: developer
1463 
1464 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1465 @*/
1466 PetscErrorCode  DMRefine(DM dm,MPI_Comm comm,DM *dmf)
1467 {
1468   PetscErrorCode ierr;
1469   DMRefineHookLink link;
1470 
1471   PetscFunctionBegin;
1472   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1473   ierr = (*dm->ops->refine)(dm,comm,dmf);CHKERRQ(ierr);
1474   if (*dmf) {
1475     (*dmf)->ops->creatematrix = dm->ops->creatematrix;
1476     ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmf);CHKERRQ(ierr);
1477     (*dmf)->ctx       = dm->ctx;
1478     (*dmf)->leveldown = dm->leveldown;
1479     (*dmf)->levelup   = dm->levelup + 1;
1480     ierr = DMSetMatType(*dmf,dm->mattype);CHKERRQ(ierr);
1481     for (link=dm->refinehook; link; link=link->next) {
1482       if (link->refinehook) {ierr = (*link->refinehook)(dm,*dmf,link->ctx);CHKERRQ(ierr);}
1483     }
1484   }
1485   PetscFunctionReturn(0);
1486 }
1487 
1488 #undef __FUNCT__
1489 #define __FUNCT__ "DMRefineHookAdd"
1490 /*@
1491    DMRefineHookAdd - adds a callback to be run when interpolating a nonlinear problem to a finer grid
1492 
1493    Logically Collective
1494 
1495    Input Arguments:
1496 +  coarse - nonlinear solver context on which to run a hook when restricting to a coarser level
1497 .  refinehook - function to run when setting up a coarser level
1498 .  interphook - function to run to update data on finer levels (once per SNESSolve())
1499 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1500 
1501    Calling sequence of refinehook:
1502 $    refinehook(DM coarse,DM fine,void *ctx);
1503 
1504 +  coarse - coarse level DM
1505 .  fine - fine level DM to interpolate problem to
1506 -  ctx - optional user-defined function context
1507 
1508    Calling sequence for interphook:
1509 $    interphook(DM coarse,Mat interp,DM fine,void *ctx)
1510 
1511 +  coarse - coarse level DM
1512 .  interp - matrix interpolating a coarse-level solution to the finer grid
1513 .  fine - fine level DM to update
1514 -  ctx - optional user-defined function context
1515 
1516    Level: advanced
1517 
1518    Notes:
1519    This function is only needed if auxiliary data needs to be passed to fine grids while grid sequencing
1520 
1521    If this function is called multiple times, the hooks will be run in the order they are added.
1522 
1523 .seealso: DMCoarsenHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1524 @*/
1525 PetscErrorCode DMRefineHookAdd(DM coarse,PetscErrorCode (*refinehook)(DM,DM,void*),PetscErrorCode (*interphook)(DM,Mat,DM,void*),void *ctx)
1526 {
1527   PetscErrorCode ierr;
1528   DMRefineHookLink link,*p;
1529 
1530   PetscFunctionBegin;
1531   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1532   for (p=&coarse->refinehook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1533   ierr = PetscMalloc(sizeof(struct _DMRefineHookLink),&link);CHKERRQ(ierr);
1534   link->refinehook = refinehook;
1535   link->interphook = interphook;
1536   link->ctx = ctx;
1537   link->next = PETSC_NULL;
1538   *p = link;
1539   PetscFunctionReturn(0);
1540 }
1541 
1542 #undef __FUNCT__
1543 #define __FUNCT__ "DMInterpolate"
1544 /*@
1545    DMInterpolate - interpolates user-defined problem data to a finer DM by running hooks registered by DMRefineHookAdd()
1546 
1547    Collective if any hooks are
1548 
1549    Input Arguments:
1550 +  coarse - coarser DM to use as a base
1551 .  restrct - interpolation matrix, apply using MatInterpolate()
1552 -  fine - finer DM to update
1553 
1554    Level: developer
1555 
1556 .seealso: DMRefineHookAdd(), MatInterpolate()
1557 @*/
1558 PetscErrorCode DMInterpolate(DM coarse,Mat interp,DM fine)
1559 {
1560   PetscErrorCode ierr;
1561   DMRefineHookLink link;
1562 
1563   PetscFunctionBegin;
1564   for (link=fine->refinehook; link; link=link->next) {
1565     if (link->interphook) {ierr = (*link->interphook)(coarse,interp,fine,link->ctx);CHKERRQ(ierr);}
1566   }
1567   PetscFunctionReturn(0);
1568 }
1569 
1570 #undef __FUNCT__
1571 #define __FUNCT__ "DMGetRefineLevel"
1572 /*@
1573     DMGetRefineLevel - Get's the number of refinements that have generated this DM.
1574 
1575     Not Collective
1576 
1577     Input Parameter:
1578 .   dm - the DM object
1579 
1580     Output Parameter:
1581 .   level - number of refinements
1582 
1583     Level: developer
1584 
1585 .seealso DMCoarsen(), DMGetCoarsenLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1586 
1587 @*/
1588 PetscErrorCode  DMGetRefineLevel(DM dm,PetscInt *level)
1589 {
1590   PetscFunctionBegin;
1591   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1592   *level = dm->levelup;
1593   PetscFunctionReturn(0);
1594 }
1595 
1596 #undef __FUNCT__
1597 #define __FUNCT__ "DMGlobalToLocalHookAdd"
1598 /*@
1599    DMGlobalToLocalHookAdd - adds a callback to be run when global to local is called
1600 
1601    Logically Collective
1602 
1603    Input Arguments:
1604 +  dm - the DM
1605 .  beginhook - function to run at the beginning of DMGlobalToLocalBegin()
1606 .  endhook - function to run after DMGlobalToLocalEnd() has completed
1607 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1608 
1609    Calling sequence for beginhook:
1610 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1611 
1612 +  dm - global DM
1613 .  g - global vector
1614 .  mode - mode
1615 .  l - local vector
1616 -  ctx - optional user-defined function context
1617 
1618 
1619    Calling sequence for endhook:
1620 $    beginhook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
1621 
1622 +  global - global DM
1623 -  ctx - optional user-defined function context
1624 
1625    Level: advanced
1626 
1627    Notes:
1628    This function is only needed if auxiliary data needs to be set up on coarse grids.
1629 
1630    If this function is called multiple times, the hooks will be run in the order they are added.
1631 
1632    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1633    extract the finest level information from its context (instead of from the SNES).
1634 
1635 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1636 @*/
1637 PetscErrorCode DMGlobalToLocalHookAdd(DM dm,PetscErrorCode (*beginhook)(DM,Vec,InsertMode,Vec,void*),PetscErrorCode (*endhook)(DM,Vec,InsertMode,Vec,void*),void *ctx)
1638 {
1639   PetscErrorCode ierr;
1640   DMGlobalToLocalHookLink link,*p;
1641 
1642   PetscFunctionBegin;
1643   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1644   for (p=&dm->gtolhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1645   ierr = PetscMalloc(sizeof(struct _DMGlobalToLocalHookLink),&link);CHKERRQ(ierr);
1646   link->beginhook = beginhook;
1647   link->endhook = endhook;
1648   link->ctx = ctx;
1649   link->next = PETSC_NULL;
1650   *p = link;
1651   PetscFunctionReturn(0);
1652 }
1653 
1654 #undef __FUNCT__
1655 #define __FUNCT__ "DMGlobalToLocalBegin"
1656 /*@
1657     DMGlobalToLocalBegin - Begins updating local vectors from global vector
1658 
1659     Neighbor-wise Collective on DM
1660 
1661     Input Parameters:
1662 +   dm - the DM object
1663 .   g - the global vector
1664 .   mode - INSERT_VALUES or ADD_VALUES
1665 -   l - the local vector
1666 
1667 
1668     Level: beginner
1669 
1670 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1671 
1672 @*/
1673 PetscErrorCode  DMGlobalToLocalBegin(DM dm,Vec g,InsertMode mode,Vec l)
1674 {
1675   PetscSF        sf;
1676   PetscErrorCode ierr;
1677   DMGlobalToLocalHookLink link;
1678 
1679   PetscFunctionBegin;
1680   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1681   for (link=dm->gtolhook; link; link=link->next) {
1682     if (link->beginhook) {ierr = (*link->beginhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1683   }
1684   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1685   if (sf) {
1686     PetscScalar *lArray, *gArray;
1687 
1688     if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1689     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1690     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1691     ierr = PetscSFBcastBegin(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1692     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1693     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1694   } else {
1695     ierr = (*dm->ops->globaltolocalbegin)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1696   }
1697   PetscFunctionReturn(0);
1698 }
1699 
1700 #undef __FUNCT__
1701 #define __FUNCT__ "DMGlobalToLocalEnd"
1702 /*@
1703     DMGlobalToLocalEnd - Ends updating local vectors from global vector
1704 
1705     Neighbor-wise Collective on DM
1706 
1707     Input Parameters:
1708 +   dm - the DM object
1709 .   g - the global vector
1710 .   mode - INSERT_VALUES or ADD_VALUES
1711 -   l - the local vector
1712 
1713 
1714     Level: beginner
1715 
1716 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMLocalToGlobalBegin()
1717 
1718 @*/
1719 PetscErrorCode  DMGlobalToLocalEnd(DM dm,Vec g,InsertMode mode,Vec l)
1720 {
1721   PetscSF        sf;
1722   PetscErrorCode ierr;
1723   PetscScalar    *lArray, *gArray;
1724   DMGlobalToLocalHookLink link;
1725 
1726   PetscFunctionBegin;
1727   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1728   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1729   if (sf) {
1730   if (mode == ADD_VALUES) SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1731 
1732     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1733     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1734     ierr = PetscSFBcastEnd(sf, MPIU_SCALAR, gArray, lArray);CHKERRQ(ierr);
1735     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1736     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1737   } else {
1738     ierr = (*dm->ops->globaltolocalend)(dm,g,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),l);CHKERRQ(ierr);
1739   }
1740   for (link=dm->gtolhook; link; link=link->next) {
1741     if (link->endhook) {ierr = (*link->endhook)(dm,g,mode,l,link->ctx);CHKERRQ(ierr);}
1742   }
1743   PetscFunctionReturn(0);
1744 }
1745 
1746 #undef __FUNCT__
1747 #define __FUNCT__ "DMLocalToGlobalBegin"
1748 /*@
1749     DMLocalToGlobalBegin - updates global vectors from local vectors
1750 
1751     Neighbor-wise Collective on DM
1752 
1753     Input Parameters:
1754 +   dm - the DM object
1755 .   l - the local vector
1756 .   mode - if INSERT_VALUES then no parallel communication is used, if ADD_VALUES then all ghost points from the same base point accumulate into that
1757            base point.
1758 - - the global vector
1759 
1760     Notes: In the ADD_VALUES case you normally would zero the receiving vector before beginning this operation. If you would like to simply add the non-ghosted values in the local
1761            array into the global array you need to either (1) zero the ghosted locations and use ADD_VALUES or (2) use INSERT_VALUES into a work global array and then add the work
1762            global array to the final global array with VecAXPY().
1763 
1764     Level: beginner
1765 
1766 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalBegin()
1767 
1768 @*/
1769 PetscErrorCode  DMLocalToGlobalBegin(DM dm,Vec l,InsertMode mode,Vec g)
1770 {
1771   PetscSF        sf;
1772   PetscErrorCode ierr;
1773 
1774   PetscFunctionBegin;
1775   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1776   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1777   if (sf) {
1778     MPI_Op       op;
1779     PetscScalar *lArray, *gArray;
1780 
1781     switch(mode) {
1782     case INSERT_VALUES:
1783     case INSERT_ALL_VALUES:
1784 #if defined(PETSC_HAVE_MPI_REPLACE)
1785       op = MPI_REPLACE; break;
1786 #else
1787       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1788 #endif
1789     case ADD_VALUES:
1790     case ADD_ALL_VALUES:
1791       op = MPI_SUM; break;
1792   default:
1793     SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1794     }
1795     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1796     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1797     ierr = PetscSFReduceBegin(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1798     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1799     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1800   } else {
1801     ierr = (*dm->ops->localtoglobalbegin)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1802   }
1803   PetscFunctionReturn(0);
1804 }
1805 
1806 #undef __FUNCT__
1807 #define __FUNCT__ "DMLocalToGlobalEnd"
1808 /*@
1809     DMLocalToGlobalEnd - updates global vectors from local vectors
1810 
1811     Neighbor-wise Collective on DM
1812 
1813     Input Parameters:
1814 +   dm - the DM object
1815 .   l - the local vector
1816 .   mode - INSERT_VALUES or ADD_VALUES
1817 -   g - the global vector
1818 
1819 
1820     Level: beginner
1821 
1822 .seealso DMCoarsen(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMGlobalToLocalEnd(), DMGlobalToLocalEnd()
1823 
1824 @*/
1825 PetscErrorCode  DMLocalToGlobalEnd(DM dm,Vec l,InsertMode mode,Vec g)
1826 {
1827   PetscSF        sf;
1828   PetscErrorCode ierr;
1829 
1830   PetscFunctionBegin;
1831   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1832   ierr = DMGetDefaultSF(dm, &sf);CHKERRQ(ierr);
1833   if (sf) {
1834     MPI_Op       op;
1835     PetscScalar *lArray, *gArray;
1836 
1837     switch(mode) {
1838     case INSERT_VALUES:
1839     case INSERT_ALL_VALUES:
1840 #if defined(PETSC_HAVE_MPI_REPLACE)
1841       op = MPI_REPLACE; break;
1842 #else
1843       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No support for INSERT_VALUES without an MPI-2 implementation");
1844 #endif
1845     case ADD_VALUES:
1846     case ADD_ALL_VALUES:
1847       op = MPI_SUM; break;
1848     default:
1849       SETERRQ1(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Invalid insertion mode %D", mode);
1850     }
1851     ierr = VecGetArray(l, &lArray);CHKERRQ(ierr);
1852     ierr = VecGetArray(g, &gArray);CHKERRQ(ierr);
1853     ierr = PetscSFReduceEnd(sf, MPIU_SCALAR, lArray, gArray, op);CHKERRQ(ierr);
1854     ierr = VecRestoreArray(l, &lArray);CHKERRQ(ierr);
1855     ierr = VecRestoreArray(g, &gArray);CHKERRQ(ierr);
1856   } else {
1857     ierr = (*dm->ops->localtoglobalend)(dm,l,mode == INSERT_ALL_VALUES ? INSERT_VALUES : (mode == ADD_ALL_VALUES ? ADD_VALUES : mode),g);CHKERRQ(ierr);
1858   }
1859   PetscFunctionReturn(0);
1860 }
1861 
1862 #undef __FUNCT__
1863 #define __FUNCT__ "DMCoarsen"
1864 /*@
1865     DMCoarsen - Coarsens a DM object
1866 
1867     Collective on DM
1868 
1869     Input Parameter:
1870 +   dm - the DM object
1871 -   comm - the communicator to contain the new DM object (or MPI_COMM_NULL)
1872 
1873     Output Parameter:
1874 .   dmc - the coarsened DM
1875 
1876     Level: developer
1877 
1878 .seealso DMRefine(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
1879 
1880 @*/
1881 PetscErrorCode  DMCoarsen(DM dm, MPI_Comm comm, DM *dmc)
1882 {
1883   PetscErrorCode ierr;
1884   DMCoarsenHookLink link;
1885 
1886   PetscFunctionBegin;
1887   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1888   ierr = (*dm->ops->coarsen)(dm, comm, dmc);CHKERRQ(ierr);
1889   (*dmc)->ops->creatematrix = dm->ops->creatematrix;
1890   ierr = PetscObjectCopyFortranFunctionPointers((PetscObject)dm,(PetscObject)*dmc);CHKERRQ(ierr);
1891   (*dmc)->ctx       = dm->ctx;
1892   (*dmc)->levelup   = dm->levelup;
1893   (*dmc)->leveldown = dm->leveldown + 1;
1894   ierr = DMSetMatType(*dmc,dm->mattype);CHKERRQ(ierr);
1895   for (link=dm->coarsenhook; link; link=link->next) {
1896     if (link->coarsenhook) {ierr = (*link->coarsenhook)(dm,*dmc,link->ctx);CHKERRQ(ierr);}
1897   }
1898   PetscFunctionReturn(0);
1899 }
1900 
1901 #undef __FUNCT__
1902 #define __FUNCT__ "DMCoarsenHookAdd"
1903 /*@
1904    DMCoarsenHookAdd - adds a callback to be run when restricting a nonlinear problem to the coarse grid
1905 
1906    Logically Collective
1907 
1908    Input Arguments:
1909 +  fine - nonlinear solver context on which to run a hook when restricting to a coarser level
1910 .  coarsenhook - function to run when setting up a coarser level
1911 .  restricthook - function to run to update data on coarser levels (once per SNESSolve())
1912 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
1913 
1914    Calling sequence of coarsenhook:
1915 $    coarsenhook(DM fine,DM coarse,void *ctx);
1916 
1917 +  fine - fine level DM
1918 .  coarse - coarse level DM to restrict problem to
1919 -  ctx - optional user-defined function context
1920 
1921    Calling sequence for restricthook:
1922 $    restricthook(DM fine,Mat mrestrict,Vec rscale,Mat inject,DM coarse,void *ctx)
1923 
1924 +  fine - fine level DM
1925 .  mrestrict - matrix restricting a fine-level solution to the coarse grid
1926 .  rscale - scaling vector for restriction
1927 .  inject - matrix restricting by injection
1928 .  coarse - coarse level DM to update
1929 -  ctx - optional user-defined function context
1930 
1931    Level: advanced
1932 
1933    Notes:
1934    This function is only needed if auxiliary data needs to be set up on coarse grids.
1935 
1936    If this function is called multiple times, the hooks will be run in the order they are added.
1937 
1938    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
1939    extract the finest level information from its context (instead of from the SNES).
1940 
1941 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
1942 @*/
1943 PetscErrorCode DMCoarsenHookAdd(DM fine,PetscErrorCode (*coarsenhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,Mat,Vec,Mat,DM,void*),void *ctx)
1944 {
1945   PetscErrorCode ierr;
1946   DMCoarsenHookLink link,*p;
1947 
1948   PetscFunctionBegin;
1949   PetscValidHeaderSpecific(fine,DM_CLASSID,1);
1950   for (p=&fine->coarsenhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
1951   ierr = PetscMalloc(sizeof(struct _DMCoarsenHookLink),&link);CHKERRQ(ierr);
1952   link->coarsenhook = coarsenhook;
1953   link->restricthook = restricthook;
1954   link->ctx = ctx;
1955   link->next = PETSC_NULL;
1956   *p = link;
1957   PetscFunctionReturn(0);
1958 }
1959 
1960 #undef __FUNCT__
1961 #define __FUNCT__ "DMRestrict"
1962 /*@
1963    DMRestrict - restricts user-defined problem data to a coarser DM by running hooks registered by DMCoarsenHookAdd()
1964 
1965    Collective if any hooks are
1966 
1967    Input Arguments:
1968 +  fine - finer DM to use as a base
1969 .  restrct - restriction matrix, apply using MatRestrict()
1970 .  inject - injection matrix, also use MatRestrict()
1971 -  coarse - coarer DM to update
1972 
1973    Level: developer
1974 
1975 .seealso: DMCoarsenHookAdd(), MatRestrict()
1976 @*/
1977 PetscErrorCode DMRestrict(DM fine,Mat restrct,Vec rscale,Mat inject,DM coarse)
1978 {
1979   PetscErrorCode ierr;
1980   DMCoarsenHookLink link;
1981 
1982   PetscFunctionBegin;
1983   for (link=fine->coarsenhook; link; link=link->next) {
1984     if (link->restricthook) {ierr = (*link->restricthook)(fine,restrct,rscale,inject,coarse,link->ctx);CHKERRQ(ierr);}
1985   }
1986   PetscFunctionReturn(0);
1987 }
1988 
1989 #undef __FUNCT__
1990 #define __FUNCT__ "DMSubDomainHookAdd"
1991 /*@
1992    DMSubDomainHookAdd - adds a callback to be run when restricting a problem to the coarse grid
1993 
1994    Logically Collective
1995 
1996    Input Arguments:
1997 +  global - global DM
1998 .
1999 .  restricthook - function to run to update data on block solve (at the beginning of the block solve)
2000 -  ctx - [optional] user-defined context for provide data for the hooks (may be PETSC_NULL)
2001 
2002    Calling sequence for restricthook:
2003 $    restricthook(DM fine,VecScatter out,VecScatter in,DM coarse,void *ctx)
2004 
2005 +  global - global DM
2006 .  out    - scatter to the outer (with ghost and overlap points) block vector
2007 .  in     - scatter to block vector values only owned locally
2008 .  block  - block DM (may just be a shell if the global DM is passed in correctly)
2009 -  ctx - optional user-defined function context
2010 
2011    Level: advanced
2012 
2013    Notes:
2014    This function is only needed if auxiliary data needs to be set up on coarse grids.
2015 
2016    If this function is called multiple times, the hooks will be run in the order they are added.
2017 
2018    In order to compose with nonlinear preconditioning without duplicating storage, the hook should be implemented to
2019    extract the finest level information from its context (instead of from the SNES).
2020 
2021 .seealso: DMRefineHookAdd(), SNESFASGetInterpolation(), SNESFASGetInjection(), PetscObjectCompose(), PetscContainerCreate()
2022 @*/
2023 PetscErrorCode DMSubDomainHookAdd(DM global,PetscErrorCode (*ddhook)(DM,DM,void*),PetscErrorCode (*restricthook)(DM,VecScatter,VecScatter,DM,void*),void *ctx)
2024 {
2025   PetscErrorCode ierr;
2026   DMSubDomainHookLink link,*p;
2027 
2028   PetscFunctionBegin;
2029   PetscValidHeaderSpecific(global,DM_CLASSID,1);
2030   for (p=&global->subdomainhook; *p; p=&(*p)->next) {} /* Scan to the end of the current list of hooks */
2031   ierr = PetscMalloc(sizeof(struct _DMSubDomainHookLink),&link);CHKERRQ(ierr);
2032   link->restricthook = restricthook;
2033   link->ddhook = ddhook;
2034   link->ctx = ctx;
2035   link->next = PETSC_NULL;
2036   *p = link;
2037   PetscFunctionReturn(0);
2038 }
2039 
2040 #undef __FUNCT__
2041 #define __FUNCT__ "DMSubDomainRestrict"
2042 /*@
2043    DMSubDomainRestrict - restricts user-defined problem data to a block DM by running hooks registered by DMSubDomainHookAdd()
2044 
2045    Collective if any hooks are
2046 
2047    Input Arguments:
2048 +  fine - finer DM to use as a base
2049 .  oscatter - scatter from domain global vector filling subdomain global vector with overlap
2050 .  gscatter - scatter from domain global vector filling subdomain local vector with ghosts
2051 -  coarse - coarer DM to update
2052 
2053    Level: developer
2054 
2055 .seealso: DMCoarsenHookAdd(), MatRestrict()
2056 @*/
2057 PetscErrorCode DMSubDomainRestrict(DM global,VecScatter oscatter,VecScatter gscatter,DM subdm)
2058 {
2059   PetscErrorCode ierr;
2060   DMSubDomainHookLink link;
2061 
2062   PetscFunctionBegin;
2063   for (link=global->subdomainhook; link; link=link->next) {
2064     if (link->restricthook) {ierr = (*link->restricthook)(global,oscatter,gscatter,subdm,link->ctx);CHKERRQ(ierr);}
2065   }
2066   PetscFunctionReturn(0);
2067 }
2068 
2069 #undef __FUNCT__
2070 #define __FUNCT__ "DMGetCoarsenLevel"
2071 /*@
2072     DMGetCoarsenLevel - Get's the number of coarsenings that have generated this DM.
2073 
2074     Not Collective
2075 
2076     Input Parameter:
2077 .   dm - the DM object
2078 
2079     Output Parameter:
2080 .   level - number of coarsenings
2081 
2082     Level: developer
2083 
2084 .seealso DMCoarsen(), DMGetRefineLevel(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2085 
2086 @*/
2087 PetscErrorCode  DMGetCoarsenLevel(DM dm,PetscInt *level)
2088 {
2089   PetscFunctionBegin;
2090   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2091   *level = dm->leveldown;
2092   PetscFunctionReturn(0);
2093 }
2094 
2095 
2096 
2097 #undef __FUNCT__
2098 #define __FUNCT__ "DMRefineHierarchy"
2099 /*@C
2100     DMRefineHierarchy - Refines a DM object, all levels at once
2101 
2102     Collective on DM
2103 
2104     Input Parameter:
2105 +   dm - the DM object
2106 -   nlevels - the number of levels of refinement
2107 
2108     Output Parameter:
2109 .   dmf - the refined DM hierarchy
2110 
2111     Level: developer
2112 
2113 .seealso DMCoarsenHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2114 
2115 @*/
2116 PetscErrorCode  DMRefineHierarchy(DM dm,PetscInt nlevels,DM dmf[])
2117 {
2118   PetscErrorCode ierr;
2119 
2120   PetscFunctionBegin;
2121   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2122   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2123   if (nlevels == 0) PetscFunctionReturn(0);
2124   if (dm->ops->refinehierarchy) {
2125     ierr = (*dm->ops->refinehierarchy)(dm,nlevels,dmf);CHKERRQ(ierr);
2126   } else if (dm->ops->refine) {
2127     PetscInt i;
2128 
2129     ierr = DMRefine(dm,((PetscObject)dm)->comm,&dmf[0]);CHKERRQ(ierr);
2130     for (i=1; i<nlevels; i++) {
2131       ierr = DMRefine(dmf[i-1],((PetscObject)dm)->comm,&dmf[i]);CHKERRQ(ierr);
2132     }
2133   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No RefineHierarchy for this DM yet");
2134   PetscFunctionReturn(0);
2135 }
2136 
2137 #undef __FUNCT__
2138 #define __FUNCT__ "DMCoarsenHierarchy"
2139 /*@C
2140     DMCoarsenHierarchy - Coarsens a DM object, all levels at once
2141 
2142     Collective on DM
2143 
2144     Input Parameter:
2145 +   dm - the DM object
2146 -   nlevels - the number of levels of coarsening
2147 
2148     Output Parameter:
2149 .   dmc - the coarsened DM hierarchy
2150 
2151     Level: developer
2152 
2153 .seealso DMRefineHierarchy(), DMDestroy(), DMView(), DMCreateGlobalVector(), DMCreateInterpolation()
2154 
2155 @*/
2156 PetscErrorCode  DMCoarsenHierarchy(DM dm, PetscInt nlevels, DM dmc[])
2157 {
2158   PetscErrorCode ierr;
2159 
2160   PetscFunctionBegin;
2161   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2162   if (nlevels < 0) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"nlevels cannot be negative");
2163   if (nlevels == 0) PetscFunctionReturn(0);
2164   PetscValidPointer(dmc,3);
2165   if (dm->ops->coarsenhierarchy) {
2166     ierr = (*dm->ops->coarsenhierarchy)(dm, nlevels, dmc);CHKERRQ(ierr);
2167   } else if (dm->ops->coarsen) {
2168     PetscInt i;
2169 
2170     ierr = DMCoarsen(dm,((PetscObject)dm)->comm,&dmc[0]);CHKERRQ(ierr);
2171     for (i=1; i<nlevels; i++) {
2172       ierr = DMCoarsen(dmc[i-1],((PetscObject)dm)->comm,&dmc[i]);CHKERRQ(ierr);
2173     }
2174   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"No CoarsenHierarchy for this DM yet");
2175   PetscFunctionReturn(0);
2176 }
2177 
2178 #undef __FUNCT__
2179 #define __FUNCT__ "DMCreateAggregates"
2180 /*@
2181    DMCreateAggregates - Gets the aggregates that map between
2182    grids associated with two DMs.
2183 
2184    Collective on DM
2185 
2186    Input Parameters:
2187 +  dmc - the coarse grid DM
2188 -  dmf - the fine grid DM
2189 
2190    Output Parameters:
2191 .  rest - the restriction matrix (transpose of the projection matrix)
2192 
2193    Level: intermediate
2194 
2195 .keywords: interpolation, restriction, multigrid
2196 
2197 .seealso: DMRefine(), DMCreateInjection(), DMCreateInterpolation()
2198 @*/
2199 PetscErrorCode  DMCreateAggregates(DM dmc, DM dmf, Mat *rest)
2200 {
2201   PetscErrorCode ierr;
2202 
2203   PetscFunctionBegin;
2204   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
2205   PetscValidHeaderSpecific(dmf,DM_CLASSID,2);
2206   ierr = (*dmc->ops->getaggregates)(dmc, dmf, rest);CHKERRQ(ierr);
2207   PetscFunctionReturn(0);
2208 }
2209 
2210 #undef __FUNCT__
2211 #define __FUNCT__ "DMSetApplicationContextDestroy"
2212 /*@C
2213     DMSetApplicationContextDestroy - Sets a user function that will be called to destroy the application context when the DM is destroyed
2214 
2215     Not Collective
2216 
2217     Input Parameters:
2218 +   dm - the DM object
2219 -   destroy - the destroy function
2220 
2221     Level: intermediate
2222 
2223 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2224 
2225 @*/
2226 PetscErrorCode  DMSetApplicationContextDestroy(DM dm,PetscErrorCode (*destroy)(void**))
2227 {
2228   PetscFunctionBegin;
2229   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2230   dm->ctxdestroy = destroy;
2231   PetscFunctionReturn(0);
2232 }
2233 
2234 #undef __FUNCT__
2235 #define __FUNCT__ "DMSetApplicationContext"
2236 /*@
2237     DMSetApplicationContext - Set a user context into a DM object
2238 
2239     Not Collective
2240 
2241     Input Parameters:
2242 +   dm - the DM object
2243 -   ctx - the user context
2244 
2245     Level: intermediate
2246 
2247 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2248 
2249 @*/
2250 PetscErrorCode  DMSetApplicationContext(DM dm,void *ctx)
2251 {
2252   PetscFunctionBegin;
2253   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2254   dm->ctx = ctx;
2255   PetscFunctionReturn(0);
2256 }
2257 
2258 #undef __FUNCT__
2259 #define __FUNCT__ "DMGetApplicationContext"
2260 /*@
2261     DMGetApplicationContext - Gets a user context from a DM object
2262 
2263     Not Collective
2264 
2265     Input Parameter:
2266 .   dm - the DM object
2267 
2268     Output Parameter:
2269 .   ctx - the user context
2270 
2271     Level: intermediate
2272 
2273 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2274 
2275 @*/
2276 PetscErrorCode  DMGetApplicationContext(DM dm,void *ctx)
2277 {
2278   PetscFunctionBegin;
2279   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2280   *(void**)ctx = dm->ctx;
2281   PetscFunctionReturn(0);
2282 }
2283 
2284 #undef __FUNCT__
2285 #define __FUNCT__ "DMSetVariableBounds"
2286 /*@C
2287     DMSetVariableBounds - sets a function to compute the the lower and upper bound vectors for SNESVI.
2288 
2289     Logically Collective on DM
2290 
2291     Input Parameter:
2292 +   dm - the DM object
2293 -   f - the function that computes variable bounds used by SNESVI (use PETSC_NULL to cancel a previous function that was set)
2294 
2295     Level: intermediate
2296 
2297 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext(),
2298          DMSetJacobian()
2299 
2300 @*/
2301 PetscErrorCode  DMSetVariableBounds(DM dm,PetscErrorCode (*f)(DM,Vec,Vec))
2302 {
2303   PetscFunctionBegin;
2304   dm->ops->computevariablebounds = f;
2305   PetscFunctionReturn(0);
2306 }
2307 
2308 #undef __FUNCT__
2309 #define __FUNCT__ "DMHasVariableBounds"
2310 /*@
2311     DMHasVariableBounds - does the DM object have a variable bounds function?
2312 
2313     Not Collective
2314 
2315     Input Parameter:
2316 .   dm - the DM object to destroy
2317 
2318     Output Parameter:
2319 .   flg - PETSC_TRUE if the variable bounds function exists
2320 
2321     Level: developer
2322 
2323 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2324 
2325 @*/
2326 PetscErrorCode  DMHasVariableBounds(DM dm,PetscBool  *flg)
2327 {
2328   PetscFunctionBegin;
2329   *flg =  (dm->ops->computevariablebounds) ? PETSC_TRUE : PETSC_FALSE;
2330   PetscFunctionReturn(0);
2331 }
2332 
2333 #undef __FUNCT__
2334 #define __FUNCT__ "DMComputeVariableBounds"
2335 /*@C
2336     DMComputeVariableBounds - compute variable bounds used by SNESVI.
2337 
2338     Logically Collective on DM
2339 
2340     Input Parameters:
2341 +   dm - the DM object to destroy
2342 -   x  - current solution at which the bounds are computed
2343 
2344     Output parameters:
2345 +   xl - lower bound
2346 -   xu - upper bound
2347 
2348     Level: intermediate
2349 
2350 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2351 
2352 @*/
2353 PetscErrorCode  DMComputeVariableBounds(DM dm, Vec xl, Vec xu)
2354 {
2355   PetscErrorCode ierr;
2356   PetscFunctionBegin;
2357   PetscValidHeaderSpecific(xl,VEC_CLASSID,2);
2358   PetscValidHeaderSpecific(xu,VEC_CLASSID,2);
2359   if (dm->ops->computevariablebounds) {
2360     ierr = (*dm->ops->computevariablebounds)(dm, xl,xu);CHKERRQ(ierr);
2361   }
2362   else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_ARG_WRONGSTATE, "This DM is incapable of computing variable bounds.");
2363   PetscFunctionReturn(0);
2364 }
2365 
2366 #undef __FUNCT__
2367 #define __FUNCT__ "DMHasColoring"
2368 /*@
2369     DMHasColoring - does the DM object have a method of providing a coloring?
2370 
2371     Not Collective
2372 
2373     Input Parameter:
2374 .   dm - the DM object
2375 
2376     Output Parameter:
2377 .   flg - PETSC_TRUE if the DM has facilities for DMCreateColoring().
2378 
2379     Level: developer
2380 
2381 .seealso DMHasFunction(), DMCreateColoring()
2382 
2383 @*/
2384 PetscErrorCode  DMHasColoring(DM dm,PetscBool  *flg)
2385 {
2386   PetscFunctionBegin;
2387   *flg =  (dm->ops->getcoloring) ? PETSC_TRUE : PETSC_FALSE;
2388   PetscFunctionReturn(0);
2389 }
2390 
2391 #undef  __FUNCT__
2392 #define __FUNCT__ "DMSetVec"
2393 /*@C
2394     DMSetVec - set the vector at which to compute residual, Jacobian and VI bounds, if the problem is nonlinear.
2395 
2396     Collective on DM
2397 
2398     Input Parameter:
2399 +   dm - the DM object
2400 -   x - location to compute residual and Jacobian, if PETSC_NULL is passed to those routines; will be PETSC_NULL for linear problems.
2401 
2402     Level: developer
2403 
2404 .seealso DMView(), DMCreateGlobalVector(), DMCreateInterpolation(), DMCreateColoring(), DMCreateMatrix(), DMGetApplicationContext()
2405 
2406 @*/
2407 PetscErrorCode  DMSetVec(DM dm,Vec x)
2408 {
2409   PetscErrorCode ierr;
2410   PetscFunctionBegin;
2411   if (x) {
2412     if (!dm->x) {
2413       ierr = DMCreateGlobalVector(dm,&dm->x);CHKERRQ(ierr);
2414     }
2415     ierr = VecCopy(x,dm->x);CHKERRQ(ierr);
2416   }
2417   else if (dm->x) {
2418     ierr = VecDestroy(&dm->x);CHKERRQ(ierr);
2419   }
2420   PetscFunctionReturn(0);
2421 }
2422 
2423 PetscFList DMList                       = PETSC_NULL;
2424 PetscBool  DMRegisterAllCalled          = PETSC_FALSE;
2425 
2426 #undef __FUNCT__
2427 #define __FUNCT__ "DMSetType"
2428 /*@C
2429   DMSetType - Builds a DM, for a particular DM implementation.
2430 
2431   Collective on DM
2432 
2433   Input Parameters:
2434 + dm     - The DM object
2435 - method - The name of the DM type
2436 
2437   Options Database Key:
2438 . -dm_type <type> - Sets the DM type; use -help for a list of available types
2439 
2440   Notes:
2441   See "petsc/include/petscdm.h" for available DM types (for instance, DM1D, DM2D, or DM3D).
2442 
2443   Level: intermediate
2444 
2445 .keywords: DM, set, type
2446 .seealso: DMGetType(), DMCreate()
2447 @*/
2448 PetscErrorCode  DMSetType(DM dm, DMType method)
2449 {
2450   PetscErrorCode (*r)(DM);
2451   PetscBool      match;
2452   PetscErrorCode ierr;
2453 
2454   PetscFunctionBegin;
2455   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2456   ierr = PetscObjectTypeCompare((PetscObject) dm, method, &match);CHKERRQ(ierr);
2457   if (match) PetscFunctionReturn(0);
2458 
2459   if (!DMRegisterAllCalled) {ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);}
2460   ierr = PetscFListFind(DMList, ((PetscObject)dm)->comm, method,PETSC_TRUE,(void (**)(void)) &r);CHKERRQ(ierr);
2461   if (!r) SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_UNKNOWN_TYPE, "Unknown DM type: %s", method);
2462 
2463   if (dm->ops->destroy) {
2464     ierr = (*dm->ops->destroy)(dm);CHKERRQ(ierr);
2465     dm->ops->destroy = PETSC_NULL;
2466   }
2467   ierr = (*r)(dm);CHKERRQ(ierr);
2468   ierr = PetscObjectChangeTypeName((PetscObject)dm,method);CHKERRQ(ierr);
2469   PetscFunctionReturn(0);
2470 }
2471 
2472 #undef __FUNCT__
2473 #define __FUNCT__ "DMGetType"
2474 /*@C
2475   DMGetType - Gets the DM type name (as a string) from the DM.
2476 
2477   Not Collective
2478 
2479   Input Parameter:
2480 . dm  - The DM
2481 
2482   Output Parameter:
2483 . type - The DM type name
2484 
2485   Level: intermediate
2486 
2487 .keywords: DM, get, type, name
2488 .seealso: DMSetType(), DMCreate()
2489 @*/
2490 PetscErrorCode  DMGetType(DM dm, DMType *type)
2491 {
2492   PetscErrorCode ierr;
2493 
2494   PetscFunctionBegin;
2495   PetscValidHeaderSpecific(dm, DM_CLASSID,1);
2496   PetscValidCharPointer(type,2);
2497   if (!DMRegisterAllCalled) {
2498     ierr = DMRegisterAll(PETSC_NULL);CHKERRQ(ierr);
2499   }
2500   *type = ((PetscObject)dm)->type_name;
2501   PetscFunctionReturn(0);
2502 }
2503 
2504 #undef __FUNCT__
2505 #define __FUNCT__ "DMConvert"
2506 /*@C
2507   DMConvert - Converts a DM to another DM, either of the same or different type.
2508 
2509   Collective on DM
2510 
2511   Input Parameters:
2512 + dm - the DM
2513 - newtype - new DM type (use "same" for the same type)
2514 
2515   Output Parameter:
2516 . M - pointer to new DM
2517 
2518   Notes:
2519   Cannot be used to convert a sequential DM to parallel or parallel to sequential,
2520   the MPI communicator of the generated DM is always the same as the communicator
2521   of the input DM.
2522 
2523   Level: intermediate
2524 
2525 .seealso: DMCreate()
2526 @*/
2527 PetscErrorCode DMConvert(DM dm, DMType newtype, DM *M)
2528 {
2529   DM             B;
2530   char           convname[256];
2531   PetscBool      sametype, issame;
2532   PetscErrorCode ierr;
2533 
2534   PetscFunctionBegin;
2535   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
2536   PetscValidType(dm,1);
2537   PetscValidPointer(M,3);
2538   ierr = PetscObjectTypeCompare((PetscObject) dm, newtype, &sametype);CHKERRQ(ierr);
2539   ierr = PetscStrcmp(newtype, "same", &issame);CHKERRQ(ierr);
2540   {
2541     PetscErrorCode (*conv)(DM, DMType, DM *) = PETSC_NULL;
2542 
2543     /*
2544        Order of precedence:
2545        1) See if a specialized converter is known to the current DM.
2546        2) See if a specialized converter is known to the desired DM class.
2547        3) See if a good general converter is registered for the desired class
2548        4) See if a good general converter is known for the current matrix.
2549        5) Use a really basic converter.
2550     */
2551 
2552     /* 1) See if a specialized converter is known to the current DM and the desired class */
2553     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2554     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2555     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2556     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2557     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2558     ierr = PetscObjectQueryFunction((PetscObject)dm,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2559     if (conv) goto foundconv;
2560 
2561     /* 2)  See if a specialized converter is known to the desired DM class. */
2562     ierr = DMCreate(((PetscObject) dm)->comm, &B);CHKERRQ(ierr);
2563     ierr = DMSetType(B, newtype);CHKERRQ(ierr);
2564     ierr = PetscStrcpy(convname,"DMConvert_");CHKERRQ(ierr);
2565     ierr = PetscStrcat(convname,((PetscObject) dm)->type_name);CHKERRQ(ierr);
2566     ierr = PetscStrcat(convname,"_");CHKERRQ(ierr);
2567     ierr = PetscStrcat(convname,newtype);CHKERRQ(ierr);
2568     ierr = PetscStrcat(convname,"_C");CHKERRQ(ierr);
2569     ierr = PetscObjectQueryFunction((PetscObject)B,convname,(void (**)(void))&conv);CHKERRQ(ierr);
2570     if (conv) {
2571       ierr = DMDestroy(&B);CHKERRQ(ierr);
2572       goto foundconv;
2573     }
2574 
2575 #if 0
2576     /* 3) See if a good general converter is registered for the desired class */
2577     conv = B->ops->convertfrom;
2578     ierr = DMDestroy(&B);CHKERRQ(ierr);
2579     if (conv) goto foundconv;
2580 
2581     /* 4) See if a good general converter is known for the current matrix */
2582     if (dm->ops->convert) {
2583       conv = dm->ops->convert;
2584     }
2585     if (conv) goto foundconv;
2586 #endif
2587 
2588     /* 5) Use a really basic converter. */
2589     SETERRQ2(((PetscObject) dm)->comm, PETSC_ERR_SUP, "No conversion possible between DM types %s and %s", ((PetscObject) dm)->type_name, newtype);
2590 
2591     foundconv:
2592     ierr = PetscLogEventBegin(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2593     ierr = (*conv)(dm,newtype,M);CHKERRQ(ierr);
2594     ierr = PetscLogEventEnd(DM_Convert,dm,0,0,0);CHKERRQ(ierr);
2595   }
2596   ierr = PetscObjectStateIncrease((PetscObject) *M);CHKERRQ(ierr);
2597   PetscFunctionReturn(0);
2598 }
2599 
2600 /*--------------------------------------------------------------------------------------------------------------------*/
2601 
2602 #undef __FUNCT__
2603 #define __FUNCT__ "DMRegister"
2604 /*@C
2605   DMRegister - See DMRegisterDynamic()
2606 
2607   Level: advanced
2608 @*/
2609 PetscErrorCode  DMRegister(const char sname[], const char path[], const char name[], PetscErrorCode (*function)(DM))
2610 {
2611   char fullname[PETSC_MAX_PATH_LEN];
2612   PetscErrorCode ierr;
2613 
2614   PetscFunctionBegin;
2615   ierr = PetscStrcpy(fullname, path);CHKERRQ(ierr);
2616   ierr = PetscStrcat(fullname, ":");CHKERRQ(ierr);
2617   ierr = PetscStrcat(fullname, name);CHKERRQ(ierr);
2618   ierr = PetscFListAdd(&DMList, sname, fullname, (void (*)(void)) function);CHKERRQ(ierr);
2619   PetscFunctionReturn(0);
2620 }
2621 
2622 
2623 /*--------------------------------------------------------------------------------------------------------------------*/
2624 #undef __FUNCT__
2625 #define __FUNCT__ "DMRegisterDestroy"
2626 /*@C
2627    DMRegisterDestroy - Frees the list of DM methods that were registered by DMRegister()/DMRegisterDynamic().
2628 
2629    Not Collective
2630 
2631    Level: advanced
2632 
2633 .keywords: DM, register, destroy
2634 .seealso: DMRegister(), DMRegisterAll(), DMRegisterDynamic()
2635 @*/
2636 PetscErrorCode  DMRegisterDestroy(void)
2637 {
2638   PetscErrorCode ierr;
2639 
2640   PetscFunctionBegin;
2641   ierr = PetscFListDestroy(&DMList);CHKERRQ(ierr);
2642   DMRegisterAllCalled = PETSC_FALSE;
2643   PetscFunctionReturn(0);
2644 }
2645 
2646 #undef __FUNCT__
2647 #define __FUNCT__ "DMLoad"
2648 /*@C
2649   DMLoad - Loads a DM that has been stored in binary  with DMView().
2650 
2651   Collective on PetscViewer
2652 
2653   Input Parameters:
2654 + newdm - the newly loaded DM, this needs to have been created with DMCreate() or
2655            some related function before a call to DMLoad().
2656 - viewer - binary file viewer, obtained from PetscViewerBinaryOpen() or
2657            HDF5 file viewer, obtained from PetscViewerHDF5Open()
2658 
2659    Level: intermediate
2660 
2661   Notes:
2662    The type is determined by the data in the file, any type set into the DM before this call is ignored.
2663 
2664   Notes for advanced users:
2665   Most users should not need to know the details of the binary storage
2666   format, since DMLoad() and DMView() completely hide these details.
2667   But for anyone who's interested, the standard binary matrix storage
2668   format is
2669 .vb
2670      has not yet been determined
2671 .ve
2672 
2673 .seealso: PetscViewerBinaryOpen(), DMView(), MatLoad(), VecLoad()
2674 @*/
2675 PetscErrorCode  DMLoad(DM newdm, PetscViewer viewer)
2676 {
2677   PetscErrorCode ierr;
2678   PetscBool      isbinary;
2679   PetscInt       classid;
2680   char           type[256];
2681 
2682   PetscFunctionBegin;
2683   PetscValidHeaderSpecific(newdm,DM_CLASSID,1);
2684   PetscValidHeaderSpecific(viewer,PETSC_VIEWER_CLASSID,2);
2685   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr);
2686   if (!isbinary) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid viewer; open viewer with PetscViewerBinaryOpen()");
2687 
2688   ierr = PetscViewerBinaryRead(viewer,&classid,1,PETSC_INT);CHKERRQ(ierr);
2689   if (classid != DM_FILE_CLASSID) SETERRQ(((PetscObject)newdm)->comm,PETSC_ERR_ARG_WRONG,"Not DM next in file");
2690   ierr = PetscViewerBinaryRead(viewer,type,256,PETSC_CHAR);CHKERRQ(ierr);
2691   ierr = DMSetType(newdm, type);CHKERRQ(ierr);
2692   if (newdm->ops->load) {
2693     ierr = (*newdm->ops->load)(newdm,viewer);CHKERRQ(ierr);
2694   }
2695   PetscFunctionReturn(0);
2696 }
2697 
2698 /******************************** FEM Support **********************************/
2699 
2700 #undef __FUNCT__
2701 #define __FUNCT__ "DMPrintCellVector"
2702 PetscErrorCode DMPrintCellVector(PetscInt c, const char name[], PetscInt len, const PetscScalar x[]) {
2703   PetscInt       f;
2704   PetscErrorCode ierr;
2705 
2706   PetscFunctionBegin;
2707   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2708   for (f = 0; f < len; ++f) {
2709     ierr = PetscPrintf(PETSC_COMM_SELF, "  | %G |\n", PetscRealPart(x[f]));CHKERRQ(ierr);
2710   }
2711   PetscFunctionReturn(0);
2712 }
2713 
2714 #undef __FUNCT__
2715 #define __FUNCT__ "DMPrintCellMatrix"
2716 PetscErrorCode DMPrintCellMatrix(PetscInt c, const char name[], PetscInt rows, PetscInt cols, const PetscScalar A[]) {
2717   PetscInt       f, g;
2718   PetscErrorCode ierr;
2719 
2720   PetscFunctionBegin;
2721   ierr = PetscPrintf(PETSC_COMM_SELF, "Cell %D Element %s\n", c, name);CHKERRQ(ierr);
2722   for (f = 0; f < rows; ++f) {
2723     ierr = PetscPrintf(PETSC_COMM_SELF, "  |");CHKERRQ(ierr);
2724     for (g = 0; g < cols; ++g) {
2725       ierr = PetscPrintf(PETSC_COMM_SELF, " % 9.5G", PetscRealPart(A[f*cols+g]));CHKERRQ(ierr);
2726     }
2727     ierr = PetscPrintf(PETSC_COMM_SELF, " |\n");CHKERRQ(ierr);
2728   }
2729   PetscFunctionReturn(0);
2730 }
2731 
2732 #undef __FUNCT__
2733 #define __FUNCT__ "DMGetDefaultSection"
2734 /*@
2735   DMGetDefaultSection - Get the PetscSection encoding the local data layout for the DM.
2736 
2737   Input Parameter:
2738 . dm - The DM
2739 
2740   Output Parameter:
2741 . section - The PetscSection
2742 
2743   Level: intermediate
2744 
2745   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2746 
2747 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2748 @*/
2749 PetscErrorCode DMGetDefaultSection(DM dm, PetscSection *section) {
2750   PetscFunctionBegin;
2751   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2752   PetscValidPointer(section, 2);
2753   *section = dm->defaultSection;
2754   PetscFunctionReturn(0);
2755 }
2756 
2757 #undef __FUNCT__
2758 #define __FUNCT__ "DMSetDefaultSection"
2759 /*@
2760   DMSetDefaultSection - Set the PetscSection encoding the local data layout for the DM.
2761 
2762   Input Parameters:
2763 + dm - The DM
2764 - section - The PetscSection
2765 
2766   Level: intermediate
2767 
2768   Note: Any existing Section will be destroyed
2769 
2770 .seealso: DMSetDefaultSection(), DMGetDefaultGlobalSection()
2771 @*/
2772 PetscErrorCode DMSetDefaultSection(DM dm, PetscSection section) {
2773   PetscInt       numFields;
2774   PetscInt       f;
2775   PetscErrorCode ierr;
2776 
2777   PetscFunctionBegin;
2778   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2779   ierr = PetscSectionDestroy(&dm->defaultSection);CHKERRQ(ierr);
2780   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2781   dm->defaultSection = section;
2782   ierr = PetscSectionGetNumFields(dm->defaultSection, &numFields);CHKERRQ(ierr);
2783   if (numFields) {
2784     ierr = DMSetNumFields(dm, numFields);CHKERRQ(ierr);
2785     for (f = 0; f < numFields; ++f) {
2786       const char *name;
2787 
2788       ierr = PetscSectionGetFieldName(dm->defaultSection, f, &name);CHKERRQ(ierr);
2789       ierr = PetscObjectSetName(dm->fields[f], name);CHKERRQ(ierr);
2790     }
2791   }
2792   PetscFunctionReturn(0);
2793 }
2794 
2795 #undef __FUNCT__
2796 #define __FUNCT__ "DMGetDefaultGlobalSection"
2797 /*@
2798   DMGetDefaultGlobalSection - Get the PetscSection encoding the global data layout for the DM.
2799 
2800   Collective on DM
2801 
2802   Input Parameter:
2803 . dm - The DM
2804 
2805   Output Parameter:
2806 . section - The PetscSection
2807 
2808   Level: intermediate
2809 
2810   Note: This gets a borrowed reference, so the user should not destroy this PetscSection.
2811 
2812 .seealso: DMSetDefaultSection(), DMGetDefaultSection()
2813 @*/
2814 PetscErrorCode DMGetDefaultGlobalSection(DM dm, PetscSection *section) {
2815   PetscErrorCode ierr;
2816 
2817   PetscFunctionBegin;
2818   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2819   PetscValidPointer(section, 2);
2820   if (!dm->defaultGlobalSection) {
2821     if (!dm->defaultSection || !dm->sf) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "DM must have a default PetscSection and PetscSF in order to create a global PetscSection");
2822     ierr = PetscSectionCreateGlobalSection(dm->defaultSection, dm->sf, PETSC_FALSE, &dm->defaultGlobalSection);CHKERRQ(ierr);
2823     ierr = PetscSectionGetValueLayout(((PetscObject)dm)->comm,dm->defaultGlobalSection,&dm->map);CHKERRQ(ierr);
2824   }
2825   *section = dm->defaultGlobalSection;
2826   PetscFunctionReturn(0);
2827 }
2828 
2829 #undef __FUNCT__
2830 #define __FUNCT__ "DMSetDefaultGlobalSection"
2831 /*@
2832   DMSetDefaultGlobalSection - Set the PetscSection encoding the global data layout for the DM.
2833 
2834   Input Parameters:
2835 + dm - The DM
2836 - section - The PetscSection
2837 
2838   Level: intermediate
2839 
2840   Note: Any existing Section will be destroyed
2841 
2842 .seealso: DMGetDefaultGlobalSection(), DMSetDefaultSection()
2843 @*/
2844 PetscErrorCode DMSetDefaultGlobalSection(DM dm, PetscSection section) {
2845   PetscErrorCode ierr;
2846 
2847   PetscFunctionBegin;
2848   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2849   ierr = PetscSectionDestroy(&dm->defaultGlobalSection);CHKERRQ(ierr);
2850   dm->defaultGlobalSection = section;
2851   PetscFunctionReturn(0);
2852 }
2853 
2854 #undef __FUNCT__
2855 #define __FUNCT__ "DMGetDefaultSF"
2856 /*@
2857   DMGetDefaultSF - Get the PetscSF encoding the parallel dof overlap for the DM. If it has not been set,
2858   it is created from the default PetscSection layouts in the DM.
2859 
2860   Input Parameter:
2861 . dm - The DM
2862 
2863   Output Parameter:
2864 . sf - The PetscSF
2865 
2866   Level: intermediate
2867 
2868   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
2869 
2870 .seealso: DMSetDefaultSF(), DMCreateDefaultSF()
2871 @*/
2872 PetscErrorCode DMGetDefaultSF(DM dm, PetscSF *sf) {
2873   PetscInt       nroots;
2874   PetscErrorCode ierr;
2875 
2876   PetscFunctionBegin;
2877   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2878   PetscValidPointer(sf, 2);
2879   ierr = PetscSFGetGraph(dm->defaultSF, &nroots, PETSC_NULL, PETSC_NULL, PETSC_NULL);CHKERRQ(ierr);
2880   if (nroots < 0) {
2881     PetscSection section, gSection;
2882 
2883     ierr = DMGetDefaultSection(dm, &section);CHKERRQ(ierr);
2884     if (section) {
2885       ierr = DMGetDefaultGlobalSection(dm, &gSection);CHKERRQ(ierr);
2886       ierr = DMCreateDefaultSF(dm, section, gSection);CHKERRQ(ierr);
2887     } else {
2888       *sf = PETSC_NULL;
2889       PetscFunctionReturn(0);
2890     }
2891   }
2892   *sf = dm->defaultSF;
2893   PetscFunctionReturn(0);
2894 }
2895 
2896 #undef __FUNCT__
2897 #define __FUNCT__ "DMSetDefaultSF"
2898 /*@
2899   DMSetDefaultSF - Set the PetscSF encoding the parallel dof overlap for the DM
2900 
2901   Input Parameters:
2902 + dm - The DM
2903 - sf - The PetscSF
2904 
2905   Level: intermediate
2906 
2907   Note: Any previous SF is destroyed
2908 
2909 .seealso: DMGetDefaultSF(), DMCreateDefaultSF()
2910 @*/
2911 PetscErrorCode DMSetDefaultSF(DM dm, PetscSF sf) {
2912   PetscErrorCode ierr;
2913 
2914   PetscFunctionBegin;
2915   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2916   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 2);
2917   ierr = PetscSFDestroy(&dm->defaultSF);CHKERRQ(ierr);
2918   dm->defaultSF = sf;
2919   PetscFunctionReturn(0);
2920 }
2921 
2922 #undef __FUNCT__
2923 #define __FUNCT__ "DMCreateDefaultSF"
2924 /*@C
2925   DMCreateDefaultSF - Create the PetscSF encoding the parallel dof overlap for the DM based upon the PetscSections
2926   describing the data layout.
2927 
2928   Input Parameters:
2929 + dm - The DM
2930 . localSection - PetscSection describing the local data layout
2931 - globalSection - PetscSection describing the global data layout
2932 
2933   Level: intermediate
2934 
2935 .seealso: DMGetDefaultSF(), DMSetDefaultSF()
2936 @*/
2937 PetscErrorCode DMCreateDefaultSF(DM dm, PetscSection localSection, PetscSection globalSection)
2938 {
2939   MPI_Comm        comm = ((PetscObject) dm)->comm;
2940   PetscLayout     layout;
2941   const PetscInt *ranges;
2942   PetscInt       *local;
2943   PetscSFNode    *remote;
2944   PetscInt        pStart, pEnd, p, nroots, nleaves, l;
2945   PetscMPIInt     size, rank;
2946   PetscErrorCode  ierr;
2947 
2948   PetscFunctionBegin;
2949   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
2950   ierr = MPI_Comm_size(comm, &size);CHKERRQ(ierr);
2951   ierr = MPI_Comm_rank(comm, &rank);CHKERRQ(ierr);
2952   ierr = PetscSectionGetChart(globalSection, &pStart, &pEnd);CHKERRQ(ierr);
2953   ierr = PetscSectionGetConstrainedStorageSize(globalSection, &nroots);CHKERRQ(ierr);
2954   ierr = PetscLayoutCreate(comm, &layout);CHKERRQ(ierr);
2955   ierr = PetscLayoutSetBlockSize(layout, 1);CHKERRQ(ierr);
2956   ierr = PetscLayoutSetLocalSize(layout, nroots);CHKERRQ(ierr);
2957   ierr = PetscLayoutSetUp(layout);CHKERRQ(ierr);
2958   ierr = PetscLayoutGetRanges(layout, &ranges);CHKERRQ(ierr);
2959   for (p = pStart, nleaves = 0; p < pEnd; ++p) {
2960     PetscInt gdof, gcdof;
2961 
2962     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2963     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2964     nleaves += gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2965   }
2966   ierr = PetscMalloc(nleaves * sizeof(PetscInt), &local);CHKERRQ(ierr);
2967   ierr = PetscMalloc(nleaves * sizeof(PetscSFNode), &remote);CHKERRQ(ierr);
2968   for (p = pStart, l = 0; p < pEnd; ++p) {
2969     const PetscInt *cind;
2970     PetscInt        dof, cdof, off, gdof, gcdof, goff, gsize, d, c;
2971 
2972     ierr = PetscSectionGetDof(localSection, p, &dof);CHKERRQ(ierr);
2973     ierr = PetscSectionGetOffset(localSection, p, &off);CHKERRQ(ierr);
2974     ierr = PetscSectionGetConstraintDof(localSection, p, &cdof);CHKERRQ(ierr);
2975     ierr = PetscSectionGetConstraintIndices(localSection, p, &cind);CHKERRQ(ierr);
2976     ierr = PetscSectionGetDof(globalSection, p, &gdof);CHKERRQ(ierr);
2977     ierr = PetscSectionGetConstraintDof(globalSection, p, &gcdof);CHKERRQ(ierr);
2978     ierr = PetscSectionGetOffset(globalSection, p, &goff);CHKERRQ(ierr);
2979     if (!gdof) continue; /* Censored point */
2980     gsize = gdof < 0 ? -(gdof+1)-gcdof : gdof-gcdof;
2981     if (gsize != dof-cdof) {
2982       if (gsize != dof) SETERRQ4(comm, PETSC_ERR_ARG_WRONG, "Global dof %d for point %d is neither the constrained size %d, nor the unconstrained %d", gsize, p, dof-cdof, dof);
2983       cdof = 0; /* Ignore constraints */
2984     }
2985     for (d = 0, c = 0; d < dof; ++d) {
2986       if ((c < cdof) && (cind[c] == d)) {++c; continue;}
2987       local[l+d-c] = off+d;
2988     }
2989     if (gdof < 0) {
2990       for(d = 0; d < gsize; ++d, ++l) {
2991         PetscInt offset = -(goff+1) + d, r;
2992 
2993         ierr = PetscFindInt(offset,size,ranges,&r);CHKERRQ(ierr);
2994         if (r < 0) r = -(r+2);
2995         remote[l].rank  = r;
2996         remote[l].index = offset - ranges[r];
2997       }
2998     } else {
2999       for(d = 0; d < gsize; ++d, ++l) {
3000         remote[l].rank  = rank;
3001         remote[l].index = goff+d - ranges[rank];
3002       }
3003     }
3004   }
3005   if (l != nleaves) SETERRQ2(comm, PETSC_ERR_PLIB, "Iteration error, l %d != nleaves %d", l, nleaves);
3006   ierr = PetscLayoutDestroy(&layout);CHKERRQ(ierr);
3007   ierr = PetscSFSetGraph(dm->defaultSF, nroots, nleaves, local, PETSC_OWN_POINTER, remote, PETSC_OWN_POINTER);CHKERRQ(ierr);
3008   PetscFunctionReturn(0);
3009 }
3010 
3011 #undef __FUNCT__
3012 #define __FUNCT__ "DMGetPointSF"
3013 /*@
3014   DMGetPointSF - Get the PetscSF encoding the parallel section point overlap for the DM.
3015 
3016   Input Parameter:
3017 . dm - The DM
3018 
3019   Output Parameter:
3020 . sf - The PetscSF
3021 
3022   Level: intermediate
3023 
3024   Note: This gets a borrowed reference, so the user should not destroy this PetscSF.
3025 
3026 .seealso: DMSetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3027 @*/
3028 PetscErrorCode DMGetPointSF(DM dm, PetscSF *sf) {
3029   PetscFunctionBegin;
3030   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3031   PetscValidPointer(sf, 2);
3032   *sf = dm->sf;
3033   PetscFunctionReturn(0);
3034 }
3035 
3036 #undef __FUNCT__
3037 #define __FUNCT__ "DMSetPointSF"
3038 /*@
3039   DMSetPointSF - Set the PetscSF encoding the parallel section point overlap for the DM.
3040 
3041   Input Parameters:
3042 + dm - The DM
3043 - sf - The PetscSF
3044 
3045   Level: intermediate
3046 
3047 .seealso: DMGetPointSF(), DMGetDefaultSF(), DMSetDefaultSF(), DMCreateDefaultSF()
3048 @*/
3049 PetscErrorCode DMSetPointSF(DM dm, PetscSF sf) {
3050   PetscErrorCode ierr;
3051 
3052   PetscFunctionBegin;
3053   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3054   PetscValidHeaderSpecific(sf, PETSCSF_CLASSID, 1);
3055   ierr = PetscSFDestroy(&dm->sf);CHKERRQ(ierr);
3056   ierr = PetscObjectReference((PetscObject) sf);CHKERRQ(ierr);
3057   dm->sf = sf;
3058   PetscFunctionReturn(0);
3059 }
3060 
3061 #undef __FUNCT__
3062 #define __FUNCT__ "DMGetNumFields"
3063 PetscErrorCode DMGetNumFields(DM dm, PetscInt *numFields)
3064 {
3065   PetscFunctionBegin;
3066   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3067   PetscValidPointer(numFields, 2);
3068   *numFields = dm->numFields;
3069   PetscFunctionReturn(0);
3070 }
3071 
3072 #undef __FUNCT__
3073 #define __FUNCT__ "DMSetNumFields"
3074 PetscErrorCode DMSetNumFields(DM dm, PetscInt numFields)
3075 {
3076   PetscInt       f;
3077   PetscErrorCode ierr;
3078 
3079   PetscFunctionBegin;
3080   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3081   for (f = 0; f < dm->numFields; ++f) {
3082     ierr = PetscObjectDestroy(&dm->fields[f]);CHKERRQ(ierr);
3083   }
3084   ierr = PetscFree(dm->fields);CHKERRQ(ierr);
3085   dm->numFields = numFields;
3086   ierr = PetscMalloc(dm->numFields * sizeof(PetscObject), &dm->fields);CHKERRQ(ierr);
3087   for (f = 0; f < dm->numFields; ++f) {
3088     ierr = PetscContainerCreate(((PetscObject) dm)->comm, (PetscContainer *) &dm->fields[f]);CHKERRQ(ierr);
3089   }
3090   PetscFunctionReturn(0);
3091 }
3092 
3093 #undef __FUNCT__
3094 #define __FUNCT__ "DMGetField"
3095 PetscErrorCode DMGetField(DM dm, PetscInt f, PetscObject *field)
3096 {
3097   PetscFunctionBegin;
3098   PetscValidHeaderSpecific(dm, DM_CLASSID, 1);
3099   PetscValidPointer(field, 2);
3100   if (!dm->fields) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_ARG_WRONGSTATE, "Fields have not been setup in this DM. Call DMSetNumFields()");
3101   if ((f < 0) || (f >= dm->numFields)) SETERRQ3(((PetscObject) dm)->comm, PETSC_ERR_ARG_OUTOFRANGE, "Field %d should be in [%d,%d)", f, 0, dm->numFields);
3102   *field = dm->fields[f];
3103   PetscFunctionReturn(0);
3104 }
3105 
3106 #undef __FUNCT__
3107 #define __FUNCT__ "DMSetCoordinates"
3108 /*@
3109   DMSetCoordinates - Sets into the DM a global vector that holds the coordinates
3110 
3111   Collective on DM
3112 
3113   Input Parameters:
3114 + dm - the DM
3115 - c - coordinate vector
3116 
3117   Note:
3118   The coordinates do include those for ghost points, which are in the local vector
3119 
3120   Level: intermediate
3121 
3122 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3123 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLoca(), DMGetCoordinateDM()
3124 @*/
3125 PetscErrorCode DMSetCoordinates(DM dm, Vec c)
3126 {
3127   PetscErrorCode ierr;
3128 
3129   PetscFunctionBegin;
3130   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3131   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3132   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3133   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3134   dm->coordinates = c;
3135   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3136   PetscFunctionReturn(0);
3137 }
3138 
3139 #undef __FUNCT__
3140 #define __FUNCT__ "DMSetCoordinatesLocal"
3141 /*@
3142   DMSetCoordinatesLocal - Sets into the DM a local vector that holds the coordinates
3143 
3144   Collective on DM
3145 
3146    Input Parameters:
3147 +  dm - the DM
3148 -  c - coordinate vector
3149 
3150   Note:
3151   The coordinates of ghost points can be set using DMSetCoordinates()
3152   followed by DMGetCoordinatesLocal(). This is intended to enable the
3153   setting of ghost coordinates outside of the domain.
3154 
3155   Level: intermediate
3156 
3157 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3158 .seealso: DMGetCoordinatesLocal(), DMSetCoordinates(), DMGetCoordinates(), DMGetCoordinateDM()
3159 @*/
3160 PetscErrorCode DMSetCoordinatesLocal(DM dm, Vec c)
3161 {
3162   PetscErrorCode ierr;
3163 
3164   PetscFunctionBegin;
3165   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3166   PetscValidHeaderSpecific(c,VEC_CLASSID,2);
3167   ierr = PetscObjectReference((PetscObject) c);CHKERRQ(ierr);
3168   ierr = VecDestroy(&dm->coordinatesLocal);CHKERRQ(ierr);
3169   dm->coordinatesLocal = c;
3170   ierr = VecDestroy(&dm->coordinates);CHKERRQ(ierr);
3171   PetscFunctionReturn(0);
3172 }
3173 
3174 #undef __FUNCT__
3175 #define __FUNCT__ "DMGetCoordinates"
3176 /*@
3177   DMGetCoordinates - Gets a global vector with the coordinates associated with the DM.
3178 
3179   Not Collective
3180 
3181   Input Parameter:
3182 . dm - the DM
3183 
3184   Output Parameter:
3185 . c - global coordinate vector
3186 
3187   Note:
3188   This is a borrowed reference, so the user should NOT destroy this vector
3189 
3190   Each process has only the local coordinates (does NOT have the ghost coordinates).
3191 
3192   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3193   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3194 
3195   Level: intermediate
3196 
3197 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3198 .seealso: DMSetCoordinates(), DMGetCoordinatesLocal(), DMGetCoordinateDM()
3199 @*/
3200 PetscErrorCode DMGetCoordinates(DM dm, Vec *c)
3201 {
3202   PetscErrorCode ierr;
3203 
3204   PetscFunctionBegin;
3205   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3206   PetscValidPointer(c,2);
3207   if (!dm->coordinates && dm->coordinatesLocal) {
3208     DM cdm = PETSC_NULL;
3209 
3210     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3211     ierr = DMCreateGlobalVector(cdm, &dm->coordinates);CHKERRQ(ierr);
3212     ierr = PetscObjectSetName((PetscObject) dm->coordinates, "coordinates");CHKERRQ(ierr);
3213     ierr = DMLocalToGlobalBegin(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3214     ierr = DMLocalToGlobalEnd(cdm, dm->coordinatesLocal, INSERT_VALUES, dm->coordinates);CHKERRQ(ierr);
3215   }
3216   *c = dm->coordinates;
3217   PetscFunctionReturn(0);
3218 }
3219 
3220 #undef __FUNCT__
3221 #define __FUNCT__ "DMGetCoordinatesLocal"
3222 /*@
3223   DMGetCoordinatesLocal - Gets a local vector with the coordinates associated with the DM.
3224 
3225   Collective on DM
3226 
3227   Input Parameter:
3228 . dm - the DM
3229 
3230   Output Parameter:
3231 . c - coordinate vector
3232 
3233   Note:
3234   This is a borrowed reference, so the user should NOT destroy this vector
3235 
3236   Each process has the local and ghost coordinates
3237 
3238   For DMDA, in two and three dimensions coordinates are interlaced (x_0,y_0,x_1,y_1,...)
3239   and (x_0,y_0,z_0,x_1,y_1,z_1...)
3240 
3241   Level: intermediate
3242 
3243 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3244 .seealso: DMSetCoordinatesLocal(), DMGetCoordinates(), DMSetCoordinates(), DMGetCoordinateDM()
3245 @*/
3246 PetscErrorCode DMGetCoordinatesLocal(DM dm, Vec *c)
3247 {
3248   PetscErrorCode ierr;
3249 
3250   PetscFunctionBegin;
3251   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3252   PetscValidPointer(c,2);
3253   if (!dm->coordinatesLocal && dm->coordinates) {
3254     DM cdm = PETSC_NULL;
3255 
3256     ierr = DMGetCoordinateDM(dm, &cdm);CHKERRQ(ierr);
3257     ierr = DMCreateLocalVector(cdm, &dm->coordinatesLocal);CHKERRQ(ierr);
3258     ierr = PetscObjectSetName((PetscObject) dm->coordinatesLocal, "coordinates");CHKERRQ(ierr);
3259     ierr = DMGlobalToLocalBegin(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3260     ierr = DMGlobalToLocalEnd(cdm, dm->coordinates, INSERT_VALUES, dm->coordinatesLocal);CHKERRQ(ierr);
3261   }
3262   *c = dm->coordinatesLocal;
3263   PetscFunctionReturn(0);
3264 }
3265 
3266 #undef __FUNCT__
3267 #define __FUNCT__ "DMGetCoordinateDM"
3268 /*@
3269   DMGetCoordinateDM - Gets the DM that scatters between global and local coordinates
3270 
3271   Collective on DM
3272 
3273   Input Parameter:
3274 . dm - the DM
3275 
3276   Output Parameter:
3277 . cdm - coordinate DM
3278 
3279   Level: intermediate
3280 
3281 .keywords: distributed array, get, corners, nodes, local indices, coordinates
3282 .seealso: DMSetCoordinates(), DMSetCoordinatesLocal(), DMGetCoordinates(), DMGetCoordinatesLocal()
3283 @*/
3284 PetscErrorCode DMGetCoordinateDM(DM dm, DM *cdm)
3285 {
3286   PetscErrorCode ierr;
3287 
3288   PetscFunctionBegin;
3289   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
3290   PetscValidPointer(cdm,2);
3291   if (!dm->coordinateDM) {
3292     if (!dm->ops->createcoordinatedm) SETERRQ(((PetscObject) dm)->comm, PETSC_ERR_SUP, "Unable to create coordinates for this DM");
3293     ierr = (*dm->ops->createcoordinatedm)(dm, &dm->coordinateDM);CHKERRQ(ierr);
3294   }
3295   *cdm = dm->coordinateDM;
3296   PetscFunctionReturn(0);
3297 }
3298