xref: /petsc/src/dm/impls/composite/pack.c (revision 47c6ae997ffd1b2afd66b6474dff5950ae8613d1)
1 #define PETSCDM_DLL
2 
3 #include "petscda.h"             /*I      "petscda.h"     I*/
4 #include "private/dmimpl.h"
5 #include "petscmat.h"
6 
7 /*
8    rstart is where an array/subvector starts in the global parallel vector, so arrays
9    rstarts are meaningless (and set to the previous one) except on the processor where the array lives
10 */
11 
12 typedef enum {DMCOMPOSITE_ARRAY, DMCOMPOSITE_DM} DMCompositeLinkType;
13 
14 struct DMCompositeLink {
15   DMCompositeLinkType    type;
16   struct DMCompositeLink *next;
17   PetscInt               n,rstart;      /* rstart is relative to this process */
18   PetscInt               grstart;       /* grstart is relative to all processes */
19 
20   /* only used for DMCOMPOSITE_DM */
21   PetscInt               *grstarts;     /* global row for first unknown of this DM on each process */
22   DM                     dm;
23 
24   /* only used for DMCOMPOSITE_ARRAY */
25   PetscMPIInt            rank;          /* process where array unknowns live */
26 };
27 
28 typedef struct {
29   PetscInt               n,N,rstart;           /* rstart is relative to all processors, n unknowns owned by this process, N is total unknowns */
30   PetscInt               nghost;               /* number of all local entries include DA ghost points and any shared redundant arrays */
31   PetscInt               nDM,nredundant,nmine; /* how many DM's and seperate redundant arrays used to build DM(nmine is ones on this process) */
32   PetscBool              setup;                /* after this is set, cannot add new links to the DM*/
33   struct DMCompositeLink *next;
34 
35   PetscErrorCode (*FormCoupleLocations)(DM,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt);
36 } DM_Composite;
37 
38 #undef __FUNCT__
39 #define __FUNCT__ "DMCompositeSetCoupling"
40 /*@C
41     DMCompositeSetCoupling - Sets user provided routines that compute the coupling between the
42       seperate components (DA's and arrays) in a DMto build the correct matrix nonzero structure.
43 
44 
45     Logically Collective on MPI_Comm
46 
47     Input Parameter:
48 +   dm - the composite object
49 -   formcouplelocations - routine to set the nonzero locations in the matrix
50 
51     Level: advanced
52 
53     Notes: See DMCompositeSetContext() and DMCompositeGetContext() for how to get user information into
54         this routine
55 
56 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
57          DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
58          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(), DMCompositeSetContext(),
59          DMCompositeGetContext()
60 
61 @*/
62 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeSetCoupling(DM dm,PetscErrorCode (*FormCoupleLocations)(DM,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt))
63 {
64   DM_Composite *com = (DM_Composite*)dm->data;
65 
66   PetscFunctionBegin;
67   com->FormCoupleLocations = FormCoupleLocations;
68   PetscFunctionReturn(0);
69 }
70 
71 #undef __FUNCT__
72 #define __FUNCT__ "DMCompositeSetContext"
73 /*@
74     DMCompositeSetContext - Allows user to stash data they may need within the form coupling routine they
75       set with DMCompositeSetCoupling()
76 
77 
78     Not Collective
79 
80     Input Parameter:
81 +   dm - the composite object
82 -   ctx - the user supplied context
83 
84     Level: advanced
85 
86     Notes: Use DMCompositeGetContext() to retrieve the context when needed.
87 
88 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
89          DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
90          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(), DMCompositeSetCoupling(),
91          DMCompositeGetContext()
92 
93 @*/
94 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeSetContext(DM dm,void *ctx)
95 {
96   PetscFunctionBegin;
97   dm->ctx = ctx;
98   PetscFunctionReturn(0);
99 }
100 
101 #undef __FUNCT__
102 #define __FUNCT__ "DMCompositeGetContext"
103 /*@
104     DMCompositeGetContext - Access the context set with DMCompositeSetContext()
105 
106 
107     Not Collective
108 
109     Input Parameter:
110 .   dm - the composite object
111 
112     Output Parameter:
113 .    ctx - the user supplied context
114 
115     Level: advanced
116 
117     Notes: Use DMCompositeGetContext() to retrieve the context when needed.
118 
119 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
120          DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
121          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(), DMCompositeSetCoupling(),
122          DMCompositeSetContext()
123 
124 @*/
125 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetContext(DM dm,void **ctx)
126 {
127   PetscFunctionBegin;
128   *ctx = dm->ctx;
129   PetscFunctionReturn(0);
130 }
131 
132 
133 #undef __FUNCT__
134 #define __FUNCT__ "DMCompositeCreate"
135 /*@C
136     DMCompositeCreate - Creates a vector packer, used to generate "composite"
137       vectors made up of several subvectors.
138 
139     Collective on MPI_Comm
140 
141     Input Parameter:
142 .   comm - the processors that will share the global vector
143 
144     Output Parameters:
145 .   packer - the packer object
146 
147     Level: advanced
148 
149 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
150          DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
151          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
152 
153 @*/
154 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeCreate(MPI_Comm comm,DM *packer)
155 {
156   PetscErrorCode ierr;
157   DM             p;
158   DM_Composite   *com;
159 
160   PetscFunctionBegin;
161   PetscValidPointer(packer,2);
162   *packer = PETSC_NULL;
163 #ifndef PETSC_USE_DYNAMIC_LIBRARIES
164   ierr = DMInitializePackage(PETSC_NULL);CHKERRQ(ierr);
165 #endif
166 
167   ierr = PetscHeaderCreate(p,_p_DM,struct _DMOps,DM_CLASSID,0,"DM",comm,DMCompositeDestroy,DMCompositeView);CHKERRQ(ierr);
168   ierr = PetscNewLog(p,DM_Composite,&com);CHKERRQ(ierr);
169   p->data = com;
170   ierr = PetscObjectChangeTypeName((PetscObject)p,"DMComposite");CHKERRQ(ierr);
171   com->n            = 0;
172   com->next         = PETSC_NULL;
173   com->nredundant   = 0;
174   com->nDM          = 0;
175 
176   p->ops->createglobalvector = DMCompositeCreateGlobalVector;
177   p->ops->createlocalvector  = DMCompositeCreateLocalVector;
178   p->ops->refine             = DMCompositeRefine;
179   p->ops->getinterpolation   = DMCompositeGetInterpolation;
180   p->ops->getmatrix          = DMCompositeGetMatrix;
181   p->ops->getcoloring        = DMCompositeGetColoring;
182   p->ops->globaltolocalbegin = DMCompositeGlobalToLocalBegin;
183   p->ops->globaltolocalend   = DMCompositeGlobalToLocalEnd;
184   p->ops->destroy            = DMCompositeDestroy;
185 
186   *packer = p;
187   PetscFunctionReturn(0);
188 }
189 
190 extern PetscErrorCode DMDestroy_Private(DM,PetscBool *);
191 
192 #undef __FUNCT__
193 #define __FUNCT__ "DMCompositeDestroy"
194 /*@C
195     DMCompositeDestroy - Destroys a vector packer.
196 
197     Collective on DMComposite
198 
199     Input Parameter:
200 .   packer - the packer object
201 
202     Level: advanced
203 
204 .seealso DMCompositeCreate(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),DMCompositeGetEntries()
205          DMCompositeGather(), DMCompositeCreateGlobalVector(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess()
206 
207 @*/
208 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeDestroy(DM dm)
209 {
210   PetscErrorCode         ierr;
211   struct DMCompositeLink *next, *prev;
212   PetscBool              done;
213   DM_Composite           *com = (DM_Composite *)dm->data;
214 
215   PetscFunctionBegin;
216   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
217   ierr = DMDestroy_Private((DM)dm,&done);CHKERRQ(ierr);
218   if (!done) PetscFunctionReturn(0);
219 
220   next = com->next;
221   while (next) {
222     prev = next;
223     next = next->next;
224     if (prev->type == DMCOMPOSITE_DM) {
225       ierr = DMDestroy(prev->dm);CHKERRQ(ierr);
226     }
227     if (prev->grstarts) {
228       ierr = PetscFree(prev->grstarts);CHKERRQ(ierr);
229     }
230     ierr = PetscFree(prev);CHKERRQ(ierr);
231   }
232   ierr = PetscFree(com);CHKERRQ(ierr);
233   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
234   PetscFunctionReturn(0);
235 }
236 
237 #undef __FUNCT__
238 #define __FUNCT__ "DMCompositeView"
239 /*@
240     DMCompositeView - Views a composite DM
241 
242     Collective on DMComposite
243 
244     Input Parameter:
245 +   packer - the DMobject to view
246 -   v - the viewer
247 
248     Level: intermediate
249 
250 .seealso DMCompositeCreate()
251 
252 @*/
253 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeView(DM dm,PetscViewer v)
254 {
255   PetscErrorCode ierr;
256   PetscBool      iascii;
257   DM_Composite   *com = (DM_Composite *)dm->data;
258 
259   PetscFunctionBegin;
260   ierr = PetscTypeCompare((PetscObject)v,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
261   if (iascii) {
262     struct DMCompositeLink *lnk = com->next;
263     PetscInt               i;
264 
265     ierr = PetscViewerASCIIPrintf(v,"DM (%s)\n",((PetscObject)dm)->prefix?((PetscObject)dm)->prefix:"no prefix");CHKERRQ(ierr);
266     ierr = PetscViewerASCIIPrintf(v,"  contains %d DMs and %d redundant arrays\n",com->nDM,com->nredundant);CHKERRQ(ierr);
267     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
268     for (i=0; lnk; lnk=lnk->next,i++) {
269       if (lnk->dm) {
270         ierr = PetscViewerASCIIPrintf(v,"Link %d: DM of type %s\n",i,((PetscObject)lnk->dm)->type_name);CHKERRQ(ierr);
271         ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
272         ierr = DMView(lnk->dm,v);CHKERRQ(ierr);
273         ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
274       } else {
275         ierr = PetscViewerASCIIPrintf(v,"Link %d: Redundant array of size %d owned by rank %d\n",i,lnk->n,lnk->rank);CHKERRQ(ierr);
276       }
277     }
278     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
279   }
280   PetscFunctionReturn(0);
281 }
282 
283 /* --------------------------------------------------------------------------------------*/
284 #undef __FUNCT__
285 #define __FUNCT__ "DMCompositeSetUp"
286 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeSetUp(DM dm)
287 {
288   PetscErrorCode         ierr;
289   PetscInt               nprev = 0;
290   PetscMPIInt            rank,size;
291   DM_Composite           *com = (DM_Composite*)dm->data;
292   struct DMCompositeLink *next = com->next;
293   PetscLayout            map;
294 
295   PetscFunctionBegin;
296   if (com->setup) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Packer has already been setup");
297   ierr = PetscLayoutCreate(((PetscObject)dm)->comm,&map);CHKERRQ(ierr);
298   ierr = PetscLayoutSetLocalSize(map,com->n);CHKERRQ(ierr);
299   ierr = PetscLayoutSetSize(map,PETSC_DETERMINE);CHKERRQ(ierr);
300   ierr = PetscLayoutSetBlockSize(map,1);CHKERRQ(ierr);
301   ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
302   ierr = PetscLayoutGetSize(map,&com->N);CHKERRQ(ierr);
303   ierr = PetscLayoutGetRange(map,&com->rstart,PETSC_NULL);CHKERRQ(ierr);
304   ierr = PetscLayoutDestroy(map);CHKERRQ(ierr);
305 
306   /* now set the rstart for each linked array/vector */
307   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
308   ierr = MPI_Comm_size(((PetscObject)dm)->comm,&size);CHKERRQ(ierr);
309   while (next) {
310     next->rstart = nprev;
311     if ((rank == next->rank) || next->type != DMCOMPOSITE_ARRAY) nprev += next->n;
312     next->grstart = com->rstart + next->rstart;
313     if (next->type == DMCOMPOSITE_ARRAY) {
314       ierr = MPI_Bcast(&next->grstart,1,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
315     } else {
316       ierr = PetscMalloc(size*sizeof(PetscInt),&next->grstarts);CHKERRQ(ierr);
317       ierr = MPI_Allgather(&next->grstart,1,MPIU_INT,next->grstarts,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr);
318     }
319     next = next->next;
320   }
321   com->setup = PETSC_TRUE;
322   PetscFunctionReturn(0);
323 }
324 
325 
326 #undef __FUNCT__
327 #define __FUNCT__ "DMCompositeGetAccess_Array"
328 PetscErrorCode DMCompositeGetAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array)
329 {
330   PetscErrorCode ierr;
331   PetscScalar    *varray;
332   PetscMPIInt    rank;
333 
334   PetscFunctionBegin;
335   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
336   if (array) {
337     if (rank == mine->rank) {
338       ierr    = VecGetArray(vec,&varray);CHKERRQ(ierr);
339       *array  = varray + mine->rstart;
340       ierr    = VecRestoreArray(vec,&varray);CHKERRQ(ierr);
341     } else {
342       *array = 0;
343     }
344   }
345   PetscFunctionReturn(0);
346 }
347 
348 #undef __FUNCT__
349 #define __FUNCT__ "DMCompositeGetAccess_DM"
350 PetscErrorCode DMCompositeGetAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global)
351 {
352   PetscErrorCode ierr;
353   PetscScalar    *array;
354 
355   PetscFunctionBegin;
356   if (global) {
357     ierr    = DMGetGlobalVector(mine->dm,global);CHKERRQ(ierr);
358     ierr    = VecGetArray(vec,&array);CHKERRQ(ierr);
359     ierr    = VecPlaceArray(*global,array+mine->rstart);CHKERRQ(ierr);
360     ierr    = VecRestoreArray(vec,&array);CHKERRQ(ierr);
361   }
362   PetscFunctionReturn(0);
363 }
364 
365 #undef __FUNCT__
366 #define __FUNCT__ "DMCompositeRestoreAccess_Array"
367 PetscErrorCode DMCompositeRestoreAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array)
368 {
369   PetscFunctionBegin;
370   PetscFunctionReturn(0);
371 }
372 
373 #undef __FUNCT__
374 #define __FUNCT__ "DMCompositeRestoreAccess_DM"
375 PetscErrorCode DMCompositeRestoreAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global)
376 {
377   PetscErrorCode ierr;
378 
379   PetscFunctionBegin;
380   if (global) {
381     ierr = VecResetArray(*global);CHKERRQ(ierr);
382     ierr = DMRestoreGlobalVector(mine->dm,global);CHKERRQ(ierr);
383   }
384   PetscFunctionReturn(0);
385 }
386 
387 #undef __FUNCT__
388 #define __FUNCT__ "DMCompositeScatter_Array"
389 PetscErrorCode DMCompositeScatter_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar *array)
390 {
391   PetscErrorCode ierr;
392   PetscScalar    *varray;
393   PetscMPIInt    rank;
394 
395   PetscFunctionBegin;
396   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
397   if (rank == mine->rank) {
398     ierr    = VecGetArray(vec,&varray);CHKERRQ(ierr);
399     ierr    = PetscMemcpy(array,varray+mine->rstart,mine->n*sizeof(PetscScalar));CHKERRQ(ierr);
400     ierr    = VecRestoreArray(vec,&varray);CHKERRQ(ierr);
401   }
402   ierr    = MPI_Bcast(array,mine->n,MPIU_SCALAR,mine->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
403   PetscFunctionReturn(0);
404 }
405 
406 #undef __FUNCT__
407 #define __FUNCT__ "DMCompositeScatter_DM"
408 PetscErrorCode DMCompositeScatter_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec local)
409 {
410   PetscErrorCode ierr;
411   PetscScalar    *array;
412   Vec            global;
413 
414   PetscFunctionBegin;
415   ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr);
416   ierr = VecGetArray(vec,&array);CHKERRQ(ierr);
417   ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr);
418   ierr = DMGlobalToLocalBegin(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr);
419   ierr = DMGlobalToLocalEnd(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr);
420   ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr);
421   ierr = VecResetArray(global);CHKERRQ(ierr);
422   ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr);
423   PetscFunctionReturn(0);
424 }
425 
426 #undef __FUNCT__
427 #define __FUNCT__ "DMCompositeGather_Array"
428 PetscErrorCode DMCompositeGather_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar *array)
429 {
430   PetscErrorCode ierr;
431   PetscScalar    *varray;
432   PetscMPIInt    rank;
433 
434   PetscFunctionBegin;
435   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
436   if (rank == mine->rank) {
437     ierr    = VecGetArray(vec,&varray);CHKERRQ(ierr);
438     if (varray+mine->rstart == array) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You need not DMCompositeGather() into objects obtained via DMCompositeGetAccess()");
439     ierr    = PetscMemcpy(varray+mine->rstart,array,mine->n*sizeof(PetscScalar));CHKERRQ(ierr);
440     ierr    = VecRestoreArray(vec,&varray);CHKERRQ(ierr);
441   }
442   PetscFunctionReturn(0);
443 }
444 
445 #undef __FUNCT__
446 #define __FUNCT__ "DMCompositeGather_DM"
447 PetscErrorCode DMCompositeGather_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec local)
448 {
449   PetscErrorCode ierr;
450   PetscScalar    *array;
451   Vec            global;
452 
453   PetscFunctionBegin;
454   ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr);
455   ierr = VecGetArray(vec,&array);CHKERRQ(ierr);
456   ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr);
457   ierr = DMLocalToGlobal(mine->dm,local,INSERT_VALUES,global);CHKERRQ(ierr);
458   ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr);
459   ierr = VecResetArray(global);CHKERRQ(ierr);
460   ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr);
461   PetscFunctionReturn(0);
462 }
463 
464 /* ----------------------------------------------------------------------------------*/
465 
466 #include <stdarg.h>
467 
468 #undef __FUNCT__
469 #define __FUNCT__ "DMCompositeGetNumberDM"
470 /*@C
471     DMCompositeGetNumberDM - Get's the number of DM objects in the DMComposite
472        representation.
473 
474     Not Collective
475 
476     Input Parameter:
477 .    dm - the packer object
478 
479     Output Parameter:
480 .     nDM - the number of DMs
481 
482     Level: beginner
483 
484 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
485          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeScatter(),
486          DMCompositeRestoreAccess(), DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),
487          DMCompositeGetEntries()
488 
489 @*/
490 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetNumberDM(DM dm,PetscInt *nDM)
491 {
492   DM_Composite *com = (DM_Composite*)dm->data;
493   PetscFunctionBegin;
494   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
495   *nDM = com->nDM;
496   PetscFunctionReturn(0);
497 }
498 
499 
500 #undef __FUNCT__
501 #define __FUNCT__ "DMCompositeGetAccess"
502 /*@C
503     DMCompositeGetAccess - Allows one to access the individual packed vectors in their global
504        representation.
505 
506     Collective on DMComposite
507 
508     Input Parameter:
509 +    dm - the packer object
510 .    gvec - the global vector
511 -    ... - the individual sequential or parallel objects (arrays or vectors)
512 
513     Notes: Use DMCompositeRestoreAccess() to return the vectors when you no longer need them
514 
515     Level: advanced
516 
517 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
518          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeScatter(),
519          DMCompositeRestoreAccess(), DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),
520          DMCompositeGetEntries()
521 
522 @*/
523 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetAccess(DM dm,Vec gvec,...)
524 {
525   va_list                Argp;
526   PetscErrorCode         ierr;
527   struct DMCompositeLink *next;
528   DM_Composite           *com = (DM_Composite*)dm->data;
529 
530   PetscFunctionBegin;
531   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
532   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
533   next = com->next;
534   if (!com->setup) {
535     ierr = DMCompositeSetUp(dm);CHKERRQ(ierr);
536   }
537 
538   /* loop over packed objects, handling one at at time */
539   va_start(Argp,gvec);
540   while (next) {
541     if (next->type == DMCOMPOSITE_ARRAY) {
542       PetscScalar **array;
543       array = va_arg(Argp, PetscScalar**);
544       ierr  = DMCompositeGetAccess_Array(dm,next,gvec,array);CHKERRQ(ierr);
545     } else if (next->type == DMCOMPOSITE_DM) {
546       Vec *vec;
547       vec  = va_arg(Argp, Vec*);
548       ierr = DMCompositeGetAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr);
549     } else {
550       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
551     }
552     next = next->next;
553   }
554   va_end(Argp);
555   PetscFunctionReturn(0);
556 }
557 
558 #undef __FUNCT__
559 #define __FUNCT__ "DMCompositeRestoreAccess"
560 /*@C
561     DMCompositeRestoreAccess - Returns the vectors obtained with DACompositeGetAccess()
562        representation.
563 
564     Collective on DMComposite
565 
566     Input Parameter:
567 +    dm - the packer object
568 .    gvec - the global vector
569 -    ... - the individual sequential or parallel objects (arrays or vectors)
570 
571     Level: advanced
572 
573 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
574          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeScatter(),
575          DMCompositeRestoreAccess(), DACompositeGetAccess()
576 
577 @*/
578 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeRestoreAccess(DM dm,Vec gvec,...)
579 {
580   va_list                Argp;
581   PetscErrorCode         ierr;
582   struct DMCompositeLink *next;
583   DM_Composite           *com = (DM_Composite*)dm->data;
584 
585   PetscFunctionBegin;
586   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
587   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
588   next = com->next;
589   if (!com->setup) {
590     ierr = DMCompositeSetUp(dm);CHKERRQ(ierr);
591   }
592 
593   /* loop over packed objects, handling one at at time */
594   va_start(Argp,gvec);
595   while (next) {
596     if (next->type == DMCOMPOSITE_ARRAY) {
597       PetscScalar **array;
598       array = va_arg(Argp, PetscScalar**);
599       ierr  = DMCompositeRestoreAccess_Array(dm,next,gvec,array);CHKERRQ(ierr);
600     } else if (next->type == DMCOMPOSITE_DM) {
601       Vec *vec;
602       vec  = va_arg(Argp, Vec*);
603       ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr);
604     } else {
605       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
606     }
607     next = next->next;
608   }
609   va_end(Argp);
610   PetscFunctionReturn(0);
611 }
612 
613 #undef __FUNCT__
614 #define __FUNCT__ "DMCompositeScatter"
615 /*@C
616     DMCompositeScatter - Scatters from a global packed vector into its individual local vectors
617 
618     Collective on DMComposite
619 
620     Input Parameter:
621 +    dm - the packer object
622 .    gvec - the global vector
623 -    ... - the individual sequential objects (arrays or vectors)
624 
625     Level: advanced
626 
627 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
628          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
629          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
630 
631 @*/
632 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeScatter(DM dm,Vec gvec,...)
633 {
634   va_list                Argp;
635   PetscErrorCode         ierr;
636   struct DMCompositeLink *next;
637   PetscInt               cnt = 3;
638   DM_Composite           *com = (DM_Composite*)dm->data;
639 
640   PetscFunctionBegin;
641   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
642   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
643   next = com->next;
644   if (!com->setup) {
645     ierr = DMCompositeSetUp(dm);CHKERRQ(ierr);
646   }
647 
648   /* loop over packed objects, handling one at at time */
649   va_start(Argp,gvec);
650   while (next) {
651     if (next->type == DMCOMPOSITE_ARRAY) {
652       PetscScalar *array;
653       array = va_arg(Argp, PetscScalar*);
654       ierr = DMCompositeScatter_Array(dm,next,gvec,array);CHKERRQ(ierr);
655     } else if (next->type == DMCOMPOSITE_DM) {
656       Vec vec;
657       vec = va_arg(Argp, Vec);
658       PetscValidHeaderSpecific(vec,VEC_CLASSID,cnt);
659       ierr = DMCompositeScatter_DM(dm,next,gvec,vec);CHKERRQ(ierr);
660     } else {
661       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
662     }
663     cnt++;
664     next = next->next;
665   }
666   va_end(Argp);
667   PetscFunctionReturn(0);
668 }
669 
670 #undef __FUNCT__
671 #define __FUNCT__ "DMCompositeGather"
672 /*@C
673     DMCompositeGather - Gathers into a global packed vector from its individual local vectors
674 
675     Collective on DMComposite
676 
677     Input Parameter:
678 +    dm - the packer object
679 .    gvec - the global vector
680 -    ... - the individual sequential objects (arrays or vectors)
681 
682     Level: advanced
683 
684 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
685          DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
686          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
687 
688 @*/
689 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGather(DM dm,Vec gvec,...)
690 {
691   va_list                Argp;
692   PetscErrorCode         ierr;
693   struct DMCompositeLink *next;
694   DM_Composite           *com = (DM_Composite*)dm->data;
695 
696   PetscFunctionBegin;
697   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
698   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
699   next = com->next;
700   if (!com->setup) {
701     ierr = DMCompositeSetUp(dm);CHKERRQ(ierr);
702   }
703 
704   /* loop over packed objects, handling one at at time */
705   va_start(Argp,gvec);
706   while (next) {
707     if (next->type == DMCOMPOSITE_ARRAY) {
708       PetscScalar *array;
709       array = va_arg(Argp, PetscScalar*);
710       ierr  = DMCompositeGather_Array(dm,next,gvec,array);CHKERRQ(ierr);
711     } else if (next->type == DMCOMPOSITE_DM) {
712       Vec vec;
713       vec = va_arg(Argp, Vec);
714       PetscValidHeaderSpecific(vec,VEC_CLASSID,3);
715       ierr = DMCompositeGather_DM(dm,next,gvec,vec);CHKERRQ(ierr);
716     } else {
717       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
718     }
719     next = next->next;
720   }
721   va_end(Argp);
722   PetscFunctionReturn(0);
723 }
724 
725 #undef __FUNCT__
726 #define __FUNCT__ "DMCompositeAddArray"
727 /*@C
728     DMCompositeAddArray - adds an "redundant" array to a DMComposite. The array values will
729        be stored in part of the array on process orank.
730 
731     Collective on DMComposite
732 
733     Input Parameter:
734 +    dm - the packer object
735 .    orank - the process on which the array entries officially live, this number must be
736              the same on all processes.
737 -    n - the length of the array
738 
739     Level: advanced
740 
741 .seealso DMCompositeDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
742          DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
743          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
744 
745 @*/
746 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeAddArray(DM dm,PetscMPIInt orank,PetscInt n)
747 {
748   struct DMCompositeLink *mine,*next;
749   PetscErrorCode         ierr;
750   PetscMPIInt            rank;
751   DM_Composite           *com = (DM_Composite*)dm->data;
752 
753   PetscFunctionBegin;
754   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
755   next = com->next;
756   if (com->setup) {
757     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add an array once you have used the DMComposite");
758   }
759 #if defined(PETSC_USE_DEBUG)
760   {
761     PetscMPIInt orankmax;
762     ierr = MPI_Allreduce(&orank,&orankmax,1,MPI_INT,MPI_MAX,((PetscObject)dm)->comm);CHKERRQ(ierr);
763     if (orank != orankmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"orank %d must be equal on all processes, another process has value %d",orank,orankmax);
764   }
765 #endif
766 
767   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
768   /* create new link */
769   ierr                = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr);
770   mine->n             = n;
771   mine->rank          = orank;
772   mine->dm            = PETSC_NULL;
773   mine->type          = DMCOMPOSITE_ARRAY;
774   mine->next          = PETSC_NULL;
775   if (rank == mine->rank) {com->n += n;com->nmine++;}
776 
777   /* add to end of list */
778   if (!next) {
779     com->next = mine;
780   } else {
781     while (next->next) next = next->next;
782     next->next = mine;
783   }
784   com->nredundant++;
785   PetscFunctionReturn(0);
786 }
787 
788 #undef __FUNCT__
789 #define __FUNCT__ "DMCompositeAddDM"
790 /*@C
791     DMCompositeAddDM - adds a DM (includes DA) vector to a DMComposite
792 
793     Collective on DMComposite
794 
795     Input Parameter:
796 +    dm - the packer object
797 -    dm - the DM object, if the DM is a da you will need to caste it with a (DM)
798 
799     Level: advanced
800 
801 .seealso DMCompositeDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
802          DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
803          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
804 
805 @*/
806 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeAddDM(DM dmc,DM dm)
807 {
808   PetscErrorCode         ierr;
809   PetscInt               n;
810   struct DMCompositeLink *mine,*next;
811   Vec                    global;
812   DM_Composite           *com = (DM_Composite*)dmc->data;
813 
814   PetscFunctionBegin;
815   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
816   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
817   next = com->next;
818   if (com->setup) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add a DA once you have used the DMComposite");
819 
820   /* create new link */
821   ierr = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr);
822   ierr = PetscObjectReference((PetscObject)dm);CHKERRQ(ierr);
823   ierr = DMGetGlobalVector(dm,&global);CHKERRQ(ierr);
824   ierr = VecGetLocalSize(global,&n);CHKERRQ(ierr);
825   ierr = DMRestoreGlobalVector(dm,&global);CHKERRQ(ierr);
826   mine->n      = n;
827   mine->dm     = dm;
828   mine->type   = DMCOMPOSITE_DM;
829   mine->next   = PETSC_NULL;
830   com->n       += n;
831 
832   /* add to end of list */
833   if (!next) {
834     com->next = mine;
835   } else {
836     while (next->next) next = next->next;
837     next->next = mine;
838   }
839   com->nDM++;
840   com->nmine++;
841   PetscFunctionReturn(0);
842 }
843 
844 extern PetscErrorCode PETSCDM_DLLEXPORT VecView_MPI(Vec,PetscViewer);
845 EXTERN_C_BEGIN
846 #undef __FUNCT__
847 #define __FUNCT__ "VecView_DMComposite"
848 PetscErrorCode PETSCDM_DLLEXPORT VecView_DMComposite(Vec gvec,PetscViewer viewer)
849 {
850   DM                     dm;
851   PetscErrorCode         ierr;
852   struct DMCompositeLink *next;
853   PetscBool              isdraw;
854   DM_Composite           *com = (DM_Composite*)dm->data;
855 
856   PetscFunctionBegin;
857   ierr = PetscObjectQuery((PetscObject)gvec,"DMComposite",(PetscObject*)&dm);CHKERRQ(ierr);
858   if (!dm) SETERRQ(((PetscObject)gvec)->comm,PETSC_ERR_ARG_WRONG,"Vector not generated from a DMComposite");
859   com = (DM_Composite*)dm->data;
860   next = com->next;
861 
862   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
863   if (!isdraw) {
864     /* do I really want to call this? */
865     ierr = VecView_MPI(gvec,viewer);CHKERRQ(ierr);
866   } else {
867     PetscInt cnt = 0;
868 
869     /* loop over packed objects, handling one at at time */
870     while (next) {
871       if (next->type == DMCOMPOSITE_ARRAY) {
872 	PetscScalar *array;
873 	ierr  = DMCompositeGetAccess_Array(dm,next,gvec,&array);CHKERRQ(ierr);
874 
875 	/*skip it for now */
876       } else if (next->type == DMCOMPOSITE_DM) {
877 	Vec      vec;
878         PetscInt bs;
879 
880 	ierr = DMCompositeGetAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr);
881 	ierr = VecView(vec,viewer);CHKERRQ(ierr);
882         ierr = VecGetBlockSize(vec,&bs);CHKERRQ(ierr);
883 	ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr);
884         ierr = PetscViewerDrawBaseAdd(viewer,bs);CHKERRQ(ierr);
885         cnt += bs;
886       } else {
887 	SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
888       }
889       next = next->next;
890     }
891     ierr = PetscViewerDrawBaseAdd(viewer,-cnt);CHKERRQ(ierr);
892   }
893   PetscFunctionReturn(0);
894 }
895 EXTERN_C_END
896 
897 
898 #undef __FUNCT__
899 #define __FUNCT__ "DMCompositeCreateGlobalVector"
900 /*@C
901     DMCompositeCreateGlobalVector - Creates a vector of the correct size to be gathered into
902         by the packer.
903 
904     Collective on DMComposite
905 
906     Input Parameter:
907 .    dm - the packer object
908 
909     Output Parameters:
910 .   gvec - the global vector
911 
912     Level: advanced
913 
914     Notes: Once this has been created you cannot add additional arrays or vectors to be packed.
915 
916 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
917          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
918          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(),
919          DMCompositeCreateLocalVector()
920 
921 @*/
922 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeCreateGlobalVector(DM dm,Vec *gvec)
923 {
924   PetscErrorCode         ierr;
925   DM_Composite           *com = (DM_Composite*)dm->data;
926 
927   PetscFunctionBegin;
928   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
929   if (!com->setup) {
930     ierr = DMCompositeSetUp(dm);CHKERRQ(ierr);
931   }
932   ierr = VecCreateMPI(((PetscObject)dm)->comm,com->n,com->N,gvec);CHKERRQ(ierr);
933   ierr = PetscObjectCompose((PetscObject)*gvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr);
934   ierr = VecSetOperation(*gvec,VECOP_VIEW,(void(*)(void))VecView_DMComposite);CHKERRQ(ierr);
935   PetscFunctionReturn(0);
936 }
937 
938 #undef __FUNCT__
939 #define __FUNCT__ "DMCompositeCreateLocalVector"
940 /*@C
941     DMCompositeCreateLocalVector - Creates a vector of the correct size to contain all ghost points
942         and redundant arrays.
943 
944     Not Collective
945 
946     Input Parameter:
947 .    dm - the packer object
948 
949     Output Parameters:
950 .   lvec - the local vector
951 
952     Level: advanced
953 
954     Notes: Once this has been created you cannot add additional arrays or vectors to be packed.
955 
956 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
957          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
958          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries(),
959          DMCompositeCreateGlobalVector()
960 
961 @*/
962 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeCreateLocalVector(DM dm,Vec *lvec)
963 {
964   PetscErrorCode         ierr;
965   DM_Composite           *com = (DM_Composite*)dm->data;
966 
967   PetscFunctionBegin;
968   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
969   if (!com->setup) {
970     ierr = DMCompositeSetUp(dm);CHKERRQ(ierr);
971   }
972   ierr = VecCreateSeq(((PetscObject)dm)->comm,com->nghost,lvec);CHKERRQ(ierr);
973   ierr = PetscObjectCompose((PetscObject)*lvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr);
974   PetscFunctionReturn(0);
975 }
976 
977 #undef __FUNCT__
978 #define __FUNCT__ "DMCompositeGetLocalISs"
979 /*@C
980     DMCompositeGetLocalISs - gets an IS for each DM/array in the DMComposite, include ghost points
981 
982     Collective on DMComposite
983 
984     Input Parameter:
985 .    dm - the packer object
986 
987     Output Parameters:
988 .    is - the individual indices for each packed vector/array. Note that this includes
989            all the ghost points that individual ghosted DA's may have. Also each process has an
990            is for EACH redundant array (not just the local redundant arrays).
991 
992     Level: advanced
993 
994     Notes:
995        The is entries should be destroyed with ISDestroy(), the is array should be freed with PetscFree()
996 
997        Use DMCompositeGetGlobalISs() for non-ghosted ISs.
998 
999 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1000          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(),
1001          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries()
1002 
1003 @*/
1004 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetGlobalIndices(DM dm,IS *is[])
1005 {
1006   PetscErrorCode         ierr;
1007   PetscInt               i,*idx,n,cnt;
1008   struct DMCompositeLink *next;
1009   Vec                    global,dglobal;
1010   PF                     pf;
1011   PetscScalar            *array;
1012   PetscMPIInt            rank;
1013   DM_Composite           *com = (DM_Composite*)dm->data;
1014 
1015   PetscFunctionBegin;
1016   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1017   ierr = PetscMalloc(com->nmine*sizeof(IS),is);CHKERRQ(ierr);
1018   next = com->next;
1019   ierr = DMCompositeCreateGlobalVector(dm,&global);CHKERRQ(ierr);
1020   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
1021 
1022   /* put 0 to N-1 into the global vector */
1023   ierr = PFCreate(PETSC_COMM_WORLD,1,1,&pf);CHKERRQ(ierr);
1024   ierr = PFSetType(pf,PFIDENTITY,PETSC_NULL);CHKERRQ(ierr);
1025   ierr = PFApplyVec(pf,PETSC_NULL,global);CHKERRQ(ierr);
1026   ierr = PFDestroy(pf);CHKERRQ(ierr);
1027 
1028   /* loop over packed objects, handling one at at time */
1029   cnt = 0;
1030   while (next) {
1031 
1032     if (next->type == DMCOMPOSITE_ARRAY) {
1033 
1034       ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1035       if (rank == next->rank) {
1036         ierr   = VecGetArray(global,&array);CHKERRQ(ierr);
1037         array += next->rstart;
1038         for (i=0; i<next->n; i++) idx[i] = (PetscInt)PetscRealPart(array[i]);
1039         array -= next->rstart;
1040         ierr   = VecRestoreArray(global,&array);CHKERRQ(ierr);
1041       }
1042       ierr = MPI_Bcast(idx,next->n,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
1043       ierr = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr);
1044     } else if (next->type == DMCOMPOSITE_DM) {
1045       Vec local;
1046 
1047       ierr   = DMCreateLocalVector(next->dm,&local);CHKERRQ(ierr);
1048       ierr   = VecGetArray(global,&array);CHKERRQ(ierr);
1049       array += next->rstart;
1050       ierr   = DMGetGlobalVector(next->dm,&dglobal);CHKERRQ(ierr);
1051       ierr   = VecPlaceArray(dglobal,array);CHKERRQ(ierr);
1052       ierr   = DMGlobalToLocalBegin(next->dm,dglobal,INSERT_VALUES,local);CHKERRQ(ierr);
1053       ierr   = DMGlobalToLocalEnd(next->dm,dglobal,INSERT_VALUES,local);CHKERRQ(ierr);
1054       array -= next->rstart;
1055       ierr   = VecRestoreArray(global,&array);CHKERRQ(ierr);
1056       ierr   = VecResetArray(dglobal);CHKERRQ(ierr);
1057       ierr   = DMRestoreGlobalVector(next->dm,&dglobal);CHKERRQ(ierr);
1058 
1059       ierr   = VecGetArray(local,&array);CHKERRQ(ierr);
1060       ierr   = VecGetSize(local,&n);CHKERRQ(ierr);
1061       ierr   = PetscMalloc(n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1062       for (i=0; i<n; i++) idx[i] = (PetscInt)PetscRealPart(array[i]);
1063       ierr    = VecRestoreArray(local,&array);CHKERRQ(ierr);
1064       ierr    = VecDestroy(local);CHKERRQ(ierr);
1065       ierr    = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr);
1066 
1067     } else SETERRQ(((PetscObject)global)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1068     next = next->next;
1069     cnt++;
1070   }
1071   ierr = VecDestroy(global);CHKERRQ(ierr);
1072   PetscFunctionReturn(0);
1073 }
1074 
1075 #undef __FUNCT__
1076 #define __FUNCT__ "DMCompositeGetGlobalISs"
1077 /*@C
1078     DMCompositeGetGlobalISs - Gets the index sets for each composed object
1079 
1080     Collective on DMComposite
1081 
1082     Input Parameter:
1083 .    dm - the packer object
1084 
1085     Output Parameters:
1086 .    is - the array of index sets
1087 
1088     Level: advanced
1089 
1090     Notes:
1091        The is entries should be destroyed with ISDestroy(), the is array should be freed with PetscFree()
1092 
1093        The number of IS on each process will/may be different when redundant arrays are used
1094 
1095        These could be used to extract a subset of vector entries for a "multi-physics" preconditioner
1096 
1097        Use DMCompositeGetLocalISs() for index sets that include ghost points
1098 
1099 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1100          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(),
1101          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries()
1102 
1103 @*/
1104 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetGlobalISs(DM dm,IS *is[])
1105 {
1106   PetscErrorCode         ierr;
1107   PetscInt               cnt = 0,*idx,i;
1108   struct DMCompositeLink *next;
1109   PetscMPIInt            rank;
1110   DM_Composite           *com = (DM_Composite*)dm->data;
1111 
1112   PetscFunctionBegin;
1113   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1114   ierr = PetscMalloc(com->nmine*sizeof(IS),is);CHKERRQ(ierr);
1115   next = com->next;
1116   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
1117 
1118   /* loop over packed objects, handling one at at time */
1119   while (next) {
1120 
1121     if (next->type == DMCOMPOSITE_ARRAY) {
1122 
1123       if (rank == next->rank) {
1124         ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1125         for (i=0; i<next->n; i++) idx[i] = next->grstart + i;
1126         ierr = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr);
1127         cnt++;
1128       }
1129 
1130     } else if (next->type == DMCOMPOSITE_DM) {
1131       ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1132       for (i=0; i<next->n; i++) idx[i] = next->grstart + i;
1133       ierr = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr);
1134       cnt++;
1135     } else {
1136       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1137     }
1138     next = next->next;
1139   }
1140   PetscFunctionReturn(0);
1141 }
1142 
1143 /* -------------------------------------------------------------------------------------*/
1144 #undef __FUNCT__
1145 #define __FUNCT__ "DMCompositeGetLocalVectors_Array"
1146 PetscErrorCode DMCompositeGetLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array)
1147 {
1148   PetscErrorCode ierr;
1149   PetscFunctionBegin;
1150   if (array) {
1151     ierr = PetscMalloc(mine->n*sizeof(PetscScalar),array);CHKERRQ(ierr);
1152   }
1153   PetscFunctionReturn(0);
1154 }
1155 
1156 #undef __FUNCT__
1157 #define __FUNCT__ "DMCompositeGetLocalVectors_DM"
1158 PetscErrorCode DMCompositeGetLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local)
1159 {
1160   PetscErrorCode ierr;
1161   PetscFunctionBegin;
1162   if (local) {
1163     ierr = DMGetLocalVector(mine->dm,local);CHKERRQ(ierr);
1164   }
1165   PetscFunctionReturn(0);
1166 }
1167 
1168 #undef __FUNCT__
1169 #define __FUNCT__ "DMCompositeRestoreLocalVectors_Array"
1170 PetscErrorCode DMCompositeRestoreLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array)
1171 {
1172   PetscErrorCode ierr;
1173   PetscFunctionBegin;
1174   if (array) {
1175     ierr = PetscFree(*array);CHKERRQ(ierr);
1176   }
1177   PetscFunctionReturn(0);
1178 }
1179 
1180 #undef __FUNCT__
1181 #define __FUNCT__ "DMCompositeRestoreLocalVectors_DM"
1182 PetscErrorCode DMCompositeRestoreLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local)
1183 {
1184   PetscErrorCode ierr;
1185   PetscFunctionBegin;
1186   if (local) {
1187     ierr = DMRestoreLocalVector(mine->dm,local);CHKERRQ(ierr);
1188   }
1189   PetscFunctionReturn(0);
1190 }
1191 
1192 #undef __FUNCT__
1193 #define __FUNCT__ "DMCompositeGetLocalVectors"
1194 /*@C
1195     DMCompositeGetLocalVectors - Gets local vectors and arrays for each part of a DMComposite.
1196        Use DMCompositeRestoreLocalVectors() to return them.
1197 
1198     Not Collective
1199 
1200     Input Parameter:
1201 .    dm - the packer object
1202 
1203     Output Parameter:
1204 .    ... - the individual sequential objects (arrays or vectors)
1205 
1206     Level: advanced
1207 
1208 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1209          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1210          DMCompositeRestoreLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries()
1211 
1212 @*/
1213 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetLocalVectors(DM dm,...)
1214 {
1215   va_list                Argp;
1216   PetscErrorCode         ierr;
1217   struct DMCompositeLink *next;
1218   DM_Composite           *com = (DM_Composite*)dm->data;
1219 
1220   PetscFunctionBegin;
1221   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1222   next = com->next;
1223   /* loop over packed objects, handling one at at time */
1224   va_start(Argp,dm);
1225   while (next) {
1226     if (next->type == DMCOMPOSITE_ARRAY) {
1227       PetscScalar **array;
1228       array = va_arg(Argp, PetscScalar**);
1229       ierr = DMCompositeGetLocalVectors_Array(dm,next,array);CHKERRQ(ierr);
1230     } else if (next->type == DMCOMPOSITE_DM) {
1231       Vec *vec;
1232       vec = va_arg(Argp, Vec*);
1233       ierr = DMCompositeGetLocalVectors_DM(dm,next,vec);CHKERRQ(ierr);
1234     } else {
1235       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1236     }
1237     next = next->next;
1238   }
1239   va_end(Argp);
1240   PetscFunctionReturn(0);
1241 }
1242 
1243 #undef __FUNCT__
1244 #define __FUNCT__ "DMCompositeRestoreLocalVectors"
1245 /*@C
1246     DMCompositeRestoreLocalVectors - Restores local vectors and arrays for each part of a DMComposite.
1247 
1248     Not Collective
1249 
1250     Input Parameter:
1251 .    dm - the packer object
1252 
1253     Output Parameter:
1254 .    ... - the individual sequential objects (arrays or vectors)
1255 
1256     Level: advanced
1257 
1258 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1259          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1260          DMCompositeGetLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries()
1261 
1262 @*/
1263 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeRestoreLocalVectors(DM dm,...)
1264 {
1265   va_list                Argp;
1266   PetscErrorCode         ierr;
1267   struct DMCompositeLink *next;
1268   DM_Composite           *com = (DM_Composite*)dm->data;
1269 
1270   PetscFunctionBegin;
1271   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1272   next = com->next;
1273   /* loop over packed objects, handling one at at time */
1274   va_start(Argp,dm);
1275   while (next) {
1276     if (next->type == DMCOMPOSITE_ARRAY) {
1277       PetscScalar **array;
1278       array = va_arg(Argp, PetscScalar**);
1279       ierr = DMCompositeRestoreLocalVectors_Array(dm,next,array);CHKERRQ(ierr);
1280     } else if (next->type == DMCOMPOSITE_DM) {
1281       Vec *vec;
1282       vec = va_arg(Argp, Vec*);
1283       ierr = DMCompositeRestoreLocalVectors_DM(dm,next,vec);CHKERRQ(ierr);
1284     } else {
1285       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1286     }
1287     next = next->next;
1288   }
1289   va_end(Argp);
1290   PetscFunctionReturn(0);
1291 }
1292 
1293 /* -------------------------------------------------------------------------------------*/
1294 #undef __FUNCT__
1295 #define __FUNCT__ "DMCompositeGetEntries_Array"
1296 PetscErrorCode DMCompositeGetEntries_Array(DM dm,struct DMCompositeLink *mine,PetscInt *n)
1297 {
1298   PetscFunctionBegin;
1299   if (n) *n = mine->n;
1300   PetscFunctionReturn(0);
1301 }
1302 
1303 #undef __FUNCT__
1304 #define __FUNCT__ "DMCompositeGetEntries_DM"
1305 PetscErrorCode DMCompositeGetEntries_DM(DM dmi,struct DMCompositeLink *mine,DM *dm)
1306 {
1307   PetscFunctionBegin;
1308   if (dm) *dm = mine->dm;
1309   PetscFunctionReturn(0);
1310 }
1311 
1312 #undef __FUNCT__
1313 #define __FUNCT__ "DMCompositeGetEntries"
1314 /*@C
1315     DMCompositeGetEntries - Gets the DA, redundant size, etc for each entry in a DMComposite.
1316 
1317     Not Collective
1318 
1319     Input Parameter:
1320 .    dm - the packer object
1321 
1322     Output Parameter:
1323 .    ... - the individual entries, DAs or integer sizes)
1324 
1325     Level: advanced
1326 
1327 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1328          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1329          DMCompositeRestoreLocalVectors(), DMCompositeGetLocalVectors(),  DMCompositeScatter(),
1330          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors()
1331 
1332 @*/
1333 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetEntries(DM dm,...)
1334 {
1335   va_list                Argp;
1336   PetscErrorCode         ierr;
1337   struct DMCompositeLink *next;
1338   DM_Composite           *com = (DM_Composite*)dm->data;
1339 
1340   PetscFunctionBegin;
1341   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1342   next = com->next;
1343   /* loop over packed objects, handling one at at time */
1344   va_start(Argp,dm);
1345   while (next) {
1346     if (next->type == DMCOMPOSITE_ARRAY) {
1347       PetscInt *n;
1348       n = va_arg(Argp, PetscInt*);
1349       ierr = DMCompositeGetEntries_Array(dm,next,n);CHKERRQ(ierr);
1350     } else if (next->type == DMCOMPOSITE_DM) {
1351       DM *dmn;
1352       dmn = va_arg(Argp, DM*);
1353       ierr = DMCompositeGetEntries_DM(dm,next,dmn);CHKERRQ(ierr);
1354     } else {
1355       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1356     }
1357     next = next->next;
1358   }
1359   va_end(Argp);
1360   PetscFunctionReturn(0);
1361 }
1362 
1363 #undef __FUNCT__
1364 #define __FUNCT__ "DMCompositeRefine"
1365 /*@C
1366     DMCompositeRefine - Refines a DM by refining all of its DAs
1367 
1368     Collective on DMComposite
1369 
1370     Input Parameters:
1371 +    dm - the packer object
1372 -    comm - communicator to contain the new DM object, usually PETSC_NULL
1373 
1374     Output Parameter:
1375 .    fine - new packer
1376 
1377     Level: advanced
1378 
1379 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1380          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1381          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),  DMCompositeScatter(),
1382          DMCompositeGetEntries()
1383 
1384 @*/
1385 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeRefine(DM dmi,MPI_Comm comm,DM *fine)
1386 {
1387   PetscErrorCode         ierr;
1388   struct DMCompositeLink *next;
1389   DM_Composite           *com = (DM_Composite*)dmi->data;
1390   DM                     dm;
1391 
1392   PetscFunctionBegin;
1393   PetscValidHeaderSpecific(dmi,DM_CLASSID,1);
1394   next = com->next;
1395   ierr = DMCompositeCreate(comm,fine);CHKERRQ(ierr);
1396 
1397   /* loop over packed objects, handling one at at time */
1398   while (next) {
1399     if (next->type == DMCOMPOSITE_ARRAY) {
1400       ierr = DMCompositeAddArray(*fine,next->rank,next->n);CHKERRQ(ierr);
1401     } else if (next->type == DMCOMPOSITE_DM) {
1402       ierr = DMRefine(next->dm,comm,&dm);CHKERRQ(ierr);
1403       ierr = DMCompositeAddDM(*fine,dm);CHKERRQ(ierr);
1404       ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr);
1405     } else {
1406       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1407     }
1408     next = next->next;
1409   }
1410   PetscFunctionReturn(0);
1411 }
1412 
1413 #include "petscmat.h"
1414 
1415 struct MatPackLink {
1416   Mat                A;
1417   struct MatPackLink *next;
1418 };
1419 
1420 struct MatPack {
1421   DM                 right,left;
1422   struct MatPackLink *next;
1423 };
1424 
1425 #undef __FUNCT__
1426 #define __FUNCT__ "MatMultBoth_Shell_Pack"
1427 PetscErrorCode MatMultBoth_Shell_Pack(Mat A,Vec x,Vec y,PetscBool  add)
1428 {
1429   struct MatPack         *mpack;
1430   struct DMCompositeLink *xnext,*ynext;
1431   struct MatPackLink     *anext;
1432   PetscScalar            *xarray,*yarray;
1433   PetscErrorCode         ierr;
1434   PetscInt               i;
1435   Vec                    xglobal,yglobal;
1436   PetscMPIInt            rank;
1437   DM_Composite           *comright;
1438   DM_Composite           *comleft;
1439 
1440   PetscFunctionBegin;
1441   ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr);
1442   ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr);
1443   comright = (DM_Composite*)mpack->right->data;
1444   comleft = (DM_Composite*)mpack->left->data;
1445   xnext = comright->next;
1446   ynext = comleft->next;
1447   anext = mpack->next;
1448 
1449   while (xnext) {
1450     if (xnext->type == DMCOMPOSITE_ARRAY) {
1451       if (rank == xnext->rank) {
1452         ierr    = VecGetArray(x,&xarray);CHKERRQ(ierr);
1453         ierr    = VecGetArray(y,&yarray);CHKERRQ(ierr);
1454         if (add) {
1455           for (i=0; i<xnext->n; i++) {
1456             yarray[ynext->rstart+i] += xarray[xnext->rstart+i];
1457           }
1458         } else {
1459           ierr    = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr);
1460         }
1461         ierr    = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1462         ierr    = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1463       }
1464     } else if (xnext->type == DMCOMPOSITE_DM) {
1465       ierr  = VecGetArray(x,&xarray);CHKERRQ(ierr);
1466       ierr  = VecGetArray(y,&yarray);CHKERRQ(ierr);
1467       ierr  = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr);
1468       ierr  = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr);
1469       ierr  = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr);
1470       ierr  = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr);
1471       if (add) {
1472         ierr  = MatMultAdd(anext->A,xglobal,yglobal,yglobal);CHKERRQ(ierr);
1473       } else {
1474         ierr  = MatMult(anext->A,xglobal,yglobal);CHKERRQ(ierr);
1475       }
1476       ierr  = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1477       ierr  = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1478       ierr  = VecResetArray(xglobal);CHKERRQ(ierr);
1479       ierr  = VecResetArray(yglobal);CHKERRQ(ierr);
1480       ierr  = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr);
1481       ierr  = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr);
1482       anext = anext->next;
1483     } else {
1484       SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1485     }
1486     xnext = xnext->next;
1487     ynext = ynext->next;
1488   }
1489   PetscFunctionReturn(0);
1490 }
1491 
1492 #undef __FUNCT__
1493 #define __FUNCT__ "MatMultAdd_Shell_Pack"
1494 PetscErrorCode MatMultAdd_Shell_Pack(Mat A,Vec x,Vec y,Vec z)
1495 {
1496   PetscErrorCode ierr;
1497   PetscFunctionBegin;
1498   if (z != y) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Handles y == z only");
1499   ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_TRUE);CHKERRQ(ierr);
1500   PetscFunctionReturn(0);
1501 }
1502 
1503 #undef __FUNCT__
1504 #define __FUNCT__ "MatMult_Shell_Pack"
1505 PetscErrorCode MatMult_Shell_Pack(Mat A,Vec x,Vec y)
1506 {
1507   PetscErrorCode ierr;
1508   PetscFunctionBegin;
1509   ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_FALSE);CHKERRQ(ierr);
1510   PetscFunctionReturn(0);
1511 }
1512 
1513 #undef __FUNCT__
1514 #define __FUNCT__ "MatMultTranspose_Shell_Pack"
1515 PetscErrorCode MatMultTranspose_Shell_Pack(Mat A,Vec x,Vec y)
1516 {
1517   struct MatPack         *mpack;
1518   struct DMCompositeLink *xnext,*ynext;
1519   struct MatPackLink     *anext;
1520   PetscScalar            *xarray,*yarray;
1521   PetscErrorCode         ierr;
1522   Vec                    xglobal,yglobal;
1523   PetscMPIInt            rank;
1524   DM_Composite           *comright;
1525   DM_Composite           *comleft;
1526 
1527   PetscFunctionBegin;
1528   ierr  = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr);
1529   ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr);
1530   comright = (DM_Composite*)mpack->right->data;
1531   comleft = (DM_Composite*)mpack->left->data;
1532   ynext = comright->next;
1533   xnext = comleft->next;
1534   anext = mpack->next;
1535 
1536   while (xnext) {
1537     if (xnext->type == DMCOMPOSITE_ARRAY) {
1538       if (rank == ynext->rank) {
1539         ierr    = VecGetArray(x,&xarray);CHKERRQ(ierr);
1540         ierr    = VecGetArray(y,&yarray);CHKERRQ(ierr);
1541         ierr    = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr);
1542         ierr    = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1543         ierr    = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1544       }
1545     } else if (xnext->type == DMCOMPOSITE_DM) {
1546       ierr  = VecGetArray(x,&xarray);CHKERRQ(ierr);
1547       ierr  = VecGetArray(y,&yarray);CHKERRQ(ierr);
1548       ierr  = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr);
1549       ierr  = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr);
1550       ierr  = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr);
1551       ierr  = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr);
1552       ierr  = MatMultTranspose(anext->A,xglobal,yglobal);CHKERRQ(ierr);
1553       ierr  = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1554       ierr  = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1555       ierr  = VecResetArray(xglobal);CHKERRQ(ierr);
1556       ierr  = VecResetArray(yglobal);CHKERRQ(ierr);
1557       ierr  = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr);
1558       ierr  = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr);
1559       anext = anext->next;
1560     } else {
1561       SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1562     }
1563     xnext = xnext->next;
1564     ynext = ynext->next;
1565   }
1566   PetscFunctionReturn(0);
1567 }
1568 
1569 #undef __FUNCT__
1570 #define __FUNCT__ "MatDestroy_Shell_Pack"
1571 PetscErrorCode MatDestroy_Shell_Pack(Mat A)
1572 {
1573   struct MatPack     *mpack;
1574   struct MatPackLink *anext,*oldanext;
1575   PetscErrorCode     ierr;
1576 
1577   PetscFunctionBegin;
1578   ierr  = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr);
1579   anext = mpack->next;
1580 
1581   while (anext) {
1582     ierr     = MatDestroy(anext->A);CHKERRQ(ierr);
1583     oldanext = anext;
1584     anext    = anext->next;
1585     ierr     = PetscFree(oldanext);CHKERRQ(ierr);
1586   }
1587   ierr = PetscFree(mpack);CHKERRQ(ierr);
1588   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
1589   PetscFunctionReturn(0);
1590 }
1591 
1592 #undef __FUNCT__
1593 #define __FUNCT__ "DMCompositeGetInterpolation"
1594 /*@C
1595     DMCompositeGetInterpolation - GetInterpolations a DM by refining all of its DAs
1596 
1597     Collective on DMComposite
1598 
1599     Input Parameters:
1600 +    coarse - coarse grid packer
1601 -    fine - fine grid packer
1602 
1603     Output Parameter:
1604 +    A - interpolation matrix
1605 -    v - scaling vector
1606 
1607     Level: advanced
1608 
1609 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1610          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1611          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),  DMCompositeScatter(),DMCompositeGetEntries()
1612 
1613 @*/
1614 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetInterpolation(DM coarse,DM fine,Mat *A,Vec *v)
1615 {
1616   PetscErrorCode         ierr;
1617   PetscInt               m,n,M,N;
1618   struct DMCompositeLink *nextc;
1619   struct DMCompositeLink *nextf;
1620   struct MatPackLink     *nextmat,*pnextmat = 0;
1621   struct MatPack         *mpack;
1622   Vec                    gcoarse,gfine;
1623   DM_Composite           *comcoarse = (DM_Composite*)coarse->data;
1624   DM_Composite           *comfine = (DM_Composite*)fine->data;
1625 
1626   PetscFunctionBegin;
1627   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1628   PetscValidHeaderSpecific(fine,DM_CLASSID,2);
1629   nextc = comcoarse->next;
1630   nextf = comfine->next;
1631   /* use global vectors only for determining matrix layout */
1632   ierr = DMCompositeCreateGlobalVector(coarse,&gcoarse);CHKERRQ(ierr);
1633   ierr = DMCompositeCreateGlobalVector(fine,&gfine);CHKERRQ(ierr);
1634   ierr = VecGetLocalSize(gcoarse,&n);CHKERRQ(ierr);
1635   ierr = VecGetLocalSize(gfine,&m);CHKERRQ(ierr);
1636   ierr = VecGetSize(gcoarse,&N);CHKERRQ(ierr);
1637   ierr = VecGetSize(gfine,&M);CHKERRQ(ierr);
1638   ierr = VecDestroy(gcoarse);CHKERRQ(ierr);
1639   ierr = VecDestroy(gfine);CHKERRQ(ierr);
1640 
1641   ierr         = PetscNew(struct MatPack,&mpack);CHKERRQ(ierr);
1642   mpack->right = coarse;
1643   mpack->left  = fine;
1644   ierr  = MatCreate(((PetscObject)fine)->comm,A);CHKERRQ(ierr);
1645   ierr  = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1646   ierr  = MatSetType(*A,MATSHELL);CHKERRQ(ierr);
1647   ierr  = MatShellSetContext(*A,mpack);CHKERRQ(ierr);
1648   ierr  = MatShellSetOperation(*A,MATOP_MULT,(void(*)(void))MatMult_Shell_Pack);CHKERRQ(ierr);
1649   ierr  = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,(void(*)(void))MatMultTranspose_Shell_Pack);CHKERRQ(ierr);
1650   ierr  = MatShellSetOperation(*A,MATOP_MULT_ADD,(void(*)(void))MatMultAdd_Shell_Pack);CHKERRQ(ierr);
1651   ierr  = MatShellSetOperation(*A,MATOP_DESTROY,(void(*)(void))MatDestroy_Shell_Pack);CHKERRQ(ierr);
1652 
1653   /* loop over packed objects, handling one at at time */
1654   while (nextc) {
1655     if (nextc->type != nextf->type) SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_ARG_INCOMP,"Two DM have different layout");
1656 
1657     if (nextc->type == DMCOMPOSITE_ARRAY) {
1658       ;
1659     } else if (nextc->type == DMCOMPOSITE_DM) {
1660       ierr          = PetscNew(struct MatPackLink,&nextmat);CHKERRQ(ierr);
1661       nextmat->next = 0;
1662       if (pnextmat) {
1663         pnextmat->next = nextmat;
1664         pnextmat       = nextmat;
1665       } else {
1666         pnextmat    = nextmat;
1667         mpack->next = nextmat;
1668       }
1669       ierr = DMGetInterpolation(nextc->dm,nextf->dm,&nextmat->A,PETSC_NULL);CHKERRQ(ierr);
1670     } else {
1671       SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1672     }
1673     nextc = nextc->next;
1674     nextf = nextf->next;
1675   }
1676   PetscFunctionReturn(0);
1677 }
1678 
1679 #undef __FUNCT__
1680 #define __FUNCT__ "DMCompositeGetMatrix"
1681 /*@C
1682     DMCompositeGetMatrix - Creates a matrix with the correct parallel layout and nonzero structure required for
1683       computing the Jacobian on a function defined using the stencils set in the DA's and coupling in the array variables
1684 
1685     Collective on DA
1686 
1687     Input Parameter:
1688 +   dm - the distributed array
1689 -   mtype - Supported types are MATSEQAIJ, MATMPIAIJ
1690 
1691     Output Parameters:
1692 .   J  - matrix with the correct nonzero structure
1693         (obviously without the correct Jacobian values)
1694 
1695     Level: advanced
1696 
1697     Notes: This properly preallocates the number of nonzeros in the sparse matrix so you
1698        do not need to do it yourself.
1699 
1700 
1701 .seealso DAGetMatrix(), DMCompositeCreate()
1702 
1703 @*/
1704 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetMatrix(DM dm, const MatType mtype,Mat *J)
1705 {
1706   PetscErrorCode         ierr;
1707   DM_Composite           *com = (DM_Composite*)dm->data;
1708   struct DMCompositeLink *next = com->next;
1709   PetscInt               m,*dnz,*onz,i,j,mA;
1710   Mat                    Atmp;
1711   PetscMPIInt            rank;
1712   PetscScalar            zero = 0.0;
1713   PetscBool              dense = PETSC_FALSE;
1714 
1715   PetscFunctionBegin;
1716   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1717 
1718   /* use global vector to determine layout needed for matrix */
1719   m = com->n;
1720   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
1721   ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr);
1722   ierr = MatSetSizes(*J,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1723   ierr = MatSetType(*J,MATAIJ);CHKERRQ(ierr);
1724 
1725   /*
1726      Extremely inefficient but will compute entire Jacobian for testing
1727   */
1728   ierr = PetscOptionsGetTruth(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);CHKERRQ(ierr);
1729   if (dense) {
1730     PetscInt    rstart,rend,*indices;
1731     PetscScalar *values;
1732 
1733     mA    = com->N;
1734     ierr = MatMPIAIJSetPreallocation(*J,mA,PETSC_NULL,mA-m,PETSC_NULL);CHKERRQ(ierr);
1735     ierr = MatSeqAIJSetPreallocation(*J,mA,PETSC_NULL);CHKERRQ(ierr);
1736 
1737     ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr);
1738     ierr = PetscMalloc2(mA,PetscScalar,&values,mA,PetscInt,&indices);CHKERRQ(ierr);
1739     ierr = PetscMemzero(values,mA*sizeof(PetscScalar));CHKERRQ(ierr);
1740     for (i=0; i<mA; i++) indices[i] = i;
1741     for (i=rstart; i<rend; i++) {
1742       ierr = MatSetValues(*J,1,&i,mA,indices,values,INSERT_VALUES);CHKERRQ(ierr);
1743     }
1744     ierr = PetscFree2(values,indices);CHKERRQ(ierr);
1745     ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1746     ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1747     PetscFunctionReturn(0);
1748   }
1749 
1750   ierr = MatPreallocateInitialize(((PetscObject)dm)->comm,m,m,dnz,onz);CHKERRQ(ierr);
1751   /* loop over packed objects, handling one at at time */
1752   next = com->next;
1753   while (next) {
1754     if (next->type == DMCOMPOSITE_ARRAY) {
1755       if (rank == next->rank) {  /* zero the "little" block */
1756         for (j=com->rstart+next->rstart; j<com->rstart+next->rstart+next->n; j++) {
1757           for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) {
1758             ierr = MatPreallocateSet(j,1,&i,dnz,onz);CHKERRQ(ierr);
1759           }
1760         }
1761       }
1762     } else if (next->type == DMCOMPOSITE_DM) {
1763       PetscInt       nc,rstart,*ccols,maxnc;
1764       const PetscInt *cols,*rstarts;
1765       PetscMPIInt    proc;
1766 
1767       ierr = DMGetMatrix(next->dm,mtype,&Atmp);CHKERRQ(ierr);
1768       ierr = MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);CHKERRQ(ierr);
1769       ierr = MatGetOwnershipRanges(Atmp,&rstarts);CHKERRQ(ierr);
1770       ierr = MatGetLocalSize(Atmp,&mA,PETSC_NULL);CHKERRQ(ierr);
1771 
1772       maxnc = 0;
1773       for (i=0; i<mA; i++) {
1774         ierr  = MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
1775         ierr  = MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
1776         maxnc = PetscMax(nc,maxnc);
1777       }
1778       ierr = PetscMalloc(maxnc*sizeof(PetscInt),&ccols);CHKERRQ(ierr);
1779       for (i=0; i<mA; i++) {
1780         ierr = MatGetRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);CHKERRQ(ierr);
1781         /* remap the columns taking into how much they are shifted on each process */
1782         for (j=0; j<nc; j++) {
1783           proc = 0;
1784           while (cols[j] >= rstarts[proc+1]) proc++;
1785           ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
1786         }
1787         ierr = MatPreallocateSet(com->rstart+next->rstart+i,nc,ccols,dnz,onz);CHKERRQ(ierr);
1788         ierr = MatRestoreRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);CHKERRQ(ierr);
1789       }
1790       ierr = PetscFree(ccols);CHKERRQ(ierr);
1791       ierr = MatDestroy(Atmp);CHKERRQ(ierr);
1792     } else {
1793       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1794     }
1795     next = next->next;
1796   }
1797   if (com->FormCoupleLocations) {
1798     ierr = (*com->FormCoupleLocations)(dm,PETSC_NULL,dnz,onz,__rstart,__nrows,__start,__end);CHKERRQ(ierr);
1799   }
1800   ierr = MatMPIAIJSetPreallocation(*J,0,dnz,0,onz);CHKERRQ(ierr);
1801   ierr = MatSeqAIJSetPreallocation(*J,0,dnz);CHKERRQ(ierr);
1802   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
1803 
1804   next = com->next;
1805   while (next) {
1806     if (next->type == DMCOMPOSITE_ARRAY) {
1807       if (rank == next->rank) {
1808         for (j=com->rstart+next->rstart; j<com->rstart+next->rstart+next->n; j++) {
1809           for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) {
1810             ierr = MatSetValues(*J,1,&j,1,&i,&zero,INSERT_VALUES);CHKERRQ(ierr);
1811           }
1812         }
1813       }
1814     } else if (next->type == DMCOMPOSITE_DM) {
1815       PetscInt          nc,rstart,row,maxnc,*ccols;
1816       const PetscInt    *cols,*rstarts;
1817       const PetscScalar *values;
1818       PetscMPIInt       proc;
1819 
1820       ierr = DMGetMatrix(next->dm,mtype,&Atmp);CHKERRQ(ierr);
1821       ierr = MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);CHKERRQ(ierr);
1822       ierr = MatGetOwnershipRanges(Atmp,&rstarts);CHKERRQ(ierr);
1823       ierr = MatGetLocalSize(Atmp,&mA,PETSC_NULL);CHKERRQ(ierr);
1824       maxnc = 0;
1825       for (i=0; i<mA; i++) {
1826         ierr  = MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
1827         ierr  = MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
1828         maxnc = PetscMax(nc,maxnc);
1829       }
1830       ierr = PetscMalloc(maxnc*sizeof(PetscInt),&ccols);CHKERRQ(ierr);
1831       for (i=0; i<mA; i++) {
1832         ierr = MatGetRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);CHKERRQ(ierr);
1833         for (j=0; j<nc; j++) {
1834           proc = 0;
1835           while (cols[j] >= rstarts[proc+1]) proc++;
1836           ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
1837         }
1838         row  = com->rstart+next->rstart+i;
1839         ierr = MatSetValues(*J,1,&row,nc,ccols,values,INSERT_VALUES);CHKERRQ(ierr);
1840         ierr = MatRestoreRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);CHKERRQ(ierr);
1841       }
1842       ierr = PetscFree(ccols);CHKERRQ(ierr);
1843       ierr = MatDestroy(Atmp);CHKERRQ(ierr);
1844     } else {
1845       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1846     }
1847     next = next->next;
1848   }
1849   if (com->FormCoupleLocations) {
1850     PetscInt __rstart;
1851     ierr = MatGetOwnershipRange(*J,&__rstart,PETSC_NULL);CHKERRQ(ierr);
1852     ierr = (*com->FormCoupleLocations)(dm,*J,PETSC_NULL,PETSC_NULL,__rstart,0,0,0);CHKERRQ(ierr);
1853   }
1854   ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1855   ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1856   PetscFunctionReturn(0);
1857 }
1858 
1859 #undef __FUNCT__
1860 #define __FUNCT__ "DMCompositeGetColoring"
1861 /*@
1862     DMCompositeGetColoring - Gets the coloring required for computing the Jacobian via
1863     finite differences on a function defined using a DM "grid"
1864 
1865     Collective on DA
1866 
1867     Input Parameter:
1868 +   dm - the DM object
1869 .   ctype - IS_COLORING_GLOBAL or IS_COLORING_GHOSTED
1870 -   mtype - MATAIJ or MATBAIJ
1871 
1872     Output Parameters:
1873 .   coloring - matrix coloring for use in computing Jacobians (or PETSC_NULL if not needed)
1874 
1875     Level: advanced
1876 
1877     Notes: This colors each diagonal block (associated with a single DM) with a different set of colors;
1878       this it will compute the diagonal blocks of the Jacobian correctly. The off diagonal blocks are
1879       not computed, hence the Jacobian computed is not the entire Jacobian. If -dmcomposite_dense_jacobian
1880       is used then each column of the Jacobian is given a different color so the full Jacobian is computed
1881       correctly.
1882 
1883     Notes: These compute the graph coloring of the graph of A^{T}A. The coloring used
1884    for efficient (parallel or thread based) triangular solves etc is NOT yet
1885    available.
1886 
1887 
1888 .seealso ISColoringView(), ISColoringGetIS(), MatFDColoringCreate(), ISColoringType, ISColoring, DAGetColoring()
1889 
1890 @*/
1891 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetColoring(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring)
1892 {
1893   PetscErrorCode         ierr;
1894   PetscInt               n,i,cnt;
1895   ISColoringValue        *colors;
1896   PetscBool              dense = PETSC_FALSE;
1897   ISColoringValue        maxcol = 0;
1898   DM_Composite           *com = (DM_Composite*)dm->data;
1899 
1900   PetscFunctionBegin;
1901   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1902   if (ctype == IS_COLORING_GHOSTED) {
1903     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Currently you must use -dmmg_iscoloring_type global" );
1904   } else if (ctype == IS_COLORING_GLOBAL) {
1905     n = com->n;
1906   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Unknown ISColoringType");
1907   ierr = PetscMalloc(n*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); /* freed in ISColoringDestroy() */
1908 
1909   ierr = PetscOptionsGetTruth(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);CHKERRQ(ierr);
1910   if (dense) {
1911     for (i=0; i<n; i++) {
1912       colors[i] = (ISColoringValue)(com->rstart + i);
1913     }
1914     maxcol = com->N;
1915   } else {
1916     struct DMCompositeLink *next = com->next;
1917     PetscMPIInt            rank;
1918 
1919     ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
1920     cnt  = 0;
1921     while (next) {
1922       if (next->type == DMCOMPOSITE_ARRAY) {
1923         if (rank == next->rank) {  /* each column gets is own color */
1924           for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) {
1925             colors[cnt++] = maxcol++;
1926           }
1927         }
1928         ierr = MPI_Bcast(&maxcol,1,MPIU_COLORING_VALUE,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
1929       } else if (next->type == DMCOMPOSITE_DM) {
1930         ISColoring     lcoloring;
1931 
1932         ierr = DMGetColoring(next->dm,IS_COLORING_GLOBAL,mtype,&lcoloring);CHKERRQ(ierr);
1933         for (i=0; i<lcoloring->N; i++) {
1934           colors[cnt++] = maxcol + lcoloring->colors[i];
1935         }
1936         maxcol += lcoloring->n;
1937         ierr = ISColoringDestroy(lcoloring);CHKERRQ(ierr);
1938       } else {
1939         SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1940       }
1941       next = next->next;
1942     }
1943   }
1944   ierr = ISColoringCreate(((PetscObject)dm)->comm,maxcol,n,colors,coloring);CHKERRQ(ierr);
1945   PetscFunctionReturn(0);
1946 }
1947 
1948 #undef __FUNCT__
1949 #define __FUNCT__ "DMCompositeGlobalToLocalBegin"
1950 /*@C
1951     DMCompositeGlobalToLocalBegin - begin update of single local vector from global vector
1952 
1953     Neighbor-wise Collective on DMComposite
1954 
1955     Input Parameter:
1956 +    dm - the packer object
1957 .    gvec - the global vector
1958 -    lvec - single local vector
1959 
1960     Level: advanced
1961 
1962 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
1963          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
1964          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
1965 
1966 @*/
1967 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGlobalToLocalBegin(DM dm,Vec gvec,InsertMode mode,Vec lvec)
1968 {
1969   PetscErrorCode         ierr;
1970   struct DMCompositeLink *next;
1971   PetscInt               cnt = 3;
1972   PetscMPIInt            rank;
1973   PetscScalar            *garray,*larray;
1974   DM_Composite           *com = (DM_Composite*)dm->data;
1975 
1976   PetscFunctionBegin;
1977   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1978   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
1979   next = com->next;
1980   if (!com->setup) {
1981     ierr = DMCompositeSetUp(dm);CHKERRQ(ierr);
1982   }
1983   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
1984   ierr = VecGetArray(gvec,&garray);CHKERRQ(ierr);
1985   ierr = VecGetArray(lvec,&larray);CHKERRQ(ierr);
1986 
1987   /* loop over packed objects, handling one at at time */
1988   while (next) {
1989     if (next->type == DMCOMPOSITE_ARRAY) {
1990       if (rank == next->rank) {
1991         ierr    = PetscMemcpy(larray,garray,next->n*sizeof(PetscScalar));CHKERRQ(ierr);
1992         garray += next->n;
1993       }
1994       /* does not handle ADD_VALUES */
1995       ierr = MPI_Bcast(larray,next->n,MPIU_SCALAR,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
1996       larray += next->n;
1997     } else if (next->type == DMCOMPOSITE_DM) {
1998       Vec      local,global;
1999       PetscInt N;
2000 
2001       ierr = DMGetGlobalVector(next->dm,&global);CHKERRQ(ierr);
2002       ierr = VecGetLocalSize(global,&N);CHKERRQ(ierr);
2003       ierr = VecPlaceArray(global,garray);CHKERRQ(ierr);
2004       ierr = DMGetLocalVector(next->dm,&local);CHKERRQ(ierr);
2005       ierr = VecPlaceArray(local,larray);CHKERRQ(ierr);
2006       ierr = DMGlobalToLocalBegin(next->dm,global,mode,local);CHKERRQ(ierr);
2007       ierr = DMGlobalToLocalEnd(next->dm,global,mode,local);CHKERRQ(ierr);
2008       ierr = VecResetArray(global);CHKERRQ(ierr);
2009       ierr = VecResetArray(local);CHKERRQ(ierr);
2010       ierr = DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr);
2011       ierr = DMRestoreGlobalVector(next->dm,&local);CHKERRQ(ierr);
2012       larray += next->n;
2013     } else {
2014       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
2015     }
2016     cnt++;
2017     next = next->next;
2018   }
2019 
2020   ierr = VecRestoreArray(gvec,PETSC_NULL);CHKERRQ(ierr);
2021   ierr = VecRestoreArray(lvec,PETSC_NULL);CHKERRQ(ierr);
2022   PetscFunctionReturn(0);
2023 }
2024 
2025 #undef __FUNCT__
2026 #define __FUNCT__ "DMCompositeGlobalToLocalEnd"
2027 /*@C
2028     DMCompositeGlobalToLocalEnd - All communication is handled in the Begin phase
2029 
2030     Neighbor-wise Collective on DMComposite
2031 
2032     Input Parameter:
2033 +    dm - the packer object
2034 .    gvec - the global vector
2035 -    lvec - single local vector
2036 
2037     Level: advanced
2038 
2039 .seealso DMCompositeDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeCreateGlobalVector(),
2040          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetGlobalIndices(), DMCompositeGetAccess(),
2041          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
2042 
2043 @*/
2044 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGlobalToLocalEnd(DM dm,Vec gvec,InsertMode mode,Vec lvec)
2045 {
2046   PetscFunctionBegin;
2047   PetscFunctionReturn(0);
2048 }
2049