xref: /petsc/src/dm/impls/composite/pack.c (revision a03f9cedd42fafeabd5749330b5ade1511d0b021)
1 #define PETSCDM_DLL
2 
3 #include "petscdm.h"             /*I      "petscdm.h"     I*/
4 #include "private/dmimpl.h"
5 #include "petscmat.h"
6 
7 /*
8    rstart is where an array/subvector starts in the global parallel vector, so arrays
9    rstarts are meaningless (and set to the previous one) except on the processor where the array lives
10 */
11 
12 typedef enum {DMCOMPOSITE_ARRAY, DMCOMPOSITE_DM} DMCompositeLinkType;
13 
14 struct DMCompositeLink {
15   DMCompositeLinkType    type;
16   struct DMCompositeLink *next;
17   PetscInt               n,rstart;      /* rstart is relative to this process */
18   PetscInt               grstart;       /* grstart is relative to all processes */
19 
20   /* only used for DMCOMPOSITE_DM */
21   PetscInt               *grstarts;     /* global row for first unknown of this DM on each process */
22   DM                     dm;
23 
24   /* only used for DMCOMPOSITE_ARRAY */
25   PetscMPIInt            rank;          /* process where array unknowns live */
26 };
27 
28 typedef struct {
29   PetscInt               n,N,rstart;           /* rstart is relative to all processors, n unknowns owned by this process, N is total unknowns */
30   PetscInt               nghost;               /* number of all local entries include DMDA ghost points and any shared redundant arrays */
31   PetscInt               nDM,nredundant,nmine; /* how many DM's and seperate redundant arrays used to build DM(nmine is ones on this process) */
32   PetscBool              setup;                /* after this is set, cannot add new links to the DM*/
33   struct DMCompositeLink *next;
34 
35   PetscErrorCode (*FormCoupleLocations)(DM,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt);
36 } DM_Composite;
37 
38 #undef __FUNCT__
39 #define __FUNCT__ "DMCompositeSetCoupling"
40 /*@C
41     DMCompositeSetCoupling - Sets user provided routines that compute the coupling between the
42       seperate components (DMDA's and arrays) in a DMto build the correct matrix nonzero structure.
43 
44 
45     Logically Collective on MPI_Comm
46 
47     Input Parameter:
48 +   dm - the composite object
49 -   formcouplelocations - routine to set the nonzero locations in the matrix
50 
51     Level: advanced
52 
53     Notes: See DMCompositeSetContext() and DMCompositeGetContext() for how to get user information into
54         this routine
55 
56 @*/
57 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeSetCoupling(DM dm,PetscErrorCode (*FormCoupleLocations)(DM,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt))
58 {
59   DM_Composite *com = (DM_Composite*)dm->data;
60 
61   PetscFunctionBegin;
62   com->FormCoupleLocations = FormCoupleLocations;
63   PetscFunctionReturn(0);
64 }
65 
66 #undef __FUNCT__
67 #define __FUNCT__ "DMCompositeSetContext"
68 /*@
69     DMCompositeSetContext - Allows user to stash data they may need within the form coupling routine they
70       set with DMCompositeSetCoupling()
71 
72 
73     Not Collective
74 
75     Input Parameter:
76 +   dm - the composite object
77 -   ctx - the user supplied context
78 
79     Level: advanced
80 
81     Notes: Use DMCompositeGetContext() to retrieve the context when needed.
82 
83 @*/
84 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeSetContext(DM dm,void *ctx)
85 {
86   PetscFunctionBegin;
87   dm->ctx = ctx;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMCompositeGetContext"
93 /*@
94     DMCompositeGetContext - Access the context set with DMCompositeSetContext()
95 
96 
97     Not Collective
98 
99     Input Parameter:
100 .   dm - the composite object
101 
102     Output Parameter:
103 .    ctx - the user supplied context
104 
105     Level: advanced
106 
107     Notes: Use DMCompositeGetContext() to retrieve the context when needed.
108 
109 @*/
110 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetContext(DM dm,void **ctx)
111 {
112   PetscFunctionBegin;
113   *ctx = dm->ctx;
114   PetscFunctionReturn(0);
115 }
116 
117 
118 
119 extern PetscErrorCode DMDestroy_Private(DM,PetscBool *);
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "DMDestroy_Composite"
123 PetscErrorCode PETSCDM_DLLEXPORT DMDestroy_Composite(DM dm)
124 {
125   PetscErrorCode         ierr;
126   struct DMCompositeLink *next, *prev;
127   PetscBool              done;
128   DM_Composite           *com = (DM_Composite *)dm->data;
129 
130   PetscFunctionBegin;
131   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
132   ierr = DMDestroy_Private((DM)dm,&done);CHKERRQ(ierr);
133   if (!done) PetscFunctionReturn(0);
134 
135   next = com->next;
136   while (next) {
137     prev = next;
138     next = next->next;
139     if (prev->type == DMCOMPOSITE_DM) {
140       ierr = DMDestroy(prev->dm);CHKERRQ(ierr);
141     }
142     if (prev->grstarts) {
143       ierr = PetscFree(prev->grstarts);CHKERRQ(ierr);
144     }
145     ierr = PetscFree(prev);CHKERRQ(ierr);
146   }
147   ierr = PetscFree(com);CHKERRQ(ierr);
148   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
149   PetscFunctionReturn(0);
150 }
151 
152 #undef __FUNCT__
153 #define __FUNCT__ "DMView_Composite"
154 PetscErrorCode PETSCDM_DLLEXPORT DMView_Composite(DM dm,PetscViewer v)
155 {
156   PetscErrorCode ierr;
157   PetscBool      iascii;
158   DM_Composite   *com = (DM_Composite *)dm->data;
159 
160   PetscFunctionBegin;
161   ierr = PetscTypeCompare((PetscObject)v,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
162   if (iascii) {
163     struct DMCompositeLink *lnk = com->next;
164     PetscInt               i;
165 
166     ierr = PetscViewerASCIIPrintf(v,"DM (%s)\n",((PetscObject)dm)->prefix?((PetscObject)dm)->prefix:"no prefix");CHKERRQ(ierr);
167     ierr = PetscViewerASCIIPrintf(v,"  contains %d DMs and %d redundant arrays\n",com->nDM,com->nredundant);CHKERRQ(ierr);
168     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
169     for (i=0; lnk; lnk=lnk->next,i++) {
170       if (lnk->dm) {
171         ierr = PetscViewerASCIIPrintf(v,"Link %d: DM of type %s\n",i,((PetscObject)lnk->dm)->type_name);CHKERRQ(ierr);
172         ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
173         ierr = DMView(lnk->dm,v);CHKERRQ(ierr);
174         ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
175       } else {
176         ierr = PetscViewerASCIIPrintf(v,"Link %d: Redundant array of size %d owned by rank %d\n",i,lnk->n,lnk->rank);CHKERRQ(ierr);
177       }
178     }
179     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
180   }
181   PetscFunctionReturn(0);
182 }
183 
184 /* --------------------------------------------------------------------------------------*/
185 #undef __FUNCT__
186 #define __FUNCT__ "DMSetUp_Composite"
187 PetscErrorCode PETSCDM_DLLEXPORT DMSetUp_Composite(DM dm)
188 {
189   PetscErrorCode         ierr;
190   PetscInt               nprev = 0;
191   PetscMPIInt            rank,size;
192   DM_Composite           *com = (DM_Composite*)dm->data;
193   struct DMCompositeLink *next = com->next;
194   PetscLayout            map;
195 
196   PetscFunctionBegin;
197   if (com->setup) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Packer has already been setup");
198   ierr = PetscLayoutCreate(((PetscObject)dm)->comm,&map);CHKERRQ(ierr);
199   ierr = PetscLayoutSetLocalSize(map,com->n);CHKERRQ(ierr);
200   ierr = PetscLayoutSetSize(map,PETSC_DETERMINE);CHKERRQ(ierr);
201   ierr = PetscLayoutSetBlockSize(map,1);CHKERRQ(ierr);
202   ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
203   ierr = PetscLayoutGetSize(map,&com->N);CHKERRQ(ierr);
204   ierr = PetscLayoutGetRange(map,&com->rstart,PETSC_NULL);CHKERRQ(ierr);
205   ierr = PetscLayoutDestroy(map);CHKERRQ(ierr);
206 
207   /* now set the rstart for each linked array/vector */
208   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
209   ierr = MPI_Comm_size(((PetscObject)dm)->comm,&size);CHKERRQ(ierr);
210   while (next) {
211     next->rstart = nprev;
212     if ((rank == next->rank) || next->type != DMCOMPOSITE_ARRAY) nprev += next->n;
213     next->grstart = com->rstart + next->rstart;
214     if (next->type == DMCOMPOSITE_ARRAY) {
215       ierr = MPI_Bcast(&next->grstart,1,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
216     } else {
217       ierr = PetscMalloc(size*sizeof(PetscInt),&next->grstarts);CHKERRQ(ierr);
218       ierr = MPI_Allgather(&next->grstart,1,MPIU_INT,next->grstarts,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr);
219     }
220     next = next->next;
221   }
222   com->setup = PETSC_TRUE;
223   PetscFunctionReturn(0);
224 }
225 
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "DMCompositeGetAccess_Array"
229 PetscErrorCode DMCompositeGetAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array)
230 {
231   PetscErrorCode ierr;
232   PetscScalar    *varray;
233   PetscMPIInt    rank;
234 
235   PetscFunctionBegin;
236   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
237   if (array) {
238     if (rank == mine->rank) {
239       ierr    = VecGetArray(vec,&varray);CHKERRQ(ierr);
240       *array  = varray + mine->rstart;
241       ierr    = VecRestoreArray(vec,&varray);CHKERRQ(ierr);
242     } else {
243       *array = 0;
244     }
245   }
246   PetscFunctionReturn(0);
247 }
248 
249 #undef __FUNCT__
250 #define __FUNCT__ "DMCompositeGetAccess_DM"
251 PetscErrorCode DMCompositeGetAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global)
252 {
253   PetscErrorCode ierr;
254   PetscScalar    *array;
255 
256   PetscFunctionBegin;
257   if (global) {
258     ierr    = DMGetGlobalVector(mine->dm,global);CHKERRQ(ierr);
259     ierr    = VecGetArray(vec,&array);CHKERRQ(ierr);
260     ierr    = VecPlaceArray(*global,array+mine->rstart);CHKERRQ(ierr);
261     ierr    = VecRestoreArray(vec,&array);CHKERRQ(ierr);
262   }
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "DMCompositeRestoreAccess_Array"
268 PetscErrorCode DMCompositeRestoreAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array)
269 {
270   PetscFunctionBegin;
271   PetscFunctionReturn(0);
272 }
273 
274 #undef __FUNCT__
275 #define __FUNCT__ "DMCompositeRestoreAccess_DM"
276 PetscErrorCode DMCompositeRestoreAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global)
277 {
278   PetscErrorCode ierr;
279 
280   PetscFunctionBegin;
281   if (global) {
282     ierr = VecResetArray(*global);CHKERRQ(ierr);
283     ierr = DMRestoreGlobalVector(mine->dm,global);CHKERRQ(ierr);
284   }
285   PetscFunctionReturn(0);
286 }
287 
288 #undef __FUNCT__
289 #define __FUNCT__ "DMCompositeScatter_Array"
290 PetscErrorCode DMCompositeScatter_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar *array)
291 {
292   PetscErrorCode ierr;
293   PetscScalar    *varray;
294   PetscMPIInt    rank;
295 
296   PetscFunctionBegin;
297   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
298   if (rank == mine->rank) {
299     ierr    = VecGetArray(vec,&varray);CHKERRQ(ierr);
300     ierr    = PetscMemcpy(array,varray+mine->rstart,mine->n*sizeof(PetscScalar));CHKERRQ(ierr);
301     ierr    = VecRestoreArray(vec,&varray);CHKERRQ(ierr);
302   }
303   ierr    = MPI_Bcast(array,mine->n,MPIU_SCALAR,mine->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
304   PetscFunctionReturn(0);
305 }
306 
307 #undef __FUNCT__
308 #define __FUNCT__ "DMCompositeScatter_DM"
309 PetscErrorCode DMCompositeScatter_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec local)
310 {
311   PetscErrorCode ierr;
312   PetscScalar    *array;
313   Vec            global;
314 
315   PetscFunctionBegin;
316   ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr);
317   ierr = VecGetArray(vec,&array);CHKERRQ(ierr);
318   ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr);
319   ierr = DMGlobalToLocalBegin(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr);
320   ierr = DMGlobalToLocalEnd(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr);
321   ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr);
322   ierr = VecResetArray(global);CHKERRQ(ierr);
323   ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr);
324   PetscFunctionReturn(0);
325 }
326 
327 #undef __FUNCT__
328 #define __FUNCT__ "DMCompositeGather_Array"
329 PetscErrorCode DMCompositeGather_Array(DM dm,struct DMCompositeLink *mine,Vec vec,InsertMode imode,const PetscScalar *array)
330 {
331   PetscErrorCode ierr;
332   PetscScalar    *varray;
333   PetscMPIInt    rank;
334 
335   PetscFunctionBegin;
336   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
337   if (rank == mine->rank) {
338     ierr = VecGetArray(vec,&varray);CHKERRQ(ierr);
339     if (varray+mine->rstart == array) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You need not DMCompositeGather() into objects obtained via DMCompositeGetAccess()");
340   }
341   switch (imode) {
342   case INSERT_VALUES:
343     if (rank == mine->rank) {
344       ierr = PetscMemcpy(varray+mine->rstart,array,mine->n*sizeof(PetscScalar));CHKERRQ(ierr);
345     }
346     break;
347   case ADD_VALUES: {
348     PetscInt          i;
349     void             *source;
350     PetscScalar       *buffer,*dest;
351     if (rank == mine->rank) {
352       dest = &varray[mine->rstart];
353 #if defined(PETSC_HAVE_MPI_IN_PLACE)
354       buffer = dest;
355       source = MPI_IN_PLACE;
356 #else
357       ierr = PetscMalloc(mine->n*sizeof(PetscScalar),&buffer);CHKERRQ(ierr);
358       source = (void *) buffer;
359 #endif
360       for (i=0; i<mine->n; i++) buffer[i] = varray[mine->rstart+i] + array[i];
361     } else {
362       source = (void *) array;
363       dest   = PETSC_NULL;
364     }
365     ierr = MPI_Reduce(source,dest,mine->n,MPIU_SCALAR,MPI_SUM,mine->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
366 #if !defined(PETSC_HAVE_MPI_IN_PLACE)
367     if (rank == mine->rank) {ierr = PetscFree(source);CHKERRQ(ierr);}
368 #endif
369   } break;
370   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"imode");
371   }
372   if (rank == mine->rank) {ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr);}
373   PetscFunctionReturn(0);
374 }
375 
376 #undef __FUNCT__
377 #define __FUNCT__ "DMCompositeGather_DM"
378 PetscErrorCode DMCompositeGather_DM(DM dm,struct DMCompositeLink *mine,Vec vec,InsertMode imode,Vec local)
379 {
380   PetscErrorCode ierr;
381   PetscScalar    *array;
382   Vec            global;
383 
384   PetscFunctionBegin;
385   ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr);
386   ierr = VecGetArray(vec,&array);CHKERRQ(ierr);
387   ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr);
388   ierr = DMLocalToGlobalBegin(mine->dm,local,imode,global);CHKERRQ(ierr);
389   ierr = DMLocalToGlobalEnd(mine->dm,local,imode,global);CHKERRQ(ierr);
390   ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr);
391   ierr = VecResetArray(global);CHKERRQ(ierr);
392   ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr);
393   PetscFunctionReturn(0);
394 }
395 
396 /* ----------------------------------------------------------------------------------*/
397 
398 #include <stdarg.h>
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "DMCompositeGetNumberDM"
402 /*@C
403     DMCompositeGetNumberDM - Get's the number of DM objects in the DMComposite
404        representation.
405 
406     Not Collective
407 
408     Input Parameter:
409 .    dm - the packer object
410 
411     Output Parameter:
412 .     nDM - the number of DMs
413 
414     Level: beginner
415 
416 @*/
417 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetNumberDM(DM dm,PetscInt *nDM)
418 {
419   DM_Composite *com = (DM_Composite*)dm->data;
420   PetscFunctionBegin;
421   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
422   *nDM = com->nDM;
423   PetscFunctionReturn(0);
424 }
425 
426 
427 #undef __FUNCT__
428 #define __FUNCT__ "DMCompositeGetAccess"
429 /*@C
430     DMCompositeGetAccess - Allows one to access the individual packed vectors in their global
431        representation.
432 
433     Collective on DMComposite
434 
435     Input Parameter:
436 +    dm - the packer object
437 .    gvec - the global vector
438 -    ... - the individual sequential or parallel objects (arrays or vectors)
439 
440     Notes: Use DMCompositeRestoreAccess() to return the vectors when you no longer need them
441 
442     Level: advanced
443 
444 @*/
445 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetAccess(DM dm,Vec gvec,...)
446 {
447   va_list                Argp;
448   PetscErrorCode         ierr;
449   struct DMCompositeLink *next;
450   DM_Composite           *com = (DM_Composite*)dm->data;
451 
452   PetscFunctionBegin;
453   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
454   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
455   next = com->next;
456   if (!com->setup) {
457     ierr = DMSetUp(dm);CHKERRQ(ierr);
458   }
459 
460   /* loop over packed objects, handling one at at time */
461   va_start(Argp,gvec);
462   while (next) {
463     if (next->type == DMCOMPOSITE_ARRAY) {
464       PetscScalar **array;
465       array = va_arg(Argp, PetscScalar**);
466       ierr  = DMCompositeGetAccess_Array(dm,next,gvec,array);CHKERRQ(ierr);
467     } else if (next->type == DMCOMPOSITE_DM) {
468       Vec *vec;
469       vec  = va_arg(Argp, Vec*);
470       ierr = DMCompositeGetAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr);
471     } else {
472       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
473     }
474     next = next->next;
475   }
476   va_end(Argp);
477   PetscFunctionReturn(0);
478 }
479 
480 #undef __FUNCT__
481 #define __FUNCT__ "DMCompositeRestoreAccess"
482 /*@C
483     DMCompositeRestoreAccess - Returns the vectors obtained with DMCompositeGetAccess()
484        representation.
485 
486     Collective on DMComposite
487 
488     Input Parameter:
489 +    dm - the packer object
490 .    gvec - the global vector
491 -    ... - the individual sequential or parallel objects (arrays or vectors)
492 
493     Level: advanced
494 
495 .seealso  DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
496          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeScatter(),
497          DMCompositeRestoreAccess(), DMCompositeGetAccess()
498 
499 @*/
500 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeRestoreAccess(DM dm,Vec gvec,...)
501 {
502   va_list                Argp;
503   PetscErrorCode         ierr;
504   struct DMCompositeLink *next;
505   DM_Composite           *com = (DM_Composite*)dm->data;
506 
507   PetscFunctionBegin;
508   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
509   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
510   next = com->next;
511   if (!com->setup) {
512     ierr = DMSetUp(dm);CHKERRQ(ierr);
513   }
514 
515   /* loop over packed objects, handling one at at time */
516   va_start(Argp,gvec);
517   while (next) {
518     if (next->type == DMCOMPOSITE_ARRAY) {
519       PetscScalar **array;
520       array = va_arg(Argp, PetscScalar**);
521       ierr  = DMCompositeRestoreAccess_Array(dm,next,gvec,array);CHKERRQ(ierr);
522     } else if (next->type == DMCOMPOSITE_DM) {
523       Vec *vec;
524       vec  = va_arg(Argp, Vec*);
525       ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr);
526     } else {
527       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
528     }
529     next = next->next;
530   }
531   va_end(Argp);
532   PetscFunctionReturn(0);
533 }
534 
535 #undef __FUNCT__
536 #define __FUNCT__ "DMCompositeScatter"
537 /*@C
538     DMCompositeScatter - Scatters from a global packed vector into its individual local vectors
539 
540     Collective on DMComposite
541 
542     Input Parameter:
543 +    dm - the packer object
544 .    gvec - the global vector
545 -    ... - the individual sequential objects (arrays or vectors), PETSC_NULL for those that are not needed
546 
547     Level: advanced
548 
549 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
550          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
551          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
552 
553 @*/
554 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeScatter(DM dm,Vec gvec,...)
555 {
556   va_list                Argp;
557   PetscErrorCode         ierr;
558   struct DMCompositeLink *next;
559   PetscInt               cnt;
560   DM_Composite           *com = (DM_Composite*)dm->data;
561 
562   PetscFunctionBegin;
563   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
564   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
565   if (!com->setup) {
566     ierr = DMSetUp(dm);CHKERRQ(ierr);
567   }
568 
569   /* loop over packed objects, handling one at at time */
570   va_start(Argp,gvec);
571   for (cnt=3,next=com->next; next; cnt++,next=next->next) {
572     if (next->type == DMCOMPOSITE_ARRAY) {
573       PetscScalar *array;
574       array = va_arg(Argp, PetscScalar*);
575       if (!array) continue;
576       PetscValidScalarPointer(array,cnt);
577       ierr = DMCompositeScatter_Array(dm,next,gvec,array);CHKERRQ(ierr);
578     } else if (next->type == DMCOMPOSITE_DM) {
579       Vec vec;
580       vec = va_arg(Argp, Vec);
581       if (!vec) continue;
582       PetscValidHeaderSpecific(vec,VEC_CLASSID,cnt);
583       ierr = DMCompositeScatter_DM(dm,next,gvec,vec);CHKERRQ(ierr);
584     } else {
585       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
586     }
587   }
588   va_end(Argp);
589   PetscFunctionReturn(0);
590 }
591 
592 #undef __FUNCT__
593 #define __FUNCT__ "DMCompositeGather"
594 /*@C
595     DMCompositeGather - Gathers into a global packed vector from its individual local vectors
596 
597     Collective on DMComposite
598 
599     Input Parameter:
600 +    dm - the packer object
601 .    gvec - the global vector
602 -    ... - the individual sequential objects (arrays or vectors), PETSC_NULL for any that are not needed
603 
604     Level: advanced
605 
606 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
607          DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
608          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
609 
610 @*/
611 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGather(DM dm,Vec gvec,InsertMode imode,...)
612 {
613   va_list                Argp;
614   PetscErrorCode         ierr;
615   struct DMCompositeLink *next;
616   DM_Composite           *com = (DM_Composite*)dm->data;
617   PetscInt               cnt;
618 
619   PetscFunctionBegin;
620   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
621   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
622   if (!com->setup) {
623     ierr = DMSetUp(dm);CHKERRQ(ierr);
624   }
625 
626   /* loop over packed objects, handling one at at time */
627   va_start(Argp,imode);
628   for (cnt=3,next=com->next; next; cnt++,next=next->next) {
629     if (next->type == DMCOMPOSITE_ARRAY) {
630       PetscScalar *array;
631       array = va_arg(Argp, PetscScalar*);
632       if (!array) continue;
633       PetscValidScalarPointer(array,cnt);
634       ierr  = DMCompositeGather_Array(dm,next,gvec,imode,array);CHKERRQ(ierr);
635     } else if (next->type == DMCOMPOSITE_DM) {
636       Vec vec;
637       vec = va_arg(Argp, Vec);
638       if (!vec) continue;
639       PetscValidHeaderSpecific(vec,VEC_CLASSID,cnt);
640       ierr = DMCompositeGather_DM(dm,next,gvec,imode,vec);CHKERRQ(ierr);
641     } else {
642       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
643     }
644   }
645   va_end(Argp);
646   PetscFunctionReturn(0);
647 }
648 
649 #undef __FUNCT__
650 #define __FUNCT__ "DMCompositeAddArray"
651 /*@C
652     DMCompositeAddArray - adds an "redundant" array to a DMComposite. The array values will
653        be stored in part of the array on process orank.
654 
655     Collective on DMComposite
656 
657     Input Parameter:
658 +    dm - the packer object
659 .    orank - the process on which the array entries officially live, this number must be
660              the same on all processes.
661 -    n - the length of the array
662 
663     Level: advanced
664 
665 .seealso DMDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCreateGlobalVector(),
666          DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
667          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
668 
669 @*/
670 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeAddArray(DM dm,PetscMPIInt orank,PetscInt n)
671 {
672   struct DMCompositeLink *mine,*next;
673   PetscErrorCode         ierr;
674   PetscMPIInt            rank;
675   DM_Composite           *com = (DM_Composite*)dm->data;
676 
677   PetscFunctionBegin;
678   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
679   next = com->next;
680   if (com->setup) {
681     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add an array once you have used the DMComposite");
682   }
683 #if defined(PETSC_USE_DEBUG)
684   {
685     PetscMPIInt orankmax;
686     ierr = MPI_Allreduce(&orank,&orankmax,1,MPI_INT,MPI_MAX,((PetscObject)dm)->comm);CHKERRQ(ierr);
687     if (orank != orankmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"orank %d must be equal on all processes, another process has value %d",orank,orankmax);
688   }
689 #endif
690 
691   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
692   /* create new link */
693   ierr                = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr);
694   mine->n             = n;
695   mine->rank          = orank;
696   mine->dm            = PETSC_NULL;
697   mine->type          = DMCOMPOSITE_ARRAY;
698   mine->next          = PETSC_NULL;
699   if (rank == mine->rank) {com->n += n;com->nmine++;}
700 
701   /* add to end of list */
702   if (!next) {
703     com->next = mine;
704   } else {
705     while (next->next) next = next->next;
706     next->next = mine;
707   }
708   com->nredundant++;
709   PetscFunctionReturn(0);
710 }
711 
712 #undef __FUNCT__
713 #define __FUNCT__ "DMCompositeAddDM"
714 /*@C
715     DMCompositeAddDM - adds a DM  vector to a DMComposite
716 
717     Collective on DMComposite
718 
719     Input Parameter:
720 +    dm - the packer object
721 -    dm - the DM object, if the DM is a da you will need to caste it with a (DM)
722 
723     Level: advanced
724 
725 .seealso DMDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCreateGlobalVector(),
726          DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
727          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
728 
729 @*/
730 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeAddDM(DM dmc,DM dm)
731 {
732   PetscErrorCode         ierr;
733   PetscInt               n;
734   struct DMCompositeLink *mine,*next;
735   Vec                    global;
736   DM_Composite           *com = (DM_Composite*)dmc->data;
737 
738   PetscFunctionBegin;
739   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
740   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
741   next = com->next;
742   if (com->setup) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add a DM once you have used the DMComposite");
743 
744   /* create new link */
745   ierr = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr);
746   ierr = PetscObjectReference((PetscObject)dm);CHKERRQ(ierr);
747   ierr = DMGetGlobalVector(dm,&global);CHKERRQ(ierr);
748   ierr = VecGetLocalSize(global,&n);CHKERRQ(ierr);
749   ierr = DMRestoreGlobalVector(dm,&global);CHKERRQ(ierr);
750   mine->n      = n;
751   mine->dm     = dm;
752   mine->type   = DMCOMPOSITE_DM;
753   mine->next   = PETSC_NULL;
754   com->n       += n;
755 
756   /* add to end of list */
757   if (!next) {
758     com->next = mine;
759   } else {
760     while (next->next) next = next->next;
761     next->next = mine;
762   }
763   com->nDM++;
764   com->nmine++;
765   PetscFunctionReturn(0);
766 }
767 
768 extern PetscErrorCode PETSCDM_DLLEXPORT VecView_MPI(Vec,PetscViewer);
769 EXTERN_C_BEGIN
770 #undef __FUNCT__
771 #define __FUNCT__ "VecView_DMComposite"
772 PetscErrorCode PETSCDM_DLLEXPORT VecView_DMComposite(Vec gvec,PetscViewer viewer)
773 {
774   DM                     dm;
775   PetscErrorCode         ierr;
776   struct DMCompositeLink *next;
777   PetscBool              isdraw;
778   DM_Composite           *com;
779 
780   PetscFunctionBegin;
781   ierr = PetscObjectQuery((PetscObject)gvec,"DMComposite",(PetscObject*)&dm);CHKERRQ(ierr);
782   if (!dm) SETERRQ(((PetscObject)gvec)->comm,PETSC_ERR_ARG_WRONG,"Vector not generated from a DMComposite");
783   com = (DM_Composite*)dm->data;
784   next = com->next;
785 
786   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
787   if (!isdraw) {
788     /* do I really want to call this? */
789     ierr = VecView_MPI(gvec,viewer);CHKERRQ(ierr);
790   } else {
791     PetscInt cnt = 0;
792 
793     /* loop over packed objects, handling one at at time */
794     while (next) {
795       if (next->type == DMCOMPOSITE_ARRAY) {
796 	PetscScalar *array;
797 	ierr  = DMCompositeGetAccess_Array(dm,next,gvec,&array);CHKERRQ(ierr);
798 
799 	/*skip it for now */
800       } else if (next->type == DMCOMPOSITE_DM) {
801 	Vec      vec;
802         PetscInt bs;
803 
804 	ierr = DMCompositeGetAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr);
805 	ierr = VecView(vec,viewer);CHKERRQ(ierr);
806         ierr = VecGetBlockSize(vec,&bs);CHKERRQ(ierr);
807 	ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr);
808         ierr = PetscViewerDrawBaseAdd(viewer,bs);CHKERRQ(ierr);
809         cnt += bs;
810       } else {
811 	SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
812       }
813       next = next->next;
814     }
815     ierr = PetscViewerDrawBaseAdd(viewer,-cnt);CHKERRQ(ierr);
816   }
817   PetscFunctionReturn(0);
818 }
819 EXTERN_C_END
820 
821 
822 #undef __FUNCT__
823 #define __FUNCT__ "DMCreateGlobalVector_Composite"
824 PetscErrorCode PETSCDM_DLLEXPORT DMCreateGlobalVector_Composite(DM dm,Vec *gvec)
825 {
826   PetscErrorCode         ierr;
827   DM_Composite           *com = (DM_Composite*)dm->data;
828 
829   PetscFunctionBegin;
830   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
831   if (!com->setup) {
832     ierr = DMSetUp(dm);CHKERRQ(ierr);
833   }
834   ierr = VecCreateMPI(((PetscObject)dm)->comm,com->n,com->N,gvec);CHKERRQ(ierr);
835   ierr = PetscObjectCompose((PetscObject)*gvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr);
836   ierr = VecSetOperation(*gvec,VECOP_VIEW,(void(*)(void))VecView_DMComposite);CHKERRQ(ierr);
837   PetscFunctionReturn(0);
838 }
839 
840 #undef __FUNCT__
841 #define __FUNCT__ "DMCreateLocalVector_Composite"
842 PetscErrorCode PETSCDM_DLLEXPORT DMCreateLocalVector_Composite(DM dm,Vec *lvec)
843 {
844   PetscErrorCode         ierr;
845   DM_Composite           *com = (DM_Composite*)dm->data;
846 
847   PetscFunctionBegin;
848   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
849   if (!com->setup) {
850     ierr = DMSetUp(dm);CHKERRQ(ierr);
851   }
852   ierr = VecCreateSeq(((PetscObject)dm)->comm,com->nghost,lvec);CHKERRQ(ierr);
853   ierr = PetscObjectCompose((PetscObject)*lvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr);
854   PetscFunctionReturn(0);
855 }
856 
857 #undef __FUNCT__
858 #define __FUNCT__ "DMCompositeGetISLocalToGlobalMappings"
859 /*@C
860     DMCompositeGetISLocalToGlobalMappings - gets an ISLocalToGlobalMapping for each DM/array in the DMComposite, maps to the composite global space
861 
862     Collective on DMComposite
863 
864     Input Parameter:
865 .    dm - the packer object
866 
867     Output Parameters:
868 .    ltogs - the individual mappings for each packed vector/array. Note that this includes
869            all the ghost points that individual ghosted DMDA's may have. Also each process has an
870            mapping for EACH redundant array (not just the local redundant arrays).
871 
872     Level: advanced
873 
874     Notes:
875        Each entry of ltogs should be destroyed with ISLocalToGlobalMappingDestroy(), the ltogs array should be freed with PetscFree().
876 
877 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
878          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(),
879          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries()
880 
881 @*/
882 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetISLocalToGlobalMappings(DM dm,ISLocalToGlobalMapping **ltogs)
883 {
884   PetscErrorCode         ierr;
885   PetscInt               i,*idx,n,cnt;
886   struct DMCompositeLink *next;
887   PetscMPIInt            rank;
888   DM_Composite           *com = (DM_Composite*)dm->data;
889 
890   PetscFunctionBegin;
891   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
892   ierr = PetscMalloc((com->nDM+com->nredundant)*sizeof(ISLocalToGlobalMapping),ltogs);CHKERRQ(ierr);
893   next = com->next;
894   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
895 
896   /* loop over packed objects, handling one at at time */
897   cnt = 0;
898   while (next) {
899     if (next->type == DMCOMPOSITE_ARRAY) {
900       ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
901       if (rank == next->rank) {
902         for (i=0; i<next->n; i++) idx[i] = next->grstart + i;
903       }
904       ierr = MPI_Bcast(idx,next->n,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
905       ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*ltogs)[cnt]);CHKERRQ(ierr);
906     } else if (next->type == DMCOMPOSITE_DM) {
907       ISLocalToGlobalMapping ltog;
908       PetscMPIInt            size;
909       const PetscInt         *suboff,*indices;
910       Vec                    global;
911 
912       /* Get sub-DM global indices for each local dof */
913       ierr = DMGetLocalToGlobalMapping(next->dm,&ltog);CHKERRQ(ierr);
914       ierr = ISLocalToGlobalMappingGetSize(ltog,&n);CHKERRQ(ierr);
915       ierr = ISLocalToGlobalMappingGetIndices(ltog,&indices);CHKERRQ(ierr);
916       ierr = PetscMalloc(n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
917 
918       /* Get the offsets for the sub-DM global vector */
919       ierr = DMGetGlobalVector(next->dm,&global);CHKERRQ(ierr);
920       ierr = VecGetOwnershipRanges(global,&suboff);CHKERRQ(ierr);
921       ierr = MPI_Comm_size(((PetscObject)global)->comm,&size);CHKERRQ(ierr);
922 
923       /* Shift the sub-DM definition of the global space to the composite global space */
924       for (i=0; i<n; i++) {
925         PetscInt subi = indices[i],lo = 0,hi = size,t;
926         /* Binary search to find which rank owns subi */
927         while (hi-lo > 1) {
928           t = lo + (hi-lo)/2;
929           if (suboff[t] > subi) hi = t;
930           else                  lo = t;
931         }
932         idx[i] = subi - suboff[lo] + next->grstarts[lo];
933       }
934       ierr = ISLocalToGlobalMappingRestoreIndices(ltog,&indices);CHKERRQ(ierr);
935       ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,n,idx,PETSC_OWN_POINTER,&(*ltogs)[cnt]);CHKERRQ(ierr);
936       ierr = DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr);
937     } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
938     next = next->next;
939     cnt++;
940   }
941   PetscFunctionReturn(0);
942 }
943 
944 #undef __FUNCT__
945 #define __FUNCT__ "DMCompositeGetLocalISs"
946 /*@C
947    DMCompositeGetLocalISs - Gets index sets for each DM/array component of a composite local vector
948 
949    Not Collective
950 
951    Input Arguments:
952 . dm - composite DM
953 
954    Output Arguments:
955 . is - array of serial index sets for each each component of the DMComposite
956 
957    Level: intermediate
958 
959    Notes:
960    At present, a composite local vector does not normally exist.  This function is used to provide index sets for
961    MatGetLocalSubMatrix().  In the future, the scatters for each entry in the DMComposite may be be merged into a single
962    scatter to a composite local vector.
963 
964    To get the composite global indices at all local points (including ghosts), use DMCompositeGetISLocalToGlobalMappings().
965 
966    To get index sets for pieces of the composite global vector, use DMCompositeGetGlobalISs().
967 
968    Each returned IS should be destroyed with ISDestroy(), the array should be freed with PetscFree().
969 
970 .seealso: DMCompositeGetGlobalISs(), DMCompositeGetISLocalToGlobalMappings(), MatGetLocalSubMatrix(), MatCreateLocalRef()
971 @*/
972 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetLocalISs(DM dm,IS **is)
973 {
974   PetscErrorCode         ierr;
975   DM_Composite           *com = (DM_Composite*)dm->data;
976   struct DMCompositeLink *link;
977   PetscInt cnt,start;
978 
979   PetscFunctionBegin;
980   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
981   PetscValidPointer(is,2);
982   ierr = PetscMalloc(com->nmine*sizeof(IS),is);CHKERRQ(ierr);
983   for (cnt=0,start=0,link=com->next; link; start+=link->n,cnt++,link=link->next) {
984     ierr = ISCreateStride(PETSC_COMM_SELF,link->n,start,1,&(*is)[cnt]);CHKERRQ(ierr);
985     if (link->type == DMCOMPOSITE_DM) {
986       PetscInt bs;
987       ierr = DMGetBlockSize(link->dm,&bs);CHKERRQ(ierr);
988       ierr = ISSetBlockSize((*is)[cnt],bs);CHKERRQ(ierr);
989     }
990   }
991   PetscFunctionReturn(0);
992 }
993 
994 #undef __FUNCT__
995 #define __FUNCT__ "DMCompositeGetGlobalISs"
996 /*@C
997     DMCompositeGetGlobalISs - Gets the index sets for each composed object
998 
999     Collective on DMComposite
1000 
1001     Input Parameter:
1002 .    dm - the packer object
1003 
1004     Output Parameters:
1005 .    is - the array of index sets
1006 
1007     Level: advanced
1008 
1009     Notes:
1010        The is entries should be destroyed with ISDestroy(), the is array should be freed with PetscFree()
1011 
1012        The number of IS on each process will/may be different when redundant arrays are used
1013 
1014        These could be used to extract a subset of vector entries for a "multi-physics" preconditioner
1015 
1016        Use DMCompositeGetLocalISs() for index sets in the packed local numbering, and
1017        DMCompositeGetISLocalToGlobalMappings() for to map local sub-DM (including ghost) indices to packed global
1018        indices.
1019 
1020 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
1021          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(),
1022          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries()
1023 
1024 @*/
1025 
1026 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetGlobalISs(DM dm,IS *is[])
1027 {
1028   PetscErrorCode         ierr;
1029   PetscInt               cnt = 0,*idx,i;
1030   struct DMCompositeLink *next;
1031   PetscMPIInt            rank;
1032   DM_Composite           *com = (DM_Composite*)dm->data;
1033 
1034   PetscFunctionBegin;
1035   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1036   ierr = PetscMalloc(com->nmine*sizeof(IS),is);CHKERRQ(ierr);
1037   next = com->next;
1038   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
1039 
1040   /* loop over packed objects, handling one at at time */
1041   while (next) {
1042 
1043     if (next->type == DMCOMPOSITE_ARRAY) {
1044 
1045       if (rank == next->rank) {
1046         ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1047         for (i=0; i<next->n; i++) idx[i] = next->grstart + i;
1048         ierr = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr);
1049         cnt++;
1050       }
1051 
1052     } else if (next->type == DMCOMPOSITE_DM) {
1053       ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1054       for (i=0; i<next->n; i++) idx[i] = next->grstart + i;
1055       ierr = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr);
1056       cnt++;
1057     } else {
1058       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1059     }
1060     next = next->next;
1061   }
1062   PetscFunctionReturn(0);
1063 }
1064 
1065 /* -------------------------------------------------------------------------------------*/
1066 #undef __FUNCT__
1067 #define __FUNCT__ "DMCompositeGetLocalVectors_Array"
1068 PetscErrorCode DMCompositeGetLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array)
1069 {
1070   PetscErrorCode ierr;
1071   PetscFunctionBegin;
1072   if (array) {
1073     ierr = PetscMalloc(mine->n*sizeof(PetscScalar),array);CHKERRQ(ierr);
1074   }
1075   PetscFunctionReturn(0);
1076 }
1077 
1078 #undef __FUNCT__
1079 #define __FUNCT__ "DMCompositeGetLocalVectors_DM"
1080 PetscErrorCode DMCompositeGetLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local)
1081 {
1082   PetscErrorCode ierr;
1083   PetscFunctionBegin;
1084   if (local) {
1085     ierr = DMGetLocalVector(mine->dm,local);CHKERRQ(ierr);
1086   }
1087   PetscFunctionReturn(0);
1088 }
1089 
1090 #undef __FUNCT__
1091 #define __FUNCT__ "DMCompositeRestoreLocalVectors_Array"
1092 PetscErrorCode DMCompositeRestoreLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array)
1093 {
1094   PetscErrorCode ierr;
1095   PetscFunctionBegin;
1096   if (array) {
1097     ierr = PetscFree(*array);CHKERRQ(ierr);
1098   }
1099   PetscFunctionReturn(0);
1100 }
1101 
1102 #undef __FUNCT__
1103 #define __FUNCT__ "DMCompositeRestoreLocalVectors_DM"
1104 PetscErrorCode DMCompositeRestoreLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local)
1105 {
1106   PetscErrorCode ierr;
1107   PetscFunctionBegin;
1108   if (local) {
1109     ierr = DMRestoreLocalVector(mine->dm,local);CHKERRQ(ierr);
1110   }
1111   PetscFunctionReturn(0);
1112 }
1113 
1114 #undef __FUNCT__
1115 #define __FUNCT__ "DMCompositeGetLocalVectors"
1116 /*@C
1117     DMCompositeGetLocalVectors - Gets local vectors and arrays for each part of a DMComposite.
1118        Use DMCompositeRestoreLocalVectors() to return them.
1119 
1120     Not Collective
1121 
1122     Input Parameter:
1123 .    dm - the packer object
1124 
1125     Output Parameter:
1126 .    ... - the individual sequential objects (arrays or vectors)
1127 
1128     Level: advanced
1129 
1130 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
1131          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
1132          DMCompositeRestoreLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries()
1133 
1134 @*/
1135 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetLocalVectors(DM dm,...)
1136 {
1137   va_list                Argp;
1138   PetscErrorCode         ierr;
1139   struct DMCompositeLink *next;
1140   DM_Composite           *com = (DM_Composite*)dm->data;
1141 
1142   PetscFunctionBegin;
1143   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1144   next = com->next;
1145   /* loop over packed objects, handling one at at time */
1146   va_start(Argp,dm);
1147   while (next) {
1148     if (next->type == DMCOMPOSITE_ARRAY) {
1149       PetscScalar **array;
1150       array = va_arg(Argp, PetscScalar**);
1151       ierr = DMCompositeGetLocalVectors_Array(dm,next,array);CHKERRQ(ierr);
1152     } else if (next->type == DMCOMPOSITE_DM) {
1153       Vec *vec;
1154       vec = va_arg(Argp, Vec*);
1155       ierr = DMCompositeGetLocalVectors_DM(dm,next,vec);CHKERRQ(ierr);
1156     } else {
1157       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1158     }
1159     next = next->next;
1160   }
1161   va_end(Argp);
1162   PetscFunctionReturn(0);
1163 }
1164 
1165 #undef __FUNCT__
1166 #define __FUNCT__ "DMCompositeRestoreLocalVectors"
1167 /*@C
1168     DMCompositeRestoreLocalVectors - Restores local vectors and arrays for each part of a DMComposite.
1169 
1170     Not Collective
1171 
1172     Input Parameter:
1173 .    dm - the packer object
1174 
1175     Output Parameter:
1176 .    ... - the individual sequential objects (arrays or vectors)
1177 
1178     Level: advanced
1179 
1180 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
1181          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
1182          DMCompositeGetLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries()
1183 
1184 @*/
1185 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeRestoreLocalVectors(DM dm,...)
1186 {
1187   va_list                Argp;
1188   PetscErrorCode         ierr;
1189   struct DMCompositeLink *next;
1190   DM_Composite           *com = (DM_Composite*)dm->data;
1191 
1192   PetscFunctionBegin;
1193   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1194   next = com->next;
1195   /* loop over packed objects, handling one at at time */
1196   va_start(Argp,dm);
1197   while (next) {
1198     if (next->type == DMCOMPOSITE_ARRAY) {
1199       PetscScalar **array;
1200       array = va_arg(Argp, PetscScalar**);
1201       ierr = DMCompositeRestoreLocalVectors_Array(dm,next,array);CHKERRQ(ierr);
1202     } else if (next->type == DMCOMPOSITE_DM) {
1203       Vec *vec;
1204       vec = va_arg(Argp, Vec*);
1205       ierr = DMCompositeRestoreLocalVectors_DM(dm,next,vec);CHKERRQ(ierr);
1206     } else {
1207       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1208     }
1209     next = next->next;
1210   }
1211   va_end(Argp);
1212   PetscFunctionReturn(0);
1213 }
1214 
1215 /* -------------------------------------------------------------------------------------*/
1216 #undef __FUNCT__
1217 #define __FUNCT__ "DMCompositeGetEntries_Array"
1218 PetscErrorCode DMCompositeGetEntries_Array(DM dm,struct DMCompositeLink *mine,PetscInt *n)
1219 {
1220   PetscFunctionBegin;
1221   if (n) *n = mine->n;
1222   PetscFunctionReturn(0);
1223 }
1224 
1225 #undef __FUNCT__
1226 #define __FUNCT__ "DMCompositeGetEntries_DM"
1227 PetscErrorCode DMCompositeGetEntries_DM(DM dmi,struct DMCompositeLink *mine,DM *dm)
1228 {
1229   PetscFunctionBegin;
1230   if (dm) *dm = mine->dm;
1231   PetscFunctionReturn(0);
1232 }
1233 
1234 #undef __FUNCT__
1235 #define __FUNCT__ "DMCompositeGetEntries"
1236 /*@C
1237     DMCompositeGetEntries - Gets the DM, redundant size, etc for each entry in a DMComposite.
1238 
1239     Not Collective
1240 
1241     Input Parameter:
1242 .    dm - the packer object
1243 
1244     Output Parameter:
1245 .    ... - the individual entries, DMs or integer sizes)
1246 
1247     Level: advanced
1248 
1249 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
1250          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
1251          DMCompositeRestoreLocalVectors(), DMCompositeGetLocalVectors(),  DMCompositeScatter(),
1252          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors()
1253 
1254 @*/
1255 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetEntries(DM dm,...)
1256 {
1257   va_list                Argp;
1258   PetscErrorCode         ierr;
1259   struct DMCompositeLink *next;
1260   DM_Composite           *com = (DM_Composite*)dm->data;
1261 
1262   PetscFunctionBegin;
1263   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1264   next = com->next;
1265   /* loop over packed objects, handling one at at time */
1266   va_start(Argp,dm);
1267   while (next) {
1268     if (next->type == DMCOMPOSITE_ARRAY) {
1269       PetscInt *n;
1270       n = va_arg(Argp, PetscInt*);
1271       ierr = DMCompositeGetEntries_Array(dm,next,n);CHKERRQ(ierr);
1272     } else if (next->type == DMCOMPOSITE_DM) {
1273       DM *dmn;
1274       dmn = va_arg(Argp, DM*);
1275       ierr = DMCompositeGetEntries_DM(dm,next,dmn);CHKERRQ(ierr);
1276     } else {
1277       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1278     }
1279     next = next->next;
1280   }
1281   va_end(Argp);
1282   PetscFunctionReturn(0);
1283 }
1284 
1285 #undef __FUNCT__
1286 #define __FUNCT__ "DMRefine_Composite"
1287 PetscErrorCode PETSCDM_DLLEXPORT DMRefine_Composite(DM dmi,MPI_Comm comm,DM *fine)
1288 {
1289   PetscErrorCode         ierr;
1290   struct DMCompositeLink *next;
1291   DM_Composite           *com = (DM_Composite*)dmi->data;
1292   DM                     dm;
1293 
1294   PetscFunctionBegin;
1295   PetscValidHeaderSpecific(dmi,DM_CLASSID,1);
1296   next = com->next;
1297   ierr = DMCompositeCreate(comm,fine);CHKERRQ(ierr);
1298 
1299   /* loop over packed objects, handling one at at time */
1300   while (next) {
1301     if (next->type == DMCOMPOSITE_ARRAY) {
1302       ierr = DMCompositeAddArray(*fine,next->rank,next->n);CHKERRQ(ierr);
1303     } else if (next->type == DMCOMPOSITE_DM) {
1304       ierr = DMRefine(next->dm,comm,&dm);CHKERRQ(ierr);
1305       ierr = DMCompositeAddDM(*fine,dm);CHKERRQ(ierr);
1306       ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr);
1307     } else {
1308       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1309     }
1310     next = next->next;
1311   }
1312   PetscFunctionReturn(0);
1313 }
1314 
1315 #include "petscmat.h"
1316 
1317 struct MatPackLink {
1318   Mat                A;
1319   struct MatPackLink *next;
1320 };
1321 
1322 struct MatPack {
1323   DM                 right,left;
1324   struct MatPackLink *next;
1325 };
1326 
1327 #undef __FUNCT__
1328 #define __FUNCT__ "MatMultBoth_Shell_Pack"
1329 PetscErrorCode MatMultBoth_Shell_Pack(Mat A,Vec x,Vec y,PetscBool  add)
1330 {
1331   struct MatPack         *mpack;
1332   struct DMCompositeLink *xnext,*ynext;
1333   struct MatPackLink     *anext;
1334   PetscScalar            *xarray,*yarray;
1335   PetscErrorCode         ierr;
1336   PetscInt               i;
1337   Vec                    xglobal,yglobal;
1338   PetscMPIInt            rank;
1339   DM_Composite           *comright;
1340   DM_Composite           *comleft;
1341 
1342   PetscFunctionBegin;
1343   ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr);
1344   ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr);
1345   comright = (DM_Composite*)mpack->right->data;
1346   comleft = (DM_Composite*)mpack->left->data;
1347   xnext = comright->next;
1348   ynext = comleft->next;
1349   anext = mpack->next;
1350 
1351   while (xnext) {
1352     if (xnext->type == DMCOMPOSITE_ARRAY) {
1353       if (rank == xnext->rank) {
1354         ierr    = VecGetArray(x,&xarray);CHKERRQ(ierr);
1355         ierr    = VecGetArray(y,&yarray);CHKERRQ(ierr);
1356         if (add) {
1357           for (i=0; i<xnext->n; i++) {
1358             yarray[ynext->rstart+i] += xarray[xnext->rstart+i];
1359           }
1360         } else {
1361           ierr    = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr);
1362         }
1363         ierr    = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1364         ierr    = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1365       }
1366     } else if (xnext->type == DMCOMPOSITE_DM) {
1367       ierr  = VecGetArray(x,&xarray);CHKERRQ(ierr);
1368       ierr  = VecGetArray(y,&yarray);CHKERRQ(ierr);
1369       ierr  = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr);
1370       ierr  = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr);
1371       ierr  = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr);
1372       ierr  = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr);
1373       if (add) {
1374         ierr  = MatMultAdd(anext->A,xglobal,yglobal,yglobal);CHKERRQ(ierr);
1375       } else {
1376         ierr  = MatMult(anext->A,xglobal,yglobal);CHKERRQ(ierr);
1377       }
1378       ierr  = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1379       ierr  = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1380       ierr  = VecResetArray(xglobal);CHKERRQ(ierr);
1381       ierr  = VecResetArray(yglobal);CHKERRQ(ierr);
1382       ierr  = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr);
1383       ierr  = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr);
1384       anext = anext->next;
1385     } else {
1386       SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1387     }
1388     xnext = xnext->next;
1389     ynext = ynext->next;
1390   }
1391   PetscFunctionReturn(0);
1392 }
1393 
1394 #undef __FUNCT__
1395 #define __FUNCT__ "MatMultAdd_Shell_Pack"
1396 PetscErrorCode MatMultAdd_Shell_Pack(Mat A,Vec x,Vec y,Vec z)
1397 {
1398   PetscErrorCode ierr;
1399   PetscFunctionBegin;
1400   if (z != y) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Handles y == z only");
1401   ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_TRUE);CHKERRQ(ierr);
1402   PetscFunctionReturn(0);
1403 }
1404 
1405 #undef __FUNCT__
1406 #define __FUNCT__ "MatMult_Shell_Pack"
1407 PetscErrorCode MatMult_Shell_Pack(Mat A,Vec x,Vec y)
1408 {
1409   PetscErrorCode ierr;
1410   PetscFunctionBegin;
1411   ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_FALSE);CHKERRQ(ierr);
1412   PetscFunctionReturn(0);
1413 }
1414 
1415 #undef __FUNCT__
1416 #define __FUNCT__ "MatMultTranspose_Shell_Pack"
1417 PetscErrorCode MatMultTranspose_Shell_Pack(Mat A,Vec x,Vec y)
1418 {
1419   struct MatPack         *mpack;
1420   struct DMCompositeLink *xnext,*ynext;
1421   struct MatPackLink     *anext;
1422   PetscScalar            *xarray,*yarray;
1423   PetscErrorCode         ierr;
1424   Vec                    xglobal,yglobal;
1425   PetscMPIInt            rank;
1426   DM_Composite           *comright;
1427   DM_Composite           *comleft;
1428 
1429   PetscFunctionBegin;
1430   ierr  = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr);
1431   ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr);
1432   comright = (DM_Composite*)mpack->right->data;
1433   comleft = (DM_Composite*)mpack->left->data;
1434   ynext = comright->next;
1435   xnext = comleft->next;
1436   anext = mpack->next;
1437 
1438   while (xnext) {
1439     if (xnext->type == DMCOMPOSITE_ARRAY) {
1440       if (rank == ynext->rank) {
1441         ierr    = VecGetArray(x,&xarray);CHKERRQ(ierr);
1442         ierr    = VecGetArray(y,&yarray);CHKERRQ(ierr);
1443         ierr    = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr);
1444         ierr    = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1445         ierr    = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1446       }
1447     } else if (xnext->type == DMCOMPOSITE_DM) {
1448       ierr  = VecGetArray(x,&xarray);CHKERRQ(ierr);
1449       ierr  = VecGetArray(y,&yarray);CHKERRQ(ierr);
1450       ierr  = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr);
1451       ierr  = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr);
1452       ierr  = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr);
1453       ierr  = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr);
1454       ierr  = MatMultTranspose(anext->A,xglobal,yglobal);CHKERRQ(ierr);
1455       ierr  = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1456       ierr  = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1457       ierr  = VecResetArray(xglobal);CHKERRQ(ierr);
1458       ierr  = VecResetArray(yglobal);CHKERRQ(ierr);
1459       ierr  = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr);
1460       ierr  = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr);
1461       anext = anext->next;
1462     } else {
1463       SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1464     }
1465     xnext = xnext->next;
1466     ynext = ynext->next;
1467   }
1468   PetscFunctionReturn(0);
1469 }
1470 
1471 #undef __FUNCT__
1472 #define __FUNCT__ "MatDestroy_Shell_Pack"
1473 PetscErrorCode MatDestroy_Shell_Pack(Mat A)
1474 {
1475   struct MatPack     *mpack;
1476   struct MatPackLink *anext,*oldanext;
1477   PetscErrorCode     ierr;
1478 
1479   PetscFunctionBegin;
1480   ierr  = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr);
1481   anext = mpack->next;
1482 
1483   while (anext) {
1484     ierr     = MatDestroy(anext->A);CHKERRQ(ierr);
1485     oldanext = anext;
1486     anext    = anext->next;
1487     ierr     = PetscFree(oldanext);CHKERRQ(ierr);
1488   }
1489   ierr = PetscFree(mpack);CHKERRQ(ierr);
1490   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
1491   PetscFunctionReturn(0);
1492 }
1493 
1494 #undef __FUNCT__
1495 #define __FUNCT__ "DMGetInterpolation_Composite"
1496 PetscErrorCode PETSCDM_DLLEXPORT DMGetInterpolation_Composite(DM coarse,DM fine,Mat *A,Vec *v)
1497 {
1498   PetscErrorCode         ierr;
1499   PetscInt               m,n,M,N;
1500   struct DMCompositeLink *nextc;
1501   struct DMCompositeLink *nextf;
1502   struct MatPackLink     *nextmat,*pnextmat = 0;
1503   struct MatPack         *mpack;
1504   Vec                    gcoarse,gfine;
1505   DM_Composite           *comcoarse = (DM_Composite*)coarse->data;
1506   DM_Composite           *comfine = (DM_Composite*)fine->data;
1507 
1508   PetscFunctionBegin;
1509   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1510   PetscValidHeaderSpecific(fine,DM_CLASSID,2);
1511   nextc = comcoarse->next;
1512   nextf = comfine->next;
1513   /* use global vectors only for determining matrix layout */
1514   ierr = DMCreateGlobalVector(coarse,&gcoarse);CHKERRQ(ierr);
1515   ierr = DMCreateGlobalVector(fine,&gfine);CHKERRQ(ierr);
1516   ierr = VecGetLocalSize(gcoarse,&n);CHKERRQ(ierr);
1517   ierr = VecGetLocalSize(gfine,&m);CHKERRQ(ierr);
1518   ierr = VecGetSize(gcoarse,&N);CHKERRQ(ierr);
1519   ierr = VecGetSize(gfine,&M);CHKERRQ(ierr);
1520   ierr = VecDestroy(gcoarse);CHKERRQ(ierr);
1521   ierr = VecDestroy(gfine);CHKERRQ(ierr);
1522 
1523   ierr         = PetscNew(struct MatPack,&mpack);CHKERRQ(ierr);
1524   mpack->right = coarse;
1525   mpack->left  = fine;
1526   ierr  = MatCreate(((PetscObject)fine)->comm,A);CHKERRQ(ierr);
1527   ierr  = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1528   ierr  = MatSetType(*A,MATSHELL);CHKERRQ(ierr);
1529   ierr  = MatShellSetContext(*A,mpack);CHKERRQ(ierr);
1530   ierr  = MatShellSetOperation(*A,MATOP_MULT,(void(*)(void))MatMult_Shell_Pack);CHKERRQ(ierr);
1531   ierr  = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,(void(*)(void))MatMultTranspose_Shell_Pack);CHKERRQ(ierr);
1532   ierr  = MatShellSetOperation(*A,MATOP_MULT_ADD,(void(*)(void))MatMultAdd_Shell_Pack);CHKERRQ(ierr);
1533   ierr  = MatShellSetOperation(*A,MATOP_DESTROY,(void(*)(void))MatDestroy_Shell_Pack);CHKERRQ(ierr);
1534 
1535   /* loop over packed objects, handling one at at time */
1536   while (nextc) {
1537     if (nextc->type != nextf->type) SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_ARG_INCOMP,"Two DM have different layout");
1538 
1539     if (nextc->type == DMCOMPOSITE_ARRAY) {
1540       ;
1541     } else if (nextc->type == DMCOMPOSITE_DM) {
1542       ierr          = PetscNew(struct MatPackLink,&nextmat);CHKERRQ(ierr);
1543       nextmat->next = 0;
1544       if (pnextmat) {
1545         pnextmat->next = nextmat;
1546         pnextmat       = nextmat;
1547       } else {
1548         pnextmat    = nextmat;
1549         mpack->next = nextmat;
1550       }
1551       ierr = DMGetInterpolation(nextc->dm,nextf->dm,&nextmat->A,PETSC_NULL);CHKERRQ(ierr);
1552     } else {
1553       SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1554     }
1555     nextc = nextc->next;
1556     nextf = nextf->next;
1557   }
1558   PetscFunctionReturn(0);
1559 }
1560 
1561 #undef __FUNCT__
1562 #define __FUNCT__ "DMGetMatrix_Composite"
1563 PetscErrorCode PETSCDM_DLLEXPORT DMGetMatrix_Composite(DM dm, const MatType mtype,Mat *J)
1564 {
1565   PetscErrorCode         ierr;
1566   DM_Composite           *com = (DM_Composite*)dm->data;
1567   struct DMCompositeLink *next = com->next;
1568   PetscInt               m,*dnz,*onz,i,j,mA;
1569   Mat                    Atmp;
1570   PetscMPIInt            rank;
1571   PetscScalar            zero = 0.0;
1572   PetscBool              dense = PETSC_FALSE;
1573   ISLocalToGlobalMapping ltogmap,ltogmapb;
1574 
1575   PetscFunctionBegin;
1576   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1577 
1578   /* use global vector to determine layout needed for matrix */
1579   m = com->n;
1580   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
1581   ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr);
1582   ierr = MatSetSizes(*J,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1583   ierr = MatSetType(*J,MATAIJ);CHKERRQ(ierr);
1584 
1585   /*
1586      Extremely inefficient but will compute entire Jacobian for testing
1587   */
1588   ierr = PetscOptionsGetBool(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);CHKERRQ(ierr);
1589   if (dense) {
1590     PetscInt    rstart,rend,*indices;
1591     PetscScalar *values;
1592 
1593     mA    = com->N;
1594     ierr = MatMPIAIJSetPreallocation(*J,mA,PETSC_NULL,mA-m,PETSC_NULL);CHKERRQ(ierr);
1595     ierr = MatSeqAIJSetPreallocation(*J,mA,PETSC_NULL);CHKERRQ(ierr);
1596 
1597     ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr);
1598     ierr = PetscMalloc2(mA,PetscScalar,&values,mA,PetscInt,&indices);CHKERRQ(ierr);
1599     ierr = PetscMemzero(values,mA*sizeof(PetscScalar));CHKERRQ(ierr);
1600     for (i=0; i<mA; i++) indices[i] = i;
1601     for (i=rstart; i<rend; i++) {
1602       ierr = MatSetValues(*J,1,&i,mA,indices,values,INSERT_VALUES);CHKERRQ(ierr);
1603     }
1604     ierr = PetscFree2(values,indices);CHKERRQ(ierr);
1605     ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1606     ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1607     PetscFunctionReturn(0);
1608   }
1609 
1610   ierr = MatPreallocateInitialize(((PetscObject)dm)->comm,m,m,dnz,onz);CHKERRQ(ierr);
1611   /* loop over packed objects, handling one at at time */
1612   next = com->next;
1613   while (next) {
1614     if (next->type == DMCOMPOSITE_ARRAY) {
1615       if (rank == next->rank) {  /* zero the "little" block */
1616         for (j=com->rstart+next->rstart; j<com->rstart+next->rstart+next->n; j++) {
1617           for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) {
1618             ierr = MatPreallocateSet(j,1,&i,dnz,onz);CHKERRQ(ierr);
1619           }
1620         }
1621       }
1622     } else if (next->type == DMCOMPOSITE_DM) {
1623       PetscInt       nc,rstart,*ccols,maxnc;
1624       const PetscInt *cols,*rstarts;
1625       PetscMPIInt    proc;
1626 
1627       ierr = DMGetMatrix(next->dm,mtype,&Atmp);CHKERRQ(ierr);
1628       ierr = MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);CHKERRQ(ierr);
1629       ierr = MatGetOwnershipRanges(Atmp,&rstarts);CHKERRQ(ierr);
1630       ierr = MatGetLocalSize(Atmp,&mA,PETSC_NULL);CHKERRQ(ierr);
1631 
1632       maxnc = 0;
1633       for (i=0; i<mA; i++) {
1634         ierr  = MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
1635         ierr  = MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
1636         maxnc = PetscMax(nc,maxnc);
1637       }
1638       ierr = PetscMalloc(maxnc*sizeof(PetscInt),&ccols);CHKERRQ(ierr);
1639       for (i=0; i<mA; i++) {
1640         ierr = MatGetRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);CHKERRQ(ierr);
1641         /* remap the columns taking into how much they are shifted on each process */
1642         for (j=0; j<nc; j++) {
1643           proc = 0;
1644           while (cols[j] >= rstarts[proc+1]) proc++;
1645           ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
1646         }
1647         ierr = MatPreallocateSet(com->rstart+next->rstart+i,nc,ccols,dnz,onz);CHKERRQ(ierr);
1648         ierr = MatRestoreRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);CHKERRQ(ierr);
1649       }
1650       ierr = PetscFree(ccols);CHKERRQ(ierr);
1651       ierr = MatDestroy(Atmp);CHKERRQ(ierr);
1652     } else {
1653       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1654     }
1655     next = next->next;
1656   }
1657   if (com->FormCoupleLocations) {
1658     ierr = (*com->FormCoupleLocations)(dm,PETSC_NULL,dnz,onz,__rstart,__nrows,__start,__end);CHKERRQ(ierr);
1659   }
1660   ierr = MatMPIAIJSetPreallocation(*J,0,dnz,0,onz);CHKERRQ(ierr);
1661   ierr = MatSeqAIJSetPreallocation(*J,0,dnz);CHKERRQ(ierr);
1662   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
1663 
1664   next = com->next;
1665   while (next) {
1666     if (next->type == DMCOMPOSITE_ARRAY) {
1667       if (rank == next->rank) {
1668         for (j=com->rstart+next->rstart; j<com->rstart+next->rstart+next->n; j++) {
1669           for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) {
1670             ierr = MatSetValues(*J,1,&j,1,&i,&zero,INSERT_VALUES);CHKERRQ(ierr);
1671           }
1672         }
1673       }
1674     } else if (next->type == DMCOMPOSITE_DM) {
1675       PetscInt          nc,rstart,row,maxnc,*ccols;
1676       const PetscInt    *cols,*rstarts;
1677       const PetscScalar *values;
1678       PetscMPIInt       proc;
1679 
1680       ierr = DMGetMatrix(next->dm,mtype,&Atmp);CHKERRQ(ierr);
1681       ierr = MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);CHKERRQ(ierr);
1682       ierr = MatGetOwnershipRanges(Atmp,&rstarts);CHKERRQ(ierr);
1683       ierr = MatGetLocalSize(Atmp,&mA,PETSC_NULL);CHKERRQ(ierr);
1684       maxnc = 0;
1685       for (i=0; i<mA; i++) {
1686         ierr  = MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
1687         ierr  = MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
1688         maxnc = PetscMax(nc,maxnc);
1689       }
1690       ierr = PetscMalloc(maxnc*sizeof(PetscInt),&ccols);CHKERRQ(ierr);
1691       for (i=0; i<mA; i++) {
1692         ierr = MatGetRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);CHKERRQ(ierr);
1693         for (j=0; j<nc; j++) {
1694           proc = 0;
1695           while (cols[j] >= rstarts[proc+1]) proc++;
1696           ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
1697         }
1698         row  = com->rstart+next->rstart+i;
1699         ierr = MatSetValues(*J,1,&row,nc,ccols,values,INSERT_VALUES);CHKERRQ(ierr);
1700         ierr = MatRestoreRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);CHKERRQ(ierr);
1701       }
1702       ierr = PetscFree(ccols);CHKERRQ(ierr);
1703       ierr = MatDestroy(Atmp);CHKERRQ(ierr);
1704     } else {
1705       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1706     }
1707     next = next->next;
1708   }
1709   if (com->FormCoupleLocations) {
1710     PetscInt __rstart;
1711     ierr = MatGetOwnershipRange(*J,&__rstart,PETSC_NULL);CHKERRQ(ierr);
1712     ierr = (*com->FormCoupleLocations)(dm,*J,PETSC_NULL,PETSC_NULL,__rstart,0,0,0);CHKERRQ(ierr);
1713   }
1714   ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1715   ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1716 
1717   ierr = DMGetLocalToGlobalMapping(dm,&ltogmap);CHKERRQ(ierr);
1718   ierr = DMGetLocalToGlobalMappingBlock(dm,&ltogmapb);CHKERRQ(ierr);
1719   ierr = MatSetLocalToGlobalMapping(*J,ltogmap,ltogmap);CHKERRQ(ierr);
1720   ierr = MatSetLocalToGlobalMappingBlock(*J,ltogmapb,ltogmapb);CHKERRQ(ierr);
1721   PetscFunctionReturn(0);
1722 }
1723 
1724 #undef __FUNCT__
1725 #define __FUNCT__ "DMCreateLocalToGlobalMapping_Composite"
1726 static PetscErrorCode DMCreateLocalToGlobalMapping_Composite(DM dm)
1727 {
1728   DM_Composite           *com = (DM_Composite*)dm->data;
1729   ISLocalToGlobalMapping *ltogs;
1730   PetscInt               i,cnt,m,*idx;
1731   PetscErrorCode         ierr;
1732 
1733   PetscFunctionBegin;
1734   /* Set the ISLocalToGlobalMapping on the new matrix */
1735   ierr = DMCompositeGetISLocalToGlobalMappings(dm,&ltogs);CHKERRQ(ierr);
1736   for (cnt=0,i=0; i<(com->nDM+com->nredundant); i++) {
1737     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1738     cnt += m;
1739   }
1740   ierr = PetscMalloc(cnt*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1741   for (cnt=0,i=0; i<(com->nDM+com->nredundant); i++) {
1742     const PetscInt *subidx;
1743     ierr = ISLocalToGlobalMappingGetSize(ltogs[i],&m);CHKERRQ(ierr);
1744     ierr = ISLocalToGlobalMappingGetIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1745     ierr = PetscMemcpy(&idx[cnt],subidx,m*sizeof(PetscInt));CHKERRQ(ierr);
1746     ierr = ISLocalToGlobalMappingRestoreIndices(ltogs[i],&subidx);CHKERRQ(ierr);
1747     cnt += m;
1748   }
1749   ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,cnt,idx,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
1750   for (i=0; i<com->nDM+com->nredundant; i++) {ierr = ISLocalToGlobalMappingDestroy(ltogs[i]);CHKERRQ(ierr);}
1751   ierr = PetscFree(ltogs);CHKERRQ(ierr);
1752   PetscFunctionReturn(0);
1753 }
1754 
1755 
1756 #undef __FUNCT__
1757 #define __FUNCT__ "DMGetColoring_Composite"
1758 PetscErrorCode PETSCDM_DLLEXPORT DMGetColoring_Composite(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring)
1759 {
1760   PetscErrorCode         ierr;
1761   PetscInt               n,i,cnt;
1762   ISColoringValue        *colors;
1763   PetscBool              dense = PETSC_FALSE;
1764   ISColoringValue        maxcol = 0;
1765   DM_Composite           *com = (DM_Composite*)dm->data;
1766 
1767   PetscFunctionBegin;
1768   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1769   if (ctype == IS_COLORING_GHOSTED) {
1770     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Currently you must use -dmmg_iscoloring_type global" );
1771   } else if (ctype == IS_COLORING_GLOBAL) {
1772     n = com->n;
1773   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Unknown ISColoringType");
1774   ierr = PetscMalloc(n*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); /* freed in ISColoringDestroy() */
1775 
1776   ierr = PetscOptionsGetBool(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);CHKERRQ(ierr);
1777   if (dense) {
1778     for (i=0; i<n; i++) {
1779       colors[i] = (ISColoringValue)(com->rstart + i);
1780     }
1781     maxcol = com->N;
1782   } else {
1783     struct DMCompositeLink *next = com->next;
1784     PetscMPIInt            rank;
1785 
1786     ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
1787     cnt  = 0;
1788     while (next) {
1789       if (next->type == DMCOMPOSITE_ARRAY) {
1790         if (rank == next->rank) {  /* each column gets is own color */
1791           for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) {
1792             colors[cnt++] = maxcol++;
1793           }
1794         }
1795         ierr = MPI_Bcast(&maxcol,1,MPIU_COLORING_VALUE,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
1796       } else if (next->type == DMCOMPOSITE_DM) {
1797         ISColoring     lcoloring;
1798 
1799         ierr = DMGetColoring(next->dm,IS_COLORING_GLOBAL,mtype,&lcoloring);CHKERRQ(ierr);
1800         for (i=0; i<lcoloring->N; i++) {
1801           colors[cnt++] = maxcol + lcoloring->colors[i];
1802         }
1803         maxcol += lcoloring->n;
1804         ierr = ISColoringDestroy(lcoloring);CHKERRQ(ierr);
1805       } else {
1806         SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1807       }
1808       next = next->next;
1809     }
1810   }
1811   ierr = ISColoringCreate(((PetscObject)dm)->comm,maxcol,n,colors,coloring);CHKERRQ(ierr);
1812   PetscFunctionReturn(0);
1813 }
1814 
1815 #undef __FUNCT__
1816 #define __FUNCT__ "DMGlobalToLocalBegin_Composite"
1817 PetscErrorCode PETSCDM_DLLEXPORT DMGlobalToLocalBegin_Composite(DM dm,Vec gvec,InsertMode mode,Vec lvec)
1818 {
1819   PetscErrorCode         ierr;
1820   struct DMCompositeLink *next;
1821   PetscInt               cnt = 3;
1822   PetscMPIInt            rank;
1823   PetscScalar            *garray,*larray;
1824   DM_Composite           *com = (DM_Composite*)dm->data;
1825 
1826   PetscFunctionBegin;
1827   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1828   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
1829   next = com->next;
1830   if (!com->setup) {
1831     ierr = DMSetUp(dm);CHKERRQ(ierr);
1832   }
1833   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
1834   ierr = VecGetArray(gvec,&garray);CHKERRQ(ierr);
1835   ierr = VecGetArray(lvec,&larray);CHKERRQ(ierr);
1836 
1837   /* loop over packed objects, handling one at at time */
1838   while (next) {
1839     if (next->type == DMCOMPOSITE_ARRAY) {
1840       if (rank == next->rank) {
1841         ierr    = PetscMemcpy(larray,garray,next->n*sizeof(PetscScalar));CHKERRQ(ierr);
1842         garray += next->n;
1843       }
1844       /* does not handle ADD_VALUES */
1845       ierr = MPI_Bcast(larray,next->n,MPIU_SCALAR,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
1846       larray += next->n;
1847     } else if (next->type == DMCOMPOSITE_DM) {
1848       Vec      local,global;
1849       PetscInt N;
1850 
1851       ierr = DMGetGlobalVector(next->dm,&global);CHKERRQ(ierr);
1852       ierr = VecGetLocalSize(global,&N);CHKERRQ(ierr);
1853       ierr = VecPlaceArray(global,garray);CHKERRQ(ierr);
1854       ierr = DMGetLocalVector(next->dm,&local);CHKERRQ(ierr);
1855       ierr = VecPlaceArray(local,larray);CHKERRQ(ierr);
1856       ierr = DMGlobalToLocalBegin(next->dm,global,mode,local);CHKERRQ(ierr);
1857       ierr = DMGlobalToLocalEnd(next->dm,global,mode,local);CHKERRQ(ierr);
1858       ierr = VecResetArray(global);CHKERRQ(ierr);
1859       ierr = VecResetArray(local);CHKERRQ(ierr);
1860       ierr = DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr);
1861       ierr = DMRestoreGlobalVector(next->dm,&local);CHKERRQ(ierr);
1862       larray += next->n;
1863     } else {
1864       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1865     }
1866     cnt++;
1867     next = next->next;
1868   }
1869 
1870   ierr = VecRestoreArray(gvec,PETSC_NULL);CHKERRQ(ierr);
1871   ierr = VecRestoreArray(lvec,PETSC_NULL);CHKERRQ(ierr);
1872   PetscFunctionReturn(0);
1873 }
1874 
1875 #undef __FUNCT__
1876 #define __FUNCT__ "DMGlobalToLocalEnd_Composite"
1877 PetscErrorCode PETSCDM_DLLEXPORT DMGlobalToLocalEnd_Composite(DM dm,Vec gvec,InsertMode mode,Vec lvec)
1878 {
1879   PetscFunctionBegin;
1880   PetscFunctionReturn(0);
1881 }
1882 
1883 EXTERN_C_BEGIN
1884 #undef __FUNCT__
1885 #define __FUNCT__ "DMCreate_Composite"
1886 PetscErrorCode PETSCDM_DLLEXPORT DMCreate_Composite(DM p)
1887 {
1888   PetscErrorCode ierr;
1889   DM_Composite   *com;
1890 
1891   PetscFunctionBegin;
1892   ierr = PetscNewLog(p,DM_Composite,&com);CHKERRQ(ierr);
1893   p->data = com;
1894   ierr = PetscObjectChangeTypeName((PetscObject)p,"DMComposite");CHKERRQ(ierr);
1895   com->n            = 0;
1896   com->next         = PETSC_NULL;
1897   com->nredundant   = 0;
1898   com->nDM          = 0;
1899 
1900   p->ops->createglobalvector              = DMCreateGlobalVector_Composite;
1901   p->ops->createlocalvector               = DMCreateLocalVector_Composite;
1902   p->ops->createlocaltoglobalmapping      = DMCreateLocalToGlobalMapping_Composite;
1903   p->ops->createlocaltoglobalmappingblock = 0;
1904   p->ops->refine                          = DMRefine_Composite;
1905   p->ops->getinterpolation                = DMGetInterpolation_Composite;
1906   p->ops->getmatrix                       = DMGetMatrix_Composite;
1907   p->ops->getcoloring                     = DMGetColoring_Composite;
1908   p->ops->globaltolocalbegin              = DMGlobalToLocalBegin_Composite;
1909   p->ops->globaltolocalend                = DMGlobalToLocalEnd_Composite;
1910   p->ops->destroy                         = DMDestroy_Composite;
1911   p->ops->view                            = DMView_Composite;
1912   p->ops->setup                           = DMSetUp_Composite;
1913   PetscFunctionReturn(0);
1914 }
1915 EXTERN_C_END
1916 
1917 #undef __FUNCT__
1918 #define __FUNCT__ "DMCompositeCreate"
1919 /*@C
1920     DMCompositeCreate - Creates a vector packer, used to generate "composite"
1921       vectors made up of several subvectors.
1922 
1923     Collective on MPI_Comm
1924 
1925     Input Parameter:
1926 .   comm - the processors that will share the global vector
1927 
1928     Output Parameters:
1929 .   packer - the packer object
1930 
1931     Level: advanced
1932 
1933 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
1934          DMCompositeGather(), DMCreateGlobalVector(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess()
1935          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
1936 
1937 @*/
1938 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeCreate(MPI_Comm comm,DM *packer)
1939 {
1940   PetscErrorCode ierr;
1941 
1942   PetscFunctionBegin;
1943   PetscValidPointer(packer,2);
1944   ierr = DMCreate(comm,packer);CHKERRQ(ierr);
1945   ierr = DMSetType(*packer,DMCOMPOSITE);CHKERRQ(ierr);
1946   PetscFunctionReturn(0);
1947 }
1948