xref: /petsc/src/dm/impls/composite/pack.c (revision 520db06cfec378d7c34fb86a68534e48a6477cdf)
1 #define PETSCDM_DLL
2 
3 #include "petscdm.h"             /*I      "petscdm.h"     I*/
4 #include "private/dmimpl.h"
5 #include "petscmat.h"
6 
7 /*
8    rstart is where an array/subvector starts in the global parallel vector, so arrays
9    rstarts are meaningless (and set to the previous one) except on the processor where the array lives
10 */
11 
12 typedef enum {DMCOMPOSITE_ARRAY, DMCOMPOSITE_DM} DMCompositeLinkType;
13 
14 struct DMCompositeLink {
15   DMCompositeLinkType    type;
16   struct DMCompositeLink *next;
17   PetscInt               n,rstart;      /* rstart is relative to this process */
18   PetscInt               grstart;       /* grstart is relative to all processes */
19 
20   /* only used for DMCOMPOSITE_DM */
21   PetscInt               *grstarts;     /* global row for first unknown of this DM on each process */
22   DM                     dm;
23 
24   /* only used for DMCOMPOSITE_ARRAY */
25   PetscMPIInt            rank;          /* process where array unknowns live */
26 };
27 
28 typedef struct {
29   PetscInt               n,N,rstart;           /* rstart is relative to all processors, n unknowns owned by this process, N is total unknowns */
30   PetscInt               nghost;               /* number of all local entries include DMDA ghost points and any shared redundant arrays */
31   PetscInt               nDM,nredundant,nmine; /* how many DM's and seperate redundant arrays used to build DM(nmine is ones on this process) */
32   PetscBool              setup;                /* after this is set, cannot add new links to the DM*/
33   struct DMCompositeLink *next;
34 
35   PetscErrorCode (*FormCoupleLocations)(DM,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt);
36 } DM_Composite;
37 
38 #undef __FUNCT__
39 #define __FUNCT__ "DMCompositeSetCoupling"
40 /*@C
41     DMCompositeSetCoupling - Sets user provided routines that compute the coupling between the
42       seperate components (DMDA's and arrays) in a DMto build the correct matrix nonzero structure.
43 
44 
45     Logically Collective on MPI_Comm
46 
47     Input Parameter:
48 +   dm - the composite object
49 -   formcouplelocations - routine to set the nonzero locations in the matrix
50 
51     Level: advanced
52 
53     Notes: See DMCompositeSetContext() and DMCompositeGetContext() for how to get user information into
54         this routine
55 
56 @*/
57 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeSetCoupling(DM dm,PetscErrorCode (*FormCoupleLocations)(DM,Mat,PetscInt*,PetscInt*,PetscInt,PetscInt,PetscInt,PetscInt))
58 {
59   DM_Composite *com = (DM_Composite*)dm->data;
60 
61   PetscFunctionBegin;
62   com->FormCoupleLocations = FormCoupleLocations;
63   PetscFunctionReturn(0);
64 }
65 
66 #undef __FUNCT__
67 #define __FUNCT__ "DMCompositeSetContext"
68 /*@
69     DMCompositeSetContext - Allows user to stash data they may need within the form coupling routine they
70       set with DMCompositeSetCoupling()
71 
72 
73     Not Collective
74 
75     Input Parameter:
76 +   dm - the composite object
77 -   ctx - the user supplied context
78 
79     Level: advanced
80 
81     Notes: Use DMCompositeGetContext() to retrieve the context when needed.
82 
83 @*/
84 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeSetContext(DM dm,void *ctx)
85 {
86   PetscFunctionBegin;
87   dm->ctx = ctx;
88   PetscFunctionReturn(0);
89 }
90 
91 #undef __FUNCT__
92 #define __FUNCT__ "DMCompositeGetContext"
93 /*@
94     DMCompositeGetContext - Access the context set with DMCompositeSetContext()
95 
96 
97     Not Collective
98 
99     Input Parameter:
100 .   dm - the composite object
101 
102     Output Parameter:
103 .    ctx - the user supplied context
104 
105     Level: advanced
106 
107     Notes: Use DMCompositeGetContext() to retrieve the context when needed.
108 
109 @*/
110 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetContext(DM dm,void **ctx)
111 {
112   PetscFunctionBegin;
113   *ctx = dm->ctx;
114   PetscFunctionReturn(0);
115 }
116 
117 
118 
119 extern PetscErrorCode DMDestroy_Private(DM,PetscBool *);
120 
121 #undef __FUNCT__
122 #define __FUNCT__ "DMDestroy_Composite"
123 PetscErrorCode PETSCDM_DLLEXPORT DMDestroy_Composite(DM dm)
124 {
125   PetscErrorCode         ierr;
126   struct DMCompositeLink *next, *prev;
127   PetscBool              done;
128   DM_Composite           *com = (DM_Composite *)dm->data;
129 
130   PetscFunctionBegin;
131   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
132   ierr = DMDestroy_Private((DM)dm,&done);CHKERRQ(ierr);
133   if (!done) PetscFunctionReturn(0);
134 
135   next = com->next;
136   while (next) {
137     prev = next;
138     next = next->next;
139     if (prev->type == DMCOMPOSITE_DM) {
140       ierr = DMDestroy(prev->dm);CHKERRQ(ierr);
141     }
142     if (prev->grstarts) {
143       ierr = PetscFree(prev->grstarts);CHKERRQ(ierr);
144     }
145     ierr = PetscFree(prev);CHKERRQ(ierr);
146   }
147   ierr = PetscFree(com);CHKERRQ(ierr);
148   ierr = PetscHeaderDestroy(dm);CHKERRQ(ierr);
149   PetscFunctionReturn(0);
150 }
151 
152 #undef __FUNCT__
153 #define __FUNCT__ "DMView_Composite"
154 PetscErrorCode PETSCDM_DLLEXPORT DMView_Composite(DM dm,PetscViewer v)
155 {
156   PetscErrorCode ierr;
157   PetscBool      iascii;
158   DM_Composite   *com = (DM_Composite *)dm->data;
159 
160   PetscFunctionBegin;
161   ierr = PetscTypeCompare((PetscObject)v,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
162   if (iascii) {
163     struct DMCompositeLink *lnk = com->next;
164     PetscInt               i;
165 
166     ierr = PetscViewerASCIIPrintf(v,"DM (%s)\n",((PetscObject)dm)->prefix?((PetscObject)dm)->prefix:"no prefix");CHKERRQ(ierr);
167     ierr = PetscViewerASCIIPrintf(v,"  contains %d DMs and %d redundant arrays\n",com->nDM,com->nredundant);CHKERRQ(ierr);
168     ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
169     for (i=0; lnk; lnk=lnk->next,i++) {
170       if (lnk->dm) {
171         ierr = PetscViewerASCIIPrintf(v,"Link %d: DM of type %s\n",i,((PetscObject)lnk->dm)->type_name);CHKERRQ(ierr);
172         ierr = PetscViewerASCIIPushTab(v);CHKERRQ(ierr);
173         ierr = DMView(lnk->dm,v);CHKERRQ(ierr);
174         ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
175       } else {
176         ierr = PetscViewerASCIIPrintf(v,"Link %d: Redundant array of size %d owned by rank %d\n",i,lnk->n,lnk->rank);CHKERRQ(ierr);
177       }
178     }
179     ierr = PetscViewerASCIIPopTab(v);CHKERRQ(ierr);
180   }
181   PetscFunctionReturn(0);
182 }
183 
184 /* --------------------------------------------------------------------------------------*/
185 #undef __FUNCT__
186 #define __FUNCT__ "DMSetUp_Composite"
187 PetscErrorCode PETSCDM_DLLEXPORT DMSetUp_Composite(DM dm)
188 {
189   PetscErrorCode         ierr;
190   PetscInt               nprev = 0;
191   PetscMPIInt            rank,size;
192   DM_Composite           *com = (DM_Composite*)dm->data;
193   struct DMCompositeLink *next = com->next;
194   PetscLayout            map;
195 
196   PetscFunctionBegin;
197   if (com->setup) SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Packer has already been setup");
198   ierr = PetscLayoutCreate(((PetscObject)dm)->comm,&map);CHKERRQ(ierr);
199   ierr = PetscLayoutSetLocalSize(map,com->n);CHKERRQ(ierr);
200   ierr = PetscLayoutSetSize(map,PETSC_DETERMINE);CHKERRQ(ierr);
201   ierr = PetscLayoutSetBlockSize(map,1);CHKERRQ(ierr);
202   ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
203   ierr = PetscLayoutGetSize(map,&com->N);CHKERRQ(ierr);
204   ierr = PetscLayoutGetRange(map,&com->rstart,PETSC_NULL);CHKERRQ(ierr);
205   ierr = PetscLayoutDestroy(map);CHKERRQ(ierr);
206 
207   /* now set the rstart for each linked array/vector */
208   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
209   ierr = MPI_Comm_size(((PetscObject)dm)->comm,&size);CHKERRQ(ierr);
210   while (next) {
211     next->rstart = nprev;
212     if ((rank == next->rank) || next->type != DMCOMPOSITE_ARRAY) nprev += next->n;
213     next->grstart = com->rstart + next->rstart;
214     if (next->type == DMCOMPOSITE_ARRAY) {
215       ierr = MPI_Bcast(&next->grstart,1,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
216     } else {
217       ierr = PetscMalloc(size*sizeof(PetscInt),&next->grstarts);CHKERRQ(ierr);
218       ierr = MPI_Allgather(&next->grstart,1,MPIU_INT,next->grstarts,1,MPIU_INT,((PetscObject)dm)->comm);CHKERRQ(ierr);
219     }
220     next = next->next;
221   }
222   com->setup = PETSC_TRUE;
223   PetscFunctionReturn(0);
224 }
225 
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "DMCompositeGetAccess_Array"
229 PetscErrorCode DMCompositeGetAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array)
230 {
231   PetscErrorCode ierr;
232   PetscScalar    *varray;
233   PetscMPIInt    rank;
234 
235   PetscFunctionBegin;
236   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
237   if (array) {
238     if (rank == mine->rank) {
239       ierr    = VecGetArray(vec,&varray);CHKERRQ(ierr);
240       *array  = varray + mine->rstart;
241       ierr    = VecRestoreArray(vec,&varray);CHKERRQ(ierr);
242     } else {
243       *array = 0;
244     }
245   }
246   PetscFunctionReturn(0);
247 }
248 
249 #undef __FUNCT__
250 #define __FUNCT__ "DMCompositeGetAccess_DM"
251 PetscErrorCode DMCompositeGetAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global)
252 {
253   PetscErrorCode ierr;
254   PetscScalar    *array;
255 
256   PetscFunctionBegin;
257   if (global) {
258     ierr    = DMGetGlobalVector(mine->dm,global);CHKERRQ(ierr);
259     ierr    = VecGetArray(vec,&array);CHKERRQ(ierr);
260     ierr    = VecPlaceArray(*global,array+mine->rstart);CHKERRQ(ierr);
261     ierr    = VecRestoreArray(vec,&array);CHKERRQ(ierr);
262   }
263   PetscFunctionReturn(0);
264 }
265 
266 #undef __FUNCT__
267 #define __FUNCT__ "DMCompositeRestoreAccess_Array"
268 PetscErrorCode DMCompositeRestoreAccess_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar **array)
269 {
270   PetscFunctionBegin;
271   PetscFunctionReturn(0);
272 }
273 
274 #undef __FUNCT__
275 #define __FUNCT__ "DMCompositeRestoreAccess_DM"
276 PetscErrorCode DMCompositeRestoreAccess_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec *global)
277 {
278   PetscErrorCode ierr;
279 
280   PetscFunctionBegin;
281   if (global) {
282     ierr = VecResetArray(*global);CHKERRQ(ierr);
283     ierr = DMRestoreGlobalVector(mine->dm,global);CHKERRQ(ierr);
284   }
285   PetscFunctionReturn(0);
286 }
287 
288 #undef __FUNCT__
289 #define __FUNCT__ "DMCompositeScatter_Array"
290 PetscErrorCode DMCompositeScatter_Array(DM dm,struct DMCompositeLink *mine,Vec vec,PetscScalar *array)
291 {
292   PetscErrorCode ierr;
293   PetscScalar    *varray;
294   PetscMPIInt    rank;
295 
296   PetscFunctionBegin;
297   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
298   if (rank == mine->rank) {
299     ierr    = VecGetArray(vec,&varray);CHKERRQ(ierr);
300     ierr    = PetscMemcpy(array,varray+mine->rstart,mine->n*sizeof(PetscScalar));CHKERRQ(ierr);
301     ierr    = VecRestoreArray(vec,&varray);CHKERRQ(ierr);
302   }
303   ierr    = MPI_Bcast(array,mine->n,MPIU_SCALAR,mine->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
304   PetscFunctionReturn(0);
305 }
306 
307 #undef __FUNCT__
308 #define __FUNCT__ "DMCompositeScatter_DM"
309 PetscErrorCode DMCompositeScatter_DM(DM dm,struct DMCompositeLink *mine,Vec vec,Vec local)
310 {
311   PetscErrorCode ierr;
312   PetscScalar    *array;
313   Vec            global;
314 
315   PetscFunctionBegin;
316   ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr);
317   ierr = VecGetArray(vec,&array);CHKERRQ(ierr);
318   ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr);
319   ierr = DMGlobalToLocalBegin(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr);
320   ierr = DMGlobalToLocalEnd(mine->dm,global,INSERT_VALUES,local);CHKERRQ(ierr);
321   ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr);
322   ierr = VecResetArray(global);CHKERRQ(ierr);
323   ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr);
324   PetscFunctionReturn(0);
325 }
326 
327 #undef __FUNCT__
328 #define __FUNCT__ "DMCompositeGather_Array"
329 PetscErrorCode DMCompositeGather_Array(DM dm,struct DMCompositeLink *mine,Vec vec,InsertMode imode,const PetscScalar *array)
330 {
331   PetscErrorCode ierr;
332   PetscScalar    *varray;
333   PetscMPIInt    rank;
334 
335   PetscFunctionBegin;
336   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
337   if (rank == mine->rank) {
338     ierr = VecGetArray(vec,&varray);CHKERRQ(ierr);
339     if (varray+mine->rstart == array) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You need not DMCompositeGather() into objects obtained via DMCompositeGetAccess()");
340   }
341   switch (imode) {
342   case INSERT_VALUES:
343     if (rank == mine->rank) {
344       ierr = PetscMemcpy(varray+mine->rstart,array,mine->n*sizeof(PetscScalar));CHKERRQ(ierr);
345     }
346     break;
347   case ADD_VALUES: {
348     PetscInt          i;
349     const PetscScalar *source;
350     PetscScalar       *buffer,*dest;
351     if (rank == mine->rank) {
352       dest = &varray[mine->rstart];
353 #if defined(PETSC_HAVE_MPI_IN_PLACE)
354       buffer = dest;
355       source = MPI_IN_PLACE;
356 #else
357       ierr = PetscMalloc(mine->n*sizeof(PetscScalar),&buffer);CHKERRQ(ierr);
358       source = buffer;
359 #endif
360       for (i=0; i<mine->n; i++) buffer[i] = varray[mine->rstart+i] + array[i];
361     } else {
362       source = array;
363       dest   = PETSC_NULL;
364     }
365     ierr = MPI_Reduce((void*)source,dest,mine->n,MPIU_SCALAR,MPI_SUM,mine->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
366 #if !defined(PETSC_HAVE_MPI_IN_PLACE)
367     if (rank == mine->rank) {ierr = PetscFree(source);CHKERRQ(ierr);}
368 #endif
369   } break;
370   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"imode");
371   }
372   if (rank == mine->rank) {ierr = VecRestoreArray(vec,&varray);CHKERRQ(ierr);}
373   PetscFunctionReturn(0);
374 }
375 
376 #undef __FUNCT__
377 #define __FUNCT__ "DMCompositeGather_DM"
378 PetscErrorCode DMCompositeGather_DM(DM dm,struct DMCompositeLink *mine,Vec vec,InsertMode imode,Vec local)
379 {
380   PetscErrorCode ierr;
381   PetscScalar    *array;
382   Vec            global;
383 
384   PetscFunctionBegin;
385   ierr = DMGetGlobalVector(mine->dm,&global);CHKERRQ(ierr);
386   ierr = VecGetArray(vec,&array);CHKERRQ(ierr);
387   ierr = VecPlaceArray(global,array+mine->rstart);CHKERRQ(ierr);
388   ierr = DMLocalToGlobalBegin(mine->dm,local,imode,global);CHKERRQ(ierr);
389   ierr = DMLocalToGlobalEnd(mine->dm,local,imode,global);CHKERRQ(ierr);
390   ierr = VecRestoreArray(vec,&array);CHKERRQ(ierr);
391   ierr = VecResetArray(global);CHKERRQ(ierr);
392   ierr = DMRestoreGlobalVector(mine->dm,&global);CHKERRQ(ierr);
393   PetscFunctionReturn(0);
394 }
395 
396 /* ----------------------------------------------------------------------------------*/
397 
398 #include <stdarg.h>
399 
400 #undef __FUNCT__
401 #define __FUNCT__ "DMCompositeGetNumberDM"
402 /*@C
403     DMCompositeGetNumberDM - Get's the number of DM objects in the DMComposite
404        representation.
405 
406     Not Collective
407 
408     Input Parameter:
409 .    dm - the packer object
410 
411     Output Parameter:
412 .     nDM - the number of DMs
413 
414     Level: beginner
415 
416 @*/
417 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetNumberDM(DM dm,PetscInt *nDM)
418 {
419   DM_Composite *com = (DM_Composite*)dm->data;
420   PetscFunctionBegin;
421   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
422   *nDM = com->nDM;
423   PetscFunctionReturn(0);
424 }
425 
426 
427 #undef __FUNCT__
428 #define __FUNCT__ "DMCompositeGetAccess"
429 /*@C
430     DMCompositeGetAccess - Allows one to access the individual packed vectors in their global
431        representation.
432 
433     Collective on DMComposite
434 
435     Input Parameter:
436 +    dm - the packer object
437 .    gvec - the global vector
438 -    ... - the individual sequential or parallel objects (arrays or vectors)
439 
440     Notes: Use DMCompositeRestoreAccess() to return the vectors when you no longer need them
441 
442     Level: advanced
443 
444 @*/
445 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetAccess(DM dm,Vec gvec,...)
446 {
447   va_list                Argp;
448   PetscErrorCode         ierr;
449   struct DMCompositeLink *next;
450   DM_Composite           *com = (DM_Composite*)dm->data;
451 
452   PetscFunctionBegin;
453   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
454   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
455   next = com->next;
456   if (!com->setup) {
457     ierr = DMSetUp(dm);CHKERRQ(ierr);
458   }
459 
460   /* loop over packed objects, handling one at at time */
461   va_start(Argp,gvec);
462   while (next) {
463     if (next->type == DMCOMPOSITE_ARRAY) {
464       PetscScalar **array;
465       array = va_arg(Argp, PetscScalar**);
466       ierr  = DMCompositeGetAccess_Array(dm,next,gvec,array);CHKERRQ(ierr);
467     } else if (next->type == DMCOMPOSITE_DM) {
468       Vec *vec;
469       vec  = va_arg(Argp, Vec*);
470       ierr = DMCompositeGetAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr);
471     } else {
472       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
473     }
474     next = next->next;
475   }
476   va_end(Argp);
477   PetscFunctionReturn(0);
478 }
479 
480 #undef __FUNCT__
481 #define __FUNCT__ "DMCompositeRestoreAccess"
482 /*@C
483     DMCompositeRestoreAccess - Returns the vectors obtained with DMCompositeGetAccess()
484        representation.
485 
486     Collective on DMComposite
487 
488     Input Parameter:
489 +    dm - the packer object
490 .    gvec - the global vector
491 -    ... - the individual sequential or parallel objects (arrays or vectors)
492 
493     Level: advanced
494 
495 .seealso  DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
496          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeScatter(),
497          DMCompositeRestoreAccess(), DMCompositeGetAccess()
498 
499 @*/
500 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeRestoreAccess(DM dm,Vec gvec,...)
501 {
502   va_list                Argp;
503   PetscErrorCode         ierr;
504   struct DMCompositeLink *next;
505   DM_Composite           *com = (DM_Composite*)dm->data;
506 
507   PetscFunctionBegin;
508   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
509   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
510   next = com->next;
511   if (!com->setup) {
512     ierr = DMSetUp(dm);CHKERRQ(ierr);
513   }
514 
515   /* loop over packed objects, handling one at at time */
516   va_start(Argp,gvec);
517   while (next) {
518     if (next->type == DMCOMPOSITE_ARRAY) {
519       PetscScalar **array;
520       array = va_arg(Argp, PetscScalar**);
521       ierr  = DMCompositeRestoreAccess_Array(dm,next,gvec,array);CHKERRQ(ierr);
522     } else if (next->type == DMCOMPOSITE_DM) {
523       Vec *vec;
524       vec  = va_arg(Argp, Vec*);
525       ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,vec);CHKERRQ(ierr);
526     } else {
527       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
528     }
529     next = next->next;
530   }
531   va_end(Argp);
532   PetscFunctionReturn(0);
533 }
534 
535 #undef __FUNCT__
536 #define __FUNCT__ "DMCompositeScatter"
537 /*@C
538     DMCompositeScatter - Scatters from a global packed vector into its individual local vectors
539 
540     Collective on DMComposite
541 
542     Input Parameter:
543 +    dm - the packer object
544 .    gvec - the global vector
545 -    ... - the individual sequential objects (arrays or vectors)
546 
547     Level: advanced
548 
549 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
550          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
551          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
552 
553 @*/
554 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeScatter(DM dm,Vec gvec,...)
555 {
556   va_list                Argp;
557   PetscErrorCode         ierr;
558   struct DMCompositeLink *next;
559   PetscInt               cnt = 3;
560   DM_Composite           *com = (DM_Composite*)dm->data;
561 
562   PetscFunctionBegin;
563   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
564   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
565   next = com->next;
566   if (!com->setup) {
567     ierr = DMSetUp(dm);CHKERRQ(ierr);
568   }
569 
570   /* loop over packed objects, handling one at at time */
571   va_start(Argp,gvec);
572   while (next) {
573     if (next->type == DMCOMPOSITE_ARRAY) {
574       PetscScalar *array;
575       array = va_arg(Argp, PetscScalar*);
576       ierr = DMCompositeScatter_Array(dm,next,gvec,array);CHKERRQ(ierr);
577     } else if (next->type == DMCOMPOSITE_DM) {
578       Vec vec;
579       vec = va_arg(Argp, Vec);
580       PetscValidHeaderSpecific(vec,VEC_CLASSID,cnt);
581       ierr = DMCompositeScatter_DM(dm,next,gvec,vec);CHKERRQ(ierr);
582     } else {
583       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
584     }
585     cnt++;
586     next = next->next;
587   }
588   va_end(Argp);
589   PetscFunctionReturn(0);
590 }
591 
592 #undef __FUNCT__
593 #define __FUNCT__ "DMCompositeGather"
594 /*@C
595     DMCompositeGather - Gathers into a global packed vector from its individual local vectors
596 
597     Collective on DMComposite
598 
599     Input Parameter:
600 +    dm - the packer object
601 .    gvec - the global vector
602 -    ... - the individual sequential objects (arrays or vectors)
603 
604     Level: advanced
605 
606 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
607          DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
608          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
609 
610 @*/
611 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGather(DM dm,Vec gvec,InsertMode imode,...)
612 {
613   va_list                Argp;
614   PetscErrorCode         ierr;
615   struct DMCompositeLink *next;
616   DM_Composite           *com = (DM_Composite*)dm->data;
617 
618   PetscFunctionBegin;
619   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
620   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
621   next = com->next;
622   if (!com->setup) {
623     ierr = DMSetUp(dm);CHKERRQ(ierr);
624   }
625 
626   /* loop over packed objects, handling one at at time */
627   va_start(Argp,imode);
628   while (next) {
629     if (next->type == DMCOMPOSITE_ARRAY) {
630       PetscScalar *array;
631       array = va_arg(Argp, PetscScalar*);
632       ierr  = DMCompositeGather_Array(dm,next,gvec,imode,array);CHKERRQ(ierr);
633     } else if (next->type == DMCOMPOSITE_DM) {
634       Vec vec;
635       vec = va_arg(Argp, Vec);
636       PetscValidHeaderSpecific(vec,VEC_CLASSID,3);
637       ierr = DMCompositeGather_DM(dm,next,gvec,imode,vec);CHKERRQ(ierr);
638     } else {
639       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
640     }
641     next = next->next;
642   }
643   va_end(Argp);
644   PetscFunctionReturn(0);
645 }
646 
647 #undef __FUNCT__
648 #define __FUNCT__ "DMCompositeAddArray"
649 /*@C
650     DMCompositeAddArray - adds an "redundant" array to a DMComposite. The array values will
651        be stored in part of the array on process orank.
652 
653     Collective on DMComposite
654 
655     Input Parameter:
656 +    dm - the packer object
657 .    orank - the process on which the array entries officially live, this number must be
658              the same on all processes.
659 -    n - the length of the array
660 
661     Level: advanced
662 
663 .seealso DMDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCreateGlobalVector(),
664          DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
665          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
666 
667 @*/
668 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeAddArray(DM dm,PetscMPIInt orank,PetscInt n)
669 {
670   struct DMCompositeLink *mine,*next;
671   PetscErrorCode         ierr;
672   PetscMPIInt            rank;
673   DM_Composite           *com = (DM_Composite*)dm->data;
674 
675   PetscFunctionBegin;
676   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
677   next = com->next;
678   if (com->setup) {
679     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add an array once you have used the DMComposite");
680   }
681 #if defined(PETSC_USE_DEBUG)
682   {
683     PetscMPIInt orankmax;
684     ierr = MPI_Allreduce(&orank,&orankmax,1,MPI_INT,MPI_MAX,((PetscObject)dm)->comm);CHKERRQ(ierr);
685     if (orank != orankmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"orank %d must be equal on all processes, another process has value %d",orank,orankmax);
686   }
687 #endif
688 
689   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
690   /* create new link */
691   ierr                = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr);
692   mine->n             = n;
693   mine->rank          = orank;
694   mine->dm            = PETSC_NULL;
695   mine->type          = DMCOMPOSITE_ARRAY;
696   mine->next          = PETSC_NULL;
697   if (rank == mine->rank) {com->n += n;com->nmine++;}
698 
699   /* add to end of list */
700   if (!next) {
701     com->next = mine;
702   } else {
703     while (next->next) next = next->next;
704     next->next = mine;
705   }
706   com->nredundant++;
707   PetscFunctionReturn(0);
708 }
709 
710 #undef __FUNCT__
711 #define __FUNCT__ "DMCompositeAddDM"
712 /*@C
713     DMCompositeAddDM - adds a DM  vector to a DMComposite
714 
715     Collective on DMComposite
716 
717     Input Parameter:
718 +    dm - the packer object
719 -    dm - the DM object, if the DM is a da you will need to caste it with a (DM)
720 
721     Level: advanced
722 
723 .seealso DMDestroy(), DMCompositeGather(), DMCompositeAddDM(), DMCreateGlobalVector(),
724          DMCompositeScatter(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
725          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
726 
727 @*/
728 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeAddDM(DM dmc,DM dm)
729 {
730   PetscErrorCode         ierr;
731   PetscInt               n;
732   struct DMCompositeLink *mine,*next;
733   Vec                    global;
734   DM_Composite           *com = (DM_Composite*)dmc->data;
735 
736   PetscFunctionBegin;
737   PetscValidHeaderSpecific(dmc,DM_CLASSID,1);
738   PetscValidHeaderSpecific(dm,DM_CLASSID,2);
739   next = com->next;
740   if (com->setup) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot add a DM once you have used the DMComposite");
741 
742   /* create new link */
743   ierr = PetscNew(struct DMCompositeLink,&mine);CHKERRQ(ierr);
744   ierr = PetscObjectReference((PetscObject)dm);CHKERRQ(ierr);
745   ierr = DMGetGlobalVector(dm,&global);CHKERRQ(ierr);
746   ierr = VecGetLocalSize(global,&n);CHKERRQ(ierr);
747   ierr = DMRestoreGlobalVector(dm,&global);CHKERRQ(ierr);
748   mine->n      = n;
749   mine->dm     = dm;
750   mine->type   = DMCOMPOSITE_DM;
751   mine->next   = PETSC_NULL;
752   com->n       += n;
753 
754   /* add to end of list */
755   if (!next) {
756     com->next = mine;
757   } else {
758     while (next->next) next = next->next;
759     next->next = mine;
760   }
761   com->nDM++;
762   com->nmine++;
763   PetscFunctionReturn(0);
764 }
765 
766 extern PetscErrorCode PETSCDM_DLLEXPORT VecView_MPI(Vec,PetscViewer);
767 EXTERN_C_BEGIN
768 #undef __FUNCT__
769 #define __FUNCT__ "VecView_DMComposite"
770 PetscErrorCode PETSCDM_DLLEXPORT VecView_DMComposite(Vec gvec,PetscViewer viewer)
771 {
772   DM                     dm;
773   PetscErrorCode         ierr;
774   struct DMCompositeLink *next;
775   PetscBool              isdraw;
776   DM_Composite           *com;
777 
778   PetscFunctionBegin;
779   ierr = PetscObjectQuery((PetscObject)gvec,"DMComposite",(PetscObject*)&dm);CHKERRQ(ierr);
780   if (!dm) SETERRQ(((PetscObject)gvec)->comm,PETSC_ERR_ARG_WRONG,"Vector not generated from a DMComposite");
781   com = (DM_Composite*)dm->data;
782   next = com->next;
783 
784   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr);
785   if (!isdraw) {
786     /* do I really want to call this? */
787     ierr = VecView_MPI(gvec,viewer);CHKERRQ(ierr);
788   } else {
789     PetscInt cnt = 0;
790 
791     /* loop over packed objects, handling one at at time */
792     while (next) {
793       if (next->type == DMCOMPOSITE_ARRAY) {
794 	PetscScalar *array;
795 	ierr  = DMCompositeGetAccess_Array(dm,next,gvec,&array);CHKERRQ(ierr);
796 
797 	/*skip it for now */
798       } else if (next->type == DMCOMPOSITE_DM) {
799 	Vec      vec;
800         PetscInt bs;
801 
802 	ierr = DMCompositeGetAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr);
803 	ierr = VecView(vec,viewer);CHKERRQ(ierr);
804         ierr = VecGetBlockSize(vec,&bs);CHKERRQ(ierr);
805 	ierr = DMCompositeRestoreAccess_DM(dm,next,gvec,&vec);CHKERRQ(ierr);
806         ierr = PetscViewerDrawBaseAdd(viewer,bs);CHKERRQ(ierr);
807         cnt += bs;
808       } else {
809 	SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
810       }
811       next = next->next;
812     }
813     ierr = PetscViewerDrawBaseAdd(viewer,-cnt);CHKERRQ(ierr);
814   }
815   PetscFunctionReturn(0);
816 }
817 EXTERN_C_END
818 
819 
820 #undef __FUNCT__
821 #define __FUNCT__ "DMCreateGlobalVector_Composite"
822 PetscErrorCode PETSCDM_DLLEXPORT DMCreateGlobalVector_Composite(DM dm,Vec *gvec)
823 {
824   PetscErrorCode         ierr;
825   DM_Composite           *com = (DM_Composite*)dm->data;
826 
827   PetscFunctionBegin;
828   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
829   if (!com->setup) {
830     ierr = DMSetUp(dm);CHKERRQ(ierr);
831   }
832   ierr = VecCreateMPI(((PetscObject)dm)->comm,com->n,com->N,gvec);CHKERRQ(ierr);
833   ierr = PetscObjectCompose((PetscObject)*gvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr);
834   ierr = VecSetOperation(*gvec,VECOP_VIEW,(void(*)(void))VecView_DMComposite);CHKERRQ(ierr);
835   PetscFunctionReturn(0);
836 }
837 
838 #undef __FUNCT__
839 #define __FUNCT__ "DMCreateLocalVector_Composite"
840 PetscErrorCode PETSCDM_DLLEXPORT DMCreateLocalVector_Composite(DM dm,Vec *lvec)
841 {
842   PetscErrorCode         ierr;
843   DM_Composite           *com = (DM_Composite*)dm->data;
844 
845   PetscFunctionBegin;
846   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
847   if (!com->setup) {
848     ierr = DMSetUp(dm);CHKERRQ(ierr);
849   }
850   ierr = VecCreateSeq(((PetscObject)dm)->comm,com->nghost,lvec);CHKERRQ(ierr);
851   ierr = PetscObjectCompose((PetscObject)*lvec,"DMComposite",(PetscObject)dm);CHKERRQ(ierr);
852   PetscFunctionReturn(0);
853 }
854 
855 #undef __FUNCT__
856 #define __FUNCT__ "DMCompositeGetISLocalToGlobalMappings"
857 /*@C
858     DMCompositeGetISLocalToGlobalMappings - gets an ISLocalToGlobalMapping for each DM/array in the DMComposite, maps to the composite global space
859 
860     Collective on DMComposite
861 
862     Input Parameter:
863 .    dm - the packer object
864 
865     Output Parameters:
866 .    ltogs - the individual mappings for each packed vector/array. Note that this includes
867            all the ghost points that individual ghosted DMDA's may have. Also each process has an
868            mapping for EACH redundant array (not just the local redundant arrays).
869 
870     Level: advanced
871 
872     Notes:
873        Each entry of ltogs should be destroyed with ISLocalToGlobalMappingDestroy(), the ltogs array should be freed with PetscFree().
874 
875 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
876          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(),
877          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries()
878 
879 @*/
880 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetISLocalToGlobalMappings(DM dm,ISLocalToGlobalMapping **ltogs)
881 {
882   PetscErrorCode         ierr;
883   PetscInt               i,*idx,n,cnt;
884   struct DMCompositeLink *next;
885   PetscMPIInt            rank;
886   DM_Composite           *com = (DM_Composite*)dm->data;
887 
888   PetscFunctionBegin;
889   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
890   ierr = PetscMalloc((com->nDM+com->nredundant)*sizeof(ISLocalToGlobalMapping),ltogs);CHKERRQ(ierr);
891   next = com->next;
892   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
893 
894   /* loop over packed objects, handling one at at time */
895   cnt = 0;
896   while (next) {
897     if (next->type == DMCOMPOSITE_ARRAY) {
898       ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
899       if (rank == next->rank) {
900         for (i=0; i<next->n; i++) idx[i] = next->grstart + i;
901       }
902       ierr = MPI_Bcast(idx,next->n,MPIU_INT,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
903       ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*ltogs)[cnt]);CHKERRQ(ierr);
904     } else if (next->type == DMCOMPOSITE_DM) {
905       ISLocalToGlobalMapping ltog;
906       PetscMPIInt            size;
907       const PetscInt         *suboff;
908       Vec                    global;
909 
910       /* Get sub-DM global indices for each local dof */
911       ierr = DMDAGetISLocalToGlobalMapping(next->dm,&ltog);CHKERRQ(ierr); /* This function should become generic to DM */
912       ierr = ISLocalToGlobalMappingGetSize(ltog,&n);CHKERRQ(ierr);
913       ierr = PetscMalloc(n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
914       for (i=0; i<n; i++) idx[i] = i;
915       ierr = ISLocalToGlobalMappingApply(ltog,n,idx,idx);CHKERRQ(ierr); /* This would be nicer with an ISLocalToGlobalMappingGetIndices() */
916 
917       /* Get the offsets for the sub-DM global vector */
918       ierr = DMGetGlobalVector(next->dm,&global);CHKERRQ(ierr);
919       ierr = VecGetOwnershipRanges(global,&suboff);CHKERRQ(ierr);
920       ierr = MPI_Comm_size(((PetscObject)global)->comm,&size);CHKERRQ(ierr);
921 
922       /* Shift the sub-DM definition of the global space to the composite global space */
923       for (i=0; i<n; i++) {
924         PetscInt subi = idx[i],lo = 0,hi = size,t;
925         /* Binary search to find which rank owns subi */
926         while (hi-lo > 1) {
927           t = lo + (hi-lo)/2;
928           if (suboff[t] > subi) hi = t;
929           else                  lo = t;
930         }
931         idx[i] = subi - suboff[lo] + next->grstarts[lo];
932       }
933       ierr = ISLocalToGlobalMappingCreate(((PetscObject)dm)->comm,n,idx,PETSC_OWN_POINTER,&(*ltogs)[cnt]);CHKERRQ(ierr);
934       ierr = DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr);
935     } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
936     next = next->next;
937     cnt++;
938   }
939   PetscFunctionReturn(0);
940 }
941 
942 #undef __FUNCT__
943 #define __FUNCT__ "DMCompositeGetLocalISs"
944 /*@C
945    DMCompositeGetLocalISs - Gets index sets for each DM/array component of a composite local vector
946 
947    Not Collective
948 
949    Input Arguments:
950 . dm - composite DM
951 
952    Output Arguments:
953 . is - array of serial index sets for each each component of the DMComposite
954 
955    Level: intermediate
956 
957    Notes:
958    At present, a composite local vector does not normally exist.  This function is used to provide index sets for
959    MatGetLocalSubMatrix().  In the future, the scatters for each entry in the DMComposite may be be merged into a single
960    scatter to a composite local vector.
961 
962    To get the composite global indices at all local points (including ghosts), use DMCompositeGetISLocalToGlobalMappings().
963 
964    To get index sets for pieces of the composite global vector, use DMCompositeGetGlobalISs().
965 
966    Each returned IS should be destroyed with ISDestroy(), the array should be freed with PetscFree().
967 
968 .seealso: DMCompositeGetGlobalISs(), DMCompositeGetISLocalToGlobalMappings(), MatGetLocalSubMatrix(), MatCreateLocalRef()
969 @*/
970 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetLocalISs(DM dm,IS **is)
971 {
972   PetscErrorCode         ierr;
973   DM_Composite           *com = (DM_Composite*)dm->data;
974   struct DMCompositeLink *link;
975   PetscInt cnt,start;
976 
977   PetscFunctionBegin;
978   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
979   PetscValidPointer(is,2);
980   ierr = PetscMalloc(com->nmine*sizeof(IS),is);CHKERRQ(ierr);
981   for (cnt=0,start=0,link=com->next; link; start+=link->n,cnt++,link=link->next) {
982     ierr = ISCreateStride(PETSC_COMM_SELF,link->n,start,1,&(*is)[cnt]);CHKERRQ(ierr);
983     if (link->type == DMCOMPOSITE_DM) {
984       Vec lvec;
985       PetscInt bs;
986       ierr = DMGetLocalVector(link->dm,&lvec);CHKERRQ(ierr);
987       ierr = VecGetBlockSize(lvec,&bs);CHKERRQ(ierr);
988       ierr = DMRestoreLocalVector(link->dm,&lvec);CHKERRQ(ierr);
989       ierr = ISSetBlockSize((*is)[cnt],bs);CHKERRQ(ierr);
990     }
991   }
992   PetscFunctionReturn(0);
993 }
994 
995 #undef __FUNCT__
996 #define __FUNCT__ "DMCompositeGetGlobalISs"
997 /*@C
998     DMCompositeGetGlobalISs - Gets the index sets for each composed object
999 
1000     Collective on DMComposite
1001 
1002     Input Parameter:
1003 .    dm - the packer object
1004 
1005     Output Parameters:
1006 .    is - the array of index sets
1007 
1008     Level: advanced
1009 
1010     Notes:
1011        The is entries should be destroyed with ISDestroy(), the is array should be freed with PetscFree()
1012 
1013        The number of IS on each process will/may be different when redundant arrays are used
1014 
1015        These could be used to extract a subset of vector entries for a "multi-physics" preconditioner
1016 
1017        Use DMCompositeGetLocalISs() for index sets in the packed local numbering, and
1018        DMCompositeGetISLocalToGlobalMappings() for to map local sub-DM (including ghost) indices to packed global
1019        indices.
1020 
1021 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
1022          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetAccess(), DMCompositeScatter(),
1023          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(),DMCompositeGetEntries()
1024 
1025 @*/
1026 
1027 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetGlobalISs(DM dm,IS *is[])
1028 {
1029   PetscErrorCode         ierr;
1030   PetscInt               cnt = 0,*idx,i;
1031   struct DMCompositeLink *next;
1032   PetscMPIInt            rank;
1033   DM_Composite           *com = (DM_Composite*)dm->data;
1034 
1035   PetscFunctionBegin;
1036   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1037   ierr = PetscMalloc(com->nmine*sizeof(IS),is);CHKERRQ(ierr);
1038   next = com->next;
1039   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
1040 
1041   /* loop over packed objects, handling one at at time */
1042   while (next) {
1043 
1044     if (next->type == DMCOMPOSITE_ARRAY) {
1045 
1046       if (rank == next->rank) {
1047         ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1048         for (i=0; i<next->n; i++) idx[i] = next->grstart + i;
1049         ierr = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr);
1050         cnt++;
1051       }
1052 
1053     } else if (next->type == DMCOMPOSITE_DM) {
1054       ierr = PetscMalloc(next->n*sizeof(PetscInt),&idx);CHKERRQ(ierr);
1055       for (i=0; i<next->n; i++) idx[i] = next->grstart + i;
1056       ierr = ISCreateGeneral(((PetscObject)dm)->comm,next->n,idx,PETSC_OWN_POINTER,&(*is)[cnt]);CHKERRQ(ierr);
1057       cnt++;
1058     } else {
1059       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1060     }
1061     next = next->next;
1062   }
1063   PetscFunctionReturn(0);
1064 }
1065 
1066 /* -------------------------------------------------------------------------------------*/
1067 #undef __FUNCT__
1068 #define __FUNCT__ "DMCompositeGetLocalVectors_Array"
1069 PetscErrorCode DMCompositeGetLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array)
1070 {
1071   PetscErrorCode ierr;
1072   PetscFunctionBegin;
1073   if (array) {
1074     ierr = PetscMalloc(mine->n*sizeof(PetscScalar),array);CHKERRQ(ierr);
1075   }
1076   PetscFunctionReturn(0);
1077 }
1078 
1079 #undef __FUNCT__
1080 #define __FUNCT__ "DMCompositeGetLocalVectors_DM"
1081 PetscErrorCode DMCompositeGetLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local)
1082 {
1083   PetscErrorCode ierr;
1084   PetscFunctionBegin;
1085   if (local) {
1086     ierr = DMGetLocalVector(mine->dm,local);CHKERRQ(ierr);
1087   }
1088   PetscFunctionReturn(0);
1089 }
1090 
1091 #undef __FUNCT__
1092 #define __FUNCT__ "DMCompositeRestoreLocalVectors_Array"
1093 PetscErrorCode DMCompositeRestoreLocalVectors_Array(DM dm,struct DMCompositeLink *mine,PetscScalar **array)
1094 {
1095   PetscErrorCode ierr;
1096   PetscFunctionBegin;
1097   if (array) {
1098     ierr = PetscFree(*array);CHKERRQ(ierr);
1099   }
1100   PetscFunctionReturn(0);
1101 }
1102 
1103 #undef __FUNCT__
1104 #define __FUNCT__ "DMCompositeRestoreLocalVectors_DM"
1105 PetscErrorCode DMCompositeRestoreLocalVectors_DM(DM dm,struct DMCompositeLink *mine,Vec *local)
1106 {
1107   PetscErrorCode ierr;
1108   PetscFunctionBegin;
1109   if (local) {
1110     ierr = DMRestoreLocalVector(mine->dm,local);CHKERRQ(ierr);
1111   }
1112   PetscFunctionReturn(0);
1113 }
1114 
1115 #undef __FUNCT__
1116 #define __FUNCT__ "DMCompositeGetLocalVectors"
1117 /*@C
1118     DMCompositeGetLocalVectors - Gets local vectors and arrays for each part of a DMComposite.
1119        Use DMCompositeRestoreLocalVectors() to return them.
1120 
1121     Not Collective
1122 
1123     Input Parameter:
1124 .    dm - the packer object
1125 
1126     Output Parameter:
1127 .    ... - the individual sequential objects (arrays or vectors)
1128 
1129     Level: advanced
1130 
1131 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
1132          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
1133          DMCompositeRestoreLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries()
1134 
1135 @*/
1136 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetLocalVectors(DM dm,...)
1137 {
1138   va_list                Argp;
1139   PetscErrorCode         ierr;
1140   struct DMCompositeLink *next;
1141   DM_Composite           *com = (DM_Composite*)dm->data;
1142 
1143   PetscFunctionBegin;
1144   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1145   next = com->next;
1146   /* loop over packed objects, handling one at at time */
1147   va_start(Argp,dm);
1148   while (next) {
1149     if (next->type == DMCOMPOSITE_ARRAY) {
1150       PetscScalar **array;
1151       array = va_arg(Argp, PetscScalar**);
1152       ierr = DMCompositeGetLocalVectors_Array(dm,next,array);CHKERRQ(ierr);
1153     } else if (next->type == DMCOMPOSITE_DM) {
1154       Vec *vec;
1155       vec = va_arg(Argp, Vec*);
1156       ierr = DMCompositeGetLocalVectors_DM(dm,next,vec);CHKERRQ(ierr);
1157     } else {
1158       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1159     }
1160     next = next->next;
1161   }
1162   va_end(Argp);
1163   PetscFunctionReturn(0);
1164 }
1165 
1166 #undef __FUNCT__
1167 #define __FUNCT__ "DMCompositeRestoreLocalVectors"
1168 /*@C
1169     DMCompositeRestoreLocalVectors - Restores local vectors and arrays for each part of a DMComposite.
1170 
1171     Not Collective
1172 
1173     Input Parameter:
1174 .    dm - the packer object
1175 
1176     Output Parameter:
1177 .    ... - the individual sequential objects (arrays or vectors)
1178 
1179     Level: advanced
1180 
1181 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
1182          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
1183          DMCompositeGetLocalVectors(), DMCompositeScatter(), DMCompositeGetEntries()
1184 
1185 @*/
1186 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeRestoreLocalVectors(DM dm,...)
1187 {
1188   va_list                Argp;
1189   PetscErrorCode         ierr;
1190   struct DMCompositeLink *next;
1191   DM_Composite           *com = (DM_Composite*)dm->data;
1192 
1193   PetscFunctionBegin;
1194   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1195   next = com->next;
1196   /* loop over packed objects, handling one at at time */
1197   va_start(Argp,dm);
1198   while (next) {
1199     if (next->type == DMCOMPOSITE_ARRAY) {
1200       PetscScalar **array;
1201       array = va_arg(Argp, PetscScalar**);
1202       ierr = DMCompositeRestoreLocalVectors_Array(dm,next,array);CHKERRQ(ierr);
1203     } else if (next->type == DMCOMPOSITE_DM) {
1204       Vec *vec;
1205       vec = va_arg(Argp, Vec*);
1206       ierr = DMCompositeRestoreLocalVectors_DM(dm,next,vec);CHKERRQ(ierr);
1207     } else {
1208       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1209     }
1210     next = next->next;
1211   }
1212   va_end(Argp);
1213   PetscFunctionReturn(0);
1214 }
1215 
1216 /* -------------------------------------------------------------------------------------*/
1217 #undef __FUNCT__
1218 #define __FUNCT__ "DMCompositeGetEntries_Array"
1219 PetscErrorCode DMCompositeGetEntries_Array(DM dm,struct DMCompositeLink *mine,PetscInt *n)
1220 {
1221   PetscFunctionBegin;
1222   if (n) *n = mine->n;
1223   PetscFunctionReturn(0);
1224 }
1225 
1226 #undef __FUNCT__
1227 #define __FUNCT__ "DMCompositeGetEntries_DM"
1228 PetscErrorCode DMCompositeGetEntries_DM(DM dmi,struct DMCompositeLink *mine,DM *dm)
1229 {
1230   PetscFunctionBegin;
1231   if (dm) *dm = mine->dm;
1232   PetscFunctionReturn(0);
1233 }
1234 
1235 #undef __FUNCT__
1236 #define __FUNCT__ "DMCompositeGetEntries"
1237 /*@C
1238     DMCompositeGetEntries - Gets the DM, redundant size, etc for each entry in a DMComposite.
1239 
1240     Not Collective
1241 
1242     Input Parameter:
1243 .    dm - the packer object
1244 
1245     Output Parameter:
1246 .    ... - the individual entries, DMs or integer sizes)
1247 
1248     Level: advanced
1249 
1250 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCreateGlobalVector(),
1251          DMCompositeGather(), DMCompositeCreate(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess(),
1252          DMCompositeRestoreLocalVectors(), DMCompositeGetLocalVectors(),  DMCompositeScatter(),
1253          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors()
1254 
1255 @*/
1256 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeGetEntries(DM dm,...)
1257 {
1258   va_list                Argp;
1259   PetscErrorCode         ierr;
1260   struct DMCompositeLink *next;
1261   DM_Composite           *com = (DM_Composite*)dm->data;
1262 
1263   PetscFunctionBegin;
1264   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1265   next = com->next;
1266   /* loop over packed objects, handling one at at time */
1267   va_start(Argp,dm);
1268   while (next) {
1269     if (next->type == DMCOMPOSITE_ARRAY) {
1270       PetscInt *n;
1271       n = va_arg(Argp, PetscInt*);
1272       ierr = DMCompositeGetEntries_Array(dm,next,n);CHKERRQ(ierr);
1273     } else if (next->type == DMCOMPOSITE_DM) {
1274       DM *dmn;
1275       dmn = va_arg(Argp, DM*);
1276       ierr = DMCompositeGetEntries_DM(dm,next,dmn);CHKERRQ(ierr);
1277     } else {
1278       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1279     }
1280     next = next->next;
1281   }
1282   va_end(Argp);
1283   PetscFunctionReturn(0);
1284 }
1285 
1286 #undef __FUNCT__
1287 #define __FUNCT__ "DMRefine_Composite"
1288 PetscErrorCode PETSCDM_DLLEXPORT DMRefine_Composite(DM dmi,MPI_Comm comm,DM *fine)
1289 {
1290   PetscErrorCode         ierr;
1291   struct DMCompositeLink *next;
1292   DM_Composite           *com = (DM_Composite*)dmi->data;
1293   DM                     dm;
1294 
1295   PetscFunctionBegin;
1296   PetscValidHeaderSpecific(dmi,DM_CLASSID,1);
1297   next = com->next;
1298   ierr = DMCompositeCreate(comm,fine);CHKERRQ(ierr);
1299 
1300   /* loop over packed objects, handling one at at time */
1301   while (next) {
1302     if (next->type == DMCOMPOSITE_ARRAY) {
1303       ierr = DMCompositeAddArray(*fine,next->rank,next->n);CHKERRQ(ierr);
1304     } else if (next->type == DMCOMPOSITE_DM) {
1305       ierr = DMRefine(next->dm,comm,&dm);CHKERRQ(ierr);
1306       ierr = DMCompositeAddDM(*fine,dm);CHKERRQ(ierr);
1307       ierr = PetscObjectDereference((PetscObject)dm);CHKERRQ(ierr);
1308     } else {
1309       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1310     }
1311     next = next->next;
1312   }
1313   PetscFunctionReturn(0);
1314 }
1315 
1316 #include "petscmat.h"
1317 
1318 struct MatPackLink {
1319   Mat                A;
1320   struct MatPackLink *next;
1321 };
1322 
1323 struct MatPack {
1324   DM                 right,left;
1325   struct MatPackLink *next;
1326 };
1327 
1328 #undef __FUNCT__
1329 #define __FUNCT__ "MatMultBoth_Shell_Pack"
1330 PetscErrorCode MatMultBoth_Shell_Pack(Mat A,Vec x,Vec y,PetscBool  add)
1331 {
1332   struct MatPack         *mpack;
1333   struct DMCompositeLink *xnext,*ynext;
1334   struct MatPackLink     *anext;
1335   PetscScalar            *xarray,*yarray;
1336   PetscErrorCode         ierr;
1337   PetscInt               i;
1338   Vec                    xglobal,yglobal;
1339   PetscMPIInt            rank;
1340   DM_Composite           *comright;
1341   DM_Composite           *comleft;
1342 
1343   PetscFunctionBegin;
1344   ierr = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr);
1345   ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr);
1346   comright = (DM_Composite*)mpack->right->data;
1347   comleft = (DM_Composite*)mpack->left->data;
1348   xnext = comright->next;
1349   ynext = comleft->next;
1350   anext = mpack->next;
1351 
1352   while (xnext) {
1353     if (xnext->type == DMCOMPOSITE_ARRAY) {
1354       if (rank == xnext->rank) {
1355         ierr    = VecGetArray(x,&xarray);CHKERRQ(ierr);
1356         ierr    = VecGetArray(y,&yarray);CHKERRQ(ierr);
1357         if (add) {
1358           for (i=0; i<xnext->n; i++) {
1359             yarray[ynext->rstart+i] += xarray[xnext->rstart+i];
1360           }
1361         } else {
1362           ierr    = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr);
1363         }
1364         ierr    = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1365         ierr    = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1366       }
1367     } else if (xnext->type == DMCOMPOSITE_DM) {
1368       ierr  = VecGetArray(x,&xarray);CHKERRQ(ierr);
1369       ierr  = VecGetArray(y,&yarray);CHKERRQ(ierr);
1370       ierr  = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr);
1371       ierr  = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr);
1372       ierr  = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr);
1373       ierr  = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr);
1374       if (add) {
1375         ierr  = MatMultAdd(anext->A,xglobal,yglobal,yglobal);CHKERRQ(ierr);
1376       } else {
1377         ierr  = MatMult(anext->A,xglobal,yglobal);CHKERRQ(ierr);
1378       }
1379       ierr  = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1380       ierr  = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1381       ierr  = VecResetArray(xglobal);CHKERRQ(ierr);
1382       ierr  = VecResetArray(yglobal);CHKERRQ(ierr);
1383       ierr  = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr);
1384       ierr  = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr);
1385       anext = anext->next;
1386     } else {
1387       SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1388     }
1389     xnext = xnext->next;
1390     ynext = ynext->next;
1391   }
1392   PetscFunctionReturn(0);
1393 }
1394 
1395 #undef __FUNCT__
1396 #define __FUNCT__ "MatMultAdd_Shell_Pack"
1397 PetscErrorCode MatMultAdd_Shell_Pack(Mat A,Vec x,Vec y,Vec z)
1398 {
1399   PetscErrorCode ierr;
1400   PetscFunctionBegin;
1401   if (z != y) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Handles y == z only");
1402   ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_TRUE);CHKERRQ(ierr);
1403   PetscFunctionReturn(0);
1404 }
1405 
1406 #undef __FUNCT__
1407 #define __FUNCT__ "MatMult_Shell_Pack"
1408 PetscErrorCode MatMult_Shell_Pack(Mat A,Vec x,Vec y)
1409 {
1410   PetscErrorCode ierr;
1411   PetscFunctionBegin;
1412   ierr = MatMultBoth_Shell_Pack(A,x,y,PETSC_FALSE);CHKERRQ(ierr);
1413   PetscFunctionReturn(0);
1414 }
1415 
1416 #undef __FUNCT__
1417 #define __FUNCT__ "MatMultTranspose_Shell_Pack"
1418 PetscErrorCode MatMultTranspose_Shell_Pack(Mat A,Vec x,Vec y)
1419 {
1420   struct MatPack         *mpack;
1421   struct DMCompositeLink *xnext,*ynext;
1422   struct MatPackLink     *anext;
1423   PetscScalar            *xarray,*yarray;
1424   PetscErrorCode         ierr;
1425   Vec                    xglobal,yglobal;
1426   PetscMPIInt            rank;
1427   DM_Composite           *comright;
1428   DM_Composite           *comleft;
1429 
1430   PetscFunctionBegin;
1431   ierr  = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr);
1432   ierr = MPI_Comm_rank(((PetscObject)mpack->right)->comm,&rank);CHKERRQ(ierr);
1433   comright = (DM_Composite*)mpack->right->data;
1434   comleft = (DM_Composite*)mpack->left->data;
1435   ynext = comright->next;
1436   xnext = comleft->next;
1437   anext = mpack->next;
1438 
1439   while (xnext) {
1440     if (xnext->type == DMCOMPOSITE_ARRAY) {
1441       if (rank == ynext->rank) {
1442         ierr    = VecGetArray(x,&xarray);CHKERRQ(ierr);
1443         ierr    = VecGetArray(y,&yarray);CHKERRQ(ierr);
1444         ierr    = PetscMemcpy(yarray+ynext->rstart,xarray+xnext->rstart,xnext->n*sizeof(PetscScalar));CHKERRQ(ierr);
1445         ierr    = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1446         ierr    = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1447       }
1448     } else if (xnext->type == DMCOMPOSITE_DM) {
1449       ierr  = VecGetArray(x,&xarray);CHKERRQ(ierr);
1450       ierr  = VecGetArray(y,&yarray);CHKERRQ(ierr);
1451       ierr  = DMGetGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr);
1452       ierr  = DMGetGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr);
1453       ierr  = VecPlaceArray(xglobal,xarray+xnext->rstart);CHKERRQ(ierr);
1454       ierr  = VecPlaceArray(yglobal,yarray+ynext->rstart);CHKERRQ(ierr);
1455       ierr  = MatMultTranspose(anext->A,xglobal,yglobal);CHKERRQ(ierr);
1456       ierr  = VecRestoreArray(x,&xarray);CHKERRQ(ierr);
1457       ierr  = VecRestoreArray(y,&yarray);CHKERRQ(ierr);
1458       ierr  = VecResetArray(xglobal);CHKERRQ(ierr);
1459       ierr  = VecResetArray(yglobal);CHKERRQ(ierr);
1460       ierr  = DMRestoreGlobalVector(xnext->dm,&xglobal);CHKERRQ(ierr);
1461       ierr  = DMRestoreGlobalVector(ynext->dm,&yglobal);CHKERRQ(ierr);
1462       anext = anext->next;
1463     } else {
1464       SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1465     }
1466     xnext = xnext->next;
1467     ynext = ynext->next;
1468   }
1469   PetscFunctionReturn(0);
1470 }
1471 
1472 #undef __FUNCT__
1473 #define __FUNCT__ "MatDestroy_Shell_Pack"
1474 PetscErrorCode MatDestroy_Shell_Pack(Mat A)
1475 {
1476   struct MatPack     *mpack;
1477   struct MatPackLink *anext,*oldanext;
1478   PetscErrorCode     ierr;
1479 
1480   PetscFunctionBegin;
1481   ierr  = MatShellGetContext(A,(void**)&mpack);CHKERRQ(ierr);
1482   anext = mpack->next;
1483 
1484   while (anext) {
1485     ierr     = MatDestroy(anext->A);CHKERRQ(ierr);
1486     oldanext = anext;
1487     anext    = anext->next;
1488     ierr     = PetscFree(oldanext);CHKERRQ(ierr);
1489   }
1490   ierr = PetscFree(mpack);CHKERRQ(ierr);
1491   ierr = PetscObjectChangeTypeName((PetscObject)A,0);CHKERRQ(ierr);
1492   PetscFunctionReturn(0);
1493 }
1494 
1495 #undef __FUNCT__
1496 #define __FUNCT__ "DMGetInterpolation_Composite"
1497 PetscErrorCode PETSCDM_DLLEXPORT DMGetInterpolation_Composite(DM coarse,DM fine,Mat *A,Vec *v)
1498 {
1499   PetscErrorCode         ierr;
1500   PetscInt               m,n,M,N;
1501   struct DMCompositeLink *nextc;
1502   struct DMCompositeLink *nextf;
1503   struct MatPackLink     *nextmat,*pnextmat = 0;
1504   struct MatPack         *mpack;
1505   Vec                    gcoarse,gfine;
1506   DM_Composite           *comcoarse = (DM_Composite*)coarse->data;
1507   DM_Composite           *comfine = (DM_Composite*)fine->data;
1508 
1509   PetscFunctionBegin;
1510   PetscValidHeaderSpecific(coarse,DM_CLASSID,1);
1511   PetscValidHeaderSpecific(fine,DM_CLASSID,2);
1512   nextc = comcoarse->next;
1513   nextf = comfine->next;
1514   /* use global vectors only for determining matrix layout */
1515   ierr = DMCreateGlobalVector(coarse,&gcoarse);CHKERRQ(ierr);
1516   ierr = DMCreateGlobalVector(fine,&gfine);CHKERRQ(ierr);
1517   ierr = VecGetLocalSize(gcoarse,&n);CHKERRQ(ierr);
1518   ierr = VecGetLocalSize(gfine,&m);CHKERRQ(ierr);
1519   ierr = VecGetSize(gcoarse,&N);CHKERRQ(ierr);
1520   ierr = VecGetSize(gfine,&M);CHKERRQ(ierr);
1521   ierr = VecDestroy(gcoarse);CHKERRQ(ierr);
1522   ierr = VecDestroy(gfine);CHKERRQ(ierr);
1523 
1524   ierr         = PetscNew(struct MatPack,&mpack);CHKERRQ(ierr);
1525   mpack->right = coarse;
1526   mpack->left  = fine;
1527   ierr  = MatCreate(((PetscObject)fine)->comm,A);CHKERRQ(ierr);
1528   ierr  = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr);
1529   ierr  = MatSetType(*A,MATSHELL);CHKERRQ(ierr);
1530   ierr  = MatShellSetContext(*A,mpack);CHKERRQ(ierr);
1531   ierr  = MatShellSetOperation(*A,MATOP_MULT,(void(*)(void))MatMult_Shell_Pack);CHKERRQ(ierr);
1532   ierr  = MatShellSetOperation(*A,MATOP_MULT_TRANSPOSE,(void(*)(void))MatMultTranspose_Shell_Pack);CHKERRQ(ierr);
1533   ierr  = MatShellSetOperation(*A,MATOP_MULT_ADD,(void(*)(void))MatMultAdd_Shell_Pack);CHKERRQ(ierr);
1534   ierr  = MatShellSetOperation(*A,MATOP_DESTROY,(void(*)(void))MatDestroy_Shell_Pack);CHKERRQ(ierr);
1535 
1536   /* loop over packed objects, handling one at at time */
1537   while (nextc) {
1538     if (nextc->type != nextf->type) SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_ARG_INCOMP,"Two DM have different layout");
1539 
1540     if (nextc->type == DMCOMPOSITE_ARRAY) {
1541       ;
1542     } else if (nextc->type == DMCOMPOSITE_DM) {
1543       ierr          = PetscNew(struct MatPackLink,&nextmat);CHKERRQ(ierr);
1544       nextmat->next = 0;
1545       if (pnextmat) {
1546         pnextmat->next = nextmat;
1547         pnextmat       = nextmat;
1548       } else {
1549         pnextmat    = nextmat;
1550         mpack->next = nextmat;
1551       }
1552       ierr = DMGetInterpolation(nextc->dm,nextf->dm,&nextmat->A,PETSC_NULL);CHKERRQ(ierr);
1553     } else {
1554       SETERRQ(((PetscObject)fine)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1555     }
1556     nextc = nextc->next;
1557     nextf = nextf->next;
1558   }
1559   PetscFunctionReturn(0);
1560 }
1561 
1562 #undef __FUNCT__
1563 #define __FUNCT__ "DMGetMatrix_Composite"
1564 PetscErrorCode PETSCDM_DLLEXPORT DMGetMatrix_Composite(DM dm, const MatType mtype,Mat *J)
1565 {
1566   PetscErrorCode         ierr;
1567   DM_Composite           *com = (DM_Composite*)dm->data;
1568   struct DMCompositeLink *next = com->next;
1569   PetscInt               m,*dnz,*onz,i,j,mA;
1570   Mat                    Atmp;
1571   PetscMPIInt            rank;
1572   PetscScalar            zero = 0.0;
1573   PetscBool              dense = PETSC_FALSE;
1574 
1575   PetscFunctionBegin;
1576   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1577 
1578   /* use global vector to determine layout needed for matrix */
1579   m = com->n;
1580   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
1581   ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr);
1582   ierr = MatSetSizes(*J,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr);
1583   ierr = MatSetType(*J,MATAIJ);CHKERRQ(ierr);
1584 
1585   /*
1586      Extremely inefficient but will compute entire Jacobian for testing
1587   */
1588   ierr = PetscOptionsGetBool(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);CHKERRQ(ierr);
1589   if (dense) {
1590     PetscInt    rstart,rend,*indices;
1591     PetscScalar *values;
1592 
1593     mA    = com->N;
1594     ierr = MatMPIAIJSetPreallocation(*J,mA,PETSC_NULL,mA-m,PETSC_NULL);CHKERRQ(ierr);
1595     ierr = MatSeqAIJSetPreallocation(*J,mA,PETSC_NULL);CHKERRQ(ierr);
1596 
1597     ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr);
1598     ierr = PetscMalloc2(mA,PetscScalar,&values,mA,PetscInt,&indices);CHKERRQ(ierr);
1599     ierr = PetscMemzero(values,mA*sizeof(PetscScalar));CHKERRQ(ierr);
1600     for (i=0; i<mA; i++) indices[i] = i;
1601     for (i=rstart; i<rend; i++) {
1602       ierr = MatSetValues(*J,1,&i,mA,indices,values,INSERT_VALUES);CHKERRQ(ierr);
1603     }
1604     ierr = PetscFree2(values,indices);CHKERRQ(ierr);
1605     ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1606     ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1607     PetscFunctionReturn(0);
1608   }
1609 
1610   ierr = MatPreallocateInitialize(((PetscObject)dm)->comm,m,m,dnz,onz);CHKERRQ(ierr);
1611   /* loop over packed objects, handling one at at time */
1612   next = com->next;
1613   while (next) {
1614     if (next->type == DMCOMPOSITE_ARRAY) {
1615       if (rank == next->rank) {  /* zero the "little" block */
1616         for (j=com->rstart+next->rstart; j<com->rstart+next->rstart+next->n; j++) {
1617           for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) {
1618             ierr = MatPreallocateSet(j,1,&i,dnz,onz);CHKERRQ(ierr);
1619           }
1620         }
1621       }
1622     } else if (next->type == DMCOMPOSITE_DM) {
1623       PetscInt       nc,rstart,*ccols,maxnc;
1624       const PetscInt *cols,*rstarts;
1625       PetscMPIInt    proc;
1626 
1627       ierr = DMGetMatrix(next->dm,mtype,&Atmp);CHKERRQ(ierr);
1628       ierr = MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);CHKERRQ(ierr);
1629       ierr = MatGetOwnershipRanges(Atmp,&rstarts);CHKERRQ(ierr);
1630       ierr = MatGetLocalSize(Atmp,&mA,PETSC_NULL);CHKERRQ(ierr);
1631 
1632       maxnc = 0;
1633       for (i=0; i<mA; i++) {
1634         ierr  = MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
1635         ierr  = MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
1636         maxnc = PetscMax(nc,maxnc);
1637       }
1638       ierr = PetscMalloc(maxnc*sizeof(PetscInt),&ccols);CHKERRQ(ierr);
1639       for (i=0; i<mA; i++) {
1640         ierr = MatGetRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);CHKERRQ(ierr);
1641         /* remap the columns taking into how much they are shifted on each process */
1642         for (j=0; j<nc; j++) {
1643           proc = 0;
1644           while (cols[j] >= rstarts[proc+1]) proc++;
1645           ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
1646         }
1647         ierr = MatPreallocateSet(com->rstart+next->rstart+i,nc,ccols,dnz,onz);CHKERRQ(ierr);
1648         ierr = MatRestoreRow(Atmp,rstart+i,&nc,&cols,PETSC_NULL);CHKERRQ(ierr);
1649       }
1650       ierr = PetscFree(ccols);CHKERRQ(ierr);
1651       ierr = MatDestroy(Atmp);CHKERRQ(ierr);
1652     } else {
1653       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1654     }
1655     next = next->next;
1656   }
1657   if (com->FormCoupleLocations) {
1658     ierr = (*com->FormCoupleLocations)(dm,PETSC_NULL,dnz,onz,__rstart,__nrows,__start,__end);CHKERRQ(ierr);
1659   }
1660   ierr = MatMPIAIJSetPreallocation(*J,0,dnz,0,onz);CHKERRQ(ierr);
1661   ierr = MatSeqAIJSetPreallocation(*J,0,dnz);CHKERRQ(ierr);
1662   ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr);
1663 
1664   next = com->next;
1665   while (next) {
1666     if (next->type == DMCOMPOSITE_ARRAY) {
1667       if (rank == next->rank) {
1668         for (j=com->rstart+next->rstart; j<com->rstart+next->rstart+next->n; j++) {
1669           for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) {
1670             ierr = MatSetValues(*J,1,&j,1,&i,&zero,INSERT_VALUES);CHKERRQ(ierr);
1671           }
1672         }
1673       }
1674     } else if (next->type == DMCOMPOSITE_DM) {
1675       PetscInt          nc,rstart,row,maxnc,*ccols;
1676       const PetscInt    *cols,*rstarts;
1677       const PetscScalar *values;
1678       PetscMPIInt       proc;
1679 
1680       ierr = DMGetMatrix(next->dm,mtype,&Atmp);CHKERRQ(ierr);
1681       ierr = MatGetOwnershipRange(Atmp,&rstart,PETSC_NULL);CHKERRQ(ierr);
1682       ierr = MatGetOwnershipRanges(Atmp,&rstarts);CHKERRQ(ierr);
1683       ierr = MatGetLocalSize(Atmp,&mA,PETSC_NULL);CHKERRQ(ierr);
1684       maxnc = 0;
1685       for (i=0; i<mA; i++) {
1686         ierr  = MatGetRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
1687         ierr  = MatRestoreRow(Atmp,rstart+i,&nc,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
1688         maxnc = PetscMax(nc,maxnc);
1689       }
1690       ierr = PetscMalloc(maxnc*sizeof(PetscInt),&ccols);CHKERRQ(ierr);
1691       for (i=0; i<mA; i++) {
1692         ierr = MatGetRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);CHKERRQ(ierr);
1693         for (j=0; j<nc; j++) {
1694           proc = 0;
1695           while (cols[j] >= rstarts[proc+1]) proc++;
1696           ccols[j] = cols[j] + next->grstarts[proc] - rstarts[proc];
1697         }
1698         row  = com->rstart+next->rstart+i;
1699         ierr = MatSetValues(*J,1,&row,nc,ccols,values,INSERT_VALUES);CHKERRQ(ierr);
1700         ierr = MatRestoreRow(Atmp,rstart+i,&nc,(const PetscInt **)&cols,&values);CHKERRQ(ierr);
1701       }
1702       ierr = PetscFree(ccols);CHKERRQ(ierr);
1703       ierr = MatDestroy(Atmp);CHKERRQ(ierr);
1704     } else {
1705       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1706     }
1707     next = next->next;
1708   }
1709   if (com->FormCoupleLocations) {
1710     PetscInt __rstart;
1711     ierr = MatGetOwnershipRange(*J,&__rstart,PETSC_NULL);CHKERRQ(ierr);
1712     ierr = (*com->FormCoupleLocations)(dm,*J,PETSC_NULL,PETSC_NULL,__rstart,0,0,0);CHKERRQ(ierr);
1713   }
1714   ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1715   ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
1716   PetscFunctionReturn(0);
1717 }
1718 
1719 #undef __FUNCT__
1720 #define __FUNCT__ "DMGetColoring_Composite"
1721 PetscErrorCode PETSCDM_DLLEXPORT DMGetColoring_Composite(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring)
1722 {
1723   PetscErrorCode         ierr;
1724   PetscInt               n,i,cnt;
1725   ISColoringValue        *colors;
1726   PetscBool              dense = PETSC_FALSE;
1727   ISColoringValue        maxcol = 0;
1728   DM_Composite           *com = (DM_Composite*)dm->data;
1729 
1730   PetscFunctionBegin;
1731   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1732   if (ctype == IS_COLORING_GHOSTED) {
1733     SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Currently you must use -dmmg_iscoloring_type global" );
1734   } else if (ctype == IS_COLORING_GLOBAL) {
1735     n = com->n;
1736   } else SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_ARG_OUTOFRANGE,"Unknown ISColoringType");
1737   ierr = PetscMalloc(n*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); /* freed in ISColoringDestroy() */
1738 
1739   ierr = PetscOptionsGetBool(PETSC_NULL,"-dmcomposite_dense_jacobian",&dense,PETSC_NULL);CHKERRQ(ierr);
1740   if (dense) {
1741     for (i=0; i<n; i++) {
1742       colors[i] = (ISColoringValue)(com->rstart + i);
1743     }
1744     maxcol = com->N;
1745   } else {
1746     struct DMCompositeLink *next = com->next;
1747     PetscMPIInt            rank;
1748 
1749     ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
1750     cnt  = 0;
1751     while (next) {
1752       if (next->type == DMCOMPOSITE_ARRAY) {
1753         if (rank == next->rank) {  /* each column gets is own color */
1754           for (i=com->rstart+next->rstart; i<com->rstart+next->rstart+next->n; i++) {
1755             colors[cnt++] = maxcol++;
1756           }
1757         }
1758         ierr = MPI_Bcast(&maxcol,1,MPIU_COLORING_VALUE,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
1759       } else if (next->type == DMCOMPOSITE_DM) {
1760         ISColoring     lcoloring;
1761 
1762         ierr = DMGetColoring(next->dm,IS_COLORING_GLOBAL,mtype,&lcoloring);CHKERRQ(ierr);
1763         for (i=0; i<lcoloring->N; i++) {
1764           colors[cnt++] = maxcol + lcoloring->colors[i];
1765         }
1766         maxcol += lcoloring->n;
1767         ierr = ISColoringDestroy(lcoloring);CHKERRQ(ierr);
1768       } else {
1769         SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1770       }
1771       next = next->next;
1772     }
1773   }
1774   ierr = ISColoringCreate(((PetscObject)dm)->comm,maxcol,n,colors,coloring);CHKERRQ(ierr);
1775   PetscFunctionReturn(0);
1776 }
1777 
1778 #undef __FUNCT__
1779 #define __FUNCT__ "DMGlobalToLocalBegin_Composite"
1780 PetscErrorCode PETSCDM_DLLEXPORT DMGlobalToLocalBegin_Composite(DM dm,Vec gvec,InsertMode mode,Vec lvec)
1781 {
1782   PetscErrorCode         ierr;
1783   struct DMCompositeLink *next;
1784   PetscInt               cnt = 3;
1785   PetscMPIInt            rank;
1786   PetscScalar            *garray,*larray;
1787   DM_Composite           *com = (DM_Composite*)dm->data;
1788 
1789   PetscFunctionBegin;
1790   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
1791   PetscValidHeaderSpecific(gvec,VEC_CLASSID,2);
1792   next = com->next;
1793   if (!com->setup) {
1794     ierr = DMSetUp(dm);CHKERRQ(ierr);
1795   }
1796   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
1797   ierr = VecGetArray(gvec,&garray);CHKERRQ(ierr);
1798   ierr = VecGetArray(lvec,&larray);CHKERRQ(ierr);
1799 
1800   /* loop over packed objects, handling one at at time */
1801   while (next) {
1802     if (next->type == DMCOMPOSITE_ARRAY) {
1803       if (rank == next->rank) {
1804         ierr    = PetscMemcpy(larray,garray,next->n*sizeof(PetscScalar));CHKERRQ(ierr);
1805         garray += next->n;
1806       }
1807       /* does not handle ADD_VALUES */
1808       ierr = MPI_Bcast(larray,next->n,MPIU_SCALAR,next->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
1809       larray += next->n;
1810     } else if (next->type == DMCOMPOSITE_DM) {
1811       Vec      local,global;
1812       PetscInt N;
1813 
1814       ierr = DMGetGlobalVector(next->dm,&global);CHKERRQ(ierr);
1815       ierr = VecGetLocalSize(global,&N);CHKERRQ(ierr);
1816       ierr = VecPlaceArray(global,garray);CHKERRQ(ierr);
1817       ierr = DMGetLocalVector(next->dm,&local);CHKERRQ(ierr);
1818       ierr = VecPlaceArray(local,larray);CHKERRQ(ierr);
1819       ierr = DMGlobalToLocalBegin(next->dm,global,mode,local);CHKERRQ(ierr);
1820       ierr = DMGlobalToLocalEnd(next->dm,global,mode,local);CHKERRQ(ierr);
1821       ierr = VecResetArray(global);CHKERRQ(ierr);
1822       ierr = VecResetArray(local);CHKERRQ(ierr);
1823       ierr = DMRestoreGlobalVector(next->dm,&global);CHKERRQ(ierr);
1824       ierr = DMRestoreGlobalVector(next->dm,&local);CHKERRQ(ierr);
1825       larray += next->n;
1826     } else {
1827       SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"Cannot handle that object type yet");
1828     }
1829     cnt++;
1830     next = next->next;
1831   }
1832 
1833   ierr = VecRestoreArray(gvec,PETSC_NULL);CHKERRQ(ierr);
1834   ierr = VecRestoreArray(lvec,PETSC_NULL);CHKERRQ(ierr);
1835   PetscFunctionReturn(0);
1836 }
1837 
1838 #undef __FUNCT__
1839 #define __FUNCT__ "DMGlobalToLocalEnd_Composite"
1840 PetscErrorCode PETSCDM_DLLEXPORT DMGlobalToLocalEnd_Composite(DM dm,Vec gvec,InsertMode mode,Vec lvec)
1841 {
1842   PetscFunctionBegin;
1843   PetscFunctionReturn(0);
1844 }
1845 
1846 EXTERN_C_BEGIN
1847 #undef __FUNCT__
1848 #define __FUNCT__ "DMCreate_Composite"
1849 PetscErrorCode PETSCDM_DLLEXPORT DMCreate_Composite(DM p)
1850 {
1851   PetscErrorCode ierr;
1852   DM_Composite   *com;
1853 
1854   PetscFunctionBegin;
1855   ierr = PetscNewLog(p,DM_Composite,&com);CHKERRQ(ierr);
1856   p->data = com;
1857   ierr = PetscObjectChangeTypeName((PetscObject)p,"DMComposite");CHKERRQ(ierr);
1858   com->n            = 0;
1859   com->next         = PETSC_NULL;
1860   com->nredundant   = 0;
1861   com->nDM          = 0;
1862 
1863   p->ops->createglobalvector = DMCreateGlobalVector_Composite;
1864   p->ops->createlocalvector  = DMCreateLocalVector_Composite;
1865   p->ops->refine             = DMRefine_Composite;
1866   p->ops->getinterpolation   = DMGetInterpolation_Composite;
1867   p->ops->getmatrix          = DMGetMatrix_Composite;
1868   p->ops->getcoloring        = DMGetColoring_Composite;
1869   p->ops->globaltolocalbegin = DMGlobalToLocalBegin_Composite;
1870   p->ops->globaltolocalend   = DMGlobalToLocalEnd_Composite;
1871   p->ops->destroy            = DMDestroy_Composite;
1872   p->ops->view               = DMView_Composite;
1873   p->ops->setup              = DMSetUp_Composite;
1874   PetscFunctionReturn(0);
1875 }
1876 EXTERN_C_END
1877 
1878 #undef __FUNCT__
1879 #define __FUNCT__ "DMCompositeCreate"
1880 /*@C
1881     DMCompositeCreate - Creates a vector packer, used to generate "composite"
1882       vectors made up of several subvectors.
1883 
1884     Collective on MPI_Comm
1885 
1886     Input Parameter:
1887 .   comm - the processors that will share the global vector
1888 
1889     Output Parameters:
1890 .   packer - the packer object
1891 
1892     Level: advanced
1893 
1894 .seealso DMDestroy(), DMCompositeAddArray(), DMCompositeAddDM(), DMCompositeScatter(),
1895          DMCompositeGather(), DMCreateGlobalVector(), DMCompositeGetISLocalToGlobalMappings(), DMCompositeGetAccess()
1896          DMCompositeGetLocalVectors(), DMCompositeRestoreLocalVectors(), DMCompositeGetEntries()
1897 
1898 @*/
1899 PetscErrorCode PETSCDM_DLLEXPORT DMCompositeCreate(MPI_Comm comm,DM *packer)
1900 {
1901   PetscErrorCode ierr;
1902 
1903   PetscFunctionBegin;
1904   PetscValidPointer(packer,2);
1905   ierr = DMCreate(comm,packer);CHKERRQ(ierr);
1906   ierr = DMSetType(*packer,DMCOMPOSITE);CHKERRQ(ierr);
1907   PetscFunctionReturn(0);
1908 }
1909