xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision 2205254efee3a00a594e5e2a3a70f74dcb40bc03)
1 #include <petsc-private/dmimpl.h>
2 #include <petscdmredundant.h>   /*I      "petscdmredundant.h" I*/
3 
4 typedef struct  {
5   PetscInt rank;                /* owner */
6   PetscInt N;                   /* total number of dofs */
7   PetscInt n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
8 } DM_Redundant;
9 
10 #undef __FUNCT__
11 #define __FUNCT__ "DMCreateMatrix_Redundant"
12 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,MatType mtype,Mat *J)
13 {
14   DM_Redundant           *red = (DM_Redundant*)dm->data;
15   PetscErrorCode         ierr;
16   ISLocalToGlobalMapping ltog,ltogb;
17   PetscInt               i,rstart,rend,*cols;
18   PetscScalar            *vals;
19 
20   PetscFunctionBegin;
21   ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr);
22   ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr);
23   ierr = MatSetType(*J,mtype);CHKERRQ(ierr);
24   ierr = MatSeqAIJSetPreallocation(*J,red->n,PETSC_NULL);CHKERRQ(ierr);
25   ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,PETSC_NULL);CHKERRQ(ierr);
26   ierr = MatMPIAIJSetPreallocation(*J,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);CHKERRQ(ierr);
27   ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);CHKERRQ(ierr);
28 
29   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
30   ierr = DMGetLocalToGlobalMappingBlock(dm,&ltogb);CHKERRQ(ierr);
31   ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr);
32   ierr = MatSetLocalToGlobalMappingBlock(*J,ltogb,ltogb);CHKERRQ(ierr);
33 
34   ierr = PetscMalloc2(red->N,PetscInt,&cols,red->N,PetscScalar,&vals);CHKERRQ(ierr);
35   for (i=0; i<red->N; i++) {
36     cols[i] = i;
37     vals[i] = 0.0;
38   }
39   ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr);
40   for (i=rstart; i<rend; i++) {
41     ierr = MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
42   }
43   ierr = PetscFree2(cols,vals);CHKERRQ(ierr);
44   ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
45   ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
46   PetscFunctionReturn(0);
47 }
48 
49 #undef __FUNCT__
50 #define __FUNCT__ "DMDestroy_Redundant"
51 static PetscErrorCode DMDestroy_Redundant(DM dm)
52 {
53   PetscErrorCode ierr;
54 
55   PetscFunctionBegin;
56   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","",PETSC_NULL);CHKERRQ(ierr);
57   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","",PETSC_NULL);CHKERRQ(ierr);
58   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
59   ierr = PetscFree(dm->data);CHKERRQ(ierr);
60   PetscFunctionReturn(0);
61 }
62 
63 #undef __FUNCT__
64 #define __FUNCT__ "DMCreateGlobalVector_Redundant"
65 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
66 {
67   PetscErrorCode         ierr;
68   DM_Redundant           *red = (DM_Redundant*)dm->data;
69   ISLocalToGlobalMapping ltog,ltogb;
70 
71   PetscFunctionBegin;
72   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
73   PetscValidPointer(gvec,2);
74   *gvec = 0;
75   ierr  = VecCreate(((PetscObject)dm)->comm,gvec);CHKERRQ(ierr);
76   ierr  = VecSetSizes(*gvec,red->n,red->N);CHKERRQ(ierr);
77   ierr  = VecSetType(*gvec,dm->vectype);CHKERRQ(ierr);
78   ierr  = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
79   ierr  = DMGetLocalToGlobalMappingBlock(dm,&ltogb);CHKERRQ(ierr);
80   ierr  = VecSetLocalToGlobalMapping(*gvec,ltog);CHKERRQ(ierr);
81   ierr  = VecSetLocalToGlobalMappingBlock(*gvec,ltog);CHKERRQ(ierr);
82   ierr  = VecSetDM(*gvec,dm);CHKERRQ(ierr);
83   PetscFunctionReturn(0);
84 }
85 
86 #undef __FUNCT__
87 #define __FUNCT__ "DMCreateLocalVector_Redundant"
88 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
89 {
90   PetscErrorCode ierr;
91   DM_Redundant   *red = (DM_Redundant*)dm->data;
92 
93   PetscFunctionBegin;
94   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
95   PetscValidPointer(lvec,2);
96   *lvec = 0;
97   ierr  = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr);
98   ierr  = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr);
99   ierr  = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr);
100   ierr  = VecSetDM(*lvec,dm);CHKERRQ(ierr);
101   PetscFunctionReturn(0);
102 }
103 
104 #undef __FUNCT__
105 #define __FUNCT__ "DMLocalToGlobalBegin_Redundant"
106 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
107 {
108   PetscErrorCode    ierr;
109   DM_Redundant      *red = (DM_Redundant*)dm->data;
110   const PetscScalar *lv;
111   PetscScalar       *gv;
112   PetscMPIInt       rank;
113 
114   PetscFunctionBegin;
115   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
116   ierr = VecGetArrayRead(l,&lv);CHKERRQ(ierr);
117   ierr = VecGetArray(g,&gv);CHKERRQ(ierr);
118   switch (imode) {
119   case ADD_VALUES:
120   case MAX_VALUES:
121   {
122     void        *source;
123     PetscScalar *buffer;
124     PetscInt    i;
125     if (rank == red->rank) {
126 #if defined(PETSC_HAVE_MPI_IN_PLACE)
127       buffer = gv;
128       source = MPI_IN_PLACE;
129 #else
130       ierr   = PetscMalloc(red->N*sizeof(PetscScalar),&buffer);CHKERRQ(ierr);
131       source = buffer;
132 #endif
133       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
134 #if !defined(PETSC_USE_COMPLEX)
135       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
136 #endif
137     } else source = (void*)lv;
138     ierr = MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPI_SUM : MPI_MAX,red->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
139 #if !defined(PETSC_HAVE_MPI_IN_PLACE)
140     if (rank == red->rank) {ierr = PetscFree(buffer);CHKERRQ(ierr);}
141 #endif
142   } break;
143   case INSERT_VALUES:
144     ierr = PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);
145     break;
146   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported");
147   }
148   ierr = VecRestoreArrayRead(l,&lv);CHKERRQ(ierr);
149   ierr = VecRestoreArray(g,&gv);CHKERRQ(ierr);
150   PetscFunctionReturn(0);
151 }
152 
153 #undef __FUNCT__
154 #define __FUNCT__ "DMLocalToGlobalEnd_Redundant"
155 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
156 {
157   PetscFunctionBegin;
158   PetscFunctionReturn(0);
159 }
160 
161 #undef __FUNCT__
162 #define __FUNCT__ "DMGlobalToLocalBegin_Redundant"
163 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
164 {
165   PetscErrorCode    ierr;
166   DM_Redundant      *red = (DM_Redundant*)dm->data;
167   const PetscScalar *gv;
168   PetscScalar       *lv;
169 
170   PetscFunctionBegin;
171   ierr = VecGetArrayRead(g,&gv);CHKERRQ(ierr);
172   ierr = VecGetArray(l,&lv);CHKERRQ(ierr);
173   switch (imode) {
174   case INSERT_VALUES:
175     if (red->n) {ierr = PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);}
176     ierr = MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
177     break;
178   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported");
179   }
180   ierr = VecRestoreArrayRead(g,&gv);CHKERRQ(ierr);
181   ierr = VecRestoreArray(l,&lv);CHKERRQ(ierr);
182   PetscFunctionReturn(0);
183 }
184 
185 #undef __FUNCT__
186 #define __FUNCT__ "DMGlobalToLocalEnd_Redundant"
187 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
188 {
189   PetscFunctionBegin;
190   PetscFunctionReturn(0);
191 }
192 
193 #undef __FUNCT__
194 #define __FUNCT__ "DMSetUp_Redundant"
195 static PetscErrorCode DMSetUp_Redundant(DM dm)
196 {
197   PetscErrorCode ierr;
198   DM_Redundant   *red = (DM_Redundant*)dm->data;
199   PetscInt       i,*globals;
200 
201   PetscFunctionBegin;
202   ierr = PetscMalloc(red->N*sizeof(PetscInt),&globals);CHKERRQ(ierr);
203   for (i=0; i<red->N; i++) globals[i] = i;
204   ierr         = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
205   ierr         = PetscObjectReference((PetscObject)dm->ltogmap);CHKERRQ(ierr);
206   dm->ltogmapb = dm->ltogmap;
207   PetscFunctionReturn(0);
208 }
209 
210 #undef __FUNCT__
211 #define __FUNCT__ "DMView_Redundant"
212 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
213 {
214   PetscErrorCode ierr;
215   DM_Redundant   *red = (DM_Redundant*)dm->data;
216   PetscBool      iascii;
217 
218   PetscFunctionBegin;
219   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
220   if (iascii) {
221     ierr = PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);CHKERRQ(ierr);
222   }
223   PetscFunctionReturn(0);
224 }
225 
226 #undef __FUNCT__
227 #define __FUNCT__ "DMCreateColoring_Redundant"
228 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,MatType mtype,ISColoring *coloring)
229 {
230   DM_Redundant    *red = (DM_Redundant*)dm->data;
231   PetscErrorCode  ierr;
232   PetscInt        i,nloc;
233   ISColoringValue *colors;
234 
235   PetscFunctionBegin;
236   switch (ctype) {
237   case IS_COLORING_GLOBAL:
238     nloc = red->n;
239     break;
240   case IS_COLORING_GHOSTED:
241     nloc = red->N;
242     break;
243   default: SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
244   }
245   ierr = PetscMalloc(nloc*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
246   for (i=0; i<nloc; i++) colors[i] = i;
247   ierr = ISColoringCreate(((PetscObject)dm)->comm,red->N,nloc,colors,coloring);CHKERRQ(ierr);
248   ierr = ISColoringSetType(*coloring,ctype);CHKERRQ(ierr);
249   PetscFunctionReturn(0);
250 }
251 
252 #undef __FUNCT__
253 #define __FUNCT__ "DMRefine_Redundant"
254 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
255 {
256   PetscErrorCode ierr;
257   PetscMPIInt    flag;
258   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
259 
260   PetscFunctionBegin;
261   if (comm == MPI_COMM_NULL) comm = ((PetscObject)dmc)->comm;
262   ierr = MPI_Comm_compare(((PetscObject)dmc)->comm,comm,&flag);CHKERRQ(ierr);
263   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_SUP,"cannot change communicators");
264   ierr = DMRedundantCreate(comm,redc->rank,redc->N,dmf);CHKERRQ(ierr);
265   PetscFunctionReturn(0);
266 }
267 
268 #undef __FUNCT__
269 #define __FUNCT__ "DMCoarsen_Redundant"
270 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
271 {
272   PetscErrorCode ierr;
273   PetscMPIInt    flag;
274   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
275 
276   PetscFunctionBegin;
277   if (comm == MPI_COMM_NULL) comm = ((PetscObject)dmf)->comm;
278   ierr = MPI_Comm_compare(((PetscObject)dmf)->comm,comm,&flag);CHKERRQ(ierr);
279   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators");
280   ierr = DMRedundantCreate(comm,redf->rank,redf->N,dmc);CHKERRQ(ierr);
281   PetscFunctionReturn(0);
282 }
283 
284 #undef __FUNCT__
285 #define __FUNCT__ "DMCreateInterpolation_Redundant"
286 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
287 {
288   PetscErrorCode ierr;
289   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
290   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
291   PetscMPIInt    flag;
292   PetscInt       i,rstart,rend;
293 
294   PetscFunctionBegin;
295   ierr = MPI_Comm_compare(((PetscObject)dmc)->comm,((PetscObject)dmf)->comm,&flag);CHKERRQ(ierr);
296   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators");
297   if (redc->rank != redf->rank) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
298   if (redc->N != redf->N) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Global size does not match");
299   ierr = MatCreate(((PetscObject)dmc)->comm,P);CHKERRQ(ierr);
300   ierr = MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);CHKERRQ(ierr);
301   ierr = MatSetType(*P,MATAIJ);CHKERRQ(ierr);
302   ierr = MatSeqAIJSetPreallocation(*P,1,0);CHKERRQ(ierr);
303   ierr = MatMPIAIJSetPreallocation(*P,1,0,0,0);CHKERRQ(ierr);
304   ierr = MatGetOwnershipRange(*P,&rstart,&rend);CHKERRQ(ierr);
305   for (i=rstart; i<rend; i++) {ierr = MatSetValue(*P,i,i,1.0,INSERT_VALUES);CHKERRQ(ierr);}
306   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
307   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
308   if (scale) {ierr = DMCreateInterpolationScale(dmc,dmf,*P,scale);CHKERRQ(ierr);}
309   PetscFunctionReturn(0);
310 }
311 
312 #undef __FUNCT__
313 #define __FUNCT__ "DMRedundantSetSize"
314 /*@
315     DMRedundantSetSize - Sets the size of a densely coupled redundant object
316 
317     Collective on DM
318 
319     Input Parameter:
320 +   dm - redundant DM
321 .   rank - rank of process to own redundant degrees of freedom
322 -   N - total number of redundant degrees of freedom
323 
324     Level: advanced
325 
326 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
327 @*/
328 PetscErrorCode DMRedundantSetSize(DM dm,PetscInt rank,PetscInt N)
329 {
330   PetscErrorCode ierr;
331 
332   PetscFunctionBegin;
333   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
334   PetscValidType(dm,1);
335   PetscValidLogicalCollectiveInt(dm,rank,2);
336   PetscValidLogicalCollectiveInt(dm,N,3);
337   ierr = PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscInt,PetscInt),(dm,rank,N));CHKERRQ(ierr);
338   PetscFunctionReturn(0);
339 }
340 
341 #undef __FUNCT__
342 #define __FUNCT__ "DMRedundantGetSize"
343 /*@
344     DMRedundantGetSize - Gets the size of a densely coupled redundant object
345 
346     Not Collective
347 
348     Input Parameter:
349 +   dm - redundant DM
350 
351     Output Parameters:
352 +   rank - rank of process to own redundant degrees of freedom (or PETSC_NULL)
353 -   N - total number of redundant degrees of freedom (or PETSC_NULL)
354 
355     Level: advanced
356 
357 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
358 @*/
359 PetscErrorCode DMRedundantGetSize(DM dm,PetscInt *rank,PetscInt *N)
360 {
361   PetscErrorCode ierr;
362 
363   PetscFunctionBegin;
364   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
365   PetscValidType(dm,1);
366   ierr = PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscInt*,PetscInt*),(dm,rank,N));CHKERRQ(ierr);
367   PetscFunctionReturn(0);
368 }
369 
370 EXTERN_C_BEGIN
371 #undef __FUNCT__
372 #define __FUNCT__ "DMRedundantSetSize_Redundant"
373 PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscInt rank,PetscInt N)
374 {
375   DM_Redundant   *red = (DM_Redundant*)dm->data;
376   PetscErrorCode ierr;
377   PetscMPIInt    myrank;
378 
379   PetscFunctionBegin;
380   ierr      = MPI_Comm_rank(((PetscObject)dm)->comm,&myrank);CHKERRQ(ierr);
381   red->rank = rank;
382   red->N    = N;
383   red->n    = (myrank == rank) ? N : 0;
384   PetscFunctionReturn(0);
385 }
386 
387 #undef __FUNCT__
388 #define __FUNCT__ "DMRedundantGetSize_Redundant"
389 PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
390 {
391   DM_Redundant *red = (DM_Redundant*)dm->data;
392 
393   PetscFunctionBegin;
394   if (rank) *rank = red->rank;
395   if (N)    *N = red->N;
396   PetscFunctionReturn(0);
397 }
398 EXTERN_C_END
399 
400 /*MC
401    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
402          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
403          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
404          processes local computations).
405 
406          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
407 
408 
409   Level: intermediate
410 
411 .seealso: DMType, DMCOMPOSITE, DMCreateRedundant(), DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
412 M*/
413 
414 EXTERN_C_BEGIN
415 #undef __FUNCT__
416 #define __FUNCT__ "DMCreate_Redundant"
417 PetscErrorCode DMCreate_Redundant(DM dm)
418 {
419   PetscErrorCode ierr;
420   DM_Redundant   *red;
421 
422   PetscFunctionBegin;
423   ierr     = PetscNewLog(dm,DM_Redundant,&red);CHKERRQ(ierr);
424   dm->data = red;
425 
426   ierr = PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);CHKERRQ(ierr);
427 
428   dm->ops->setup              = DMSetUp_Redundant;
429   dm->ops->view               = DMView_Redundant;
430   dm->ops->createglobalvector = DMCreateGlobalVector_Redundant;
431   dm->ops->createlocalvector  = DMCreateLocalVector_Redundant;
432   dm->ops->creatematrix       = DMCreateMatrix_Redundant;
433   dm->ops->destroy            = DMDestroy_Redundant;
434   dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant;
435   dm->ops->globaltolocalend   = DMGlobalToLocalEnd_Redundant;
436   dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant;
437   dm->ops->localtoglobalend   = DMLocalToGlobalEnd_Redundant;
438   dm->ops->refine             = DMRefine_Redundant;
439   dm->ops->coarsen            = DMCoarsen_Redundant;
440   dm->ops->createinterpolation= DMCreateInterpolation_Redundant;
441   dm->ops->getcoloring        = DMCreateColoring_Redundant;
442 
443   ierr = PetscStrallocpy(VECSTANDARD,(char**)&dm->vectype);CHKERRQ(ierr);
444   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","DMRedundantSetSize_Redundant",DMRedundantSetSize_Redundant);CHKERRQ(ierr);
445   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","DMRedundantGetSize_Redundant",DMRedundantGetSize_Redundant);CHKERRQ(ierr);
446   PetscFunctionReturn(0);
447 }
448 EXTERN_C_END
449 
450 #undef __FUNCT__
451 #define __FUNCT__ "DMRedundantCreate"
452 /*@C
453     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
454 
455     Collective on MPI_Comm
456 
457     Input Parameter:
458 +   comm - the processors that will share the global vector
459 .   rank - rank to own the redundant values
460 -   N - total number of degrees of freedom
461 
462     Output Parameters:
463 .   red - the redundant DM
464 
465     Level: advanced
466 
467 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
468 
469 @*/
470 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscInt rank,PetscInt N,DM *dm)
471 {
472   PetscErrorCode ierr;
473 
474   PetscFunctionBegin;
475   PetscValidPointer(dm,2);
476   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
477   ierr = DMSetType(*dm,DMREDUNDANT);CHKERRQ(ierr);
478   ierr = DMRedundantSetSize(*dm,rank,N);CHKERRQ(ierr);
479   ierr = DMSetUp(*dm);CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482