xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision 5f042095cd6794de959e41580753d6ff4dec299c)
1 #include <petsc-private/dmimpl.h>     /*I      "petscdm.h"          I*/
2 #include <petscdmredundant.h>   /*I      "petscdmredundant.h" I*/
3 #include <petscmat.h>           /*I      "petscmat.h"         I*/
4 
5 typedef struct  {
6   PetscInt rank;                /* owner */
7   PetscInt N;                   /* total number of dofs */
8   PetscInt n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
9 } DM_Redundant;
10 
11 #undef __FUNCT__
12 #define __FUNCT__ "DMCreateMatrix_Redundant"
13 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,const MatType mtype,Mat *J)
14 {
15   DM_Redundant           *red = (DM_Redundant*)dm->data;
16   PetscErrorCode         ierr;
17   ISLocalToGlobalMapping ltog,ltogb;
18   PetscInt               i,rstart,rend,*cols;
19   PetscScalar            *vals;
20 
21   PetscFunctionBegin;
22   ierr = MatCreate(((PetscObject)dm)->comm,J);CHKERRQ(ierr);
23   ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr);
24   ierr = MatSetType(*J,mtype);CHKERRQ(ierr);
25   ierr = MatSeqAIJSetPreallocation(*J,red->n,PETSC_NULL);CHKERRQ(ierr);
26   ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,PETSC_NULL);CHKERRQ(ierr);
27   ierr = MatMPIAIJSetPreallocation(*J,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);CHKERRQ(ierr);
28   ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,PETSC_NULL,red->N-red->n,PETSC_NULL);CHKERRQ(ierr);
29 
30   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
31   ierr = DMGetLocalToGlobalMappingBlock(dm,&ltogb);CHKERRQ(ierr);
32   ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr);
33   ierr = MatSetLocalToGlobalMappingBlock(*J,ltogb,ltogb);CHKERRQ(ierr);
34 
35   ierr = PetscMalloc2(red->N,PetscInt,&cols,red->N,PetscScalar,&vals);CHKERRQ(ierr);
36   for (i=0; i<red->N; i++) {
37     cols[i] = i;
38     vals[i] = 0.0;
39   }
40   ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr);
41   for (i=rstart; i<rend; i++) {
42     ierr = MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
43   }
44   ierr = PetscFree2(cols,vals);CHKERRQ(ierr);
45   ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
46   ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
47   PetscFunctionReturn(0);
48 }
49 
50 #undef __FUNCT__
51 #define __FUNCT__ "DMDestroy_Redundant"
52 static PetscErrorCode DMDestroy_Redundant(DM dm)
53 {
54   PetscErrorCode ierr;
55 
56   PetscFunctionBegin;
57   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","",PETSC_NULL);CHKERRQ(ierr);
58   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","",PETSC_NULL);CHKERRQ(ierr);
59   PetscFunctionReturn(0);
60 }
61 
62 #undef __FUNCT__
63 #define __FUNCT__ "DMCreateGlobalVector_Redundant"
64 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
65 {
66   PetscErrorCode         ierr;
67   DM_Redundant           *red = (DM_Redundant*)dm->data;
68   ISLocalToGlobalMapping ltog,ltogb;
69 
70   PetscFunctionBegin;
71   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
72   PetscValidPointer(gvec,2);
73   *gvec = 0;
74   ierr = VecCreate(((PetscObject)dm)->comm,gvec);CHKERRQ(ierr);
75   ierr = VecSetSizes(*gvec,red->n,red->N);CHKERRQ(ierr);
76   ierr = VecSetType(*gvec,dm->vectype);CHKERRQ(ierr);
77   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
78   ierr = DMGetLocalToGlobalMappingBlock(dm,&ltogb);CHKERRQ(ierr);
79   ierr = VecSetLocalToGlobalMapping(*gvec,ltog);CHKERRQ(ierr);
80   ierr = VecSetLocalToGlobalMappingBlock(*gvec,ltog);CHKERRQ(ierr);
81   ierr = PetscObjectCompose((PetscObject)*gvec,"DM",(PetscObject)dm);CHKERRQ(ierr);
82   PetscFunctionReturn(0);
83 }
84 
85 #undef __FUNCT__
86 #define __FUNCT__ "DMCreateLocalVector_Redundant"
87 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
88 {
89   PetscErrorCode         ierr;
90   DM_Redundant           *red = (DM_Redundant*)dm->data;
91 
92   PetscFunctionBegin;
93   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
94   PetscValidPointer(lvec,2);
95   *lvec = 0;
96   ierr = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr);
97   ierr = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr);
98   ierr = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr);
99   ierr = PetscObjectCompose((PetscObject)*lvec,"DM",(PetscObject)dm);CHKERRQ(ierr);
100   PetscFunctionReturn(0);
101 }
102 
103 #undef __FUNCT__
104 #define __FUNCT__ "DMLocalToGlobalBegin_Redundant"
105 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
106 {
107   PetscErrorCode    ierr;
108   DM_Redundant      *red = (DM_Redundant*)dm->data;
109   const PetscScalar *lv;
110   PetscScalar       *gv;
111   PetscMPIInt       rank;
112 
113   PetscFunctionBegin;
114   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&rank);CHKERRQ(ierr);
115   ierr = VecGetArrayRead(l,&lv);CHKERRQ(ierr);
116   ierr = VecGetArray(g,&gv);CHKERRQ(ierr);
117   switch (imode) {
118   case ADD_VALUES:
119   case MAX_VALUES:
120   {
121     void *source;
122     PetscScalar *buffer;
123     PetscInt i;
124     if (rank == red->rank) {
125 #if defined (PETSC_HAVE_MPI_IN_PLACE)
126       buffer = gv;
127       source = MPI_IN_PLACE;
128 #else
129       ierr = PetscMalloc(red->N*sizeof(PetscScalar),&buffer);CHKERRQ(ierr);
130       source = buffer;
131 #endif
132       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
133 #if !defined(PETSC_USE_COMPLEX)
134       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
135 #endif
136     } else {
137       source = (void*)lv;
138     }
139     ierr = MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES)?MPI_SUM:MPI_MAX,red->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
140 #if !defined(PETSC_HAVE_MPI_IN_PLACE)
141     if (rank == red->rank) {ierr = PetscFree(buffer);CHKERRQ(ierr);}
142 #endif
143   } break;
144   case INSERT_VALUES:
145   ierr = PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);
146   break;
147   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported");
148   }
149   ierr = VecRestoreArrayRead(l,&lv);CHKERRQ(ierr);
150   ierr = VecRestoreArray(g,&gv);CHKERRQ(ierr);
151   PetscFunctionReturn(0);
152 }
153 
154 #undef __FUNCT__
155 #define __FUNCT__ "DMLocalToGlobalEnd_Redundant"
156 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
157 {
158   PetscFunctionBegin;
159   PetscFunctionReturn(0);
160 }
161 
162 #undef __FUNCT__
163 #define __FUNCT__ "DMGlobalToLocalBegin_Redundant"
164 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
165 {
166   PetscErrorCode    ierr;
167   DM_Redundant      *red = (DM_Redundant*)dm->data;
168   const PetscScalar *gv;
169   PetscScalar       *lv;
170 
171   PetscFunctionBegin;
172   ierr = VecGetArrayRead(g,&gv);CHKERRQ(ierr);
173   ierr = VecGetArray(l,&lv);CHKERRQ(ierr);
174   switch (imode) {
175   case INSERT_VALUES:
176     if (red->n) {ierr = PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);}
177     ierr = MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,((PetscObject)dm)->comm);CHKERRQ(ierr);
178     break;
179   default: SETERRQ(((PetscObject)dm)->comm,PETSC_ERR_SUP,"InsertMode not supported");
180   }
181   ierr = VecRestoreArrayRead(g,&gv);CHKERRQ(ierr);
182   ierr = VecRestoreArray(l,&lv);CHKERRQ(ierr);
183   PetscFunctionReturn(0);
184 }
185 
186 #undef __FUNCT__
187 #define __FUNCT__ "DMGlobalToLocalEnd_Redundant"
188 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
189 {
190   PetscFunctionBegin;
191   PetscFunctionReturn(0);
192 }
193 
194 #undef __FUNCT__
195 #define __FUNCT__ "DMSetUp_Redundant"
196 static PetscErrorCode DMSetUp_Redundant(DM dm)
197 {
198   PetscErrorCode ierr;
199   DM_Redundant   *red = (DM_Redundant*)dm->data;
200   PetscInt       i,*globals;
201 
202   PetscFunctionBegin;
203   ierr = PetscMalloc(red->N*sizeof(PetscInt),&globals);CHKERRQ(ierr);
204   for (i=0; i<red->N; i++) globals[i] = i;
205   ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
206   ierr = PetscObjectReference((PetscObject)dm->ltogmap);CHKERRQ(ierr);
207   dm->ltogmapb = dm->ltogmap;
208   PetscFunctionReturn(0);
209 }
210 
211 #undef __FUNCT__
212 #define __FUNCT__ "DMView_Redundant"
213 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
214 {
215   PetscErrorCode ierr;
216   DM_Redundant   *red = (DM_Redundant*)dm->data;
217   PetscBool      iascii;
218 
219   PetscFunctionBegin;
220   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
221   if (iascii) {
222     ierr = PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);CHKERRQ(ierr);
223   }
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "DMCreateColoring_Redundant"
229 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,const MatType mtype,ISColoring *coloring)
230 {
231   DM_Redundant   *red = (DM_Redundant*)dm->data;
232   PetscErrorCode ierr;
233   PetscInt i,nloc;
234   ISColoringValue *colors;
235 
236   PetscFunctionBegin;
237   switch (ctype) {
238   case IS_COLORING_GLOBAL:
239     nloc = red->n;
240     break;
241   case IS_COLORING_GHOSTED:
242     nloc = red->N;
243     break;
244   default: SETERRQ1(((PetscObject)dm)->comm,PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
245   }
246   ierr = PetscMalloc(nloc*sizeof(ISColoringValue),&colors);CHKERRQ(ierr);
247   for (i=0; i<nloc; i++) colors[i] = i;
248   ierr = ISColoringCreate(((PetscObject)dm)->comm,red->N,nloc,colors,coloring);CHKERRQ(ierr);
249   ierr = ISColoringSetType(*coloring,ctype);CHKERRQ(ierr);
250   PetscFunctionReturn(0);
251 }
252 
253 #undef __FUNCT__
254 #define __FUNCT__ "DMRefine_Redundant"
255 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
256 {
257   PetscErrorCode ierr;
258   PetscMPIInt flag;
259   DM_Redundant *redc = (DM_Redundant*)dmc->data;
260 
261   PetscFunctionBegin;
262   if (comm == MPI_COMM_NULL) comm = ((PetscObject)dmc)->comm;
263   ierr = MPI_Comm_compare(((PetscObject)dmc)->comm,comm,&flag); CHKERRQ(ierr);
264   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmc)->comm,PETSC_ERR_SUP,"cannot change communicators");
265   ierr = DMRedundantCreate(comm,redc->rank,redc->N,dmf);CHKERRQ(ierr);
266   PetscFunctionReturn(0);
267 }
268 
269 #undef __FUNCT__
270 #define __FUNCT__ "DMCoarsen_Redundant"
271 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
272 {
273   PetscErrorCode ierr;
274   PetscMPIInt flag;
275   DM_Redundant *redf = (DM_Redundant*)dmf->data;
276 
277   PetscFunctionBegin;
278   if (comm == MPI_COMM_NULL) comm = ((PetscObject)dmf)->comm;
279   ierr = MPI_Comm_compare(((PetscObject)dmf)->comm,comm,&flag); CHKERRQ(ierr);
280   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators");
281   ierr = DMRedundantCreate(comm,redf->rank,redf->N,dmc);CHKERRQ(ierr);
282   PetscFunctionReturn(0);
283 }
284 
285 #undef __FUNCT__
286 #define __FUNCT__ "DMCreateInterpolation_Redundant"
287 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
288 {
289   PetscErrorCode ierr;
290   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
291   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
292   PetscMPIInt    flag;
293   PetscInt       i,rstart,rend;
294 
295   PetscFunctionBegin;
296   ierr = MPI_Comm_compare(((PetscObject)dmc)->comm,((PetscObject)dmf)->comm,&flag); CHKERRQ(ierr);
297   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_SUP,"cannot change communicators");
298   if (redc->rank != redf->rank) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
299   if (redc->N != redf->N) SETERRQ(((PetscObject)dmf)->comm,PETSC_ERR_ARG_INCOMP,"Global size does not match");
300   ierr = MatCreate(((PetscObject)dmc)->comm,P);CHKERRQ(ierr);
301   ierr = MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);CHKERRQ(ierr);
302   ierr = MatSetType(*P,MATAIJ);CHKERRQ(ierr);
303   ierr = MatSeqAIJSetPreallocation(*P,1,0);CHKERRQ(ierr);
304   ierr = MatMPIAIJSetPreallocation(*P,1,0,0,0);CHKERRQ(ierr);
305   ierr = MatGetOwnershipRange(*P,&rstart,&rend);CHKERRQ(ierr);
306   for (i=rstart; i<rend; i++) {ierr = MatSetValue(*P,i,i,1.0,INSERT_VALUES);CHKERRQ(ierr);}
307   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
308   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
309   if (scale) {ierr = DMCreateInterpolationScale(dmc,dmf,*P,scale);CHKERRQ(ierr);}
310   PetscFunctionReturn(0);
311 }
312 
313 #undef __FUNCT__
314 #define __FUNCT__ "DMRedundantSetSize"
315 /*@
316     DMRedundantSetSize - Sets the size of a densely coupled redundant object
317 
318     Collective on DM
319 
320     Input Parameter:
321 +   dm - redundant DM
322 .   rank - rank of process to own redundant degrees of freedom
323 -   N - total number of redundant degrees of freedom
324 
325     Level: advanced
326 
327 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
328 @*/
329 PetscErrorCode DMRedundantSetSize(DM dm,PetscInt rank,PetscInt N)
330 {
331   PetscErrorCode ierr;
332 
333   PetscFunctionBegin;
334   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
335   PetscValidType(dm,1);
336   PetscValidLogicalCollectiveInt(dm,rank,2);
337   PetscValidLogicalCollectiveInt(dm,N,3);
338   ierr = PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscInt,PetscInt),(dm,rank,N));CHKERRQ(ierr);
339   PetscFunctionReturn(0);
340 }
341 
342 #undef __FUNCT__
343 #define __FUNCT__ "DMRedundantGetSize"
344 /*@
345     DMRedundantGetSize - Gets the size of a densely coupled redundant object
346 
347     Not Collective
348 
349     Input Parameter:
350 +   dm - redundant DM
351 
352     Output Parameters:
353 +   rank - rank of process to own redundant degrees of freedom (or PETSC_NULL)
354 -   N - total number of redundant degrees of freedom (or PETSC_NULL)
355 
356     Level: advanced
357 
358 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
359 @*/
360 PetscErrorCode DMRedundantGetSize(DM dm,PetscInt *rank,PetscInt *N)
361 {
362   PetscErrorCode ierr;
363 
364   PetscFunctionBegin;
365   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
366   PetscValidType(dm,1);
367   ierr = PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscInt*,PetscInt*),(dm,rank,N));CHKERRQ(ierr);
368   PetscFunctionReturn(0);
369 }
370 
371 EXTERN_C_BEGIN
372 #undef __FUNCT__
373 #define __FUNCT__ "DMRedundantSetSize_Redundant"
374 PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscInt rank,PetscInt N)
375 {
376   DM_Redundant   *red = (DM_Redundant*)dm->data;
377   PetscErrorCode ierr;
378   PetscMPIInt    myrank;
379 
380   PetscFunctionBegin;
381   ierr = MPI_Comm_rank(((PetscObject)dm)->comm,&myrank);CHKERRQ(ierr);
382   red->rank = rank;
383   red->N = N;
384   red->n = (myrank == rank) ? N : 0;
385   PetscFunctionReturn(0);
386 }
387 
388 #undef __FUNCT__
389 #define __FUNCT__ "DMRedundantGetSize_Redundant"
390 PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
391 {
392   DM_Redundant   *red = (DM_Redundant*)dm->data;
393 
394   PetscFunctionBegin;
395   if (rank) *rank = red->rank;
396   if (N)    *N = red->N;
397   PetscFunctionReturn(0);
398 }
399 EXTERN_C_END
400 
401 EXTERN_C_BEGIN
402 #undef __FUNCT__
403 #define __FUNCT__ "DMCreate_Redundant"
404 PetscErrorCode DMCreate_Redundant(DM dm)
405 {
406   PetscErrorCode ierr;
407   DM_Redundant   *red;
408 
409   PetscFunctionBegin;
410   ierr = PetscNewLog(dm,DM_Redundant,&red);CHKERRQ(ierr);
411   dm->data = red;
412 
413   ierr = PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);CHKERRQ(ierr);
414   dm->ops->setup              = DMSetUp_Redundant;
415   dm->ops->view               = DMView_Redundant;
416   dm->ops->createglobalvector = DMCreateGlobalVector_Redundant;
417   dm->ops->createlocalvector  = DMCreateLocalVector_Redundant;
418   dm->ops->creatematrix          = DMCreateMatrix_Redundant;
419   dm->ops->destroy            = DMDestroy_Redundant;
420   dm->ops->globaltolocalbegin = DMGlobalToLocalBegin_Redundant;
421   dm->ops->globaltolocalend   = DMGlobalToLocalEnd_Redundant;
422   dm->ops->localtoglobalbegin = DMLocalToGlobalBegin_Redundant;
423   dm->ops->localtoglobalend   = DMLocalToGlobalEnd_Redundant;
424   dm->ops->refine             = DMRefine_Redundant;
425   dm->ops->coarsen            = DMCoarsen_Redundant;
426   dm->ops->createinterpolation   = DMCreateInterpolation_Redundant;
427   dm->ops->getcoloring        = DMCreateColoring_Redundant;
428   ierr = PetscStrallocpy(VECSTANDARD,&dm->vectype);CHKERRQ(ierr);
429   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantSetSize_C","DMRedundantSetSize_Redundant",DMRedundantSetSize_Redundant);CHKERRQ(ierr);
430   ierr = PetscObjectComposeFunctionDynamic((PetscObject)dm,"DMRedundantGetSize_C","DMRedundantGetSize_Redundant",DMRedundantGetSize_Redundant);CHKERRQ(ierr);
431   PetscFunctionReturn(0);
432 }
433 EXTERN_C_END
434 
435 #undef __FUNCT__
436 #define __FUNCT__ "DMRedundantCreate"
437 /*@C
438     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
439 
440     Collective on MPI_Comm
441 
442     Input Parameter:
443 +   comm - the processors that will share the global vector
444 .   rank - rank to own the redundant values
445 -   N - total number of degrees of freedom
446 
447     Output Parameters:
448 .   red - the redundant DM
449 
450     Level: advanced
451 
452 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM()
453 
454 @*/
455 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscInt rank,PetscInt N,DM *dm)
456 {
457   PetscErrorCode ierr;
458 
459   PetscFunctionBegin;
460   PetscValidPointer(dm,2);
461   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
462   ierr = DMSetType(*dm,DMREDUNDANT);CHKERRQ(ierr);
463   ierr = DMRedundantSetSize(*dm,rank,N);CHKERRQ(ierr);
464   ierr = DMSetUp(*dm);CHKERRQ(ierr);
465   PetscFunctionReturn(0);
466 }
467