xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision ccb4e88a40f0b86eaeca07ff64c64e4de2fae686)
1 #include <petsc/private/dmimpl.h>
2 #include <petscdmredundant.h>   /*I      "petscdmredundant.h" I*/
3 
4 typedef struct  {
5   PetscMPIInt rank;                /* owner */
6   PetscInt    N;                   /* total number of dofs */
7   PetscInt    n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
8 } DM_Redundant;
9 
10 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,Mat *J)
11 {
12   DM_Redundant           *red = (DM_Redundant*)dm->data;
13   PetscErrorCode         ierr;
14   ISLocalToGlobalMapping ltog;
15   PetscInt               i,rstart,rend,*cols;
16   PetscScalar            *vals;
17 
18   PetscFunctionBegin;
19   ierr = MatCreate(PetscObjectComm((PetscObject)dm),J);CHKERRQ(ierr);
20   ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr);
21   ierr = MatSetType(*J,dm->mattype);CHKERRQ(ierr);
22   ierr = MatSeqAIJSetPreallocation(*J,red->n,NULL);CHKERRQ(ierr);
23   ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);CHKERRQ(ierr);
24   ierr = MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr);
25   ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr);
26 
27   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
28   ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr);
29   ierr = MatSetDM(*J,dm);CHKERRQ(ierr);
30 
31   ierr = PetscMalloc2(red->N,&cols,red->N,&vals);CHKERRQ(ierr);
32   for (i=0; i<red->N; i++) {
33     cols[i] = i;
34     vals[i] = 0.0;
35   }
36   ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr);
37   for (i=rstart; i<rend; i++) {
38     ierr = MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
39   }
40   ierr = PetscFree2(cols,vals);CHKERRQ(ierr);
41   ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
42   ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
43   PetscFunctionReturn(0);
44 }
45 
46 static PetscErrorCode DMDestroy_Redundant(DM dm)
47 {
48   PetscErrorCode ierr;
49 
50   PetscFunctionBegin;
51   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",NULL);CHKERRQ(ierr);
52   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",NULL);CHKERRQ(ierr);
53   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",NULL);CHKERRQ(ierr);
54   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
55   ierr = PetscFree(dm->data);CHKERRQ(ierr);
56   PetscFunctionReturn(0);
57 }
58 
59 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
60 {
61   PetscErrorCode         ierr;
62   DM_Redundant           *red = (DM_Redundant*)dm->data;
63   ISLocalToGlobalMapping ltog;
64 
65   PetscFunctionBegin;
66   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
67   PetscValidPointer(gvec,2);
68   *gvec = NULL;
69   ierr  = VecCreate(PetscObjectComm((PetscObject)dm),gvec);CHKERRQ(ierr);
70   ierr  = VecSetSizes(*gvec,red->n,red->N);CHKERRQ(ierr);
71   ierr  = VecSetType(*gvec,dm->vectype);CHKERRQ(ierr);
72   ierr  = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
73   ierr  = VecSetLocalToGlobalMapping(*gvec,ltog);CHKERRQ(ierr);
74   ierr  = VecSetDM(*gvec,dm);CHKERRQ(ierr);
75   PetscFunctionReturn(0);
76 }
77 
78 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
79 {
80   PetscErrorCode ierr;
81   DM_Redundant   *red = (DM_Redundant*)dm->data;
82 
83   PetscFunctionBegin;
84   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
85   PetscValidPointer(lvec,2);
86   *lvec = NULL;
87   ierr  = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr);
88   ierr  = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr);
89   ierr  = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr);
90   ierr  = VecSetDM(*lvec,dm);CHKERRQ(ierr);
91   PetscFunctionReturn(0);
92 }
93 
94 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
95 {
96   PetscErrorCode    ierr;
97   DM_Redundant      *red = (DM_Redundant*)dm->data;
98   const PetscScalar *lv;
99   PetscScalar       *gv;
100   PetscMPIInt       rank;
101 
102   PetscFunctionBegin;
103   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);CHKERRMPI(ierr);
104   ierr = VecGetArrayRead(l,&lv);CHKERRQ(ierr);
105   ierr = VecGetArray(g,&gv);CHKERRQ(ierr);
106   switch (imode) {
107   case ADD_VALUES:
108   case MAX_VALUES:
109   {
110     void        *source;
111     PetscScalar *buffer;
112     PetscInt    i;
113     if (rank == red->rank) {
114 #if defined(PETSC_HAVE_MPI_IN_PLACE)
115       buffer = gv;
116       source = MPI_IN_PLACE;
117 #else
118       ierr   = PetscMalloc1(red->N,&buffer);CHKERRQ(ierr);
119       source = buffer;
120 #endif
121       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
122 #if !defined(PETSC_USE_COMPLEX)
123       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
124 #endif
125     } else source = (void*)lv;
126     ierr = MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX,red->rank,PetscObjectComm((PetscObject)dm));CHKERRMPI(ierr);
127 #if !defined(PETSC_HAVE_MPI_IN_PLACE)
128     if (rank == red->rank) {ierr = PetscFree(buffer);CHKERRQ(ierr);}
129 #endif
130   } break;
131   case INSERT_VALUES:
132     ierr = PetscArraycpy(gv,lv,red->n);CHKERRQ(ierr);
133     break;
134   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
135   }
136   ierr = VecRestoreArrayRead(l,&lv);CHKERRQ(ierr);
137   ierr = VecRestoreArray(g,&gv);CHKERRQ(ierr);
138   PetscFunctionReturn(0);
139 }
140 
141 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
142 {
143   PetscFunctionBegin;
144   PetscFunctionReturn(0);
145 }
146 
147 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
148 {
149   PetscErrorCode    ierr;
150   DM_Redundant      *red = (DM_Redundant*)dm->data;
151   const PetscScalar *gv;
152   PetscScalar       *lv;
153 
154   PetscFunctionBegin;
155   ierr = VecGetArrayRead(g,&gv);CHKERRQ(ierr);
156   ierr = VecGetArray(l,&lv);CHKERRQ(ierr);
157   switch (imode) {
158   case INSERT_VALUES:
159     if (red->n) {ierr = PetscArraycpy(lv,gv,red->n);CHKERRQ(ierr);}
160     ierr = MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm));CHKERRMPI(ierr);
161     break;
162   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
163   }
164   ierr = VecRestoreArrayRead(g,&gv);CHKERRQ(ierr);
165   ierr = VecRestoreArray(l,&lv);CHKERRQ(ierr);
166   PetscFunctionReturn(0);
167 }
168 
169 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
170 {
171   PetscFunctionBegin;
172   PetscFunctionReturn(0);
173 }
174 
175 static PetscErrorCode DMSetUp_Redundant(DM dm)
176 {
177   PetscFunctionBegin;
178   PetscFunctionReturn(0);
179 }
180 
181 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
182 {
183   PetscErrorCode ierr;
184   DM_Redundant   *red = (DM_Redundant*)dm->data;
185   PetscBool      iascii;
186 
187   PetscFunctionBegin;
188   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
189   if (iascii) {
190     ierr = PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);CHKERRQ(ierr);
191   }
192   PetscFunctionReturn(0);
193 }
194 
195 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,ISColoring *coloring)
196 {
197   DM_Redundant    *red = (DM_Redundant*)dm->data;
198   PetscErrorCode  ierr;
199   PetscInt        i,nloc;
200   ISColoringValue *colors;
201 
202   PetscFunctionBegin;
203   switch (ctype) {
204   case IS_COLORING_GLOBAL:
205     nloc = red->n;
206     break;
207   case IS_COLORING_LOCAL:
208     nloc = red->N;
209     break;
210   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
211   }
212   ierr = PetscMalloc1(nloc,&colors);CHKERRQ(ierr);
213   for (i=0; i<nloc; i++) colors[i] = i;
214   ierr = ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,PETSC_OWN_POINTER,coloring);CHKERRQ(ierr);
215   ierr = ISColoringSetType(*coloring,ctype);CHKERRQ(ierr);
216   PetscFunctionReturn(0);
217 }
218 
219 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
220 {
221   PetscErrorCode ierr;
222   PetscMPIInt    flag;
223   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
224 
225   PetscFunctionBegin;
226   if (comm == MPI_COMM_NULL) {
227     ierr = PetscObjectGetComm((PetscObject)dmc,&comm);CHKERRQ(ierr);
228   }
229   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag);CHKERRMPI(ierr);
230   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmc),PETSC_ERR_SUP,"cannot change communicators");
231   ierr = DMRedundantCreate(comm,redc->rank,redc->N,dmf);CHKERRQ(ierr);
232   PetscFunctionReturn(0);
233 }
234 
235 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
236 {
237   PetscErrorCode ierr;
238   PetscMPIInt    flag;
239   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
240 
241   PetscFunctionBegin;
242   if (comm == MPI_COMM_NULL) {
243     ierr = PetscObjectGetComm((PetscObject)dmf,&comm);CHKERRQ(ierr);
244   }
245   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag);CHKERRMPI(ierr);
246   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
247   ierr = DMRedundantCreate(comm,redf->rank,redf->N,dmc);CHKERRQ(ierr);
248   PetscFunctionReturn(0);
249 }
250 
251 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
252 {
253   PetscErrorCode ierr;
254   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
255   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
256   PetscMPIInt    flag;
257   PetscInt       i,rstart,rend;
258 
259   PetscFunctionBegin;
260   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag);CHKERRMPI(ierr);
261   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
262   if (redc->rank != redf->rank) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
263   if (redc->N != redf->N) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Global size does not match");
264   ierr = MatCreate(PetscObjectComm((PetscObject)dmc),P);CHKERRQ(ierr);
265   ierr = MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);CHKERRQ(ierr);
266   ierr = MatSetType(*P,MATAIJ);CHKERRQ(ierr);
267   ierr = MatSeqAIJSetPreallocation(*P,1,NULL);CHKERRQ(ierr);
268   ierr = MatMPIAIJSetPreallocation(*P,1,NULL,0,NULL);CHKERRQ(ierr);
269   ierr = MatGetOwnershipRange(*P,&rstart,&rend);CHKERRQ(ierr);
270   for (i=rstart; i<rend; i++) {ierr = MatSetValue(*P,i,i,1.0,INSERT_VALUES);CHKERRQ(ierr);}
271   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
272   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
273   if (scale) {ierr = DMCreateInterpolationScale(dmc,dmf,*P,scale);CHKERRQ(ierr);}
274   PetscFunctionReturn(0);
275 }
276 
277 /*@
278     DMRedundantSetSize - Sets the size of a densely coupled redundant object
279 
280     Collective on dm
281 
282     Input Parameters:
283 +   dm - redundant DM
284 .   rank - rank of process to own redundant degrees of freedom
285 -   N - total number of redundant degrees of freedom
286 
287     Level: advanced
288 
289 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
290 @*/
291 PetscErrorCode DMRedundantSetSize(DM dm,PetscMPIInt rank,PetscInt N)
292 {
293   PetscErrorCode ierr;
294 
295   PetscFunctionBegin;
296   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
297   PetscValidType(dm,1);
298   PetscValidLogicalCollectiveMPIInt(dm,rank,2);
299   PetscValidLogicalCollectiveInt(dm,N,3);
300   ierr = PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscMPIInt,PetscInt),(dm,rank,N));CHKERRQ(ierr);
301   PetscFunctionReturn(0);
302 }
303 
304 /*@
305     DMRedundantGetSize - Gets the size of a densely coupled redundant object
306 
307     Not Collective
308 
309     Input Parameter:
310 .   dm - redundant DM
311 
312     Output Parameters:
313 +   rank - rank of process to own redundant degrees of freedom (or NULL)
314 -   N - total number of redundant degrees of freedom (or NULL)
315 
316     Level: advanced
317 
318 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
319 @*/
320 PetscErrorCode DMRedundantGetSize(DM dm,PetscMPIInt *rank,PetscInt *N)
321 {
322   PetscErrorCode ierr;
323 
324   PetscFunctionBegin;
325   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
326   PetscValidType(dm,1);
327   ierr = PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscMPIInt*,PetscInt*),(dm,rank,N));CHKERRQ(ierr);
328   PetscFunctionReturn(0);
329 }
330 
331 static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscInt N)
332 {
333   DM_Redundant   *red = (DM_Redundant*)dm->data;
334   PetscErrorCode ierr;
335   PetscMPIInt    myrank;
336   PetscInt       i,*globals;
337 
338   PetscFunctionBegin;
339   ierr      = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);CHKERRMPI(ierr);
340   red->rank = rank;
341   red->N    = N;
342   red->n    = (myrank == rank) ? N : 0;
343 
344   /* mapping is setup here */
345   ierr = PetscMalloc1(red->N,&globals);CHKERRQ(ierr);
346   for (i=0; i<red->N; i++) globals[i] = i;
347   ierr = ISLocalToGlobalMappingDestroy(&dm->ltogmap);CHKERRQ(ierr);
348   ierr = ISLocalToGlobalMappingCreate(PetscObjectComm((PetscObject)dm),1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
349   PetscFunctionReturn(0);
350 }
351 
352 static PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
353 {
354   DM_Redundant *red = (DM_Redundant*)dm->data;
355 
356   PetscFunctionBegin;
357   if (rank) *rank = red->rank;
358   if (N)    *N = red->N;
359   PetscFunctionReturn(0);
360 }
361 
362 static PetscErrorCode DMSetUpGLVisViewer_Redundant(PetscObject odm, PetscViewer viewer)
363 {
364   PetscFunctionBegin;
365   PetscFunctionReturn(0);
366 }
367 
368 /*MC
369    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
370          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
371          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
372          processes local computations).
373 
374          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
375 
376   Level: intermediate
377 
378 .seealso: DMType, DMCOMPOSITE,  DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
379 M*/
380 
381 PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
382 {
383   PetscErrorCode ierr;
384   DM_Redundant   *red;
385 
386   PetscFunctionBegin;
387   ierr     = PetscNewLog(dm,&red);CHKERRQ(ierr);
388   dm->data = red;
389 
390   dm->ops->setup               = DMSetUp_Redundant;
391   dm->ops->view                = DMView_Redundant;
392   dm->ops->createglobalvector  = DMCreateGlobalVector_Redundant;
393   dm->ops->createlocalvector   = DMCreateLocalVector_Redundant;
394   dm->ops->creatematrix        = DMCreateMatrix_Redundant;
395   dm->ops->destroy             = DMDestroy_Redundant;
396   dm->ops->globaltolocalbegin  = DMGlobalToLocalBegin_Redundant;
397   dm->ops->globaltolocalend    = DMGlobalToLocalEnd_Redundant;
398   dm->ops->localtoglobalbegin  = DMLocalToGlobalBegin_Redundant;
399   dm->ops->localtoglobalend    = DMLocalToGlobalEnd_Redundant;
400   dm->ops->refine              = DMRefine_Redundant;
401   dm->ops->coarsen             = DMCoarsen_Redundant;
402   dm->ops->createinterpolation = DMCreateInterpolation_Redundant;
403   dm->ops->getcoloring         = DMCreateColoring_Redundant;
404 
405   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",DMRedundantSetSize_Redundant);CHKERRQ(ierr);
406   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",DMRedundantGetSize_Redundant);CHKERRQ(ierr);
407   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",DMSetUpGLVisViewer_Redundant);CHKERRQ(ierr);
408   PetscFunctionReturn(0);
409 }
410 
411 /*@C
412     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
413 
414     Collective
415 
416     Input Parameters:
417 +   comm - the processors that will share the global vector
418 .   rank - rank to own the redundant values
419 -   N - total number of degrees of freedom
420 
421     Output Parameters:
422 .   dm - the redundant DM
423 
424     Level: advanced
425 
426 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
427 
428 @*/
429 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscMPIInt rank,PetscInt N,DM *dm)
430 {
431   PetscErrorCode ierr;
432 
433   PetscFunctionBegin;
434   PetscValidPointer(dm,4);
435   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
436   ierr = DMSetType(*dm,DMREDUNDANT);CHKERRQ(ierr);
437   ierr = DMRedundantSetSize(*dm,rank,N);CHKERRQ(ierr);
438   ierr = DMSetUp(*dm);CHKERRQ(ierr);
439   PetscFunctionReturn(0);
440 }
441