xref: /petsc/src/dm/impls/redundant/dmredundant.c (revision 19e8fbdabb07dcd9104f2d10b4dec626a2e1a063)
1 #include <petsc/private/dmimpl.h>
2 #include <petscdmredundant.h>   /*I      "petscdmredundant.h" I*/
3 
4 typedef struct  {
5   PetscMPIInt rank;                /* owner */
6   PetscInt    N;                   /* total number of dofs */
7   PetscInt    n;                   /* owned number of dofs, n=N on owner, n=0 on non-owners */
8 } DM_Redundant;
9 
10 static PetscErrorCode DMCreateMatrix_Redundant(DM dm,Mat *J)
11 {
12   DM_Redundant           *red = (DM_Redundant*)dm->data;
13   PetscErrorCode         ierr;
14   ISLocalToGlobalMapping ltog;
15   PetscInt               i,rstart,rend,*cols;
16   PetscScalar            *vals;
17 
18   PetscFunctionBegin;
19   ierr = MatCreate(PetscObjectComm((PetscObject)dm),J);CHKERRQ(ierr);
20   ierr = MatSetSizes(*J,red->n,red->n,red->N,red->N);CHKERRQ(ierr);
21   ierr = MatSetType(*J,dm->mattype);CHKERRQ(ierr);
22   ierr = MatSeqAIJSetPreallocation(*J,red->n,NULL);CHKERRQ(ierr);
23   ierr = MatSeqBAIJSetPreallocation(*J,1,red->n,NULL);CHKERRQ(ierr);
24   ierr = MatMPIAIJSetPreallocation(*J,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr);
25   ierr = MatMPIBAIJSetPreallocation(*J,1,red->n,NULL,red->N-red->n,NULL);CHKERRQ(ierr);
26 
27   ierr = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
28   ierr = MatSetLocalToGlobalMapping(*J,ltog,ltog);CHKERRQ(ierr);
29 
30   ierr = PetscMalloc2(red->N,&cols,red->N,&vals);CHKERRQ(ierr);
31   for (i=0; i<red->N; i++) {
32     cols[i] = i;
33     vals[i] = 0.0;
34   }
35   ierr = MatGetOwnershipRange(*J,&rstart,&rend);CHKERRQ(ierr);
36   for (i=rstart; i<rend; i++) {
37     ierr = MatSetValues(*J,1,&i,red->N,cols,vals,INSERT_VALUES);CHKERRQ(ierr);
38   }
39   ierr = PetscFree2(cols,vals);CHKERRQ(ierr);
40   ierr = MatAssemblyBegin(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
41   ierr = MatAssemblyEnd(*J,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
42   PetscFunctionReturn(0);
43 }
44 
45 static PetscErrorCode DMDestroy_Redundant(DM dm)
46 {
47   PetscErrorCode ierr;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",NULL);CHKERRQ(ierr);
51   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",NULL);CHKERRQ(ierr);
52   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",NULL);CHKERRQ(ierr);
53   /* This was originally freed in DMDestroy(), but that prevents reference counting of backend objects */
54   ierr = PetscFree(dm->data);CHKERRQ(ierr);
55   PetscFunctionReturn(0);
56 }
57 
58 static PetscErrorCode DMCreateGlobalVector_Redundant(DM dm,Vec *gvec)
59 {
60   PetscErrorCode         ierr;
61   DM_Redundant           *red = (DM_Redundant*)dm->data;
62   ISLocalToGlobalMapping ltog;
63 
64   PetscFunctionBegin;
65   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
66   PetscValidPointer(gvec,2);
67   *gvec = 0;
68   ierr  = VecCreate(PetscObjectComm((PetscObject)dm),gvec);CHKERRQ(ierr);
69   ierr  = VecSetSizes(*gvec,red->n,red->N);CHKERRQ(ierr);
70   ierr  = VecSetType(*gvec,dm->vectype);CHKERRQ(ierr);
71   ierr  = DMGetLocalToGlobalMapping(dm,&ltog);CHKERRQ(ierr);
72   ierr  = VecSetLocalToGlobalMapping(*gvec,ltog);CHKERRQ(ierr);
73   ierr  = VecSetDM(*gvec,dm);CHKERRQ(ierr);
74   PetscFunctionReturn(0);
75 }
76 
77 static PetscErrorCode DMCreateLocalVector_Redundant(DM dm,Vec *lvec)
78 {
79   PetscErrorCode ierr;
80   DM_Redundant   *red = (DM_Redundant*)dm->data;
81 
82   PetscFunctionBegin;
83   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
84   PetscValidPointer(lvec,2);
85   *lvec = 0;
86   ierr  = VecCreate(PETSC_COMM_SELF,lvec);CHKERRQ(ierr);
87   ierr  = VecSetSizes(*lvec,red->N,red->N);CHKERRQ(ierr);
88   ierr  = VecSetType(*lvec,dm->vectype);CHKERRQ(ierr);
89   ierr  = VecSetDM(*lvec,dm);CHKERRQ(ierr);
90   PetscFunctionReturn(0);
91 }
92 
93 static PetscErrorCode DMLocalToGlobalBegin_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
94 {
95   PetscErrorCode    ierr;
96   DM_Redundant      *red = (DM_Redundant*)dm->data;
97   const PetscScalar *lv;
98   PetscScalar       *gv;
99   PetscMPIInt       rank;
100 
101   PetscFunctionBegin;
102   ierr = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&rank);CHKERRQ(ierr);
103   ierr = VecGetArrayRead(l,&lv);CHKERRQ(ierr);
104   ierr = VecGetArray(g,&gv);CHKERRQ(ierr);
105   switch (imode) {
106   case ADD_VALUES:
107   case MAX_VALUES:
108   {
109     void        *source;
110     PetscScalar *buffer;
111     PetscInt    i;
112     if (rank == red->rank) {
113 #if defined(PETSC_HAVE_MPI_IN_PLACE)
114       buffer = gv;
115       source = MPI_IN_PLACE;
116 #else
117       ierr   = PetscMalloc1(red->N,&buffer);CHKERRQ(ierr);
118       source = buffer;
119 #endif
120       if (imode == ADD_VALUES) for (i=0; i<red->N; i++) buffer[i] = gv[i] + lv[i];
121 #if !defined(PETSC_USE_COMPLEX)
122       if (imode == MAX_VALUES) for (i=0; i<red->N; i++) buffer[i] = PetscMax(gv[i],lv[i]);
123 #endif
124     } else source = (void*)lv;
125     ierr = MPI_Reduce(source,gv,red->N,MPIU_SCALAR,(imode == ADD_VALUES) ? MPIU_SUM : MPIU_MAX,red->rank,PetscObjectComm((PetscObject)dm));CHKERRQ(ierr);
126 #if !defined(PETSC_HAVE_MPI_IN_PLACE)
127     if (rank == red->rank) {ierr = PetscFree(buffer);CHKERRQ(ierr);}
128 #endif
129   } break;
130   case INSERT_VALUES:
131     ierr = PetscMemcpy(gv,lv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);
132     break;
133   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
134   }
135   ierr = VecRestoreArrayRead(l,&lv);CHKERRQ(ierr);
136   ierr = VecRestoreArray(g,&gv);CHKERRQ(ierr);
137   PetscFunctionReturn(0);
138 }
139 
140 static PetscErrorCode DMLocalToGlobalEnd_Redundant(DM dm,Vec l,InsertMode imode,Vec g)
141 {
142   PetscFunctionBegin;
143   PetscFunctionReturn(0);
144 }
145 
146 static PetscErrorCode DMGlobalToLocalBegin_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
147 {
148   PetscErrorCode    ierr;
149   DM_Redundant      *red = (DM_Redundant*)dm->data;
150   const PetscScalar *gv;
151   PetscScalar       *lv;
152 
153   PetscFunctionBegin;
154   ierr = VecGetArrayRead(g,&gv);CHKERRQ(ierr);
155   ierr = VecGetArray(l,&lv);CHKERRQ(ierr);
156   switch (imode) {
157   case INSERT_VALUES:
158     if (red->n) {ierr = PetscMemcpy(lv,gv,red->n*sizeof(PetscScalar));CHKERRQ(ierr);}
159     ierr = MPI_Bcast(lv,red->N,MPIU_SCALAR,red->rank,PetscObjectComm((PetscObject)dm));CHKERRQ(ierr);
160     break;
161   default: SETERRQ(PetscObjectComm((PetscObject)dm),PETSC_ERR_SUP,"InsertMode not supported");
162   }
163   ierr = VecRestoreArrayRead(g,&gv);CHKERRQ(ierr);
164   ierr = VecRestoreArray(l,&lv);CHKERRQ(ierr);
165   PetscFunctionReturn(0);
166 }
167 
168 static PetscErrorCode DMGlobalToLocalEnd_Redundant(DM dm,Vec g,InsertMode imode,Vec l)
169 {
170   PetscFunctionBegin;
171   PetscFunctionReturn(0);
172 }
173 
174 static PetscErrorCode DMSetUp_Redundant(DM dm)
175 {
176   PetscErrorCode ierr;
177   DM_Redundant   *red = (DM_Redundant*)dm->data;
178   PetscInt       i,*globals;
179 
180   PetscFunctionBegin;
181   ierr = PetscMalloc1(red->N,&globals);CHKERRQ(ierr);
182   for (i=0; i<red->N; i++) globals[i] = i;
183   ierr = ISLocalToGlobalMappingCreate(PETSC_COMM_SELF,1,red->N,globals,PETSC_OWN_POINTER,&dm->ltogmap);CHKERRQ(ierr);
184   PetscFunctionReturn(0);
185 }
186 
187 static PetscErrorCode DMView_Redundant(DM dm,PetscViewer viewer)
188 {
189   PetscErrorCode ierr;
190   DM_Redundant   *red = (DM_Redundant*)dm->data;
191   PetscBool      iascii;
192 
193   PetscFunctionBegin;
194   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
195   if (iascii) {
196     ierr = PetscViewerASCIIPrintf(viewer,"redundant: rank=%D N=%D\n",red->rank,red->N);CHKERRQ(ierr);
197   }
198   PetscFunctionReturn(0);
199 }
200 
201 static PetscErrorCode DMCreateColoring_Redundant(DM dm,ISColoringType ctype,ISColoring *coloring)
202 {
203   DM_Redundant    *red = (DM_Redundant*)dm->data;
204   PetscErrorCode  ierr;
205   PetscInt        i,nloc;
206   ISColoringValue *colors;
207 
208   PetscFunctionBegin;
209   switch (ctype) {
210   case IS_COLORING_GLOBAL:
211     nloc = red->n;
212     break;
213   case IS_COLORING_LOCAL:
214     nloc = red->N;
215     break;
216   default: SETERRQ1(PetscObjectComm((PetscObject)dm),PETSC_ERR_ARG_WRONG,"Unknown ISColoringType %d",(int)ctype);
217   }
218   ierr = PetscMalloc1(nloc,&colors);CHKERRQ(ierr);
219   for (i=0; i<nloc; i++) colors[i] = i;
220   ierr = ISColoringCreate(PetscObjectComm((PetscObject)dm),red->N,nloc,colors,PETSC_OWN_POINTER,coloring);CHKERRQ(ierr);
221   ierr = ISColoringSetType(*coloring,ctype);CHKERRQ(ierr);
222   PetscFunctionReturn(0);
223 }
224 
225 static PetscErrorCode DMRefine_Redundant(DM dmc,MPI_Comm comm,DM *dmf)
226 {
227   PetscErrorCode ierr;
228   PetscMPIInt    flag;
229   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
230 
231   PetscFunctionBegin;
232   if (comm == MPI_COMM_NULL) {
233     ierr = PetscObjectGetComm((PetscObject)dmc,&comm);CHKERRQ(ierr);
234   }
235   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),comm,&flag);CHKERRQ(ierr);
236   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmc),PETSC_ERR_SUP,"cannot change communicators");
237   ierr = DMRedundantCreate(comm,redc->rank,redc->N,dmf);CHKERRQ(ierr);
238   PetscFunctionReturn(0);
239 }
240 
241 static PetscErrorCode DMCoarsen_Redundant(DM dmf,MPI_Comm comm,DM *dmc)
242 {
243   PetscErrorCode ierr;
244   PetscMPIInt    flag;
245   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
246 
247   PetscFunctionBegin;
248   if (comm == MPI_COMM_NULL) {
249     ierr = PetscObjectGetComm((PetscObject)dmf,&comm);CHKERRQ(ierr);
250   }
251   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmf),comm,&flag);CHKERRQ(ierr);
252   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
253   ierr = DMRedundantCreate(comm,redf->rank,redf->N,dmc);CHKERRQ(ierr);
254   PetscFunctionReturn(0);
255 }
256 
257 static PetscErrorCode DMCreateInterpolation_Redundant(DM dmc,DM dmf,Mat *P,Vec *scale)
258 {
259   PetscErrorCode ierr;
260   DM_Redundant   *redc = (DM_Redundant*)dmc->data;
261   DM_Redundant   *redf = (DM_Redundant*)dmf->data;
262   PetscMPIInt    flag;
263   PetscInt       i,rstart,rend;
264 
265   PetscFunctionBegin;
266   ierr = MPI_Comm_compare(PetscObjectComm((PetscObject)dmc),PetscObjectComm((PetscObject)dmf),&flag);CHKERRQ(ierr);
267   if (flag != MPI_CONGRUENT && flag != MPI_IDENT) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_SUP,"cannot change communicators");
268   if (redc->rank != redf->rank) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Owning rank does not match");
269   if (redc->N != redf->N) SETERRQ(PetscObjectComm((PetscObject)dmf),PETSC_ERR_ARG_INCOMP,"Global size does not match");
270   ierr = MatCreate(PetscObjectComm((PetscObject)dmc),P);CHKERRQ(ierr);
271   ierr = MatSetSizes(*P,redc->n,redc->n,redc->N,redc->N);CHKERRQ(ierr);
272   ierr = MatSetType(*P,MATAIJ);CHKERRQ(ierr);
273   ierr = MatSeqAIJSetPreallocation(*P,1,0);CHKERRQ(ierr);
274   ierr = MatMPIAIJSetPreallocation(*P,1,0,0,0);CHKERRQ(ierr);
275   ierr = MatGetOwnershipRange(*P,&rstart,&rend);CHKERRQ(ierr);
276   for (i=rstart; i<rend; i++) {ierr = MatSetValue(*P,i,i,1.0,INSERT_VALUES);CHKERRQ(ierr);}
277   ierr = MatAssemblyBegin(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
278   ierr = MatAssemblyEnd(*P,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
279   if (scale) {ierr = DMCreateInterpolationScale(dmc,dmf,*P,scale);CHKERRQ(ierr);}
280   PetscFunctionReturn(0);
281 }
282 
283 /*@
284     DMRedundantSetSize - Sets the size of a densely coupled redundant object
285 
286     Collective on DM
287 
288     Input Parameter:
289 +   dm - redundant DM
290 .   rank - rank of process to own redundant degrees of freedom
291 -   N - total number of redundant degrees of freedom
292 
293     Level: advanced
294 
295 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantGetSize()
296 @*/
297 PetscErrorCode DMRedundantSetSize(DM dm,PetscMPIInt rank,PetscInt N)
298 {
299   PetscErrorCode ierr;
300 
301   PetscFunctionBegin;
302   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
303   PetscValidType(dm,1);
304   PetscValidLogicalCollectiveMPIInt(dm,rank,2);
305   PetscValidLogicalCollectiveInt(dm,N,3);
306   ierr = PetscTryMethod(dm,"DMRedundantSetSize_C",(DM,PetscMPIInt,PetscInt),(dm,rank,N));CHKERRQ(ierr);
307   PetscFunctionReturn(0);
308 }
309 
310 /*@
311     DMRedundantGetSize - Gets the size of a densely coupled redundant object
312 
313     Not Collective
314 
315     Input Parameter:
316 +   dm - redundant DM
317 
318     Output Parameters:
319 +   rank - rank of process to own redundant degrees of freedom (or NULL)
320 -   N - total number of redundant degrees of freedom (or NULL)
321 
322     Level: advanced
323 
324 .seealso DMDestroy(), DMCreateGlobalVector(), DMRedundantCreate(), DMRedundantSetSize()
325 @*/
326 PetscErrorCode DMRedundantGetSize(DM dm,PetscMPIInt *rank,PetscInt *N)
327 {
328   PetscErrorCode ierr;
329 
330   PetscFunctionBegin;
331   PetscValidHeaderSpecific(dm,DM_CLASSID,1);
332   PetscValidType(dm,1);
333   ierr = PetscUseMethod(dm,"DMRedundantGetSize_C",(DM,PetscMPIInt*,PetscInt*),(dm,rank,N));CHKERRQ(ierr);
334   PetscFunctionReturn(0);
335 }
336 
337 static PetscErrorCode DMRedundantSetSize_Redundant(DM dm,PetscMPIInt rank,PetscInt N)
338 {
339   DM_Redundant   *red = (DM_Redundant*)dm->data;
340   PetscErrorCode ierr;
341   PetscMPIInt    myrank;
342 
343   PetscFunctionBegin;
344   ierr      = MPI_Comm_rank(PetscObjectComm((PetscObject)dm),&myrank);CHKERRQ(ierr);
345   red->rank = rank;
346   red->N    = N;
347   red->n    = (myrank == rank) ? N : 0;
348   PetscFunctionReturn(0);
349 }
350 
351 static PetscErrorCode DMRedundantGetSize_Redundant(DM dm,PetscInt *rank,PetscInt *N)
352 {
353   DM_Redundant *red = (DM_Redundant*)dm->data;
354 
355   PetscFunctionBegin;
356   if (rank) *rank = red->rank;
357   if (N)    *N = red->N;
358   PetscFunctionReturn(0);
359 }
360 
361 static PetscErrorCode DMSetUpGLVisViewer_Redundant(PetscObject odm, PetscViewer viewer)
362 {
363   PetscFunctionBegin;
364   PetscFunctionReturn(0);
365 }
366 
367 /*MC
368    DMREDUNDANT = "redundant" - A DM object that is used to manage data for a small set of dense globally coupled variables.
369          In the global representation of the vector the variables are all stored on a single MPI process (all the other MPI processes have
370          no variables) in the local representation all the variables are stored on ALL the MPI processes (because they are all needed for each
371          processes local computations).
372 
373          This DM is generally used inside a DMCOMPOSITE object. For example, it may be used to store continuation parameters for a bifurcation problem.
374 
375 
376   Level: intermediate
377 
378 .seealso: DMType, DMCOMPOSITE,  DMCreate(), DMRedundantSetSize(), DMRedundantGetSize()
379 M*/
380 
381 PETSC_EXTERN PetscErrorCode DMCreate_Redundant(DM dm)
382 {
383   PetscErrorCode ierr;
384   DM_Redundant   *red;
385 
386   PetscFunctionBegin;
387   ierr     = PetscNewLog(dm,&red);CHKERRQ(ierr);
388   dm->data = red;
389 
390   ierr = PetscObjectChangeTypeName((PetscObject)dm,DMREDUNDANT);CHKERRQ(ierr);
391 
392   dm->ops->setup               = DMSetUp_Redundant;
393   dm->ops->view                = DMView_Redundant;
394   dm->ops->createglobalvector  = DMCreateGlobalVector_Redundant;
395   dm->ops->createlocalvector   = DMCreateLocalVector_Redundant;
396   dm->ops->creatematrix        = DMCreateMatrix_Redundant;
397   dm->ops->destroy             = DMDestroy_Redundant;
398   dm->ops->globaltolocalbegin  = DMGlobalToLocalBegin_Redundant;
399   dm->ops->globaltolocalend    = DMGlobalToLocalEnd_Redundant;
400   dm->ops->localtoglobalbegin  = DMLocalToGlobalBegin_Redundant;
401   dm->ops->localtoglobalend    = DMLocalToGlobalEnd_Redundant;
402   dm->ops->refine              = DMRefine_Redundant;
403   dm->ops->coarsen             = DMCoarsen_Redundant;
404   dm->ops->createinterpolation = DMCreateInterpolation_Redundant;
405   dm->ops->getcoloring         = DMCreateColoring_Redundant;
406 
407   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantSetSize_C",DMRedundantSetSize_Redundant);CHKERRQ(ierr);
408   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMRedundantGetSize_C",DMRedundantGetSize_Redundant);CHKERRQ(ierr);
409   ierr = PetscObjectComposeFunction((PetscObject)dm,"DMSetUpGLVisViewer_C",DMSetUpGLVisViewer_Redundant);CHKERRQ(ierr);
410   PetscFunctionReturn(0);
411 }
412 
413 /*@C
414     DMRedundantCreate - Creates a DM object, used to manage data for dense globally coupled variables
415 
416     Collective on MPI_Comm
417 
418     Input Parameter:
419 +   comm - the processors that will share the global vector
420 .   rank - rank to own the redundant values
421 -   N - total number of degrees of freedom
422 
423     Output Parameters:
424 .   dm - the redundant DM
425 
426     Level: advanced
427 
428 .seealso DMDestroy(), DMCreateGlobalVector(), DMCreateMatrix(), DMCompositeAddDM(), DMREDUNDANT, DMSetType(), DMRedundantSetSize(), DMRedundantGetSize()
429 
430 @*/
431 PetscErrorCode DMRedundantCreate(MPI_Comm comm,PetscMPIInt rank,PetscInt N,DM *dm)
432 {
433   PetscErrorCode ierr;
434 
435   PetscFunctionBegin;
436   PetscValidPointer(dm,2);
437   ierr = DMCreate(comm,dm);CHKERRQ(ierr);
438   ierr = DMSetType(*dm,DMREDUNDANT);CHKERRQ(ierr);
439   ierr = DMRedundantSetSize(*dm,rank,N);CHKERRQ(ierr);
440   ierr = DMSetUp(*dm);CHKERRQ(ierr);
441   PetscFunctionReturn(0);
442 }
443