xref: /petsc/src/dm/impls/da/dagtona.c (revision 58d68138c660dfb4e9f5b03334792cd4f2ffd7cc)
1 
2 /*
3      Tools to help solve the coarse grid problem redundantly.
4   Provides two scatter contexts that (1) map from the usual global vector
5   to all processors the entire vector in NATURAL numbering and (2)
6   from the entire vector on each processor in natural numbering extracts
7   out this processors piece in GLOBAL numbering
8 */
9 
10 #include <petsc/private/dmdaimpl.h> /*I   "petscdmda.h"   I*/
11 
12 /*@
13    DMDAGlobalToNaturalAllCreate - Creates a scatter context that maps from the
14      global vector the entire vector to each processor in natural numbering
15 
16    Collective on da
17 
18    Input Parameter:
19 .  da - the distributed array context
20 
21    Output Parameter:
22 .  scatter - the scatter context
23 
24    Level: advanced
25 
26 .seealso: `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
27           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
28 @*/
29 PetscErrorCode DMDAGlobalToNaturalAllCreate(DM da, VecScatter *scatter) {
30   PetscInt N;
31   IS       from, to;
32   Vec      tmplocal, global;
33   AO       ao;
34   DM_DA   *dd = (DM_DA *)da->data;
35 
36   PetscFunctionBegin;
37   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
38   PetscValidPointer(scatter, 2);
39   PetscCall(DMDAGetAO(da, &ao));
40 
41   /* create the scatter context */
42   PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, dd->Nlocal, PETSC_DETERMINE, NULL, &global));
43   PetscCall(VecGetSize(global, &N));
44   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), N, 0, 1, &to));
45   PetscCall(AOPetscToApplicationIS(ao, to));
46   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), N, 0, 1, &from));
47   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dd->w, N, NULL, &tmplocal));
48   PetscCall(VecScatterCreate(global, from, tmplocal, to, scatter));
49   PetscCall(VecDestroy(&tmplocal));
50   PetscCall(VecDestroy(&global));
51   PetscCall(ISDestroy(&from));
52   PetscCall(ISDestroy(&to));
53   PetscFunctionReturn(0);
54 }
55 
56 /*@
57    DMDANaturalAllToGlobalCreate - Creates a scatter context that maps from a copy
58      of the entire vector on each processor to its local part in the global vector.
59 
60    Collective on da
61 
62    Input Parameter:
63 .  da - the distributed array context
64 
65    Output Parameter:
66 .  scatter - the scatter context
67 
68    Level: advanced
69 
70 .seealso: `DMDAGlobalToNaturalEnd()`, `DMLocalToGlobalBegin()`, `DMDACreate2d()`,
71           `DMGlobalToLocalBegin()`, `DMGlobalToLocalEnd()`, `DMDACreateNaturalVector()`
72 @*/
73 PetscErrorCode DMDANaturalAllToGlobalCreate(DM da, VecScatter *scatter) {
74   DM_DA   *dd = (DM_DA *)da->data;
75   PetscInt M, m = dd->Nlocal, start;
76   IS       from, to;
77   Vec      tmplocal, global;
78   AO       ao;
79 
80   PetscFunctionBegin;
81   PetscValidHeaderSpecificType(da, DM_CLASSID, 1, DMDA);
82   PetscValidPointer(scatter, 2);
83   PetscCall(DMDAGetAO(da, &ao));
84 
85   /* create the scatter context */
86   PetscCall(MPIU_Allreduce(&m, &M, 1, MPIU_INT, MPI_SUM, PetscObjectComm((PetscObject)da)));
87   PetscCall(VecCreateMPIWithArray(PetscObjectComm((PetscObject)da), dd->w, m, PETSC_DETERMINE, NULL, &global));
88   PetscCall(VecGetOwnershipRange(global, &start, NULL));
89   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &from));
90   PetscCall(AOPetscToApplicationIS(ao, from));
91   PetscCall(ISCreateStride(PetscObjectComm((PetscObject)da), m, start, 1, &to));
92   PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, dd->w, M, NULL, &tmplocal));
93   PetscCall(VecScatterCreate(tmplocal, from, global, to, scatter));
94   PetscCall(VecDestroy(&tmplocal));
95   PetscCall(VecDestroy(&global));
96   PetscCall(ISDestroy(&from));
97   PetscCall(ISDestroy(&to));
98   PetscFunctionReturn(0);
99 }
100