xref: /petsc/src/sys/utils/mpiu.c (revision 74df5e01f481fb3fe90b32c3b4345ef0122eb3ce)
1 #include <petscsys.h> /*I  "petscsys.h"  I*/
2 #include <petsc/private/petscimpl.h>
3 /*
4     Note that tag of 0 is ok because comm is a private communicator
5   generated below just for these routines.
6 */
7 
8 PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm, int ng)
9 {
10   PetscMPIInt rank, size, tag = 0;
11   MPI_Status  status;
12 
13   PetscFunctionBegin;
14   PetscCallMPI(MPI_Comm_size(comm, &size));
15   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
16   PetscCallMPI(MPI_Comm_rank(comm, &rank));
17   if (rank) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, rank - 1, tag, comm, &status));
18   /* Send to the next process in the group unless we are the last process */
19   if ((rank % ng) < ng - 1 && rank != size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, rank + 1, tag, comm));
20   PetscFunctionReturn(PETSC_SUCCESS);
21 }
22 
23 PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm, int ng)
24 {
25   PetscMPIInt rank, size, tag = 0;
26   MPI_Status  status;
27 
28   PetscFunctionBegin;
29   PetscCallMPI(MPI_Comm_rank(comm, &rank));
30   PetscCallMPI(MPI_Comm_size(comm, &size));
31   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
32 
33   /* Send to the first process in the next group */
34   if ((rank % ng) == ng - 1 || rank == size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, (rank + 1) % size, tag, comm));
35   if (rank == 0) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, size - 1, tag, comm, &status));
36   PetscFunctionReturn(PETSC_SUCCESS);
37 }
38 
39 /* ---------------------------------------------------------------------*/
40 /*
41     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
42   is attached to a communicator that manages the sequential phase code below.
43 */
44 PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
45 
46 /*@
47   PetscSequentialPhaseBegin - Begins a sequential section of code.
48 
49   Collective
50 
51   Input Parameters:
52 + comm - Communicator to sequentialize over
53 - ng   - Number in processor group.  This many processes are allowed to execute
54    at the same time (usually 1)
55 
56   Level: intermediate
57 
58   Notes:
59   `PetscSequentialPhaseBegin()` and `PetscSequentialPhaseEnd()` provide a
60   way to force a section of code to be executed by the processes in
61   rank order.  Typically, this is done with
62 .vb
63       PetscSequentialPhaseBegin(comm, 1);
64       <code to be executed sequentially>
65       PetscSequentialPhaseEnd(comm, 1);
66 .ve
67 
68   You should use `PetscSynchronizedPrintf()` to ensure output between MPI ranks is properly order and not these routines.
69 
70 .seealso: `PetscSequentialPhaseEnd()`, `PetscSynchronizedPrintf()`
71 @*/
72 PetscErrorCode PetscSequentialPhaseBegin(MPI_Comm comm, int ng)
73 {
74   PetscMPIInt size;
75   MPI_Comm    local_comm, *addr_local_comm;
76 
77   PetscFunctionBegin;
78   PetscCall(PetscSysInitializePackage());
79   PetscCallMPI(MPI_Comm_size(comm, &size));
80   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
81 
82   /* Get the private communicator for the sequential operations */
83   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) PetscCallMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN, MPI_COMM_NULL_DELETE_FN, &Petsc_Seq_keyval, NULL));
84 
85   PetscCallMPI(MPI_Comm_dup(comm, &local_comm));
86   PetscCall(PetscMalloc1(1, &addr_local_comm));
87 
88   *addr_local_comm = local_comm;
89 
90   PetscCallMPI(MPI_Comm_set_attr(comm, Petsc_Seq_keyval, (void *)addr_local_comm));
91   PetscCall(PetscSequentialPhaseBegin_Private(local_comm, ng));
92   PetscFunctionReturn(PETSC_SUCCESS);
93 }
94 
95 /*@
96   PetscSequentialPhaseEnd - Ends a sequential section of code.
97 
98   Collective
99 
100   Input Parameters:
101 + comm - Communicator to sequentialize.
102 - ng   - Number in processor group.  This many processes are allowed to execute
103    at the same time (usually 1)
104 
105   Level: intermediate
106 
107   Note:
108   See `PetscSequentialPhaseBegin()` for more details.
109 
110 .seealso: `PetscSequentialPhaseBegin()`
111 @*/
112 PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm comm, int ng)
113 {
114   PetscMPIInt size, iflg;
115   MPI_Comm    local_comm, *addr_local_comm;
116 
117   PetscFunctionBegin;
118   PetscCallMPI(MPI_Comm_size(comm, &size));
119   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
120 
121   PetscCallMPI(MPI_Comm_get_attr(comm, Petsc_Seq_keyval, (void **)&addr_local_comm, &iflg));
122   PetscCheck(iflg, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
123   local_comm = *addr_local_comm;
124 
125   PetscCall(PetscSequentialPhaseEnd_Private(local_comm, ng));
126 
127   PetscCall(PetscFree(addr_local_comm));
128   PetscCallMPI(MPI_Comm_free(&local_comm));
129   PetscCallMPI(MPI_Comm_delete_attr(comm, Petsc_Seq_keyval));
130   PetscFunctionReturn(PETSC_SUCCESS);
131 }
132 
133 /*@
134   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
135 
136   Collective
137 
138   Input Parameters:
139 + comm      - The MPI communicator to reduce with
140 - minMaxVal - An array with the local min and max
141 
142   Output Parameter:
143 . minMaxValGlobal - An array with the global min and max
144 
145   Level: beginner
146 
147 .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxReal()`
148 @*/
149 PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, const PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2])
150 {
151   PetscInt  sendbuf[3], recvbuf[3];
152   PetscBool hasminint = (PetscBool)(minMaxVal[0] == PETSC_MIN_INT);
153 
154   PetscFunctionBegin;
155   sendbuf[0] = hasminint ? PETSC_MIN_INT : -minMaxVal[0]; /* Note that -PETSC_INT_MIN = PETSC_INT_MIN: ternary to suppress sanitizer warnings */
156   sendbuf[1] = minMaxVal[1];
157   sendbuf[2] = hasminint ? 1 : 0; /* Are there PETSC_INT_MIN in minMaxVal[0]? */
158   PetscCallMPI(MPIU_Allreduce(sendbuf, recvbuf, 3, MPIU_INT, MPI_MAX, comm));
159   minMaxValGlobal[0] = recvbuf[2] ? PETSC_INT_MIN : -recvbuf[0];
160   minMaxValGlobal[1] = recvbuf[1];
161   PetscFunctionReturn(PETSC_SUCCESS);
162 }
163 
164 /*@
165   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
166 
167   Collective
168 
169   Input Parameters:
170 + comm      - The MPI communicator to reduce with
171 - minMaxVal - An array with the local min and max
172 
173   Output Parameter:
174 . minMaxValGlobal - An array with the global min and max
175 
176   Level: beginner
177 
178 .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxInt()`
179 @*/
180 PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, const PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2])
181 {
182   PetscReal sendbuf[2];
183 
184   PetscFunctionBegin;
185   sendbuf[0] = -minMaxVal[0];
186   sendbuf[1] = minMaxVal[1];
187   PetscCallMPI(MPIU_Allreduce(sendbuf, minMaxValGlobal, 2, MPIU_REAL, MPIU_MAX, comm));
188   minMaxValGlobal[0] = -minMaxValGlobal[0];
189   PetscFunctionReturn(PETSC_SUCCESS);
190 }
191