xref: /petsc/src/sys/utils/mpiu.c (revision 697336901c45ac77e1fd620fe1fca906cf3f95c8)
1 
2 #include <petscsys.h> /*I  "petscsys.h"  I*/
3 #include <petsc/private/petscimpl.h>
4 /*
5     Note that tag of 0 is ok because comm is a private communicator
6   generated below just for these routines.
7 */
8 
9 PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm, int ng)
10 {
11   PetscMPIInt rank, size, tag = 0;
12   MPI_Status  status;
13 
14   PetscFunctionBegin;
15   PetscCallMPI(MPI_Comm_size(comm, &size));
16   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
17   PetscCallMPI(MPI_Comm_rank(comm, &rank));
18   if (rank) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, rank - 1, tag, comm, &status));
19   /* Send to the next process in the group unless we are the last process */
20   if ((rank % ng) < ng - 1 && rank != size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, rank + 1, tag, comm));
21   PetscFunctionReturn(PETSC_SUCCESS);
22 }
23 
24 PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm, int ng)
25 {
26   PetscMPIInt rank, size, tag = 0;
27   MPI_Status  status;
28 
29   PetscFunctionBegin;
30   PetscCallMPI(MPI_Comm_rank(comm, &rank));
31   PetscCallMPI(MPI_Comm_size(comm, &size));
32   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
33 
34   /* Send to the first process in the next group */
35   if ((rank % ng) == ng - 1 || rank == size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, (rank + 1) % size, tag, comm));
36   if (rank == 0) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, size - 1, tag, comm, &status));
37   PetscFunctionReturn(PETSC_SUCCESS);
38 }
39 
40 /* ---------------------------------------------------------------------*/
41 /*
42     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
43   is attached to a communicator that manages the sequential phase code below.
44 */
45 PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
46 
47 /*@
48   PetscSequentialPhaseBegin - Begins a sequential section of code.
49 
50   Collective
51 
52   Input Parameters:
53 + comm - Communicator to sequentialize over
54 - ng   - Number in processor group.  This many processes are allowed to execute
55    at the same time (usually 1)
56 
57   Level: intermediate
58 
59   Notes:
60   `PetscSequentialPhaseBegin()` and `PetscSequentialPhaseEnd()` provide a
61   way to force a section of code to be executed by the processes in
62   rank order.  Typically, this is done with
63 .vb
64       PetscSequentialPhaseBegin(comm, 1);
65       <code to be executed sequentially>
66       PetscSequentialPhaseEnd(comm, 1);
67 .ve
68 
69   You should use `PetscSynchronizedPrintf()` to ensure output between MPI ranks is properly order and not these routines.
70 
71 .seealso: `PetscSequentialPhaseEnd()`, `PetscSynchronizedPrintf()`
72 @*/
73 PetscErrorCode PetscSequentialPhaseBegin(MPI_Comm comm, int ng)
74 {
75   PetscMPIInt size;
76   MPI_Comm    local_comm, *addr_local_comm;
77 
78   PetscFunctionBegin;
79   PetscCall(PetscSysInitializePackage());
80   PetscCallMPI(MPI_Comm_size(comm, &size));
81   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
82 
83   /* Get the private communicator for the sequential operations */
84   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) PetscCallMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN, MPI_COMM_NULL_DELETE_FN, &Petsc_Seq_keyval, NULL));
85 
86   PetscCallMPI(MPI_Comm_dup(comm, &local_comm));
87   PetscCall(PetscMalloc1(1, &addr_local_comm));
88 
89   *addr_local_comm = local_comm;
90 
91   PetscCallMPI(MPI_Comm_set_attr(comm, Petsc_Seq_keyval, (void *)addr_local_comm));
92   PetscCall(PetscSequentialPhaseBegin_Private(local_comm, ng));
93   PetscFunctionReturn(PETSC_SUCCESS);
94 }
95 
96 /*@
97   PetscSequentialPhaseEnd - Ends a sequential section of code.
98 
99   Collective
100 
101   Input Parameters:
102 + comm - Communicator to sequentialize.
103 - ng   - Number in processor group.  This many processes are allowed to execute
104    at the same time (usually 1)
105 
106   Level: intermediate
107 
108   Note:
109   See `PetscSequentialPhaseBegin()` for more details.
110 
111 .seealso: `PetscSequentialPhaseBegin()`
112 @*/
113 PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm comm, int ng)
114 {
115   PetscMPIInt size, flag;
116   MPI_Comm    local_comm, *addr_local_comm;
117 
118   PetscFunctionBegin;
119   PetscCallMPI(MPI_Comm_size(comm, &size));
120   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
121 
122   PetscCallMPI(MPI_Comm_get_attr(comm, Petsc_Seq_keyval, (void **)&addr_local_comm, &flag));
123   PetscCheck(flag, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
124   local_comm = *addr_local_comm;
125 
126   PetscCall(PetscSequentialPhaseEnd_Private(local_comm, ng));
127 
128   PetscCall(PetscFree(addr_local_comm));
129   PetscCallMPI(MPI_Comm_free(&local_comm));
130   PetscCallMPI(MPI_Comm_delete_attr(comm, Petsc_Seq_keyval));
131   PetscFunctionReturn(PETSC_SUCCESS);
132 }
133 
134 /*@C
135   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
136 
137   Collective
138 
139   Input Parameters:
140 + comm      - The MPI communicator to reduce with
141 - minMaxVal - An array with the local min and max
142 
143   Output Parameter:
144 . minMaxValGlobal - An array with the global min and max
145 
146   Level: beginner
147 
148 .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxReal()`
149 @*/
150 PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, const PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2])
151 {
152   PetscInt sendbuf[3], recvbuf[3];
153 
154   PetscFunctionBegin;
155   sendbuf[0] = -minMaxVal[0]; /* Note that -PETSC_MIN_INT = PETSC_MIN_INT */
156   sendbuf[1] = minMaxVal[1];
157   sendbuf[2] = (minMaxVal[0] == PETSC_MIN_INT) ? 1 : 0; /* Are there PETSC_MIN_INT in minMaxVal[0]? */
158   PetscCall(MPIU_Allreduce(sendbuf, recvbuf, 3, MPIU_INT, MPI_MAX, comm));
159   minMaxValGlobal[0] = recvbuf[2] ? PETSC_MIN_INT : -recvbuf[0];
160   minMaxValGlobal[1] = recvbuf[1];
161   PetscFunctionReturn(PETSC_SUCCESS);
162 }
163 
164 /*@C
165   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
166 
167   Collective
168 
169   Input Parameters:
170 + comm      - The MPI communicator to reduce with
171 - minMaxVal - An array with the local min and max
172 
173   Output Parameter:
174 . minMaxValGlobal - An array with the global min and max
175 
176   Level: beginner
177 
178 .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxInt()`
179 @*/
180 PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, const PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2])
181 {
182   PetscReal sendbuf[2];
183 
184   PetscFunctionBegin;
185   sendbuf[0] = -minMaxVal[0];
186   sendbuf[1] = minMaxVal[1];
187   PetscCall(MPIU_Allreduce(sendbuf, minMaxValGlobal, 2, MPIU_REAL, MPIU_MAX, comm));
188   minMaxValGlobal[0] = -minMaxValGlobal[0];
189   PetscFunctionReturn(PETSC_SUCCESS);
190 }
191