xref: /petsc/src/sys/utils/mpiu.c (revision 9371c9d470a9602b6d10a8bf50c9b2280a79e45a)
1 
2 #include <petscsys.h> /*I  "petscsys.h"  I*/
3 #include <petsc/private/petscimpl.h>
4 /*
5     Note that tag of 0 is ok because comm is a private communicator
6   generated below just for these routines.
7 */
8 
9 PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm, int ng) {
10   PetscMPIInt rank, size, tag = 0;
11   MPI_Status  status;
12 
13   PetscFunctionBegin;
14   PetscCallMPI(MPI_Comm_size(comm, &size));
15   if (size == 1) PetscFunctionReturn(0);
16   PetscCallMPI(MPI_Comm_rank(comm, &rank));
17   if (rank) { PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, rank - 1, tag, comm, &status)); }
18   /* Send to the next process in the group unless we are the last process */
19   if ((rank % ng) < ng - 1 && rank != size - 1) { PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, rank + 1, tag, comm)); }
20   PetscFunctionReturn(0);
21 }
22 
23 PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm, int ng) {
24   PetscMPIInt rank, size, tag = 0;
25   MPI_Status  status;
26 
27   PetscFunctionBegin;
28   PetscCallMPI(MPI_Comm_rank(comm, &rank));
29   PetscCallMPI(MPI_Comm_size(comm, &size));
30   if (size == 1) PetscFunctionReturn(0);
31 
32   /* Send to the first process in the next group */
33   if ((rank % ng) == ng - 1 || rank == size - 1) { PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, (rank + 1) % size, tag, comm)); }
34   if (rank == 0) { PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, size - 1, tag, comm, &status)); }
35   PetscFunctionReturn(0);
36 }
37 
38 /* ---------------------------------------------------------------------*/
39 /*
40     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
41   is attached to a communicator that manages the sequential phase code below.
42 */
43 PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
44 
45 /*@
46    PetscSequentialPhaseBegin - Begins a sequential section of code.
47 
48    Collective
49 
50    Input Parameters:
51 +  comm - Communicator to sequentialize.
52 -  ng   - Number in processor group.  This many processes are allowed to execute
53    at the same time (usually 1)
54 
55    Level: intermediate
56 
57    Notes:
58    PetscSequentialPhaseBegin() and PetscSequentialPhaseEnd() provide a
59    way to force a section of code to be executed by the processes in
60    rank order.  Typically, this is done with
61 .vb
62       PetscSequentialPhaseBegin(comm, 1);
63       <code to be executed sequentially>
64       PetscSequentialPhaseEnd(comm, 1);
65 .ve
66 
67    Often, the sequential code contains output statements (e.g., printf) to
68    be executed.  Note that you may need to flush the I/O buffers before
69    calling PetscSequentialPhaseEnd().  Also, note that some systems do
70    not propagate I/O in any order to the controling terminal (in other words,
71    even if you flush the output, you may not get the data in the order
72    that you want).
73 
74 .seealso: `PetscSequentialPhaseEnd()`
75 
76 @*/
77 PetscErrorCode PetscSequentialPhaseBegin(MPI_Comm comm, int ng) {
78   PetscMPIInt size;
79   MPI_Comm    local_comm, *addr_local_comm;
80 
81   PetscFunctionBegin;
82   PetscCall(PetscSysInitializePackage());
83   PetscCallMPI(MPI_Comm_size(comm, &size));
84   if (size == 1) PetscFunctionReturn(0);
85 
86   /* Get the private communicator for the sequential operations */
87   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) { PetscCallMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN, MPI_COMM_NULL_DELETE_FN, &Petsc_Seq_keyval, NULL)); }
88 
89   PetscCallMPI(MPI_Comm_dup(comm, &local_comm));
90   PetscCall(PetscMalloc1(1, &addr_local_comm));
91 
92   *addr_local_comm = local_comm;
93 
94   PetscCallMPI(MPI_Comm_set_attr(comm, Petsc_Seq_keyval, (void *)addr_local_comm));
95   PetscCall(PetscSequentialPhaseBegin_Private(local_comm, ng));
96   PetscFunctionReturn(0);
97 }
98 
99 /*@
100    PetscSequentialPhaseEnd - Ends a sequential section of code.
101 
102    Collective
103 
104    Input Parameters:
105 +  comm - Communicator to sequentialize.
106 -  ng   - Number in processor group.  This many processes are allowed to execute
107    at the same time (usually 1)
108 
109    Level: intermediate
110 
111    Notes:
112    See PetscSequentialPhaseBegin() for more details.
113 
114 .seealso: `PetscSequentialPhaseBegin()`
115 
116 @*/
117 PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm comm, int ng) {
118   PetscMPIInt size, flag;
119   MPI_Comm    local_comm, *addr_local_comm;
120 
121   PetscFunctionBegin;
122   PetscCallMPI(MPI_Comm_size(comm, &size));
123   if (size == 1) PetscFunctionReturn(0);
124 
125   PetscCallMPI(MPI_Comm_get_attr(comm, Petsc_Seq_keyval, (void **)&addr_local_comm, &flag));
126   PetscCheck(flag, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
127   local_comm = *addr_local_comm;
128 
129   PetscCall(PetscSequentialPhaseEnd_Private(local_comm, ng));
130 
131   PetscCall(PetscFree(addr_local_comm));
132   PetscCallMPI(MPI_Comm_free(&local_comm));
133   PetscCallMPI(MPI_Comm_delete_attr(comm, Petsc_Seq_keyval));
134   PetscFunctionReturn(0);
135 }
136 
137 /*@C
138   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
139 
140   Collective
141 
142   Input Parameter:
143 . minMaxVal - An array with the local min and max
144 
145   Output Parameter:
146 . minMaxValGlobal - An array with the global min and max
147 
148   Level: beginner
149 
150 .seealso: `PetscSplitOwnership()`
151 @*/
152 PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, const PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2]) {
153   PetscInt sendbuf[3], recvbuf[3];
154 
155   PetscFunctionBegin;
156   sendbuf[0] = -minMaxVal[0]; /* Note that -PETSC_MIN_INT = PETSC_MIN_INT */
157   sendbuf[1] = minMaxVal[1];
158   sendbuf[2] = (minMaxVal[0] == PETSC_MIN_INT) ? 1 : 0; /* Are there PETSC_MIN_INT in minMaxVal[0]? */
159   PetscCallMPI(MPI_Allreduce(sendbuf, recvbuf, 3, MPIU_INT, MPI_MAX, comm));
160   minMaxValGlobal[0] = recvbuf[2] ? PETSC_MIN_INT : -recvbuf[0];
161   minMaxValGlobal[1] = recvbuf[1];
162   PetscFunctionReturn(0);
163 }
164 
165 /*@C
166   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
167 
168   Collective
169 
170   Input Parameter:
171 . minMaxVal - An array with the local min and max
172 
173   Output Parameter:
174 . minMaxValGlobal - An array with the global min and max
175 
176   Level: beginner
177 
178 .seealso: `PetscSplitOwnership()`
179 @*/
180 PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, const PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2]) {
181   PetscReal sendbuf[2];
182 
183   PetscFunctionBegin;
184   sendbuf[0] = -minMaxVal[0];
185   sendbuf[1] = minMaxVal[1];
186   PetscCall(MPIU_Allreduce(sendbuf, minMaxValGlobal, 2, MPIU_REAL, MPIU_MAX, comm));
187   minMaxValGlobal[0] = -minMaxValGlobal[0];
188   PetscFunctionReturn(0);
189 }
190