xref: /petsc/src/sys/utils/mpiu.c (revision 0ea77eda237b1068ff0d8bfa28c3463dc2087695)
1 
2 #include <petscsys.h> /*I  "petscsys.h"  I*/
3 #include <petsc/private/petscimpl.h>
4 /*
5     Note that tag of 0 is ok because comm is a private communicator
6   generated below just for these routines.
7 */
8 
9 PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm, int ng) {
10   PetscMPIInt rank, size, tag = 0;
11   MPI_Status  status;
12 
13   PetscFunctionBegin;
14   PetscCallMPI(MPI_Comm_size(comm, &size));
15   if (size == 1) PetscFunctionReturn(0);
16   PetscCallMPI(MPI_Comm_rank(comm, &rank));
17   if (rank) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, rank - 1, tag, comm, &status));
18   /* Send to the next process in the group unless we are the last process */
19   if ((rank % ng) < ng - 1 && rank != size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, rank + 1, tag, comm));
20   PetscFunctionReturn(0);
21 }
22 
23 PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm, int ng) {
24   PetscMPIInt rank, size, tag = 0;
25   MPI_Status  status;
26 
27   PetscFunctionBegin;
28   PetscCallMPI(MPI_Comm_rank(comm, &rank));
29   PetscCallMPI(MPI_Comm_size(comm, &size));
30   if (size == 1) PetscFunctionReturn(0);
31 
32   /* Send to the first process in the next group */
33   if ((rank % ng) == ng - 1 || rank == size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, (rank + 1) % size, tag, comm));
34   if (rank == 0) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, size - 1, tag, comm, &status));
35   PetscFunctionReturn(0);
36 }
37 
38 /* ---------------------------------------------------------------------*/
39 /*
40     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
41   is attached to a communicator that manages the sequential phase code below.
42 */
43 PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
44 
45 /*@
46    PetscSequentialPhaseBegin - Begins a sequential section of code.
47 
48    Collective
49 
50    Input Parameters:
51 +  comm - Communicator to sequentialize.
52 -  ng   - Number in processor group.  This many processes are allowed to execute
53    at the same time (usually 1)
54 
55    Level: intermediate
56 
57    Notes:
58    `PetscSequentialPhaseBegin()` and `PetscSequentialPhaseEnd()` provide a
59    way to force a section of code to be executed by the processes in
60    rank order.  Typically, this is done with
61 .vb
62       PetscSequentialPhaseBegin(comm, 1);
63       <code to be executed sequentially>
64       PetscSequentialPhaseEnd(comm, 1);
65 .ve
66 
67    You should use `PetscSynchronizedPrintf()` to ensure output between MPI ranks is properly order and not these routines.
68 
69 .seealso: `PetscSequentialPhaseEnd()`,`PetscSynchronizedPrintf()`
70 @*/
71 PetscErrorCode PetscSequentialPhaseBegin(MPI_Comm comm, int ng) {
72   PetscMPIInt size;
73   MPI_Comm    local_comm, *addr_local_comm;
74 
75   PetscFunctionBegin;
76   PetscCall(PetscSysInitializePackage());
77   PetscCallMPI(MPI_Comm_size(comm, &size));
78   if (size == 1) PetscFunctionReturn(0);
79 
80   /* Get the private communicator for the sequential operations */
81   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) PetscCallMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN, MPI_COMM_NULL_DELETE_FN, &Petsc_Seq_keyval, NULL));
82 
83   PetscCallMPI(MPI_Comm_dup(comm, &local_comm));
84   PetscCall(PetscMalloc1(1, &addr_local_comm));
85 
86   *addr_local_comm = local_comm;
87 
88   PetscCallMPI(MPI_Comm_set_attr(comm, Petsc_Seq_keyval, (void *)addr_local_comm));
89   PetscCall(PetscSequentialPhaseBegin_Private(local_comm, ng));
90   PetscFunctionReturn(0);
91 }
92 
93 /*@
94    PetscSequentialPhaseEnd - Ends a sequential section of code.
95 
96    Collective
97 
98    Input Parameters:
99 +  comm - Communicator to sequentialize.
100 -  ng   - Number in processor group.  This many processes are allowed to execute
101    at the same time (usually 1)
102 
103    Level: intermediate
104 
105    Note:
106    See `PetscSequentialPhaseBegin()` for more details.
107 
108 .seealso: `PetscSequentialPhaseBegin()`
109 @*/
110 PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm comm, int ng) {
111   PetscMPIInt size, flag;
112   MPI_Comm    local_comm, *addr_local_comm;
113 
114   PetscFunctionBegin;
115   PetscCallMPI(MPI_Comm_size(comm, &size));
116   if (size == 1) PetscFunctionReturn(0);
117 
118   PetscCallMPI(MPI_Comm_get_attr(comm, Petsc_Seq_keyval, (void **)&addr_local_comm, &flag));
119   PetscCheck(flag, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
120   local_comm = *addr_local_comm;
121 
122   PetscCall(PetscSequentialPhaseEnd_Private(local_comm, ng));
123 
124   PetscCall(PetscFree(addr_local_comm));
125   PetscCallMPI(MPI_Comm_free(&local_comm));
126   PetscCallMPI(MPI_Comm_delete_attr(comm, Petsc_Seq_keyval));
127   PetscFunctionReturn(0);
128 }
129 
130 /*@C
131   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
132 
133   Collective
134 
135   Input Parameter:
136 . minMaxVal - An array with the local min and max
137 
138   Output Parameter:
139 . minMaxValGlobal - An array with the global min and max
140 
141   Level: beginner
142 
143 .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxReal()`
144 @*/
145 PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, const PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2]) {
146   PetscInt sendbuf[3], recvbuf[3];
147 
148   PetscFunctionBegin;
149   sendbuf[0] = -minMaxVal[0]; /* Note that -PETSC_MIN_INT = PETSC_MIN_INT */
150   sendbuf[1] = minMaxVal[1];
151   sendbuf[2] = (minMaxVal[0] == PETSC_MIN_INT) ? 1 : 0; /* Are there PETSC_MIN_INT in minMaxVal[0]? */
152   PetscCallMPI(MPI_Allreduce(sendbuf, recvbuf, 3, MPIU_INT, MPI_MAX, comm));
153   minMaxValGlobal[0] = recvbuf[2] ? PETSC_MIN_INT : -recvbuf[0];
154   minMaxValGlobal[1] = recvbuf[1];
155   PetscFunctionReturn(0);
156 }
157 
158 /*@C
159   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
160 
161   Collective
162 
163   Input Parameter:
164 . minMaxVal - An array with the local min and max
165 
166   Output Parameter:
167 . minMaxValGlobal - An array with the global min and max
168 
169   Level: beginner
170 
171 .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxInt()`
172 @*/
173 PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, const PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2]) {
174   PetscReal sendbuf[2];
175 
176   PetscFunctionBegin;
177   sendbuf[0] = -minMaxVal[0];
178   sendbuf[1] = minMaxVal[1];
179   PetscCall(MPIU_Allreduce(sendbuf, minMaxValGlobal, 2, MPIU_REAL, MPIU_MAX, comm));
180   minMaxValGlobal[0] = -minMaxValGlobal[0];
181   PetscFunctionReturn(0);
182 }
183