xref: /petsc/src/sys/utils/mpiu.c (revision e84e3fd21fa5912dca3017339ab4b3699e3a9c51)
1 
2 #include <petscsys.h>        /*I  "petscsys.h"  I*/
3 #include <petsc/private/petscimpl.h>
4 /*
5     Note that tag of 0 is ok because comm is a private communicator
6   generated below just for these routines.
7 */
8 
9 PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm,int ng)
10 {
11   PetscMPIInt    rank,size,tag = 0;
12   MPI_Status     status;
13 
14   PetscFunctionBegin;
15   PetscCallMPI(MPI_Comm_size(comm,&size));
16   if (size == 1) PetscFunctionReturn(0);
17   PetscCallMPI(MPI_Comm_rank(comm,&rank));
18   if (rank) {
19     PetscCallMPI(MPI_Recv(NULL,0,MPI_INT,rank-1,tag,comm,&status));
20   }
21   /* Send to the next process in the group unless we are the last process */
22   if ((rank % ng) < ng - 1 && rank != size - 1) {
23     PetscCallMPI(MPI_Send(NULL,0,MPI_INT,rank + 1,tag,comm));
24   }
25   PetscFunctionReturn(0);
26 }
27 
28 PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm,int ng)
29 {
30   PetscMPIInt    rank,size,tag = 0;
31   MPI_Status     status;
32 
33   PetscFunctionBegin;
34   PetscCallMPI(MPI_Comm_rank(comm,&rank));
35   PetscCallMPI(MPI_Comm_size(comm,&size));
36   if (size == 1) PetscFunctionReturn(0);
37 
38   /* Send to the first process in the next group */
39   if ((rank % ng) == ng - 1 || rank == size - 1) {
40     PetscCallMPI(MPI_Send(NULL,0,MPI_INT,(rank + 1) % size,tag,comm));
41   }
42   if (rank == 0) {
43     PetscCallMPI(MPI_Recv(NULL,0,MPI_INT,size-1,tag,comm,&status));
44   }
45   PetscFunctionReturn(0);
46 }
47 
48 /* ---------------------------------------------------------------------*/
49 /*
50     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
51   is attached to a communicator that manages the sequential phase code below.
52 */
53 PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
54 
55 /*@
56    PetscSequentialPhaseBegin - Begins a sequential section of code.
57 
58    Collective
59 
60    Input Parameters:
61 +  comm - Communicator to sequentialize.
62 -  ng   - Number in processor group.  This many processes are allowed to execute
63    at the same time (usually 1)
64 
65    Level: intermediate
66 
67    Notes:
68    PetscSequentialPhaseBegin() and PetscSequentialPhaseEnd() provide a
69    way to force a section of code to be executed by the processes in
70    rank order.  Typically, this is done with
71 .vb
72       PetscSequentialPhaseBegin(comm, 1);
73       <code to be executed sequentially>
74       PetscSequentialPhaseEnd(comm, 1);
75 .ve
76 
77    Often, the sequential code contains output statements (e.g., printf) to
78    be executed.  Note that you may need to flush the I/O buffers before
79    calling PetscSequentialPhaseEnd().  Also, note that some systems do
80    not propagate I/O in any order to the controling terminal (in other words,
81    even if you flush the output, you may not get the data in the order
82    that you want).
83 
84 .seealso: PetscSequentialPhaseEnd()
85 
86 @*/
87 PetscErrorCode  PetscSequentialPhaseBegin(MPI_Comm comm,int ng)
88 {
89   PetscMPIInt    size;
90   MPI_Comm       local_comm,*addr_local_comm;
91 
92   PetscFunctionBegin;
93   PetscCall(PetscSysInitializePackage());
94   PetscCallMPI(MPI_Comm_size(comm,&size));
95   if (size == 1) PetscFunctionReturn(0);
96 
97   /* Get the private communicator for the sequential operations */
98   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) {
99     PetscCallMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Seq_keyval,NULL));
100   }
101 
102   PetscCallMPI(MPI_Comm_dup(comm,&local_comm));
103   PetscCall(PetscMalloc1(1,&addr_local_comm));
104 
105   *addr_local_comm = local_comm;
106 
107   PetscCallMPI(MPI_Comm_set_attr(comm,Petsc_Seq_keyval,(void*)addr_local_comm));
108   PetscCall(PetscSequentialPhaseBegin_Private(local_comm,ng));
109   PetscFunctionReturn(0);
110 }
111 
112 /*@
113    PetscSequentialPhaseEnd - Ends a sequential section of code.
114 
115    Collective
116 
117    Input Parameters:
118 +  comm - Communicator to sequentialize.
119 -  ng   - Number in processor group.  This many processes are allowed to execute
120    at the same time (usually 1)
121 
122    Level: intermediate
123 
124    Notes:
125    See PetscSequentialPhaseBegin() for more details.
126 
127 .seealso: PetscSequentialPhaseBegin()
128 
129 @*/
130 PetscErrorCode  PetscSequentialPhaseEnd(MPI_Comm comm,int ng)
131 {
132   PetscMPIInt    size,flag;
133   MPI_Comm       local_comm,*addr_local_comm;
134 
135   PetscFunctionBegin;
136   PetscCallMPI(MPI_Comm_size(comm,&size));
137   if (size == 1) PetscFunctionReturn(0);
138 
139   PetscCallMPI(MPI_Comm_get_attr(comm,Petsc_Seq_keyval,(void**)&addr_local_comm,&flag));
140   PetscCheck(flag,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
141   local_comm = *addr_local_comm;
142 
143   PetscCall(PetscSequentialPhaseEnd_Private(local_comm,ng));
144 
145   PetscCall(PetscFree(addr_local_comm));
146   PetscCallMPI(MPI_Comm_free(&local_comm));
147   PetscCallMPI(MPI_Comm_delete_attr(comm,Petsc_Seq_keyval));
148   PetscFunctionReturn(0);
149 }
150 
151 /*@C
152   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
153 
154   Collective
155 
156   Input Parameter:
157 . minMaxVal - An array with the local min and max
158 
159   Output Parameter:
160 . minMaxValGlobal - An array with the global min and max
161 
162   Level: beginner
163 
164 .seealso: PetscSplitOwnership()
165 @*/
166 PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, const PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2])
167 {
168   PetscInt       sendbuf[3],recvbuf[3];
169 
170   PetscFunctionBegin;
171   sendbuf[0] = -minMaxVal[0]; /* Note that -PETSC_MIN_INT = PETSC_MIN_INT */
172   sendbuf[1] = minMaxVal[1];
173   sendbuf[2] = (minMaxVal[0] == PETSC_MIN_INT) ? 1 : 0; /* Are there PETSC_MIN_INT in minMaxVal[0]? */
174   PetscCallMPI(MPI_Allreduce(sendbuf, recvbuf, 3, MPIU_INT, MPI_MAX, comm));
175   minMaxValGlobal[0] = recvbuf[2] ? PETSC_MIN_INT : -recvbuf[0];
176   minMaxValGlobal[1] = recvbuf[1];
177   PetscFunctionReturn(0);
178 }
179 
180 /*@C
181   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
182 
183   Collective
184 
185   Input Parameter:
186 . minMaxVal - An array with the local min and max
187 
188   Output Parameter:
189 . minMaxValGlobal - An array with the global min and max
190 
191   Level: beginner
192 
193 .seealso: PetscSplitOwnership()
194 @*/
195 PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, const PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2])
196 {
197   PetscReal      sendbuf[2];
198 
199   PetscFunctionBegin;
200   sendbuf[0] = -minMaxVal[0];
201   sendbuf[1] = minMaxVal[1];
202   PetscCall(MPIU_Allreduce(sendbuf,minMaxValGlobal,2,MPIU_REAL,MPIU_MAX,comm));
203   minMaxValGlobal[0] = -minMaxValGlobal[0];
204   PetscFunctionReturn(0);
205 }
206