xref: /petsc/src/sys/utils/mpiu.c (revision 658e5547a7c77d7aff7b975a33da61ff852241a7)
1 
2 #include <petscsys.h>        /*I  "petscsys.h"  I*/
3 #include <petsc/private/petscimpl.h>
4 /*
5     Note that tag of 0 is ok because comm is a private communicator
6   generated below just for these routines.
7 */
8 
9 PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm,int ng)
10 {
11   PetscErrorCode ierr;
12   PetscMPIInt    rank,size,tag = 0;
13   MPI_Status     status;
14 
15   PetscFunctionBegin;
16   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
17   if (size == 1) PetscFunctionReturn(0);
18   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
19   if (rank) {
20     ierr = MPI_Recv(NULL,0,MPI_INT,rank-1,tag,comm,&status);CHKERRMPI(ierr);
21   }
22   /* Send to the next process in the group unless we are the last process */
23   if ((rank % ng) < ng - 1 && rank != size - 1) {
24     ierr = MPI_Send(NULL,0,MPI_INT,rank + 1,tag,comm);CHKERRMPI(ierr);
25   }
26   PetscFunctionReturn(0);
27 }
28 
29 PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm,int ng)
30 {
31   PetscErrorCode ierr;
32   PetscMPIInt    rank,size,tag = 0;
33   MPI_Status     status;
34 
35   PetscFunctionBegin;
36   ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr);
37   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
38   if (size == 1) PetscFunctionReturn(0);
39 
40   /* Send to the first process in the next group */
41   if ((rank % ng) == ng - 1 || rank == size - 1) {
42     ierr = MPI_Send(NULL,0,MPI_INT,(rank + 1) % size,tag,comm);CHKERRMPI(ierr);
43   }
44   if (rank == 0) {
45     ierr = MPI_Recv(NULL,0,MPI_INT,size-1,tag,comm,&status);CHKERRMPI(ierr);
46   }
47   PetscFunctionReturn(0);
48 }
49 
50 /* ---------------------------------------------------------------------*/
51 /*
52     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
53   is attached to a communicator that manages the sequential phase code below.
54 */
55 PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
56 
57 /*@
58    PetscSequentialPhaseBegin - Begins a sequential section of code.
59 
60    Collective
61 
62    Input Parameters:
63 +  comm - Communicator to sequentialize.
64 -  ng   - Number in processor group.  This many processes are allowed to execute
65    at the same time (usually 1)
66 
67    Level: intermediate
68 
69    Notes:
70    PetscSequentialPhaseBegin() and PetscSequentialPhaseEnd() provide a
71    way to force a section of code to be executed by the processes in
72    rank order.  Typically, this is done with
73 .vb
74       PetscSequentialPhaseBegin(comm, 1);
75       <code to be executed sequentially>
76       PetscSequentialPhaseEnd(comm, 1);
77 .ve
78 
79    Often, the sequential code contains output statements (e.g., printf) to
80    be executed.  Note that you may need to flush the I/O buffers before
81    calling PetscSequentialPhaseEnd().  Also, note that some systems do
82    not propagate I/O in any order to the controling terminal (in other words,
83    even if you flush the output, you may not get the data in the order
84    that you want).
85 
86 .seealso: PetscSequentialPhaseEnd()
87 
88 @*/
89 PetscErrorCode  PetscSequentialPhaseBegin(MPI_Comm comm,int ng)
90 {
91   PetscErrorCode ierr;
92   PetscMPIInt    size;
93   MPI_Comm       local_comm,*addr_local_comm;
94 
95   PetscFunctionBegin;
96   ierr = PetscSysInitializePackage();CHKERRQ(ierr);
97   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
98   if (size == 1) PetscFunctionReturn(0);
99 
100   /* Get the private communicator for the sequential operations */
101   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) {
102     ierr = MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Seq_keyval,NULL);CHKERRMPI(ierr);
103   }
104 
105   ierr = MPI_Comm_dup(comm,&local_comm);CHKERRMPI(ierr);
106   ierr = PetscMalloc1(1,&addr_local_comm);CHKERRQ(ierr);
107 
108   *addr_local_comm = local_comm;
109 
110   ierr = MPI_Comm_set_attr(comm,Petsc_Seq_keyval,(void*)addr_local_comm);CHKERRMPI(ierr);
111   ierr = PetscSequentialPhaseBegin_Private(local_comm,ng);CHKERRQ(ierr);
112   PetscFunctionReturn(0);
113 }
114 
115 /*@
116    PetscSequentialPhaseEnd - Ends a sequential section of code.
117 
118    Collective
119 
120    Input Parameters:
121 +  comm - Communicator to sequentialize.
122 -  ng   - Number in processor group.  This many processes are allowed to execute
123    at the same time (usually 1)
124 
125    Level: intermediate
126 
127    Notes:
128    See PetscSequentialPhaseBegin() for more details.
129 
130 .seealso: PetscSequentialPhaseBegin()
131 
132 @*/
133 PetscErrorCode  PetscSequentialPhaseEnd(MPI_Comm comm,int ng)
134 {
135   PetscErrorCode ierr;
136   PetscMPIInt    size,flag;
137   MPI_Comm       local_comm,*addr_local_comm;
138 
139   PetscFunctionBegin;
140   ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
141   if (size == 1) PetscFunctionReturn(0);
142 
143   ierr = MPI_Comm_get_attr(comm,Petsc_Seq_keyval,(void**)&addr_local_comm,&flag);CHKERRMPI(ierr);
144   PetscCheckFalse(!flag,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
145   local_comm = *addr_local_comm;
146 
147   ierr = PetscSequentialPhaseEnd_Private(local_comm,ng);CHKERRQ(ierr);
148 
149   ierr = PetscFree(addr_local_comm);CHKERRQ(ierr);
150   ierr = MPI_Comm_free(&local_comm);CHKERRMPI(ierr);
151   ierr = MPI_Comm_delete_attr(comm,Petsc_Seq_keyval);CHKERRMPI(ierr);
152   PetscFunctionReturn(0);
153 }
154 
155 /*@C
156   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
157 
158   Collective
159 
160   Input Parameter:
161 . minMaxVal - An array with the local min and max
162 
163   Output Parameter:
164 . minMaxValGlobal - An array with the global min and max
165 
166   Level: beginner
167 
168 .seealso: PetscSplitOwnership()
169 @*/
170 PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, const PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2])
171 {
172   PetscErrorCode ierr;
173   PetscInt       sendbuf[3],recvbuf[3];
174 
175   PetscFunctionBegin;
176   sendbuf[0] = -minMaxVal[0]; /* Note that -PETSC_MIN_INT = PETSC_MIN_INT */
177   sendbuf[1] = minMaxVal[1];
178   sendbuf[2] = (minMaxVal[0] == PETSC_MIN_INT) ? 1 : 0; /* Are there PETSC_MIN_INT in minMaxVal[0]? */
179   ierr = MPI_Allreduce(sendbuf, recvbuf, 3, MPIU_INT, MPI_MAX, comm);CHKERRMPI(ierr);
180   minMaxValGlobal[0] = recvbuf[2] ? PETSC_MIN_INT : -recvbuf[0];
181   minMaxValGlobal[1] = recvbuf[1];
182   PetscFunctionReturn(0);
183 }
184 
185 /*@C
186   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
187 
188   Collective
189 
190   Input Parameter:
191 . minMaxVal - An array with the local min and max
192 
193   Output Parameter:
194 . minMaxValGlobal - An array with the global min and max
195 
196   Level: beginner
197 
198 .seealso: PetscSplitOwnership()
199 @*/
200 PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, const PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2])
201 {
202   PetscReal      sendbuf[2];
203   PetscErrorCode ierr;
204 
205   PetscFunctionBegin;
206   sendbuf[0] = -minMaxVal[0];
207   sendbuf[1] = minMaxVal[1];
208   ierr = MPIU_Allreduce(sendbuf,minMaxValGlobal,2,MPIU_REAL,MPIU_MAX,comm);CHKERRMPI(ierr);
209   minMaxValGlobal[0] = -minMaxValGlobal[0];
210   PetscFunctionReturn(0);
211 }
212