xref: /petsc/src/sys/utils/mpiu.c (revision 5b6bfdb9644f185dbf5e5a09b808ec241507e1e7)
1 
2 #include <petscsys.h>        /*I  "petscsys.h"  I*/
3 #include <petsc/private/petscimpl.h>
4 /*
5     Note that tag of 0 is ok because comm is a private communicator
6   generated below just for these routines.
7 */
8 
9 PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm,int ng)
10 {
11   PetscErrorCode ierr;
12   PetscMPIInt    rank,size,tag = 0;
13   MPI_Status     status;
14 
15   PetscFunctionBegin;
16   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
17   if (size == 1) PetscFunctionReturn(0);
18   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
19   if (rank) {
20     ierr = MPI_Recv(0,0,MPI_INT,rank-1,tag,comm,&status);CHKERRQ(ierr);
21   }
22   /* Send to the next process in the group unless we are the last process */
23   if ((rank % ng) < ng - 1 && rank != size - 1) {
24     ierr = MPI_Send(0,0,MPI_INT,rank + 1,tag,comm);CHKERRQ(ierr);
25   }
26   PetscFunctionReturn(0);
27 }
28 
29 PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm,int ng)
30 {
31   PetscErrorCode ierr;
32   PetscMPIInt    rank,size,tag = 0;
33   MPI_Status     status;
34 
35   PetscFunctionBegin;
36   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
37   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
38   if (size == 1) PetscFunctionReturn(0);
39 
40   /* Send to the first process in the next group */
41   if ((rank % ng) == ng - 1 || rank == size - 1) {
42     ierr = MPI_Send(0,0,MPI_INT,(rank + 1) % size,tag,comm);CHKERRQ(ierr);
43   }
44   if (!rank) {
45     ierr = MPI_Recv(0,0,MPI_INT,size-1,tag,comm,&status);CHKERRQ(ierr);
46   }
47   PetscFunctionReturn(0);
48 }
49 
50 /* ---------------------------------------------------------------------*/
51 /*
52     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
53   is attached to a communicator that manages the sequential phase code below.
54 */
55 PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
56 
57 /*@
58    PetscSequentialPhaseBegin - Begins a sequential section of code.
59 
60    Collective on MPI_Comm
61 
62    Input Parameters:
63 +  comm - Communicator to sequentialize.
64 -  ng   - Number in processor group.  This many processes are allowed to execute
65    at the same time (usually 1)
66 
67    Level: intermediate
68 
69    Notes:
70    PetscSequentialPhaseBegin() and PetscSequentialPhaseEnd() provide a
71    way to force a section of code to be executed by the processes in
72    rank order.  Typically, this is done with
73 .vb
74       PetscSequentialPhaseBegin(comm, 1);
75       <code to be executed sequentially>
76       PetscSequentialPhaseEnd(comm, 1);
77 .ve
78 
79    Often, the sequential code contains output statements (e.g., printf) to
80    be executed.  Note that you may need to flush the I/O buffers before
81    calling PetscSequentialPhaseEnd().  Also, note that some systems do
82    not propagate I/O in any order to the controling terminal (in other words,
83    even if you flush the output, you may not get the data in the order
84    that you want).
85 
86 .seealso: PetscSequentialPhaseEnd()
87 
88    Concepts: sequential stage
89 
90 @*/
91 PetscErrorCode  PetscSequentialPhaseBegin(MPI_Comm comm,int ng)
92 {
93   PetscErrorCode ierr;
94   PetscMPIInt    size;
95   MPI_Comm       local_comm,*addr_local_comm;
96 
97   PetscFunctionBegin;
98   ierr = PetscSysInitializePackage();CHKERRQ(ierr);
99   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
100   if (size == 1) PetscFunctionReturn(0);
101 
102   /* Get the private communicator for the sequential operations */
103   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) {
104     ierr = MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Seq_keyval,0);CHKERRQ(ierr);
105   }
106 
107   ierr = MPI_Comm_dup(comm,&local_comm);CHKERRQ(ierr);
108   ierr = PetscMalloc1(1,&addr_local_comm);CHKERRQ(ierr);
109 
110   *addr_local_comm = local_comm;
111 
112   ierr = MPI_Comm_set_attr(comm,Petsc_Seq_keyval,(void*)addr_local_comm);CHKERRQ(ierr);
113   ierr = PetscSequentialPhaseBegin_Private(local_comm,ng);CHKERRQ(ierr);
114   PetscFunctionReturn(0);
115 }
116 
117 /*@
118    PetscSequentialPhaseEnd - Ends a sequential section of code.
119 
120    Collective on MPI_Comm
121 
122    Input Parameters:
123 +  comm - Communicator to sequentialize.
124 -  ng   - Number in processor group.  This many processes are allowed to execute
125    at the same time (usually 1)
126 
127    Level: intermediate
128 
129    Notes:
130    See PetscSequentialPhaseBegin() for more details.
131 
132 .seealso: PetscSequentialPhaseBegin()
133 
134    Concepts: sequential stage
135 
136 @*/
137 PetscErrorCode  PetscSequentialPhaseEnd(MPI_Comm comm,int ng)
138 {
139   PetscErrorCode ierr;
140   PetscMPIInt    size,flag;
141   MPI_Comm       local_comm,*addr_local_comm;
142 
143   PetscFunctionBegin;
144   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
145   if (size == 1) PetscFunctionReturn(0);
146 
147   ierr = MPI_Comm_get_attr(comm,Petsc_Seq_keyval,(void**)&addr_local_comm,&flag);CHKERRQ(ierr);
148   if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
149   local_comm = *addr_local_comm;
150 
151   ierr = PetscSequentialPhaseEnd_Private(local_comm,ng);CHKERRQ(ierr);
152 
153   ierr = PetscFree(addr_local_comm);CHKERRQ(ierr);
154   ierr = MPI_Comm_free(&local_comm);CHKERRQ(ierr);
155   ierr = MPI_Comm_delete_attr(comm,Petsc_Seq_keyval);CHKERRQ(ierr);
156   PetscFunctionReturn(0);
157 }
158 
159 /*@C
160   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
161 
162   Collective on comm
163 
164   Input Parameter:
165 . minMaxVal - An array with the local min and max
166 
167   Output Parameter:
168 . minMaxValGlobal - An array with the global min and max
169 
170   Level: beginner
171 
172 .keywords: minimum, maximum
173 .seealso: PetscSplitOwnership()
174 @*/
175 PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2])
176 {
177   PetscErrorCode ierr;
178 
179   PetscFunctionBegin;
180   minMaxVal[1] = -minMaxVal[1];
181   ierr = MPI_Allreduce(minMaxVal, minMaxValGlobal, 2, MPIU_INT, MPI_MIN, comm);CHKERRQ(ierr);
182   minMaxValGlobal[1] = -minMaxValGlobal[1];
183   PetscFunctionReturn(0);
184 }
185 
186 /*@C
187   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
188 
189   Collective on comm
190 
191   Input Parameter:
192 . minMaxVal - An array with the local min and max
193 
194   Output Parameter:
195 . minMaxValGlobal - An array with the global min and max
196 
197   Level: beginner
198 
199 .keywords: minimum, maximum
200 .seealso: PetscSplitOwnership()
201 @*/
202 PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2])
203 {
204   PetscErrorCode ierr;
205 
206   PetscFunctionBegin;
207   minMaxVal[1] = -minMaxVal[1];
208   ierr = MPI_Allreduce(minMaxVal, minMaxValGlobal, 2, MPIU_REAL, MPI_MIN, comm);CHKERRQ(ierr);
209   minMaxValGlobal[1] = -minMaxValGlobal[1];
210   PetscFunctionReturn(0);
211 }
212