xref: /petsc/src/sys/utils/mpiu.c (revision 1690c2ae071c7584458d4e437df7b47bc4686b3c)
1c6db04a5SJed Brown #include <petscsys.h> /*I  "petscsys.h"  I*/
28ae1f613SSatish Balay #include <petsc/private/petscimpl.h>
3e5c89e4eSSatish Balay /*
4e5c89e4eSSatish Balay     Note that tag of 0 is ok because comm is a private communicator
5e5c89e4eSSatish Balay   generated below just for these routines.
6e5c89e4eSSatish Balay */
7e5c89e4eSSatish Balay 
8d71ae5a4SJacob Faibussowitsch PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm, int ng)
9d71ae5a4SJacob Faibussowitsch {
10e5c89e4eSSatish Balay   PetscMPIInt rank, size, tag = 0;
11e5c89e4eSSatish Balay   MPI_Status  status;
12e5c89e4eSSatish Balay 
13e5c89e4eSSatish Balay   PetscFunctionBegin;
149566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
153ba16761SJacob Faibussowitsch   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
169566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(comm, &rank));
1748a46eb9SPierre Jolivet   if (rank) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, rank - 1, tag, comm, &status));
18e5c89e4eSSatish Balay   /* Send to the next process in the group unless we are the last process */
1948a46eb9SPierre Jolivet   if ((rank % ng) < ng - 1 && rank != size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, rank + 1, tag, comm));
203ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
21e5c89e4eSSatish Balay }
22e5c89e4eSSatish Balay 
23d71ae5a4SJacob Faibussowitsch PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm, int ng)
24d71ae5a4SJacob Faibussowitsch {
25e5c89e4eSSatish Balay   PetscMPIInt rank, size, tag = 0;
26e5c89e4eSSatish Balay   MPI_Status  status;
27e5c89e4eSSatish Balay 
28e5c89e4eSSatish Balay   PetscFunctionBegin;
299566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_rank(comm, &rank));
309566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
313ba16761SJacob Faibussowitsch   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
32e5c89e4eSSatish Balay 
33e5c89e4eSSatish Balay   /* Send to the first process in the next group */
3448a46eb9SPierre Jolivet   if ((rank % ng) == ng - 1 || rank == size - 1) PetscCallMPI(MPI_Send(NULL, 0, MPI_INT, (rank + 1) % size, tag, comm));
3548a46eb9SPierre Jolivet   if (rank == 0) PetscCallMPI(MPI_Recv(NULL, 0, MPI_INT, size - 1, tag, comm, &status));
363ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
37e5c89e4eSSatish Balay }
38e5c89e4eSSatish Balay 
39e5c89e4eSSatish Balay /* ---------------------------------------------------------------------*/
40e5c89e4eSSatish Balay /*
41e5c89e4eSSatish Balay     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
42e5c89e4eSSatish Balay   is attached to a communicator that manages the sequential phase code below.
43e5c89e4eSSatish Balay */
442fe4ef4aSBarry Smith PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
45e5c89e4eSSatish Balay 
46e30d2299SSatish Balay /*@
47e5c89e4eSSatish Balay   PetscSequentialPhaseBegin - Begins a sequential section of code.
48e5c89e4eSSatish Balay 
49d083f849SBarry Smith   Collective
50e5c89e4eSSatish Balay 
51e5c89e4eSSatish Balay   Input Parameters:
52667f096bSBarry Smith + comm - Communicator to sequentialize over
53e5c89e4eSSatish Balay - ng   - Number in processor group.  This many processes are allowed to execute
54e5c89e4eSSatish Balay    at the same time (usually 1)
55e5c89e4eSSatish Balay 
56e5c89e4eSSatish Balay   Level: intermediate
57e5c89e4eSSatish Balay 
58e5c89e4eSSatish Balay   Notes:
59811af0c4SBarry Smith   `PetscSequentialPhaseBegin()` and `PetscSequentialPhaseEnd()` provide a
60e5c89e4eSSatish Balay   way to force a section of code to be executed by the processes in
61e5c89e4eSSatish Balay   rank order.  Typically, this is done with
62e5c89e4eSSatish Balay .vb
63e5c89e4eSSatish Balay       PetscSequentialPhaseBegin(comm, 1);
64e5c89e4eSSatish Balay       <code to be executed sequentially>
65e5c89e4eSSatish Balay       PetscSequentialPhaseEnd(comm, 1);
66e5c89e4eSSatish Balay .ve
67e5c89e4eSSatish Balay 
68811af0c4SBarry Smith   You should use `PetscSynchronizedPrintf()` to ensure output between MPI ranks is properly order and not these routines.
69e5c89e4eSSatish Balay 
70811af0c4SBarry Smith .seealso: `PetscSequentialPhaseEnd()`, `PetscSynchronizedPrintf()`
71e5c89e4eSSatish Balay @*/
72d71ae5a4SJacob Faibussowitsch PetscErrorCode PetscSequentialPhaseBegin(MPI_Comm comm, int ng)
73d71ae5a4SJacob Faibussowitsch {
74e5c89e4eSSatish Balay   PetscMPIInt size;
75e5c89e4eSSatish Balay   MPI_Comm    local_comm, *addr_local_comm;
76e5c89e4eSSatish Balay 
77e5c89e4eSSatish Balay   PetscFunctionBegin;
789566063dSJacob Faibussowitsch   PetscCall(PetscSysInitializePackage());
799566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
803ba16761SJacob Faibussowitsch   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
81e5c89e4eSSatish Balay 
82e5c89e4eSSatish Balay   /* Get the private communicator for the sequential operations */
8348a46eb9SPierre Jolivet   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) PetscCallMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN, MPI_COMM_NULL_DELETE_FN, &Petsc_Seq_keyval, NULL));
84e5c89e4eSSatish Balay 
859566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_dup(comm, &local_comm));
869566063dSJacob Faibussowitsch   PetscCall(PetscMalloc1(1, &addr_local_comm));
87a297a907SKarl Rupp 
88e5c89e4eSSatish Balay   *addr_local_comm = local_comm;
89a297a907SKarl Rupp 
909566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_set_attr(comm, Petsc_Seq_keyval, (void *)addr_local_comm));
919566063dSJacob Faibussowitsch   PetscCall(PetscSequentialPhaseBegin_Private(local_comm, ng));
923ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
93e5c89e4eSSatish Balay }
94e5c89e4eSSatish Balay 
95e30d2299SSatish Balay /*@
96e5c89e4eSSatish Balay   PetscSequentialPhaseEnd - Ends a sequential section of code.
97e5c89e4eSSatish Balay 
98d083f849SBarry Smith   Collective
99e5c89e4eSSatish Balay 
100e5c89e4eSSatish Balay   Input Parameters:
101e5c89e4eSSatish Balay + comm - Communicator to sequentialize.
102e5c89e4eSSatish Balay - ng   - Number in processor group.  This many processes are allowed to execute
103e5c89e4eSSatish Balay    at the same time (usually 1)
104e5c89e4eSSatish Balay 
105e5c89e4eSSatish Balay   Level: intermediate
106e5c89e4eSSatish Balay 
107811af0c4SBarry Smith   Note:
108811af0c4SBarry Smith   See `PetscSequentialPhaseBegin()` for more details.
109e5c89e4eSSatish Balay 
110db781477SPatrick Sanan .seealso: `PetscSequentialPhaseBegin()`
111e5c89e4eSSatish Balay @*/
112d71ae5a4SJacob Faibussowitsch PetscErrorCode PetscSequentialPhaseEnd(MPI_Comm comm, int ng)
113d71ae5a4SJacob Faibussowitsch {
114e5c89e4eSSatish Balay   PetscMPIInt size, flag;
115e5c89e4eSSatish Balay   MPI_Comm    local_comm, *addr_local_comm;
116e5c89e4eSSatish Balay 
117e5c89e4eSSatish Balay   PetscFunctionBegin;
1189566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_size(comm, &size));
1193ba16761SJacob Faibussowitsch   if (size == 1) PetscFunctionReturn(PETSC_SUCCESS);
120e5c89e4eSSatish Balay 
1219566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_get_attr(comm, Petsc_Seq_keyval, (void **)&addr_local_comm, &flag));
12228b400f6SJacob Faibussowitsch   PetscCheck(flag, PETSC_COMM_SELF, PETSC_ERR_ARG_INCOMP, "Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
123e5c89e4eSSatish Balay   local_comm = *addr_local_comm;
124e5c89e4eSSatish Balay 
1259566063dSJacob Faibussowitsch   PetscCall(PetscSequentialPhaseEnd_Private(local_comm, ng));
126e5c89e4eSSatish Balay 
1279566063dSJacob Faibussowitsch   PetscCall(PetscFree(addr_local_comm));
1289566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_free(&local_comm));
1299566063dSJacob Faibussowitsch   PetscCallMPI(MPI_Comm_delete_attr(comm, Petsc_Seq_keyval));
1303ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
131e5c89e4eSSatish Balay }
1326ba4152bSMatthew G. Knepley 
133cc4c1da9SBarry Smith /*@
1346ba4152bSMatthew G. Knepley   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
1356ba4152bSMatthew G. Knepley 
136d083f849SBarry Smith   Collective
1376ba4152bSMatthew G. Knepley 
13810450e9eSJacob Faibussowitsch   Input Parameters:
13910450e9eSJacob Faibussowitsch + comm      - The MPI communicator to reduce with
14010450e9eSJacob Faibussowitsch - minMaxVal - An array with the local min and max
1416ba4152bSMatthew G. Knepley 
1426ba4152bSMatthew G. Knepley   Output Parameter:
1436ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1446ba4152bSMatthew G. Knepley 
1456ba4152bSMatthew G. Knepley   Level: beginner
1466ba4152bSMatthew G. Knepley 
147811af0c4SBarry Smith .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxReal()`
1486ba4152bSMatthew G. Knepley @*/
149d71ae5a4SJacob Faibussowitsch PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, const PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2])
150d71ae5a4SJacob Faibussowitsch {
15158b5cd2aSSatish Balay   PetscInt sendbuf[3], recvbuf[3];
1526ba4152bSMatthew G. Knepley 
1536ba4152bSMatthew G. Knepley   PetscFunctionBegin;
154*1690c2aeSBarry Smith   sendbuf[0] = -minMaxVal[0]; /* Note that -PETSC_INT_MIN = PETSC_INT_MIN */
15558b5cd2aSSatish Balay   sendbuf[1] = minMaxVal[1];
156*1690c2aeSBarry Smith   sendbuf[2] = (minMaxVal[0] == PETSC_INT_MIN) ? 1 : 0; /* Are there PETSC_INT_MIN in minMaxVal[0]? */
157712fec58SPierre Jolivet   PetscCall(MPIU_Allreduce(sendbuf, recvbuf, 3, MPIU_INT, MPI_MAX, comm));
158*1690c2aeSBarry Smith   minMaxValGlobal[0] = recvbuf[2] ? PETSC_INT_MIN : -recvbuf[0];
15958b5cd2aSSatish Balay   minMaxValGlobal[1] = recvbuf[1];
1603ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
1616ba4152bSMatthew G. Knepley }
1626ba4152bSMatthew G. Knepley 
163cc4c1da9SBarry Smith /*@
1646ba4152bSMatthew G. Knepley   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
1656ba4152bSMatthew G. Knepley 
166d083f849SBarry Smith   Collective
1676ba4152bSMatthew G. Knepley 
16810450e9eSJacob Faibussowitsch   Input Parameters:
16910450e9eSJacob Faibussowitsch + comm      - The MPI communicator to reduce with
17010450e9eSJacob Faibussowitsch - minMaxVal - An array with the local min and max
1716ba4152bSMatthew G. Knepley 
1726ba4152bSMatthew G. Knepley   Output Parameter:
1736ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1746ba4152bSMatthew G. Knepley 
1756ba4152bSMatthew G. Knepley   Level: beginner
1766ba4152bSMatthew G. Knepley 
177811af0c4SBarry Smith .seealso: `PetscSplitOwnership()`, `PetscGlobalMinMaxInt()`
1786ba4152bSMatthew G. Knepley @*/
179d71ae5a4SJacob Faibussowitsch PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, const PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2])
180d71ae5a4SJacob Faibussowitsch {
18158b5cd2aSSatish Balay   PetscReal sendbuf[2];
1826ba4152bSMatthew G. Knepley 
1836ba4152bSMatthew G. Knepley   PetscFunctionBegin;
18458b5cd2aSSatish Balay   sendbuf[0] = -minMaxVal[0];
18558b5cd2aSSatish Balay   sendbuf[1] = minMaxVal[1];
1861c2dc1cbSBarry Smith   PetscCall(MPIU_Allreduce(sendbuf, minMaxValGlobal, 2, MPIU_REAL, MPIU_MAX, comm));
18758b5cd2aSSatish Balay   minMaxValGlobal[0] = -minMaxValGlobal[0];
1883ba16761SJacob Faibussowitsch   PetscFunctionReturn(PETSC_SUCCESS);
1896ba4152bSMatthew G. Knepley }
190