xref: /petsc/src/sys/utils/mpiu.c (revision 28b400f66ebc7ae0049166a2294dfcd3df27e64b)
1e5c89e4eSSatish Balay 
2c6db04a5SJed Brown #include <petscsys.h>        /*I  "petscsys.h"  I*/
38ae1f613SSatish Balay #include <petsc/private/petscimpl.h>
4e5c89e4eSSatish Balay /*
5e5c89e4eSSatish Balay     Note that tag of 0 is ok because comm is a private communicator
6e5c89e4eSSatish Balay   generated below just for these routines.
7e5c89e4eSSatish Balay */
8e5c89e4eSSatish Balay 
995c0884eSLisandro Dalcin PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm,int ng)
10e5c89e4eSSatish Balay {
11e5c89e4eSSatish Balay   PetscMPIInt    rank,size,tag = 0;
12e5c89e4eSSatish Balay   MPI_Status     status;
13e5c89e4eSSatish Balay 
14e5c89e4eSSatish Balay   PetscFunctionBegin;
155f80ce2aSJacob Faibussowitsch   CHKERRMPI(MPI_Comm_size(comm,&size));
16e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
175f80ce2aSJacob Faibussowitsch   CHKERRMPI(MPI_Comm_rank(comm,&rank));
18e5c89e4eSSatish Balay   if (rank) {
195f80ce2aSJacob Faibussowitsch     CHKERRMPI(MPI_Recv(NULL,0,MPI_INT,rank-1,tag,comm,&status));
20e5c89e4eSSatish Balay   }
21e5c89e4eSSatish Balay   /* Send to the next process in the group unless we are the last process */
22e5c89e4eSSatish Balay   if ((rank % ng) < ng - 1 && rank != size - 1) {
235f80ce2aSJacob Faibussowitsch     CHKERRMPI(MPI_Send(NULL,0,MPI_INT,rank + 1,tag,comm));
24e5c89e4eSSatish Balay   }
25e5c89e4eSSatish Balay   PetscFunctionReturn(0);
26e5c89e4eSSatish Balay }
27e5c89e4eSSatish Balay 
2895c0884eSLisandro Dalcin PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm,int ng)
29e5c89e4eSSatish Balay {
30e5c89e4eSSatish Balay   PetscMPIInt    rank,size,tag = 0;
31e5c89e4eSSatish Balay   MPI_Status     status;
32e5c89e4eSSatish Balay 
33e5c89e4eSSatish Balay   PetscFunctionBegin;
345f80ce2aSJacob Faibussowitsch   CHKERRMPI(MPI_Comm_rank(comm,&rank));
355f80ce2aSJacob Faibussowitsch   CHKERRMPI(MPI_Comm_size(comm,&size));
36e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
37e5c89e4eSSatish Balay 
38e5c89e4eSSatish Balay   /* Send to the first process in the next group */
39e5c89e4eSSatish Balay   if ((rank % ng) == ng - 1 || rank == size - 1) {
405f80ce2aSJacob Faibussowitsch     CHKERRMPI(MPI_Send(NULL,0,MPI_INT,(rank + 1) % size,tag,comm));
41e5c89e4eSSatish Balay   }
42dd400576SPatrick Sanan   if (rank == 0) {
435f80ce2aSJacob Faibussowitsch     CHKERRMPI(MPI_Recv(NULL,0,MPI_INT,size-1,tag,comm,&status));
44e5c89e4eSSatish Balay   }
45e5c89e4eSSatish Balay   PetscFunctionReturn(0);
46e5c89e4eSSatish Balay }
47e5c89e4eSSatish Balay 
48e5c89e4eSSatish Balay /* ---------------------------------------------------------------------*/
49e5c89e4eSSatish Balay /*
50e5c89e4eSSatish Balay     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
51e5c89e4eSSatish Balay   is attached to a communicator that manages the sequential phase code below.
52e5c89e4eSSatish Balay */
532fe4ef4aSBarry Smith PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
54e5c89e4eSSatish Balay 
55e30d2299SSatish Balay /*@
56e5c89e4eSSatish Balay    PetscSequentialPhaseBegin - Begins a sequential section of code.
57e5c89e4eSSatish Balay 
58d083f849SBarry Smith    Collective
59e5c89e4eSSatish Balay 
60e5c89e4eSSatish Balay    Input Parameters:
61e5c89e4eSSatish Balay +  comm - Communicator to sequentialize.
62e5c89e4eSSatish Balay -  ng   - Number in processor group.  This many processes are allowed to execute
63e5c89e4eSSatish Balay    at the same time (usually 1)
64e5c89e4eSSatish Balay 
65e5c89e4eSSatish Balay    Level: intermediate
66e5c89e4eSSatish Balay 
67e5c89e4eSSatish Balay    Notes:
68e5c89e4eSSatish Balay    PetscSequentialPhaseBegin() and PetscSequentialPhaseEnd() provide a
69e5c89e4eSSatish Balay    way to force a section of code to be executed by the processes in
70e5c89e4eSSatish Balay    rank order.  Typically, this is done with
71e5c89e4eSSatish Balay .vb
72e5c89e4eSSatish Balay       PetscSequentialPhaseBegin(comm, 1);
73e5c89e4eSSatish Balay       <code to be executed sequentially>
74e5c89e4eSSatish Balay       PetscSequentialPhaseEnd(comm, 1);
75e5c89e4eSSatish Balay .ve
76e5c89e4eSSatish Balay 
77e5c89e4eSSatish Balay    Often, the sequential code contains output statements (e.g., printf) to
78e5c89e4eSSatish Balay    be executed.  Note that you may need to flush the I/O buffers before
79e5c89e4eSSatish Balay    calling PetscSequentialPhaseEnd().  Also, note that some systems do
80e5c89e4eSSatish Balay    not propagate I/O in any order to the controling terminal (in other words,
81e5c89e4eSSatish Balay    even if you flush the output, you may not get the data in the order
82e5c89e4eSSatish Balay    that you want).
83e5c89e4eSSatish Balay 
84e5c89e4eSSatish Balay .seealso: PetscSequentialPhaseEnd()
85e5c89e4eSSatish Balay 
86e5c89e4eSSatish Balay @*/
877087cfbeSBarry Smith PetscErrorCode  PetscSequentialPhaseBegin(MPI_Comm comm,int ng)
88e5c89e4eSSatish Balay {
89e5c89e4eSSatish Balay   PetscMPIInt    size;
90e5c89e4eSSatish Balay   MPI_Comm       local_comm,*addr_local_comm;
91e5c89e4eSSatish Balay 
92e5c89e4eSSatish Balay   PetscFunctionBegin;
935f80ce2aSJacob Faibussowitsch   CHKERRQ(PetscSysInitializePackage());
945f80ce2aSJacob Faibussowitsch   CHKERRMPI(MPI_Comm_size(comm,&size));
95e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
96e5c89e4eSSatish Balay 
97e5c89e4eSSatish Balay   /* Get the private communicator for the sequential operations */
98e5c89e4eSSatish Balay   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) {
995f80ce2aSJacob Faibussowitsch     CHKERRMPI(MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Seq_keyval,NULL));
100e5c89e4eSSatish Balay   }
101e5c89e4eSSatish Balay 
1025f80ce2aSJacob Faibussowitsch   CHKERRMPI(MPI_Comm_dup(comm,&local_comm));
1035f80ce2aSJacob Faibussowitsch   CHKERRQ(PetscMalloc1(1,&addr_local_comm));
104a297a907SKarl Rupp 
105e5c89e4eSSatish Balay   *addr_local_comm = local_comm;
106a297a907SKarl Rupp 
1075f80ce2aSJacob Faibussowitsch   CHKERRMPI(MPI_Comm_set_attr(comm,Petsc_Seq_keyval,(void*)addr_local_comm));
1085f80ce2aSJacob Faibussowitsch   CHKERRQ(PetscSequentialPhaseBegin_Private(local_comm,ng));
109e5c89e4eSSatish Balay   PetscFunctionReturn(0);
110e5c89e4eSSatish Balay }
111e5c89e4eSSatish Balay 
112e30d2299SSatish Balay /*@
113e5c89e4eSSatish Balay    PetscSequentialPhaseEnd - Ends a sequential section of code.
114e5c89e4eSSatish Balay 
115d083f849SBarry Smith    Collective
116e5c89e4eSSatish Balay 
117e5c89e4eSSatish Balay    Input Parameters:
118e5c89e4eSSatish Balay +  comm - Communicator to sequentialize.
119e5c89e4eSSatish Balay -  ng   - Number in processor group.  This many processes are allowed to execute
120e5c89e4eSSatish Balay    at the same time (usually 1)
121e5c89e4eSSatish Balay 
122e5c89e4eSSatish Balay    Level: intermediate
123e5c89e4eSSatish Balay 
124e5c89e4eSSatish Balay    Notes:
125e5c89e4eSSatish Balay    See PetscSequentialPhaseBegin() for more details.
126e5c89e4eSSatish Balay 
127e5c89e4eSSatish Balay .seealso: PetscSequentialPhaseBegin()
128e5c89e4eSSatish Balay 
129e5c89e4eSSatish Balay @*/
1307087cfbeSBarry Smith PetscErrorCode  PetscSequentialPhaseEnd(MPI_Comm comm,int ng)
131e5c89e4eSSatish Balay {
132e5c89e4eSSatish Balay   PetscMPIInt    size,flag;
133e5c89e4eSSatish Balay   MPI_Comm       local_comm,*addr_local_comm;
134e5c89e4eSSatish Balay 
135e5c89e4eSSatish Balay   PetscFunctionBegin;
1365f80ce2aSJacob Faibussowitsch   CHKERRMPI(MPI_Comm_size(comm,&size));
137e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
138e5c89e4eSSatish Balay 
1395f80ce2aSJacob Faibussowitsch   CHKERRMPI(MPI_Comm_get_attr(comm,Petsc_Seq_keyval,(void**)&addr_local_comm,&flag));
140*28b400f6SJacob Faibussowitsch   PetscCheck(flag,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
141e5c89e4eSSatish Balay   local_comm = *addr_local_comm;
142e5c89e4eSSatish Balay 
1435f80ce2aSJacob Faibussowitsch   CHKERRQ(PetscSequentialPhaseEnd_Private(local_comm,ng));
144e5c89e4eSSatish Balay 
1455f80ce2aSJacob Faibussowitsch   CHKERRQ(PetscFree(addr_local_comm));
1465f80ce2aSJacob Faibussowitsch   CHKERRMPI(MPI_Comm_free(&local_comm));
1475f80ce2aSJacob Faibussowitsch   CHKERRMPI(MPI_Comm_delete_attr(comm,Petsc_Seq_keyval));
148e5c89e4eSSatish Balay   PetscFunctionReturn(0);
149e5c89e4eSSatish Balay }
1506ba4152bSMatthew G. Knepley 
1516ba4152bSMatthew G. Knepley /*@C
1526ba4152bSMatthew G. Knepley   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
1536ba4152bSMatthew G. Knepley 
154d083f849SBarry Smith   Collective
1556ba4152bSMatthew G. Knepley 
1566ba4152bSMatthew G. Knepley   Input Parameter:
1576ba4152bSMatthew G. Knepley . minMaxVal - An array with the local min and max
1586ba4152bSMatthew G. Knepley 
1596ba4152bSMatthew G. Knepley   Output Parameter:
1606ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1616ba4152bSMatthew G. Knepley 
1626ba4152bSMatthew G. Knepley   Level: beginner
1636ba4152bSMatthew G. Knepley 
1646ba4152bSMatthew G. Knepley .seealso: PetscSplitOwnership()
1656ba4152bSMatthew G. Knepley @*/
16658b5cd2aSSatish Balay PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, const PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2])
1676ba4152bSMatthew G. Knepley {
16858b5cd2aSSatish Balay   PetscInt       sendbuf[3],recvbuf[3];
1696ba4152bSMatthew G. Knepley 
1706ba4152bSMatthew G. Knepley   PetscFunctionBegin;
17158b5cd2aSSatish Balay   sendbuf[0] = -minMaxVal[0]; /* Note that -PETSC_MIN_INT = PETSC_MIN_INT */
17258b5cd2aSSatish Balay   sendbuf[1] = minMaxVal[1];
17358b5cd2aSSatish Balay   sendbuf[2] = (minMaxVal[0] == PETSC_MIN_INT) ? 1 : 0; /* Are there PETSC_MIN_INT in minMaxVal[0]? */
1745f80ce2aSJacob Faibussowitsch   CHKERRMPI(MPI_Allreduce(sendbuf, recvbuf, 3, MPIU_INT, MPI_MAX, comm));
17558b5cd2aSSatish Balay   minMaxValGlobal[0] = recvbuf[2] ? PETSC_MIN_INT : -recvbuf[0];
17658b5cd2aSSatish Balay   minMaxValGlobal[1] = recvbuf[1];
1776ba4152bSMatthew G. Knepley   PetscFunctionReturn(0);
1786ba4152bSMatthew G. Knepley }
1796ba4152bSMatthew G. Knepley 
1806ba4152bSMatthew G. Knepley /*@C
1816ba4152bSMatthew G. Knepley   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
1826ba4152bSMatthew G. Knepley 
183d083f849SBarry Smith   Collective
1846ba4152bSMatthew G. Knepley 
1856ba4152bSMatthew G. Knepley   Input Parameter:
1866ba4152bSMatthew G. Knepley . minMaxVal - An array with the local min and max
1876ba4152bSMatthew G. Knepley 
1886ba4152bSMatthew G. Knepley   Output Parameter:
1896ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1906ba4152bSMatthew G. Knepley 
1916ba4152bSMatthew G. Knepley   Level: beginner
1926ba4152bSMatthew G. Knepley 
1936ba4152bSMatthew G. Knepley .seealso: PetscSplitOwnership()
1946ba4152bSMatthew G. Knepley @*/
19558b5cd2aSSatish Balay PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, const PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2])
1966ba4152bSMatthew G. Knepley {
19758b5cd2aSSatish Balay   PetscReal      sendbuf[2];
1986ba4152bSMatthew G. Knepley 
1996ba4152bSMatthew G. Knepley   PetscFunctionBegin;
20058b5cd2aSSatish Balay   sendbuf[0] = -minMaxVal[0];
20158b5cd2aSSatish Balay   sendbuf[1] = minMaxVal[1];
2025f80ce2aSJacob Faibussowitsch   CHKERRMPI(MPIU_Allreduce(sendbuf,minMaxValGlobal,2,MPIU_REAL,MPIU_MAX,comm));
20358b5cd2aSSatish Balay   minMaxValGlobal[0] = -minMaxValGlobal[0];
2046ba4152bSMatthew G. Knepley   PetscFunctionReturn(0);
2056ba4152bSMatthew G. Knepley }
206