xref: /petsc/src/sys/utils/mpiu.c (revision d083f849a86f1f43e18d534ee43954e2786cb29a) !
1e5c89e4eSSatish Balay 
2c6db04a5SJed Brown #include <petscsys.h>        /*I  "petscsys.h"  I*/
38ae1f613SSatish Balay #include <petsc/private/petscimpl.h>
4e5c89e4eSSatish Balay /*
5e5c89e4eSSatish Balay     Note that tag of 0 is ok because comm is a private communicator
6e5c89e4eSSatish Balay   generated below just for these routines.
7e5c89e4eSSatish Balay */
8e5c89e4eSSatish Balay 
995c0884eSLisandro Dalcin PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm,int ng)
10e5c89e4eSSatish Balay {
11e5c89e4eSSatish Balay   PetscErrorCode ierr;
12e5c89e4eSSatish Balay   PetscMPIInt    rank,size,tag = 0;
13e5c89e4eSSatish Balay   MPI_Status     status;
14e5c89e4eSSatish Balay 
15e5c89e4eSSatish Balay   PetscFunctionBegin;
16e5c89e4eSSatish Balay   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
17e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
18e5c89e4eSSatish Balay   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
19e5c89e4eSSatish Balay   if (rank) {
20e5c89e4eSSatish Balay     ierr = MPI_Recv(0,0,MPI_INT,rank-1,tag,comm,&status);CHKERRQ(ierr);
21e5c89e4eSSatish Balay   }
22e5c89e4eSSatish Balay   /* Send to the next process in the group unless we are the last process */
23e5c89e4eSSatish Balay   if ((rank % ng) < ng - 1 && rank != size - 1) {
24e5c89e4eSSatish Balay     ierr = MPI_Send(0,0,MPI_INT,rank + 1,tag,comm);CHKERRQ(ierr);
25e5c89e4eSSatish Balay   }
26e5c89e4eSSatish Balay   PetscFunctionReturn(0);
27e5c89e4eSSatish Balay }
28e5c89e4eSSatish Balay 
2995c0884eSLisandro Dalcin PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm,int ng)
30e5c89e4eSSatish Balay {
31e5c89e4eSSatish Balay   PetscErrorCode ierr;
32e5c89e4eSSatish Balay   PetscMPIInt    rank,size,tag = 0;
33e5c89e4eSSatish Balay   MPI_Status     status;
34e5c89e4eSSatish Balay 
35e5c89e4eSSatish Balay   PetscFunctionBegin;
36e5c89e4eSSatish Balay   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
37e5c89e4eSSatish Balay   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
38e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
39e5c89e4eSSatish Balay 
40e5c89e4eSSatish Balay   /* Send to the first process in the next group */
41e5c89e4eSSatish Balay   if ((rank % ng) == ng - 1 || rank == size - 1) {
42e5c89e4eSSatish Balay     ierr = MPI_Send(0,0,MPI_INT,(rank + 1) % size,tag,comm);CHKERRQ(ierr);
43e5c89e4eSSatish Balay   }
44e5c89e4eSSatish Balay   if (!rank) {
45e5c89e4eSSatish Balay     ierr = MPI_Recv(0,0,MPI_INT,size-1,tag,comm,&status);CHKERRQ(ierr);
46e5c89e4eSSatish Balay   }
47e5c89e4eSSatish Balay   PetscFunctionReturn(0);
48e5c89e4eSSatish Balay }
49e5c89e4eSSatish Balay 
50e5c89e4eSSatish Balay /* ---------------------------------------------------------------------*/
51e5c89e4eSSatish Balay /*
52e5c89e4eSSatish Balay     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
53e5c89e4eSSatish Balay   is attached to a communicator that manages the sequential phase code below.
54e5c89e4eSSatish Balay */
552fe4ef4aSBarry Smith PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
56e5c89e4eSSatish Balay 
57e30d2299SSatish Balay /*@
58e5c89e4eSSatish Balay    PetscSequentialPhaseBegin - Begins a sequential section of code.
59e5c89e4eSSatish Balay 
60*d083f849SBarry Smith    Collective
61e5c89e4eSSatish Balay 
62e5c89e4eSSatish Balay    Input Parameters:
63e5c89e4eSSatish Balay +  comm - Communicator to sequentialize.
64e5c89e4eSSatish Balay -  ng   - Number in processor group.  This many processes are allowed to execute
65e5c89e4eSSatish Balay    at the same time (usually 1)
66e5c89e4eSSatish Balay 
67e5c89e4eSSatish Balay    Level: intermediate
68e5c89e4eSSatish Balay 
69e5c89e4eSSatish Balay    Notes:
70e5c89e4eSSatish Balay    PetscSequentialPhaseBegin() and PetscSequentialPhaseEnd() provide a
71e5c89e4eSSatish Balay    way to force a section of code to be executed by the processes in
72e5c89e4eSSatish Balay    rank order.  Typically, this is done with
73e5c89e4eSSatish Balay .vb
74e5c89e4eSSatish Balay       PetscSequentialPhaseBegin(comm, 1);
75e5c89e4eSSatish Balay       <code to be executed sequentially>
76e5c89e4eSSatish Balay       PetscSequentialPhaseEnd(comm, 1);
77e5c89e4eSSatish Balay .ve
78e5c89e4eSSatish Balay 
79e5c89e4eSSatish Balay    Often, the sequential code contains output statements (e.g., printf) to
80e5c89e4eSSatish Balay    be executed.  Note that you may need to flush the I/O buffers before
81e5c89e4eSSatish Balay    calling PetscSequentialPhaseEnd().  Also, note that some systems do
82e5c89e4eSSatish Balay    not propagate I/O in any order to the controling terminal (in other words,
83e5c89e4eSSatish Balay    even if you flush the output, you may not get the data in the order
84e5c89e4eSSatish Balay    that you want).
85e5c89e4eSSatish Balay 
86e5c89e4eSSatish Balay .seealso: PetscSequentialPhaseEnd()
87e5c89e4eSSatish Balay 
88e5c89e4eSSatish Balay @*/
897087cfbeSBarry Smith PetscErrorCode  PetscSequentialPhaseBegin(MPI_Comm comm,int ng)
90e5c89e4eSSatish Balay {
91e5c89e4eSSatish Balay   PetscErrorCode ierr;
92e5c89e4eSSatish Balay   PetscMPIInt    size;
93e5c89e4eSSatish Balay   MPI_Comm       local_comm,*addr_local_comm;
94e5c89e4eSSatish Balay 
95e5c89e4eSSatish Balay   PetscFunctionBegin;
96a70dbfdaSBarry Smith   ierr = PetscSysInitializePackage();CHKERRQ(ierr);
97e5c89e4eSSatish Balay   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
98e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
99e5c89e4eSSatish Balay 
100e5c89e4eSSatish Balay   /* Get the private communicator for the sequential operations */
101e5c89e4eSSatish Balay   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) {
10212801b39SBarry Smith     ierr = MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Seq_keyval,0);CHKERRQ(ierr);
103e5c89e4eSSatish Balay   }
104e5c89e4eSSatish Balay 
105e5c89e4eSSatish Balay   ierr = MPI_Comm_dup(comm,&local_comm);CHKERRQ(ierr);
106854ce69bSBarry Smith   ierr = PetscMalloc1(1,&addr_local_comm);CHKERRQ(ierr);
107a297a907SKarl Rupp 
108e5c89e4eSSatish Balay   *addr_local_comm = local_comm;
109a297a907SKarl Rupp 
11047435625SJed Brown   ierr = MPI_Comm_set_attr(comm,Petsc_Seq_keyval,(void*)addr_local_comm);CHKERRQ(ierr);
111e5c89e4eSSatish Balay   ierr = PetscSequentialPhaseBegin_Private(local_comm,ng);CHKERRQ(ierr);
112e5c89e4eSSatish Balay   PetscFunctionReturn(0);
113e5c89e4eSSatish Balay }
114e5c89e4eSSatish Balay 
115e30d2299SSatish Balay /*@
116e5c89e4eSSatish Balay    PetscSequentialPhaseEnd - Ends a sequential section of code.
117e5c89e4eSSatish Balay 
118*d083f849SBarry Smith    Collective
119e5c89e4eSSatish Balay 
120e5c89e4eSSatish Balay    Input Parameters:
121e5c89e4eSSatish Balay +  comm - Communicator to sequentialize.
122e5c89e4eSSatish Balay -  ng   - Number in processor group.  This many processes are allowed to execute
123e5c89e4eSSatish Balay    at the same time (usually 1)
124e5c89e4eSSatish Balay 
125e5c89e4eSSatish Balay    Level: intermediate
126e5c89e4eSSatish Balay 
127e5c89e4eSSatish Balay    Notes:
128e5c89e4eSSatish Balay    See PetscSequentialPhaseBegin() for more details.
129e5c89e4eSSatish Balay 
130e5c89e4eSSatish Balay .seealso: PetscSequentialPhaseBegin()
131e5c89e4eSSatish Balay 
132e5c89e4eSSatish Balay @*/
1337087cfbeSBarry Smith PetscErrorCode  PetscSequentialPhaseEnd(MPI_Comm comm,int ng)
134e5c89e4eSSatish Balay {
135e5c89e4eSSatish Balay   PetscErrorCode ierr;
136e5c89e4eSSatish Balay   PetscMPIInt    size,flag;
137e5c89e4eSSatish Balay   MPI_Comm       local_comm,*addr_local_comm;
138e5c89e4eSSatish Balay 
139e5c89e4eSSatish Balay   PetscFunctionBegin;
140e5c89e4eSSatish Balay   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
141e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
142e5c89e4eSSatish Balay 
14347435625SJed Brown   ierr = MPI_Comm_get_attr(comm,Petsc_Seq_keyval,(void**)&addr_local_comm,&flag);CHKERRQ(ierr);
14417186662SBarry Smith   if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
145e5c89e4eSSatish Balay   local_comm = *addr_local_comm;
146e5c89e4eSSatish Balay 
147e5c89e4eSSatish Balay   ierr = PetscSequentialPhaseEnd_Private(local_comm,ng);CHKERRQ(ierr);
148e5c89e4eSSatish Balay 
149e5c89e4eSSatish Balay   ierr = PetscFree(addr_local_comm);CHKERRQ(ierr);
150e5c89e4eSSatish Balay   ierr = MPI_Comm_free(&local_comm);CHKERRQ(ierr);
15147435625SJed Brown   ierr = MPI_Comm_delete_attr(comm,Petsc_Seq_keyval);CHKERRQ(ierr);
152e5c89e4eSSatish Balay   PetscFunctionReturn(0);
153e5c89e4eSSatish Balay }
1546ba4152bSMatthew G. Knepley 
1556ba4152bSMatthew G. Knepley /*@C
1566ba4152bSMatthew G. Knepley   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
1576ba4152bSMatthew G. Knepley 
158*d083f849SBarry Smith   Collective
1596ba4152bSMatthew G. Knepley 
1606ba4152bSMatthew G. Knepley   Input Parameter:
1616ba4152bSMatthew G. Knepley . minMaxVal - An array with the local min and max
1626ba4152bSMatthew G. Knepley 
1636ba4152bSMatthew G. Knepley   Output Parameter:
1646ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1656ba4152bSMatthew G. Knepley 
1666ba4152bSMatthew G. Knepley   Level: beginner
1676ba4152bSMatthew G. Knepley 
1686ba4152bSMatthew G. Knepley .seealso: PetscSplitOwnership()
1696ba4152bSMatthew G. Knepley @*/
1706ba4152bSMatthew G. Knepley PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2])
1716ba4152bSMatthew G. Knepley {
1726ba4152bSMatthew G. Knepley   PetscErrorCode ierr;
1736ba4152bSMatthew G. Knepley 
1746ba4152bSMatthew G. Knepley   PetscFunctionBegin;
1756ba4152bSMatthew G. Knepley   minMaxVal[1] = -minMaxVal[1];
1766ba4152bSMatthew G. Knepley   ierr = MPI_Allreduce(minMaxVal, minMaxValGlobal, 2, MPIU_INT, MPI_MIN, comm);CHKERRQ(ierr);
1776ba4152bSMatthew G. Knepley   minMaxValGlobal[1] = -minMaxValGlobal[1];
1786ba4152bSMatthew G. Knepley   PetscFunctionReturn(0);
1796ba4152bSMatthew G. Knepley }
1806ba4152bSMatthew G. Knepley 
1816ba4152bSMatthew G. Knepley /*@C
1826ba4152bSMatthew G. Knepley   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
1836ba4152bSMatthew G. Knepley 
184*d083f849SBarry Smith   Collective
1856ba4152bSMatthew G. Knepley 
1866ba4152bSMatthew G. Knepley   Input Parameter:
1876ba4152bSMatthew G. Knepley . minMaxVal - An array with the local min and max
1886ba4152bSMatthew G. Knepley 
1896ba4152bSMatthew G. Knepley   Output Parameter:
1906ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1916ba4152bSMatthew G. Knepley 
1926ba4152bSMatthew G. Knepley   Level: beginner
1936ba4152bSMatthew G. Knepley 
1946ba4152bSMatthew G. Knepley .seealso: PetscSplitOwnership()
1956ba4152bSMatthew G. Knepley @*/
1966ba4152bSMatthew G. Knepley PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2])
1976ba4152bSMatthew G. Knepley {
1986ba4152bSMatthew G. Knepley   PetscErrorCode ierr;
1996ba4152bSMatthew G. Knepley 
2006ba4152bSMatthew G. Knepley   PetscFunctionBegin;
2016ba4152bSMatthew G. Knepley   minMaxVal[1] = -minMaxVal[1];
2026ba4152bSMatthew G. Knepley   ierr = MPI_Allreduce(minMaxVal, minMaxValGlobal, 2, MPIU_REAL, MPI_MIN, comm);CHKERRQ(ierr);
2036ba4152bSMatthew G. Knepley   minMaxValGlobal[1] = -minMaxValGlobal[1];
2046ba4152bSMatthew G. Knepley   PetscFunctionReturn(0);
2056ba4152bSMatthew G. Knepley }
206