xref: /petsc/src/sys/utils/mpiu.c (revision 95c0884e6f7665b705eebf88174e89dc920c2fc0)
1e5c89e4eSSatish Balay 
2c6db04a5SJed Brown #include <petscsys.h>        /*I  "petscsys.h"  I*/
38ae1f613SSatish Balay #include <petsc/private/petscimpl.h>
4e5c89e4eSSatish Balay /*
5e5c89e4eSSatish Balay     Note that tag of 0 is ok because comm is a private communicator
6e5c89e4eSSatish Balay   generated below just for these routines.
7e5c89e4eSSatish Balay */
8e5c89e4eSSatish Balay 
9*95c0884eSLisandro Dalcin PETSC_INTERN PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm,int ng)
10e5c89e4eSSatish Balay {
11e5c89e4eSSatish Balay   PetscErrorCode ierr;
12e5c89e4eSSatish Balay   PetscMPIInt    rank,size,tag = 0;
13e5c89e4eSSatish Balay   MPI_Status     status;
14e5c89e4eSSatish Balay 
15e5c89e4eSSatish Balay   PetscFunctionBegin;
16e5c89e4eSSatish Balay   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
17e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
18e5c89e4eSSatish Balay   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
19e5c89e4eSSatish Balay   if (rank) {
20e5c89e4eSSatish Balay     ierr = MPI_Recv(0,0,MPI_INT,rank-1,tag,comm,&status);CHKERRQ(ierr);
21e5c89e4eSSatish Balay   }
22e5c89e4eSSatish Balay   /* Send to the next process in the group unless we are the last process */
23e5c89e4eSSatish Balay   if ((rank % ng) < ng - 1 && rank != size - 1) {
24e5c89e4eSSatish Balay     ierr = MPI_Send(0,0,MPI_INT,rank + 1,tag,comm);CHKERRQ(ierr);
25e5c89e4eSSatish Balay   }
26e5c89e4eSSatish Balay   PetscFunctionReturn(0);
27e5c89e4eSSatish Balay }
28e5c89e4eSSatish Balay 
29*95c0884eSLisandro Dalcin PETSC_INTERN PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm,int ng)
30e5c89e4eSSatish Balay {
31e5c89e4eSSatish Balay   PetscErrorCode ierr;
32e5c89e4eSSatish Balay   PetscMPIInt    rank,size,tag = 0;
33e5c89e4eSSatish Balay   MPI_Status     status;
34e5c89e4eSSatish Balay 
35e5c89e4eSSatish Balay   PetscFunctionBegin;
36e5c89e4eSSatish Balay   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
37e5c89e4eSSatish Balay   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
38e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
39e5c89e4eSSatish Balay 
40e5c89e4eSSatish Balay   /* Send to the first process in the next group */
41e5c89e4eSSatish Balay   if ((rank % ng) == ng - 1 || rank == size - 1) {
42e5c89e4eSSatish Balay     ierr = MPI_Send(0,0,MPI_INT,(rank + 1) % size,tag,comm);CHKERRQ(ierr);
43e5c89e4eSSatish Balay   }
44e5c89e4eSSatish Balay   if (!rank) {
45e5c89e4eSSatish Balay     ierr = MPI_Recv(0,0,MPI_INT,size-1,tag,comm,&status);CHKERRQ(ierr);
46e5c89e4eSSatish Balay   }
47e5c89e4eSSatish Balay   PetscFunctionReturn(0);
48e5c89e4eSSatish Balay }
49e5c89e4eSSatish Balay 
50e5c89e4eSSatish Balay /* ---------------------------------------------------------------------*/
51e5c89e4eSSatish Balay /*
52e5c89e4eSSatish Balay     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
53e5c89e4eSSatish Balay   is attached to a communicator that manages the sequential phase code below.
54e5c89e4eSSatish Balay */
552fe4ef4aSBarry Smith PetscMPIInt Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
56e5c89e4eSSatish Balay 
57e30d2299SSatish Balay /*@
58e5c89e4eSSatish Balay    PetscSequentialPhaseBegin - Begins a sequential section of code.
59e5c89e4eSSatish Balay 
60e5c89e4eSSatish Balay    Collective on MPI_Comm
61e5c89e4eSSatish Balay 
62e5c89e4eSSatish Balay    Input Parameters:
63e5c89e4eSSatish Balay +  comm - Communicator to sequentialize.
64e5c89e4eSSatish Balay -  ng   - Number in processor group.  This many processes are allowed to execute
65e5c89e4eSSatish Balay    at the same time (usually 1)
66e5c89e4eSSatish Balay 
67e5c89e4eSSatish Balay    Level: intermediate
68e5c89e4eSSatish Balay 
69e5c89e4eSSatish Balay    Notes:
70e5c89e4eSSatish Balay    PetscSequentialPhaseBegin() and PetscSequentialPhaseEnd() provide a
71e5c89e4eSSatish Balay    way to force a section of code to be executed by the processes in
72e5c89e4eSSatish Balay    rank order.  Typically, this is done with
73e5c89e4eSSatish Balay .vb
74e5c89e4eSSatish Balay       PetscSequentialPhaseBegin(comm, 1);
75e5c89e4eSSatish Balay       <code to be executed sequentially>
76e5c89e4eSSatish Balay       PetscSequentialPhaseEnd(comm, 1);
77e5c89e4eSSatish Balay .ve
78e5c89e4eSSatish Balay 
79e5c89e4eSSatish Balay    Often, the sequential code contains output statements (e.g., printf) to
80e5c89e4eSSatish Balay    be executed.  Note that you may need to flush the I/O buffers before
81e5c89e4eSSatish Balay    calling PetscSequentialPhaseEnd().  Also, note that some systems do
82e5c89e4eSSatish Balay    not propagate I/O in any order to the controling terminal (in other words,
83e5c89e4eSSatish Balay    even if you flush the output, you may not get the data in the order
84e5c89e4eSSatish Balay    that you want).
85e5c89e4eSSatish Balay 
86e5c89e4eSSatish Balay .seealso: PetscSequentialPhaseEnd()
87e5c89e4eSSatish Balay 
88e5c89e4eSSatish Balay    Concepts: sequential stage
89e5c89e4eSSatish Balay 
90e5c89e4eSSatish Balay @*/
917087cfbeSBarry Smith PetscErrorCode  PetscSequentialPhaseBegin(MPI_Comm comm,int ng)
92e5c89e4eSSatish Balay {
93e5c89e4eSSatish Balay   PetscErrorCode ierr;
94e5c89e4eSSatish Balay   PetscMPIInt    size;
95e5c89e4eSSatish Balay   MPI_Comm       local_comm,*addr_local_comm;
96e5c89e4eSSatish Balay 
97e5c89e4eSSatish Balay   PetscFunctionBegin;
98a70dbfdaSBarry Smith   ierr = PetscSysInitializePackage();CHKERRQ(ierr);
99e5c89e4eSSatish Balay   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
100e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
101e5c89e4eSSatish Balay 
102e5c89e4eSSatish Balay   /* Get the private communicator for the sequential operations */
103e5c89e4eSSatish Balay   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) {
10412801b39SBarry Smith     ierr = MPI_Comm_create_keyval(MPI_COMM_NULL_COPY_FN,MPI_COMM_NULL_DELETE_FN,&Petsc_Seq_keyval,0);CHKERRQ(ierr);
105e5c89e4eSSatish Balay   }
106e5c89e4eSSatish Balay 
107e5c89e4eSSatish Balay   ierr = MPI_Comm_dup(comm,&local_comm);CHKERRQ(ierr);
108854ce69bSBarry Smith   ierr = PetscMalloc1(1,&addr_local_comm);CHKERRQ(ierr);
109a297a907SKarl Rupp 
110e5c89e4eSSatish Balay   *addr_local_comm = local_comm;
111a297a907SKarl Rupp 
11247435625SJed Brown   ierr = MPI_Comm_set_attr(comm,Petsc_Seq_keyval,(void*)addr_local_comm);CHKERRQ(ierr);
113e5c89e4eSSatish Balay   ierr = PetscSequentialPhaseBegin_Private(local_comm,ng);CHKERRQ(ierr);
114e5c89e4eSSatish Balay   PetscFunctionReturn(0);
115e5c89e4eSSatish Balay }
116e5c89e4eSSatish Balay 
117e30d2299SSatish Balay /*@
118e5c89e4eSSatish Balay    PetscSequentialPhaseEnd - Ends a sequential section of code.
119e5c89e4eSSatish Balay 
120e5c89e4eSSatish Balay    Collective on MPI_Comm
121e5c89e4eSSatish Balay 
122e5c89e4eSSatish Balay    Input Parameters:
123e5c89e4eSSatish Balay +  comm - Communicator to sequentialize.
124e5c89e4eSSatish Balay -  ng   - Number in processor group.  This many processes are allowed to execute
125e5c89e4eSSatish Balay    at the same time (usually 1)
126e5c89e4eSSatish Balay 
127e5c89e4eSSatish Balay    Level: intermediate
128e5c89e4eSSatish Balay 
129e5c89e4eSSatish Balay    Notes:
130e5c89e4eSSatish Balay    See PetscSequentialPhaseBegin() for more details.
131e5c89e4eSSatish Balay 
132e5c89e4eSSatish Balay .seealso: PetscSequentialPhaseBegin()
133e5c89e4eSSatish Balay 
134e5c89e4eSSatish Balay    Concepts: sequential stage
135e5c89e4eSSatish Balay 
136e5c89e4eSSatish Balay @*/
1377087cfbeSBarry Smith PetscErrorCode  PetscSequentialPhaseEnd(MPI_Comm comm,int ng)
138e5c89e4eSSatish Balay {
139e5c89e4eSSatish Balay   PetscErrorCode ierr;
140e5c89e4eSSatish Balay   PetscMPIInt    size,flag;
141e5c89e4eSSatish Balay   MPI_Comm       local_comm,*addr_local_comm;
142e5c89e4eSSatish Balay 
143e5c89e4eSSatish Balay   PetscFunctionBegin;
144e5c89e4eSSatish Balay   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
145e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
146e5c89e4eSSatish Balay 
14747435625SJed Brown   ierr = MPI_Comm_get_attr(comm,Petsc_Seq_keyval,(void**)&addr_local_comm,&flag);CHKERRQ(ierr);
14817186662SBarry Smith   if (!flag) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
149e5c89e4eSSatish Balay   local_comm = *addr_local_comm;
150e5c89e4eSSatish Balay 
151e5c89e4eSSatish Balay   ierr = PetscSequentialPhaseEnd_Private(local_comm,ng);CHKERRQ(ierr);
152e5c89e4eSSatish Balay 
153e5c89e4eSSatish Balay   ierr = PetscFree(addr_local_comm);CHKERRQ(ierr);
154e5c89e4eSSatish Balay   ierr = MPI_Comm_free(&local_comm);CHKERRQ(ierr);
15547435625SJed Brown   ierr = MPI_Comm_delete_attr(comm,Petsc_Seq_keyval);CHKERRQ(ierr);
156e5c89e4eSSatish Balay   PetscFunctionReturn(0);
157e5c89e4eSSatish Balay }
1586ba4152bSMatthew G. Knepley 
1596ba4152bSMatthew G. Knepley /*@C
1606ba4152bSMatthew G. Knepley   PetscGlobalMinMaxInt - Get the global min/max from local min/max input
1616ba4152bSMatthew G. Knepley 
1626ba4152bSMatthew G. Knepley   Collective on comm
1636ba4152bSMatthew G. Knepley 
1646ba4152bSMatthew G. Knepley   Input Parameter:
1656ba4152bSMatthew G. Knepley . minMaxVal - An array with the local min and max
1666ba4152bSMatthew G. Knepley 
1676ba4152bSMatthew G. Knepley   Output Parameter:
1686ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1696ba4152bSMatthew G. Knepley 
1706ba4152bSMatthew G. Knepley   Level: beginner
1716ba4152bSMatthew G. Knepley 
1726ba4152bSMatthew G. Knepley .keywords: minimum, maximum
1736ba4152bSMatthew G. Knepley .seealso: PetscSplitOwnership()
1746ba4152bSMatthew G. Knepley @*/
1756ba4152bSMatthew G. Knepley PetscErrorCode PetscGlobalMinMaxInt(MPI_Comm comm, PetscInt minMaxVal[2], PetscInt minMaxValGlobal[2])
1766ba4152bSMatthew G. Knepley {
1776ba4152bSMatthew G. Knepley   PetscErrorCode ierr;
1786ba4152bSMatthew G. Knepley 
1796ba4152bSMatthew G. Knepley   PetscFunctionBegin;
1806ba4152bSMatthew G. Knepley   minMaxVal[1] = -minMaxVal[1];
1816ba4152bSMatthew G. Knepley   ierr = MPI_Allreduce(minMaxVal, minMaxValGlobal, 2, MPIU_INT, MPI_MIN, comm);CHKERRQ(ierr);
1826ba4152bSMatthew G. Knepley   minMaxValGlobal[1] = -minMaxValGlobal[1];
1836ba4152bSMatthew G. Knepley   PetscFunctionReturn(0);
1846ba4152bSMatthew G. Knepley }
1856ba4152bSMatthew G. Knepley 
1866ba4152bSMatthew G. Knepley /*@C
1876ba4152bSMatthew G. Knepley   PetscGlobalMinMaxReal - Get the global min/max from local min/max input
1886ba4152bSMatthew G. Knepley 
1896ba4152bSMatthew G. Knepley   Collective on comm
1906ba4152bSMatthew G. Knepley 
1916ba4152bSMatthew G. Knepley   Input Parameter:
1926ba4152bSMatthew G. Knepley . minMaxVal - An array with the local min and max
1936ba4152bSMatthew G. Knepley 
1946ba4152bSMatthew G. Knepley   Output Parameter:
1956ba4152bSMatthew G. Knepley . minMaxValGlobal - An array with the global min and max
1966ba4152bSMatthew G. Knepley 
1976ba4152bSMatthew G. Knepley   Level: beginner
1986ba4152bSMatthew G. Knepley 
1996ba4152bSMatthew G. Knepley .keywords: minimum, maximum
2006ba4152bSMatthew G. Knepley .seealso: PetscSplitOwnership()
2016ba4152bSMatthew G. Knepley @*/
2026ba4152bSMatthew G. Knepley PetscErrorCode PetscGlobalMinMaxReal(MPI_Comm comm, PetscReal minMaxVal[2], PetscReal minMaxValGlobal[2])
2036ba4152bSMatthew G. Knepley {
2046ba4152bSMatthew G. Knepley   PetscErrorCode ierr;
2056ba4152bSMatthew G. Knepley 
2066ba4152bSMatthew G. Knepley   PetscFunctionBegin;
2076ba4152bSMatthew G. Knepley   minMaxVal[1] = -minMaxVal[1];
2086ba4152bSMatthew G. Knepley   ierr = MPI_Allreduce(minMaxVal, minMaxValGlobal, 2, MPIU_REAL, MPI_MIN, comm);CHKERRQ(ierr);
2096ba4152bSMatthew G. Knepley   minMaxValGlobal[1] = -minMaxValGlobal[1];
2106ba4152bSMatthew G. Knepley   PetscFunctionReturn(0);
2116ba4152bSMatthew G. Knepley }
212