xref: /petsc/src/sys/utils/mpiu.c (revision e30d229923a696673d75fd4bbec7dc9405e48f2f)
1e5c89e4eSSatish Balay #define PETSC_DLL
2e5c89e4eSSatish Balay 
3e5c89e4eSSatish Balay #include "petsc.h"        /*I  "petsc.h"  I*/
4e5c89e4eSSatish Balay /*
5e5c89e4eSSatish Balay     Note that tag of 0 is ok because comm is a private communicator
6e5c89e4eSSatish Balay   generated below just for these routines.
7e5c89e4eSSatish Balay */
8e5c89e4eSSatish Balay 
9e5c89e4eSSatish Balay #undef __FUNCT__
10e5c89e4eSSatish Balay #define __FUNCT__ "PetscSequentialPhaseBegin_Private"
11e5c89e4eSSatish Balay PetscErrorCode PetscSequentialPhaseBegin_Private(MPI_Comm comm,int ng)
12e5c89e4eSSatish Balay {
13e5c89e4eSSatish Balay   PetscErrorCode ierr;
14e5c89e4eSSatish Balay   PetscMPIInt    rank,size,tag = 0;
15e5c89e4eSSatish Balay   MPI_Status     status;
16e5c89e4eSSatish Balay 
17e5c89e4eSSatish Balay   PetscFunctionBegin;
18e5c89e4eSSatish Balay   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
19e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
20e5c89e4eSSatish Balay   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
21e5c89e4eSSatish Balay   if (rank) {
22e5c89e4eSSatish Balay     ierr = MPI_Recv(0,0,MPI_INT,rank-1,tag,comm,&status);CHKERRQ(ierr);
23e5c89e4eSSatish Balay   }
24e5c89e4eSSatish Balay   /* Send to the next process in the group unless we are the last process */
25e5c89e4eSSatish Balay   if ((rank % ng) < ng - 1 && rank != size - 1) {
26e5c89e4eSSatish Balay     ierr = MPI_Send(0,0,MPI_INT,rank + 1,tag,comm);CHKERRQ(ierr);
27e5c89e4eSSatish Balay   }
28e5c89e4eSSatish Balay   PetscFunctionReturn(0);
29e5c89e4eSSatish Balay }
30e5c89e4eSSatish Balay 
31e5c89e4eSSatish Balay #undef __FUNCT__
32e5c89e4eSSatish Balay #define __FUNCT__ "PetscSequentialPhaseEnd_Private"
33e5c89e4eSSatish Balay PetscErrorCode PetscSequentialPhaseEnd_Private(MPI_Comm comm,int ng)
34e5c89e4eSSatish Balay {
35e5c89e4eSSatish Balay   PetscErrorCode ierr;
36e5c89e4eSSatish Balay   PetscMPIInt    rank,size,tag = 0;
37e5c89e4eSSatish Balay   MPI_Status     status;
38e5c89e4eSSatish Balay 
39e5c89e4eSSatish Balay   PetscFunctionBegin;
40e5c89e4eSSatish Balay   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
41e5c89e4eSSatish Balay   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
42e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
43e5c89e4eSSatish Balay 
44e5c89e4eSSatish Balay   /* Send to the first process in the next group */
45e5c89e4eSSatish Balay   if ((rank % ng) == ng - 1 || rank == size - 1) {
46e5c89e4eSSatish Balay     ierr = MPI_Send(0,0,MPI_INT,(rank + 1) % size,tag,comm);CHKERRQ(ierr);
47e5c89e4eSSatish Balay   }
48e5c89e4eSSatish Balay   if (!rank) {
49e5c89e4eSSatish Balay     ierr = MPI_Recv(0,0,MPI_INT,size-1,tag,comm,&status);CHKERRQ(ierr);
50e5c89e4eSSatish Balay   }
51e5c89e4eSSatish Balay   PetscFunctionReturn(0);
52e5c89e4eSSatish Balay }
53e5c89e4eSSatish Balay 
54e5c89e4eSSatish Balay /* ---------------------------------------------------------------------*/
55e5c89e4eSSatish Balay /*
56e5c89e4eSSatish Balay     The variable Petsc_Seq_keyval is used to indicate an MPI attribute that
57e5c89e4eSSatish Balay   is attached to a communicator that manages the sequential phase code below.
58e5c89e4eSSatish Balay */
59e5c89e4eSSatish Balay static int Petsc_Seq_keyval = MPI_KEYVAL_INVALID;
60e5c89e4eSSatish Balay 
61e5c89e4eSSatish Balay #undef __FUNCT__
62e5c89e4eSSatish Balay #define __FUNCT__ "PetscSequentialPhaseBegin"
63*e30d2299SSatish Balay /*@
64e5c89e4eSSatish Balay    PetscSequentialPhaseBegin - Begins a sequential section of code.
65e5c89e4eSSatish Balay 
66e5c89e4eSSatish Balay    Collective on MPI_Comm
67e5c89e4eSSatish Balay 
68e5c89e4eSSatish Balay    Input Parameters:
69e5c89e4eSSatish Balay +  comm - Communicator to sequentialize.
70e5c89e4eSSatish Balay -  ng   - Number in processor group.  This many processes are allowed to execute
71e5c89e4eSSatish Balay    at the same time (usually 1)
72e5c89e4eSSatish Balay 
73e5c89e4eSSatish Balay    Level: intermediate
74e5c89e4eSSatish Balay 
75e5c89e4eSSatish Balay    Notes:
76e5c89e4eSSatish Balay    PetscSequentialPhaseBegin() and PetscSequentialPhaseEnd() provide a
77e5c89e4eSSatish Balay    way to force a section of code to be executed by the processes in
78e5c89e4eSSatish Balay    rank order.  Typically, this is done with
79e5c89e4eSSatish Balay .vb
80e5c89e4eSSatish Balay       PetscSequentialPhaseBegin(comm, 1);
81e5c89e4eSSatish Balay       <code to be executed sequentially>
82e5c89e4eSSatish Balay       PetscSequentialPhaseEnd(comm, 1);
83e5c89e4eSSatish Balay .ve
84e5c89e4eSSatish Balay 
85e5c89e4eSSatish Balay    Often, the sequential code contains output statements (e.g., printf) to
86e5c89e4eSSatish Balay    be executed.  Note that you may need to flush the I/O buffers before
87e5c89e4eSSatish Balay    calling PetscSequentialPhaseEnd().  Also, note that some systems do
88e5c89e4eSSatish Balay    not propagate I/O in any order to the controling terminal (in other words,
89e5c89e4eSSatish Balay    even if you flush the output, you may not get the data in the order
90e5c89e4eSSatish Balay    that you want).
91e5c89e4eSSatish Balay 
92e5c89e4eSSatish Balay .seealso: PetscSequentialPhaseEnd()
93e5c89e4eSSatish Balay 
94e5c89e4eSSatish Balay    Concepts: sequential stage
95e5c89e4eSSatish Balay 
96e5c89e4eSSatish Balay @*/
97e5c89e4eSSatish Balay PetscErrorCode PETSC_DLLEXPORT PetscSequentialPhaseBegin(MPI_Comm comm,int ng)
98e5c89e4eSSatish Balay {
99e5c89e4eSSatish Balay   PetscErrorCode ierr;
100e5c89e4eSSatish Balay   PetscMPIInt    size;
101e5c89e4eSSatish Balay   MPI_Comm       local_comm,*addr_local_comm;
102e5c89e4eSSatish Balay 
103e5c89e4eSSatish Balay   PetscFunctionBegin;
104e5c89e4eSSatish Balay   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
105e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
106e5c89e4eSSatish Balay 
107e5c89e4eSSatish Balay   /* Get the private communicator for the sequential operations */
108e5c89e4eSSatish Balay   if (Petsc_Seq_keyval == MPI_KEYVAL_INVALID) {
109e5c89e4eSSatish Balay     ierr = MPI_Keyval_create(MPI_NULL_COPY_FN,MPI_NULL_DELETE_FN,&Petsc_Seq_keyval,0);CHKERRQ(ierr);
110e5c89e4eSSatish Balay   }
111e5c89e4eSSatish Balay 
112e5c89e4eSSatish Balay   ierr = MPI_Comm_dup(comm,&local_comm);CHKERRQ(ierr);
113e5c89e4eSSatish Balay   ierr = PetscMalloc(sizeof(MPI_Comm),&addr_local_comm);CHKERRQ(ierr);
114e5c89e4eSSatish Balay   *addr_local_comm = local_comm;
115e5c89e4eSSatish Balay   ierr = MPI_Attr_put(comm,Petsc_Seq_keyval,(void*)addr_local_comm);CHKERRQ(ierr);
116e5c89e4eSSatish Balay   ierr = PetscSequentialPhaseBegin_Private(local_comm,ng);CHKERRQ(ierr);
117e5c89e4eSSatish Balay   PetscFunctionReturn(0);
118e5c89e4eSSatish Balay }
119e5c89e4eSSatish Balay 
120e5c89e4eSSatish Balay #undef __FUNCT__
121e5c89e4eSSatish Balay #define __FUNCT__ "PetscSequentialPhaseEnd"
122*e30d2299SSatish Balay /*@
123e5c89e4eSSatish Balay    PetscSequentialPhaseEnd - Ends a sequential section of code.
124e5c89e4eSSatish Balay 
125e5c89e4eSSatish Balay    Collective on MPI_Comm
126e5c89e4eSSatish Balay 
127e5c89e4eSSatish Balay    Input Parameters:
128e5c89e4eSSatish Balay +  comm - Communicator to sequentialize.
129e5c89e4eSSatish Balay -  ng   - Number in processor group.  This many processes are allowed to execute
130e5c89e4eSSatish Balay    at the same time (usually 1)
131e5c89e4eSSatish Balay 
132e5c89e4eSSatish Balay    Level: intermediate
133e5c89e4eSSatish Balay 
134e5c89e4eSSatish Balay    Notes:
135e5c89e4eSSatish Balay    See PetscSequentialPhaseBegin() for more details.
136e5c89e4eSSatish Balay 
137e5c89e4eSSatish Balay .seealso: PetscSequentialPhaseBegin()
138e5c89e4eSSatish Balay 
139e5c89e4eSSatish Balay    Concepts: sequential stage
140e5c89e4eSSatish Balay 
141e5c89e4eSSatish Balay @*/
142e5c89e4eSSatish Balay PetscErrorCode PETSC_DLLEXPORT PetscSequentialPhaseEnd(MPI_Comm comm,int ng)
143e5c89e4eSSatish Balay {
144e5c89e4eSSatish Balay   PetscErrorCode ierr;
145e5c89e4eSSatish Balay   PetscMPIInt    size,flag;
146e5c89e4eSSatish Balay   MPI_Comm       local_comm,*addr_local_comm;
147e5c89e4eSSatish Balay 
148e5c89e4eSSatish Balay   PetscFunctionBegin;
149e5c89e4eSSatish Balay   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
150e5c89e4eSSatish Balay   if (size == 1) PetscFunctionReturn(0);
151e5c89e4eSSatish Balay 
152e5c89e4eSSatish Balay   ierr = MPI_Attr_get(comm,Petsc_Seq_keyval,(void **)&addr_local_comm,&flag);CHKERRQ(ierr);
153e5c89e4eSSatish Balay   if (!flag) {
154e5c89e4eSSatish Balay     SETERRQ(PETSC_ERR_ARG_INCOMP,"Wrong MPI communicator; must pass in one used with PetscSequentialPhaseBegin()");
155e5c89e4eSSatish Balay   }
156e5c89e4eSSatish Balay   local_comm = *addr_local_comm;
157e5c89e4eSSatish Balay 
158e5c89e4eSSatish Balay   ierr = PetscSequentialPhaseEnd_Private(local_comm,ng);CHKERRQ(ierr);
159e5c89e4eSSatish Balay 
160e5c89e4eSSatish Balay   ierr = PetscFree(addr_local_comm);CHKERRQ(ierr);
161e5c89e4eSSatish Balay   ierr = MPI_Comm_free(&local_comm);CHKERRQ(ierr);
162e5c89e4eSSatish Balay   ierr = MPI_Attr_delete(comm,Petsc_Seq_keyval);CHKERRQ(ierr);
163e5c89e4eSSatish Balay   PetscFunctionReturn(0);
164e5c89e4eSSatish Balay }
165