xref: /petsc/src/sys/utils/pbarrier.c (revision 8f5db7ef98c3d4d9d2da0477efdba2d5ce6b17ce)
1e5c89e4eSSatish Balay 
2af0996ceSBarry Smith #include <petsc/private/petscimpl.h>              /*I "petscsys.h" I*/
3e5c89e4eSSatish Balay 
4e5c89e4eSSatish Balay /* Logging support */
597486a04SSatish Balay PetscLogEvent PETSC_Barrier=0;
6e5c89e4eSSatish Balay 
7b2566f29SBarry Smith static int hash(const char *str)
8b2566f29SBarry Smith {
9b2566f29SBarry Smith   int c,hash = 5381;
10b2566f29SBarry Smith 
11b2566f29SBarry Smith   while ((c = *str++)) hash = ((hash << 5) + hash) + c; /* hash * 33 + c */
12b2566f29SBarry Smith   return hash;
13b2566f29SBarry Smith }
14b2566f29SBarry Smith 
15b2566f29SBarry Smith PetscErrorCode PetscAllreduceBarrierCheck(MPI_Comm comm,PetscMPIInt ctn,int line,const char *func,const char *file)
16b2566f29SBarry Smith {
17b2566f29SBarry Smith   PetscMPIInt err;
18*8f5db7efSBarry Smith   PetscMPIInt b1[6],b2[6];
19*8f5db7efSBarry Smith 
20*8f5db7efSBarry Smith   b1[0] = -(PetscMPIInt)line;       b1[1] = -b1[0];
21*8f5db7efSBarry Smith   b1[2] = -(PetscMPIInt)hash(func); b1[3] = -b1[2];
22*8f5db7efSBarry Smith   b1[4] = -(PetscMPIInt)ctn;        b1[5] = -b1[4];
23*8f5db7efSBarry Smith   err = MPI_Allreduce(b1,b2,6,MPI_INT,MPI_MAX,comm);
24*8f5db7efSBarry Smith   if (err) return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL,"MPI_Allreduced() failed");
25*8f5db7efSBarry Smith   if (-b2[0] != b2[1]) return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL,"MPI_Allreduce() called in different locations (code lines) on different processors");
26*8f5db7efSBarry Smith   if (-b2[2] != b2[3]) return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL,"MPI_Allreduce() called in different locations (functions) on different processors");
27*8f5db7efSBarry Smith   if (-b2[4] != b2[5]) return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL,"MPI_Allreduce() called with different counts %d on different processors",ctn);
28b2566f29SBarry Smith   return 0;
29b2566f29SBarry Smith }
30b2566f29SBarry Smith 
31e5c89e4eSSatish Balay #undef __FUNCT__
32e5c89e4eSSatish Balay #define __FUNCT__ "PetscBarrier"
335075c446SSatish Balay /*@C
34e5c89e4eSSatish Balay     PetscBarrier - Blocks until this routine is executed by all
35e5c89e4eSSatish Balay                    processors owning the object A.
36e5c89e4eSSatish Balay 
37e5c89e4eSSatish Balay    Input Parameters:
38e5c89e4eSSatish Balay .  A - PETSc object  (Mat, Vec, IS, SNES etc...)
390298fd71SBarry Smith         Must be caste with a (PetscObject), can use NULL (for MPI_COMM_WORLD)
40e5c89e4eSSatish Balay 
41e5c89e4eSSatish Balay   Level: intermediate
42e5c89e4eSSatish Balay 
43e5c89e4eSSatish Balay   Notes:
44e5c89e4eSSatish Balay   This routine calls MPI_Barrier with the communicator of the PETSc Object "A".
45e5c89e4eSSatish Balay 
460298fd71SBarry Smith   With fortran Use NULL_OBJECT (instead of NULL)
475075c446SSatish Balay 
48e5c89e4eSSatish Balay    Concepts: barrier
49e5c89e4eSSatish Balay 
50e5c89e4eSSatish Balay @*/
517087cfbeSBarry Smith PetscErrorCode  PetscBarrier(PetscObject obj)
52e5c89e4eSSatish Balay {
53e5c89e4eSSatish Balay   PetscErrorCode ierr;
54e5c89e4eSSatish Balay   MPI_Comm       comm;
55e5c89e4eSSatish Balay 
56e5c89e4eSSatish Balay   PetscFunctionBegin;
57e5c89e4eSSatish Balay   if (obj) PetscValidHeader(obj,1);
58e5c89e4eSSatish Balay   ierr = PetscLogEventBegin(PETSC_Barrier,obj,0,0,0);CHKERRQ(ierr);
59e5c89e4eSSatish Balay   if (obj) {
60e5c89e4eSSatish Balay     ierr = PetscObjectGetComm(obj,&comm);CHKERRQ(ierr);
61a297a907SKarl Rupp   } else comm = PETSC_COMM_WORLD;
62e5c89e4eSSatish Balay   ierr = MPI_Barrier(comm);CHKERRQ(ierr);
63e5c89e4eSSatish Balay   ierr = PetscLogEventEnd(PETSC_Barrier,obj,0,0,0);CHKERRQ(ierr);
64e5c89e4eSSatish Balay   PetscFunctionReturn(0);
65e5c89e4eSSatish Balay }
66e5c89e4eSSatish Balay 
67