xref: /petsc/src/sys/utils/pbarrier.c (revision b2566f29c2b6470df769aa9f7deb9e2726b0959e)
1 
2 #include <petsc/private/petscimpl.h>              /*I "petscsys.h" I*/
3 
4 /* Logging support */
5 PetscLogEvent PETSC_Barrier=0;
6 
7 static int hash(const char *str)
8 {
9   int c,hash = 5381;
10 
11   while ((c = *str++)) hash = ((hash << 5) + hash) + c; /* hash * 33 + c */
12   return hash;
13 }
14 
15 PetscErrorCode PetscAllreduceBarrierCheck(MPI_Comm comm,PetscMPIInt ctn,int line,const char *func,const char *file)
16 {
17   PetscMPIInt err;
18   PetscMPIInt b1[2],b2[2];
19   b1[0] = -(PetscMPIInt)line; b1[1] = (PetscMPIInt)line;
20   err = MPI_Allreduce(b1,b2,2,MPI_INT,MPI_MAX,comm);
21   if (err) {
22     return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL,"MPI_Allreduced() failed");
23   }
24   if (-b2[0] != b2[1]) {
25     return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL,"MPI_Allreduced() called in different locations on different processors");
26   }
27   b1[0] = -(PetscMPIInt)hash(func); b1[1] = -b1[0];
28   err = MPI_Allreduce(b1,b2,2,MPI_INT,MPI_MAX,comm);
29   if (err) {
30     return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL,"MPI_Allreduced() failed");
31   }
32   if (-b2[0] != b2[1]) {
33     return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL,"MPI_Allreduced() called in different locations on different processors");
34   }
35   b1[0] = -(PetscMPIInt)ctn; b1[1] = (PetscMPIInt)ctn;
36   err = MPI_Allreduce(b1,b2,2,MPI_INT,MPI_MAX,comm);
37   if (err) {
38     return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL,"MPI_Allreduced() failed");
39   }
40   if (-b2[0] != b2[1]) {
41     return PetscError(PETSC_COMM_SELF,line,func,file,PETSC_ERR_PLIB,PETSC_ERROR_INITIAL,"MPI_Allreduced() called with different counts %d on different processors",ctn);
42   }
43   return 0;
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "PetscBarrier"
48 /*@C
49     PetscBarrier - Blocks until this routine is executed by all
50                    processors owning the object A.
51 
52    Input Parameters:
53 .  A - PETSc object  (Mat, Vec, IS, SNES etc...)
54         Must be caste with a (PetscObject), can use NULL (for MPI_COMM_WORLD)
55 
56   Level: intermediate
57 
58   Notes:
59   This routine calls MPI_Barrier with the communicator of the PETSc Object "A".
60 
61   With fortran Use NULL_OBJECT (instead of NULL)
62 
63    Concepts: barrier
64 
65 @*/
66 PetscErrorCode  PetscBarrier(PetscObject obj)
67 {
68   PetscErrorCode ierr;
69   MPI_Comm       comm;
70 
71   PetscFunctionBegin;
72   if (obj) PetscValidHeader(obj,1);
73   ierr = PetscLogEventBegin(PETSC_Barrier,obj,0,0,0);CHKERRQ(ierr);
74   if (obj) {
75     ierr = PetscObjectGetComm(obj,&comm);CHKERRQ(ierr);
76   } else comm = PETSC_COMM_WORLD;
77   ierr = MPI_Barrier(comm);CHKERRQ(ierr);
78   ierr = PetscLogEventEnd(PETSC_Barrier,obj,0,0,0);CHKERRQ(ierr);
79   PetscFunctionReturn(0);
80 }
81 
82