1 2 #include <petscsys.h> /*I "petscsys.h" I*/ 3 4 /*@ 5 PetscSplitOwnershipBlock - Given a global (or local) length determines a local 6 (or global) length via a simple formula. Splits so each processors local size 7 is divisible by the block size. 8 9 Collective (if N is PETSC_DECIDE) 10 11 Input Parameters: 12 + comm - MPI communicator that shares the object being divided 13 . bs - block size 14 . n - local length (or PETSC_DECIDE to have it set) 15 - N - global length (or PETSC_DECIDE) 16 17 Level: developer 18 19 Notes: 20 n and N cannot be both PETSC_DECIDE 21 22 If one processor calls this with N of PETSC_DECIDE then all processors 23 must, otherwise the program will hang. 24 25 .seealso: PetscSplitOwnership() 26 27 @*/ 28 PetscErrorCode PetscSplitOwnershipBlock(MPI_Comm comm,PetscInt bs,PetscInt *n,PetscInt *N) 29 { 30 PetscErrorCode ierr; 31 PetscMPIInt size,rank; 32 33 PetscFunctionBegin; 34 if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE"); 35 36 if (*N == PETSC_DECIDE) { 37 if (*n % bs != 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"local size %D not divisible by block size %D",*n,bs); 38 ierr = MPIU_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);CHKERRMPI(ierr); 39 } else if (*n == PETSC_DECIDE) { 40 PetscInt Nbs = *N/bs; 41 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 42 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 43 *n = bs*(Nbs/size + ((Nbs % size) > rank)); 44 } 45 PetscFunctionReturn(0); 46 } 47 48 /*@ 49 PetscSplitOwnership - Given a global (or local) length determines a local 50 (or global) length via a simple formula 51 52 Collective (if n or N is PETSC_DECIDE) 53 54 Input Parameters: 55 + comm - MPI communicator that shares the object being divided 56 . n - local length (or PETSC_DECIDE to have it set) 57 - N - global length (or PETSC_DECIDE) 58 59 Level: developer 60 61 Notes: 62 n and N cannot be both PETSC_DECIDE 63 64 If one processor calls this with n or N of PETSC_DECIDE then all processors 65 must. Otherwise, an error is thrown in debug mode while the program will hang 66 in optimized (i.e. configured --with-debugging=0) mode. 67 68 .seealso: PetscSplitOwnershipBlock() 69 70 @*/ 71 PetscErrorCode PetscSplitOwnership(MPI_Comm comm,PetscInt *n,PetscInt *N) 72 { 73 PetscErrorCode ierr; 74 PetscMPIInt size,rank; 75 76 PetscFunctionBegin; 77 if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE\n likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee https://www.mcs.anl.gov/petsc/documentation/faq.html#split"); 78 if (PetscDefined(USE_DEBUG)) { 79 PetscMPIInt l[2],g[2]; 80 l[0] = (*n == PETSC_DECIDE) ? 1 : 0; 81 l[1] = (*N == PETSC_DECIDE) ? 1 : 0; 82 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 83 ierr = MPIU_Allreduce(l,g,2,MPI_INT,MPI_SUM,comm);CHKERRMPI(ierr); 84 if (g[0] && g[0] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for local size"); 85 if (g[1] && g[1] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for global size"); 86 } 87 88 if (*N == PETSC_DECIDE) { 89 PetscInt64 m = *n, M; 90 ierr = MPIU_Allreduce(&m,&M,1,MPIU_INT64,MPI_SUM,comm);CHKERRMPI(ierr); 91 if (M > PETSC_MAX_INT) SETERRQ1(comm,PETSC_ERR_INT_OVERFLOW,"Global size overflow %" PetscInt64_FMT ". You may consider ./configure PETSc with --with-64-bit-indices for the case you are running", M); 92 else *N = (PetscInt)M; 93 } else if (*n == PETSC_DECIDE) { 94 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 95 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 96 *n = *N/size + ((*N % size) > rank); 97 } else if (PetscDefined(USE_DEBUG)) { 98 PetscInt tmp; 99 ierr = MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);CHKERRMPI(ierr); 100 if (tmp != *N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D\n likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee https://www.mcs.anl.gov/petsc/documentation/faq.html#split",tmp,*N,*n); 101 } 102 PetscFunctionReturn(0); 103 } 104 105 /*@ 106 PetscSplitOwnershipEqual - Given a global (or local) length determines a local 107 (or global) length via a simple formula, trying to have all local lengths equal 108 109 Collective (if n or N is PETSC_DECIDE) 110 111 Input Parameters: 112 + comm - MPI communicator that shares the object being divided 113 . n - local length (or PETSC_DECIDE to have it set) 114 - N - global length (or PETSC_DECIDE) 115 116 Level: developer 117 118 Notes: 119 This is intended to be used with MATSCALAPACK, where the local size must 120 be equal in all processes (except possibly the last one). For instance, 121 the local sizes when spliting N=50 with 6 processes are 9,9,9,9,9,5 122 123 n and N cannot be both PETSC_DECIDE 124 125 If one processor calls this with n or N of PETSC_DECIDE then all processors 126 must. Otherwise, an error is thrown in debug mode while the program will hang 127 in optimized (i.e. configured --with-debugging=0) mode. 128 129 .seealso: PetscSplitOwnership(), PetscSplitOwnershipBlock() 130 131 @*/ 132 PetscErrorCode PetscSplitOwnershipEqual(MPI_Comm comm,PetscInt *n,PetscInt *N) 133 { 134 PetscErrorCode ierr; 135 PetscMPIInt size,rank; 136 137 PetscFunctionBegin; 138 if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE"); 139 if (PetscDefined(USE_DEBUG)) { 140 PetscMPIInt l[2],g[2]; 141 l[0] = (*n == PETSC_DECIDE) ? 1 : 0; 142 l[1] = (*N == PETSC_DECIDE) ? 1 : 0; 143 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 144 ierr = MPIU_Allreduce(l,g,2,MPI_INT,MPI_SUM,comm);CHKERRMPI(ierr); 145 if (g[0] && g[0] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for local size"); 146 if (g[1] && g[1] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for global size"); 147 } 148 149 if (*N == PETSC_DECIDE) { 150 PetscInt64 m = *n, M; 151 ierr = MPIU_Allreduce(&m,&M,1,MPIU_INT64,MPI_SUM,comm);CHKERRMPI(ierr); 152 if (M > PETSC_MAX_INT) SETERRQ1(comm,PETSC_ERR_INT_OVERFLOW,"Global size overflow %" PetscInt64_FMT ". You may consider ./configure PETSc with --with-64-bit-indices for the case you are running", M); 153 else *N = (PetscInt)M; 154 } else if (*n == PETSC_DECIDE) { 155 ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr); 156 ierr = MPI_Comm_rank(comm,&rank);CHKERRMPI(ierr); 157 *n = *N/size; 158 if (*N % size) { 159 if ((rank+1)*(*n+1)<=*N) *n = *n+1; 160 else if (rank*(*n+1)<=*N) *n = *N-rank*(*n+1); 161 else *n = 0; 162 } 163 } else if (PetscDefined(USE_DEBUG)) { 164 PetscInt tmp; 165 ierr = MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);CHKERRMPI(ierr); 166 if (tmp != *N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D",tmp,*N,*n); 167 } 168 PetscFunctionReturn(0); 169 } 170 171