1 2 #include <petscsys.h> /*I "petscsys.h" I*/ 3 4 /*@ 5 PetscSplitOwnershipBlock - Given a global (or local) length determines a local 6 (or global) length via a simple formula. Splits so each processors local size 7 is divisible by the block size. 8 9 Collective (if N is PETSC_DECIDE) 10 11 Input Parameters: 12 + comm - MPI communicator that shares the object being divided 13 . bs - block size 14 . n - local length (or PETSC_DECIDE to have it set) 15 - N - global length (or PETSC_DECIDE) 16 17 Level: developer 18 19 Notes: 20 n and N cannot be both PETSC_DECIDE 21 22 If one processor calls this with N of PETSC_DECIDE then all processors 23 must, otherwise the program will hang. 24 25 .seealso: PetscSplitOwnership() 26 27 @*/ 28 PetscErrorCode PetscSplitOwnershipBlock(MPI_Comm comm,PetscInt bs,PetscInt *n,PetscInt *N) 29 { 30 PetscErrorCode ierr; 31 PetscMPIInt size,rank; 32 33 PetscFunctionBegin; 34 if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE"); 35 36 if (*N == PETSC_DECIDE) { 37 if (*n % bs != 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"local size %D not divisible by block size %D",*n,bs); 38 ierr = MPIU_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 39 } else if (*n == PETSC_DECIDE) { 40 PetscInt Nbs = *N/bs; 41 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 42 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 43 *n = bs*(Nbs/size + ((Nbs % size) > rank)); 44 } 45 PetscFunctionReturn(0); 46 } 47 48 49 /*@ 50 PetscSplitOwnership - Given a global (or local) length determines a local 51 (or global) length via a simple formula 52 53 Collective (if n or N is PETSC_DECIDE) 54 55 Input Parameters: 56 + comm - MPI communicator that shares the object being divided 57 . n - local length (or PETSC_DECIDE to have it set) 58 - N - global length (or PETSC_DECIDE) 59 60 Level: developer 61 62 Notes: 63 n and N cannot be both PETSC_DECIDE 64 65 If one processor calls this with n or N of PETSC_DECIDE then all processors 66 must. Otherwise, an error is thrown in debug mode while the program will hang 67 in optimized (i.e. configured --with-debugging=0) mode. 68 69 .seealso: PetscSplitOwnershipBlock() 70 71 @*/ 72 PetscErrorCode PetscSplitOwnership(MPI_Comm comm,PetscInt *n,PetscInt *N) 73 { 74 PetscErrorCode ierr; 75 PetscMPIInt size,rank; 76 77 PetscFunctionBegin; 78 if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE\n likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee https://www.mcs.anl.gov/petsc/documentation/faq.html#split"); 79 #if defined(PETSC_USE_DEBUG) 80 { 81 PetscMPIInt l[2],g[2]; 82 l[0] = (*n == PETSC_DECIDE) ? 1 : 0; 83 l[1] = (*N == PETSC_DECIDE) ? 1 : 0; 84 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 85 ierr = MPIU_Allreduce(l,g,2,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr); 86 if (g[0] && g[0] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for local size"); 87 if (g[1] && g[1] != size) SETERRQ(comm,PETSC_ERR_ARG_INCOMP,"All processes must supply PETSC_DECIDE for global size"); 88 } 89 #endif 90 91 if (*N == PETSC_DECIDE) { 92 PetscInt64 m = *n, M; 93 ierr = MPIU_Allreduce(&m,&M,1,MPIU_INT64,MPI_SUM,comm);CHKERRQ(ierr); 94 if (M > PETSC_MAX_INT) SETERRQ1(comm,PETSC_ERR_INT_OVERFLOW,"Global size overflow %" PetscInt64_FMT ". You may consider ./configure PETSc with --with-64-bit-indices for the case you are running", M); 95 else *N = (PetscInt)M; 96 } else if (*n == PETSC_DECIDE) { 97 ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 98 ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 99 *n = *N/size + ((*N % size) > rank); 100 #if defined(PETSC_USE_DEBUG) 101 } else { 102 PetscInt tmp; 103 ierr = MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 104 if (tmp != *N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D\n likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee https://www.mcs.anl.gov/petsc/documentation/faq.html#split",tmp,*N,*n); 105 #endif 106 } 107 PetscFunctionReturn(0); 108 } 109 110