#include /*I "petscsys.h" I*/ /*@ PetscSplitOwnershipBlock - Given a global (or local) length determines a local (or global) length via a simple formula. Splits so each processors local size is divisible by the block size. Collective on MPI_Comm (if N is PETSC_DECIDE) Input Parameters: + comm - MPI communicator that shares the object being divided . bs - block size . n - local length (or PETSC_DECIDE to have it set) - N - global length (or PETSC_DECIDE) Level: developer Notes: n and N cannot be both PETSC_DECIDE If one processor calls this with N of PETSC_DECIDE then all processors must, otherwise the program will hang. .seealso: PetscSplitOwnership() @*/ PetscErrorCode PetscSplitOwnershipBlock(MPI_Comm comm,PetscInt bs,PetscInt *n,PetscInt *N) { PetscErrorCode ierr; PetscMPIInt size,rank; PetscFunctionBegin; if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE"); if (*N == PETSC_DECIDE) { if (*n % bs != 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"local size %D not divisible by block size %D",*n,bs); ierr = MPIU_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); } else if (*n == PETSC_DECIDE) { PetscInt Nbs = *N/bs; ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); *n = bs*(Nbs/size + ((Nbs % size) > rank)); } PetscFunctionReturn(0); } /*@ PetscSplitOwnership - Given a global (or local) length determines a local (or global) length via a simple formula Collective on MPI_Comm (if N is PETSC_DECIDE) Input Parameters: + comm - MPI communicator that shares the object being divided . n - local length (or PETSC_DECIDE to have it set) - N - global length (or PETSC_DECIDE) Level: developer Notes: n and N cannot be both PETSC_DECIDE If one processor calls this with N of PETSC_DECIDE then all processors must, otherwise the program will hang. .seealso: PetscSplitOwnershipBlock() @*/ PetscErrorCode PetscSplitOwnership(MPI_Comm comm,PetscInt *n,PetscInt *N) { PetscErrorCode ierr; PetscMPIInt size,rank; PetscFunctionBegin; if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE\n likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee http://www.mcs.anl.gov/petsc/documentation/faq.html#split"); if (*N == PETSC_DECIDE) { ierr = MPIU_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); } else if (*n == PETSC_DECIDE) { ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); *n = *N/size + ((*N % size) > rank); #if defined(PETSC_USE_DEBUG) } else { PetscInt tmp; ierr = MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); if (tmp != *N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D\n likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee http://www.mcs.anl.gov/petsc/documentation/faq.html#split",tmp,*N,*n); #endif } PetscFunctionReturn(0); }