xref: /petsc/src/sys/utils/psplit.c (revision e611a964e9853b74d61a56642fe9d06a6e51780f)
1 
2 #include <petscsys.h>           /*I    "petscsys.h" I*/
3 
4 #undef __FUNCT__
5 #define __FUNCT__ "PetscSplitOwnershipBlock"
6 /*@
7     PetscSplitOwnershipBlock - Given a global (or local) length determines a local
8         (or global) length via a simple formula. Splits so each processors local size
9         is divisible by the block size.
10 
11    Collective on MPI_Comm (if N is PETSC_DECIDE)
12 
13    Input Parameters:
14 +    comm - MPI communicator that shares the object being divided
15 .    bs - block size
16 .    n - local length (or PETSC_DECIDE to have it set)
17 -    N - global length (or PETSC_DECIDE)
18 
19   Level: developer
20 
21    Notes:
22      n and N cannot be both PETSC_DECIDE
23 
24      If one processor calls this with N of PETSC_DECIDE then all processors
25      must, otherwise the program will hang.
26 
27 .seealso: PetscSplitOwnership()
28 
29 @*/
30 PetscErrorCode  PetscSplitOwnershipBlock(MPI_Comm comm,PetscInt bs,PetscInt *n,PetscInt *N)
31 {
32   PetscErrorCode ierr;
33   PetscMPIInt    size,rank;
34 
35   PetscFunctionBegin;
36   if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE");
37 
38   if (*N == PETSC_DECIDE) {
39     if (*n % bs != 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"local size %D not divisible by block size %D",*n,bs);
40     ierr = MPIU_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
41   } else if (*n == PETSC_DECIDE) {
42     PetscInt Nbs = *N/bs;
43     ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
44     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
45     *n   = bs*(Nbs/size + ((Nbs % size) > rank));
46   }
47   PetscFunctionReturn(0);
48 }
49 
50 
51 #undef __FUNCT__
52 #define __FUNCT__ "PetscSplitOwnership"
53 /*@
54     PetscSplitOwnership - Given a global (or local) length determines a local
55         (or global) length via a simple formula
56 
57    Collective on MPI_Comm (if N is PETSC_DECIDE)
58 
59    Input Parameters:
60 +    comm - MPI communicator that shares the object being divided
61 .    n - local length (or PETSC_DECIDE to have it set)
62 -    N - global length (or PETSC_DECIDE)
63 
64   Level: developer
65 
66    Notes:
67      n and N cannot be both PETSC_DECIDE
68 
69      If one processor calls this with N of PETSC_DECIDE then all processors
70      must, otherwise the program will hang.
71 
72 .seealso: PetscSplitOwnershipBlock()
73 
74 @*/
75 PetscErrorCode  PetscSplitOwnership(MPI_Comm comm,PetscInt *n,PetscInt *N)
76 {
77   PetscErrorCode ierr;
78   PetscMPIInt    size,rank;
79 
80   PetscFunctionBegin;
81   if (*N == PETSC_DECIDE && *n == PETSC_DECIDE) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"Both n and N cannot be PETSC_DECIDE\n  likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee http://www.mcs.anl.gov/petsc/documentation/faq.html#split");
82 
83   if (*N == PETSC_DECIDE) {
84     ierr = MPIU_Allreduce(n,N,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
85   } else if (*n == PETSC_DECIDE) {
86     ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
87     ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
88     *n   = *N/size + ((*N % size) > rank);
89 #if defined(PETSC_USE_DEBUG)
90   } else {
91     PetscInt tmp;
92     ierr = MPIU_Allreduce(n,&tmp,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
93     if (tmp != *N) SETERRQ3(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Sum of local lengths %D does not equal global length %D, my local length %D\n  likely a call to VecSetSizes() or MatSetSizes() is wrong.\nSee http://www.mcs.anl.gov/petsc/documentation/faq.html#split",tmp,*N,*n);
94 #endif
95   }
96   PetscFunctionReturn(0);
97 }
98 
99