xref: /petsc/src/mat/impls/aij/mpi/mpiaij.h (revision b45e3bf4ff73d80a20c3202b6cd9f79d2f2d3efe)
1 #if !defined(__MPIAIJ_H)
2 #define __MPIAIJ_H
3 
4 #include <../src/mat/impls/aij/seq/aij.h>
5 
6 typedef struct { /* used by MatCreateMPIAIJSumSeqAIJ for reusing the merged matrix */
7   PetscLayout rowmap;
8   PetscInt    **buf_ri,**buf_rj;
9   PetscMPIInt *len_s,*len_r,*id_r;    /* array of length of comm->size, store send/recv matrix values */
10   PetscMPIInt nsend,nrecv;
11   PetscInt    *bi,*bj;    /* i and j array of the local portion of mpi C (matrix product) - rename to ci, cj! */
12   PetscInt    *owners_co,*coi,*coj;    /* i and j array of (p->B)^T*A*P - used in the communication */
13 } Mat_Merge_SeqsToMPI;
14 
15 typedef struct { /* used by MatPtAPXXX_MPIAIJ_MPIAIJ() and MatMatMultXXX_MPIAIJ_MPIAIJ() */
16   PetscInt               *startsj_s,*startsj_r;    /* used by MatGetBrowsOfAoCols_MPIAIJ */
17   PetscScalar            *bufa;                    /* used by MatGetBrowsOfAoCols_MPIAIJ */
18   Mat                     P_loc,P_oth;             /* partial B_seq -- intend to replace B_seq */
19   PetscInt                *api,*apj;               /* symbolic i and j arrays of the local product A_loc*B_seq */
20   PetscScalar             *apv;
21   MatReuse                reuse;                   /* flag to skip MatGetBrowsOfAoCols_MPIAIJ() and MatMPIAIJGetLocalMat() in 1st call of MatPtAPNumeric_MPIAIJ_MPIAIJ() */
22   PetscScalar             *apa;                    /* tmp array for store a row of A*P used in MatMatMult() */
23   Mat                     A_loc;                   /* used by MatTransposeMatMult(), contains api and apj */
24   ISLocalToGlobalMapping  ltog;                    /* mapping from local column indices to global column indices for A_loc */
25   Mat                     Pt;                      /* used by MatTransposeMatMult(), Pt = P^T */
26   Mat                     Rd,Ro,AP_loc,C_loc,C_oth;
27   PetscInt                algType;                 /* implementation algorithm */
28   PetscSF                 sf;                      /* use it to communicate remote part of C */
29   PetscInt                *c_othi,*c_rmti;
30 
31   Mat_Merge_SeqsToMPI *merge;
32 } Mat_APMPI;
33 
34 typedef struct {
35   Mat A,B;                             /* local submatrices: A (diag part),
36                                            B (off-diag part) */
37   PetscMPIInt size;                     /* size of communicator */
38   PetscMPIInt rank;                     /* rank of proc in communicator */
39 
40   /* The following variables are used for matrix assembly */
41   PetscBool   donotstash;               /* PETSC_TRUE if off processor entries dropped */
42   MPI_Request *send_waits;              /* array of send requests */
43   MPI_Request *recv_waits;              /* array of receive requests */
44   PetscInt    nsends,nrecvs;           /* numbers of sends and receives */
45   PetscScalar *svalues,*rvalues;       /* sending and receiving data */
46   PetscInt    rmax;                     /* maximum message length */
47 #if defined(PETSC_USE_CTABLE)
48   PetscTable colmap;
49 #else
50   PetscInt *colmap;                     /* local col number of off-diag col */
51 #endif
52   PetscInt *garray;                     /* global index of all off-processor columns */
53 
54   /* The following variables are used for matrix-vector products */
55   Vec        lvec;                 /* local vector */
56   Vec        diag;
57   VecScatter Mvctx;                /* scatter context for vector */
58   PetscBool  roworiented;          /* if true, row-oriented input, default true */
59 
60   /* The following variables are for MatGetRow() */
61   PetscInt    *rowindices;         /* column indices for row */
62   PetscScalar *rowvalues;          /* nonzero values in row */
63   PetscBool   getrowactive;        /* indicates MatGetRow(), not restored */
64 
65   PetscInt *ld;                    /* number of entries per row left of diagonal block */
66 
67   /* Used by device classes */
68   void * spptr;
69 
70   /* MatSetValuesCOO() related stuff */
71   PetscCount   coo_n; /* Number of COOs passed to MatSetPreallocationCOO)() */
72   PetscSF      coo_sf; /* SF to send/recv remote values in MatSetValuesCOO() */
73   PetscCount   Atot1,Annz1,Btot1,Bnnz1; /* Number of local entries (tot1), number of uqique local entries(nnz1) belonging to diag (A) and offdiag (B) */
74   PetscCount   Atot2,Annz2,Btot2,Bnnz2; /* Number of received entries (tot2), number of uqique received entries(nnz2) belonging to diag (A) and offdiag (B) */
75   PetscCount   *Aimap1,*Ajmap1,*Aperm1; /* Lengths: [Annz1], [Annz1+1], [Atot1]. Local entries to diag */
76   PetscCount   *Bimap1,*Bjmap1,*Bperm1; /* Lengths: [Bnnz1], [Bnnz1+1], [Btot1]. Local entries to offdiag */
77   PetscCount   *Aimap2,*Ajmap2,*Aperm2; /* Lengths: [Annz2], [Annz2+1], [Atot2]. Remote entries to diag */
78   PetscCount   *Bimap2,*Bjmap2,*Bperm2; /* Lengths: [Bnnz2], [Bnnz2+1], [Btot2]. Remote entries to offdiag */
79   PetscCount   *Cperm1; /* [sendlen] Permutation to fill MPI send buffer. 'C' for communication */
80   PetscScalar  *sendbuf,*recvbuf; /* Buffers for remote values in MatSetValuesCOO() */
81   PetscInt     sendlen,recvlen; /* Lengths (in unit of PetscScalar) of send/recvbuf */
82 } Mat_MPIAIJ;
83 
84 PETSC_EXTERN PetscErrorCode MatCreate_MPIAIJ(Mat);
85 
86 PETSC_INTERN PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat,MatAssemblyType);
87 
88 PETSC_INTERN PetscErrorCode MatSetUpMultiply_MPIAIJ(Mat);
89 PETSC_INTERN PetscErrorCode MatDisAssemble_MPIAIJ(Mat);
90 PETSC_INTERN PetscErrorCode MatDuplicate_MPIAIJ(Mat,MatDuplicateOption,Mat*);
91 PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ(Mat,PetscInt,IS [],PetscInt);
92 PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ_Scalable(Mat,PetscInt,IS [],PetscInt);
93 PETSC_INTERN PetscErrorCode MatFDColoringCreate_MPIXAIJ(Mat,ISColoring,MatFDColoring);
94 PETSC_INTERN PetscErrorCode MatFDColoringSetUp_MPIXAIJ(Mat,ISColoring,MatFDColoring);
95 PETSC_INTERN PetscErrorCode MatCreateSubMatrices_MPIAIJ (Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
96 PETSC_INTERN PetscErrorCode MatCreateSubMatricesMPI_MPIAIJ (Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]);
97 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_All(Mat,MatCreateSubMatrixOption,MatReuse,Mat *[]);
98 PETSC_INTERN PetscErrorCode MatView_MPIAIJ(Mat,PetscViewer);
99 
100 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ(Mat,IS,IS,MatReuse,Mat*);
101 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_nonscalable(Mat,IS,IS,PetscInt,MatReuse,Mat*);
102 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowDist(Mat,IS,IS,IS,MatReuse,Mat*);
103 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowColDist(Mat,IS,IS,MatReuse,Mat*);
104 PETSC_INTERN PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat,MPI_Comm,MatReuse,Mat*);
105 
106 PETSC_INTERN PetscErrorCode MatLoad_MPIAIJ(Mat,PetscViewer);
107 PETSC_INTERN PetscErrorCode MatLoad_MPIAIJ_Binary(Mat,PetscViewer);
108 PETSC_INTERN PetscErrorCode MatCreateColmap_MPIAIJ_Private(Mat);
109 
110 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_MPIAIJ(Mat);
111 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_MPIAIJBACKEND(Mat);
112 PETSC_INTERN PetscErrorCode MatProductSymbolic_MPIAIJBACKEND(Mat);
113 PETSC_INTERN PetscErrorCode MatProductSymbolic_AB_MPIAIJ_MPIAIJ(Mat);
114 
115 PETSC_INTERN PetscErrorCode MatProductSymbolic_PtAP_MPIAIJ_MPIAIJ(Mat);
116 
117 PETSC_INTERN PetscErrorCode MatProductSymbolic_RARt_MPIAIJ_MPIAIJ(Mat);
118 PETSC_INTERN PetscErrorCode MatProductNumeric_RARt_MPIAIJ_MPIAIJ(Mat);
119 
120 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat,Mat,PetscReal,Mat);
121 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_seqMPI(Mat,Mat,PetscReal,Mat);
122 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ(Mat,Mat,PetscReal,Mat);
123 PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ(Mat,Mat,Mat);
124 PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat,Mat,Mat);
125 
126 PETSC_INTERN PetscErrorCode MatMatMatMultSymbolic_MPIAIJ_MPIAIJ_MPIAIJ(Mat,Mat,Mat,PetscReal,Mat);
127 PETSC_INTERN PetscErrorCode MatMatMatMultNumeric_MPIAIJ_MPIAIJ_MPIAIJ(Mat,Mat,Mat,Mat);
128 
129 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat,Mat,PetscReal,Mat);
130 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat,Mat,Mat);
131 
132 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat,Mat,PetscReal,Mat);
133 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat,Mat,PetscReal,Mat);
134 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat,Mat,PetscReal,Mat);
135 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat,Mat,Mat);
136 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat,Mat,Mat);
137 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat,Mat,Mat);
138 
139 #if defined(PETSC_HAVE_HYPRE)
140 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat,Mat,PetscReal,Mat);
141 #endif
142 PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_MPIDense(Mat,MatType,MatReuse,Mat*);
143 #if defined(PETSC_HAVE_SCALAPACK)
144 PETSC_INTERN PetscErrorCode MatConvert_AIJ_ScaLAPACK(Mat,MatType,MatReuse,Mat*);
145 #endif
146 
147 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ(Mat);
148 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ_PtAP(void*);
149 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ_MatMatMult(void*);
150 
151 PETSC_INTERN PetscErrorCode MatGetBrowsOfAoCols_MPIAIJ(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*);
152 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar [],InsertMode);
153 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ_CopyFromCSRFormat(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]);
154 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ_CopyFromCSRFormat_Symbolic(Mat,const PetscInt[],const PetscInt[]);
155 PETSC_INTERN PetscErrorCode MatSetOption_MPIAIJ(Mat,MatOption,PetscBool);
156 
157 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat,Mat,PetscReal,Mat);
158 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ(Mat,Mat,PetscReal,Mat);
159 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ(Mat,Mat,Mat);
160 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat,Mat,Mat);
161 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_matmatmult(Mat,Mat,Mat);
162 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIDense(Mat,Mat,PetscReal,Mat);
163 PETSC_INTERN PetscErrorCode MatGetSeqNonzeroStructure_MPIAIJ(Mat,Mat*);
164 
165 PETSC_INTERN PetscErrorCode MatSetFromOptions_MPIAIJ(PetscOptionItems*,Mat);
166 PETSC_INTERN PetscErrorCode MatMPIAIJSetPreallocation_MPIAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]);
167 
168 #if !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128) && !defined(PETSC_USE_REAL___FP16)
169 PETSC_INTERN PetscErrorCode MatLUFactorSymbolic_MPIAIJ_TFS(Mat,IS,IS,const MatFactorInfo*,Mat*);
170 #endif
171 PETSC_INTERN PetscErrorCode MatSolve_MPIAIJ(Mat,Vec,Vec);
172 PETSC_INTERN PetscErrorCode MatILUFactor_MPIAIJ(Mat,IS,IS,const MatFactorInfo*);
173 
174 PETSC_INTERN PetscErrorCode MatAXPYGetPreallocation_MPIX_private(PetscInt,const PetscInt*,const PetscInt*,const PetscInt*,const PetscInt*,const PetscInt*,const PetscInt*,PetscInt*);
175 
176 extern PetscErrorCode MatGetDiagonalBlock_MPIAIJ(Mat,Mat*);
177 extern PetscErrorCode MatDiagonalScaleLocal_MPIAIJ(Mat,Vec);
178 
179 PETSC_INTERN PetscErrorCode MatGetSeqMats_MPIAIJ(Mat,Mat*,Mat*);
180 PETSC_INTERN PetscErrorCode MatSetSeqMats_MPIAIJ(Mat,IS,IS,IS,MatStructure,Mat,Mat);
181 
182 PETSC_INTERN PetscErrorCode MatSetPreallocationCOO_MPIAIJ(Mat,PetscCount,const PetscInt[],const PetscInt[]);
183 PETSC_INTERN PetscErrorCode MatResetPreallocationCOO_MPIAIJ(Mat);
184 
185 /* compute apa = A[i,:]*P = Ad[i,:]*P_loc + Ao*[i,:]*P_oth using sparse axpy */
186 #define AProw_scalable(i,ad,ao,p_loc,p_oth,api,apj,apa) \
187 {\
188   PetscInt    _anz,_pnz,_j,_k,*_ai,*_aj,_row,*_pi,*_pj,_nextp,*_apJ;\
189   PetscScalar *_aa,_valtmp,*_pa;\
190   _apJ = apj + api[i];\
191   /* diagonal portion of A */\
192   _ai  = ad->i;\
193   _anz = _ai[i+1] - _ai[i];\
194   _aj  = ad->j + _ai[i];\
195   _aa  = ad->a + _ai[i];\
196   for (_j=0; _j<_anz; _j++) {\
197     _row = _aj[_j]; \
198     _pi  = p_loc->i;                             \
199     _pnz = _pi[_row+1] - _pi[_row];              \
200     _pj  = p_loc->j + _pi[_row];                 \
201     _pa  = p_loc->a + _pi[_row];                 \
202     /* perform sparse axpy */                    \
203     _valtmp = _aa[_j];                           \
204     _nextp  = 0; \
205     for (_k=0; _nextp<_pnz; _k++) {                    \
206       if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */\
207         apa[_k] += _valtmp*_pa[_nextp++];                             \
208       } \
209     }                                           \
210     (void)PetscLogFlops(2.0*_pnz);              \
211   }                                             \
212   /* off-diagonal portion of A */               \
213   if (p_oth){ \
214     _ai  = ao->i;\
215     _anz = _ai[i+1] - _ai[i];                   \
216     _aj  = ao->j + _ai[i];                      \
217     _aa  = ao->a + _ai[i];                      \
218     for (_j=0; _j<_anz; _j++) {                 \
219       _row = _aj[_j];    \
220       _pi  = p_oth->i;                         \
221       _pnz = _pi[_row+1] - _pi[_row];          \
222       _pj  = p_oth->j + _pi[_row];             \
223       _pa  = p_oth->a + _pi[_row];             \
224       /* perform sparse axpy */                \
225       _valtmp = _aa[_j];                       \
226       _nextp  = 0; \
227       for (_k=0; _nextp<_pnz; _k++) {          \
228         if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */\
229           apa[_k] += _valtmp*_pa[_nextp++];    \
230         }                                      \
231       }                                        \
232       (void)PetscLogFlops(2.0*_pnz);           \
233     } \
234   }\
235 }
236 
237 #define AProw_nonscalable(i,ad,ao,p_loc,p_oth,apa) \
238 {\
239   PetscInt    _anz,_pnz,_j,_k,*_ai,*_aj,_row,*_pi,*_pj;\
240   PetscScalar *_aa,_valtmp,*_pa;                       \
241   /* diagonal portion of A */\
242   _ai  = ad->i;\
243   _anz = _ai[i+1] - _ai[i];\
244   _aj  = ad->j + _ai[i];\
245   _aa  = ad->a + _ai[i];\
246   for (_j=0; _j<_anz; _j++) {\
247     _row = _aj[_j]; \
248     _pi  = p_loc->i;                        \
249     _pnz = _pi[_row+1] - _pi[_row];         \
250     _pj  = p_loc->j + _pi[_row];            \
251     _pa  = p_loc->a + _pi[_row];            \
252     /* perform dense axpy */                \
253     _valtmp = _aa[_j];                      \
254     for (_k=0; _k<_pnz; _k++) {             \
255       apa[_pj[_k]] += _valtmp*_pa[_k];      \
256     }                                       \
257     (void)PetscLogFlops(2.0*_pnz);          \
258   }                                         \
259   /* off-diagonal portion of A */           \
260   if (p_oth){ \
261     _ai  = ao->i;\
262     _anz = _ai[i+1] - _ai[i];               \
263     _aj  = ao->j + _ai[i];                  \
264     _aa  = ao->a + _ai[i];                  \
265     for (_j=0; _j<_anz; _j++) {             \
266       _row = _aj[_j];    \
267       _pi  = p_oth->i;                      \
268       _pnz = _pi[_row+1] - _pi[_row];       \
269       _pj  = p_oth->j + _pi[_row];          \
270       _pa  = p_oth->a + _pi[_row];          \
271       /* perform dense axpy */              \
272       _valtmp = _aa[_j];                    \
273       for (_k=0; _k<_pnz; _k++) {           \
274         apa[_pj[_k]] += _valtmp*_pa[_k];    \
275       }                                     \
276       (void)PetscLogFlops(2.0*_pnz);        \
277     }                                       \
278   }\
279 }
280 
281 #endif
282