xref: /petsc/src/mat/impls/aij/mpi/mpiaij.h (revision bcee047adeeb73090d7e36cc71e39fc287cdbb97)
1 #ifndef __MPIAIJ_H
2 #define __MPIAIJ_H
3 
4 #include <../src/mat/impls/aij/seq/aij.h>
5 
6 typedef struct { /* used by MatCreateMPIAIJSumSeqAIJ for reusing the merged matrix */
7   PetscLayout  rowmap;
8   PetscInt   **buf_ri, **buf_rj;
9   PetscMPIInt *len_s, *len_r, *id_r; /* array of length of comm->size, store send/recv matrix values */
10   PetscMPIInt  nsend, nrecv;
11   PetscInt    *bi, *bj;               /* i and j array of the local portion of mpi C (matrix product) - rename to ci, cj! */
12   PetscInt    *owners_co, *coi, *coj; /* i and j array of (p->B)^T*A*P - used in the communication */
13 } Mat_Merge_SeqsToMPI;
14 
15 typedef struct {                                /* used by MatPtAPXXX_MPIAIJ_MPIAIJ() and MatMatMultXXX_MPIAIJ_MPIAIJ() */
16   PetscInt              *startsj_s, *startsj_r; /* used by MatGetBrowsOfAoCols_MPIAIJ */
17   PetscScalar           *bufa;                  /* used by MatGetBrowsOfAoCols_MPIAIJ */
18   Mat                    P_loc, P_oth;          /* partial B_seq -- intend to replace B_seq */
19   PetscInt              *api, *apj;             /* symbolic i and j arrays of the local product A_loc*B_seq */
20   PetscScalar           *apv;
21   MatReuse               reuse; /* flag to skip MatGetBrowsOfAoCols_MPIAIJ() and MatMPIAIJGetLocalMat() in 1st call of MatPtAPNumeric_MPIAIJ_MPIAIJ() */
22   PetscScalar           *apa;   /* tmp array for store a row of A*P used in MatMatMult() */
23   Mat                    A_loc; /* used by MatTransposeMatMult(), contains api and apj */
24   ISLocalToGlobalMapping ltog;  /* mapping from local column indices to global column indices for A_loc */
25   Mat                    Pt;    /* used by MatTransposeMatMult(), Pt = P^T */
26   Mat                    Rd, Ro, AP_loc, C_loc, C_oth;
27   PetscInt               algType; /* implementation algorithm */
28   PetscSF                sf;      /* use it to communicate remote part of C */
29   PetscInt              *c_othi, *c_rmti;
30 
31   Mat_Merge_SeqsToMPI *merge;
32 } Mat_APMPI;
33 
34 typedef struct {
35   Mat         A, B; /* local submatrices: A (diag part),
36                                            B (off-diag part) */
37   PetscMPIInt size; /* size of communicator */
38   PetscMPIInt rank; /* rank of proc in communicator */
39 
40   /* The following variables are used for matrix assembly */
41   PetscBool    donotstash;        /* PETSC_TRUE if off processor entries dropped */
42   MPI_Request *send_waits;        /* array of send requests */
43   MPI_Request *recv_waits;        /* array of receive requests */
44   PetscInt     nsends, nrecvs;    /* numbers of sends and receives */
45   PetscScalar *svalues, *rvalues; /* sending and receiving data */
46   PetscInt     rmax;              /* maximum message length */
47 #if defined(PETSC_USE_CTABLE)
48   PetscHMapI colmap;
49 #else
50   PetscInt *colmap; /* local col number of off-diag col */
51 #endif
52   PetscInt *garray; /* global index of all off-processor columns */
53 
54   /* The following variables are used for matrix-vector products */
55   Vec        lvec; /* local vector */
56   Vec        diag;
57   VecScatter Mvctx;       /* scatter context for vector */
58   PetscBool  roworiented; /* if true, row-oriented input, default true */
59 
60   /* The following variables are for MatGetRow() */
61   PetscInt    *rowindices;   /* column indices for row */
62   PetscScalar *rowvalues;    /* nonzero values in row */
63   PetscBool    getrowactive; /* indicates MatGetRow(), not restored */
64 
65   PetscInt *ld; /* number of entries per row left of diagonal block */
66 
67   /* Used by device classes */
68   void *spptr;
69 
70   struct _MatOps cops;
71 } Mat_MPIAIJ;
72 
73 typedef struct {
74   PetscCount   n;                          /* Number of COOs passed to MatSetPreallocationCOO)() */
75   PetscSF      sf;                         /* SF to send/recv remote values in MatSetValuesCOO() */
76   PetscCount   Annz, Bnnz;                 /* Number of entries in diagonal A and off-diagonal B */
77   PetscCount   Annz2, Bnnz2;               /* Number of unique remote entries belonging to A and B */
78   PetscCount   Atot1, Atot2, Btot1, Btot2; /* Total local (tot1) and remote (tot2) entries (which might contain repeats) belonging to A and B */
79   PetscCount  *Ajmap1, *Aperm1;            /* Lengths: [Annz+1], [Atot1]. Local entries to diag */
80   PetscCount  *Bjmap1, *Bperm1;            /* Lengths: [Bnnz+1], [Btot1]. Local entries to offdiag */
81   PetscCount  *Aimap2, *Ajmap2, *Aperm2;   /* Lengths: [Annz2], [Annz2+1], [Atot2]. Remote entries to diag */
82   PetscCount  *Bimap2, *Bjmap2, *Bperm2;   /* Lengths: [Bnnz2], [Bnnz2+1], [Btot2]. Remote entries to offdiag */
83   PetscCount  *Cperm1;                     /* [sendlen] Permutation to fill MPI send buffer. 'C' for communication */
84   PetscScalar *sendbuf, *recvbuf;          /* Buffers for remote values in MatSetValuesCOO() */
85   PetscInt     sendlen, recvlen;           /* Lengths (in unit of PetscScalar) of send/recvbuf */
86 } MatCOOStruct_MPIAIJ;
87 
88 PETSC_EXTERN PetscErrorCode MatCreate_MPIAIJ(Mat);
89 
90 PETSC_INTERN PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat, MatAssemblyType);
91 
92 PETSC_INTERN PetscErrorCode MatSetUpMultiply_MPIAIJ(Mat);
93 PETSC_INTERN PetscErrorCode MatDisAssemble_MPIAIJ(Mat);
94 PETSC_INTERN PetscErrorCode MatDuplicate_MPIAIJ(Mat, MatDuplicateOption, Mat *);
95 PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ(Mat, PetscInt, IS[], PetscInt);
96 PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ_Scalable(Mat, PetscInt, IS[], PetscInt);
97 PETSC_INTERN PetscErrorCode MatFDColoringCreate_MPIXAIJ(Mat, ISColoring, MatFDColoring);
98 PETSC_INTERN PetscErrorCode MatFDColoringSetUp_MPIXAIJ(Mat, ISColoring, MatFDColoring);
99 PETSC_INTERN PetscErrorCode MatCreateSubMatrices_MPIAIJ(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *[]);
100 PETSC_INTERN PetscErrorCode MatCreateSubMatricesMPI_MPIAIJ(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *[]);
101 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_All(Mat, MatCreateSubMatrixOption, MatReuse, Mat *[]);
102 PETSC_INTERN PetscErrorCode MatView_MPIAIJ(Mat, PetscViewer);
103 
104 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ(Mat, IS, IS, MatReuse, Mat *);
105 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_nonscalable(Mat, IS, IS, PetscInt, MatReuse, Mat *);
106 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowDist(Mat, IS, IS, IS, MatReuse, Mat *);
107 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowColDist(Mat, IS, IS, MatReuse, Mat *);
108 PETSC_INTERN PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat, MPI_Comm, MatReuse, Mat *);
109 
110 PETSC_INTERN PetscErrorCode MatLoad_MPIAIJ(Mat, PetscViewer);
111 PETSC_INTERN PetscErrorCode MatLoad_MPIAIJ_Binary(Mat, PetscViewer);
112 PETSC_INTERN PetscErrorCode MatCreateColmap_MPIAIJ_Private(Mat);
113 
114 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_MPIAIJ(Mat);
115 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_MPIAIJBACKEND(Mat);
116 PETSC_INTERN PetscErrorCode MatProductSymbolic_MPIAIJBACKEND(Mat);
117 PETSC_INTERN PetscErrorCode MatProductSymbolic_AB_MPIAIJ_MPIAIJ(Mat);
118 
119 PETSC_INTERN PetscErrorCode MatProductSymbolic_PtAP_MPIAIJ_MPIAIJ(Mat);
120 
121 PETSC_INTERN PetscErrorCode MatProductSymbolic_RARt_MPIAIJ_MPIAIJ(Mat);
122 PETSC_INTERN PetscErrorCode MatProductNumeric_RARt_MPIAIJ_MPIAIJ(Mat);
123 
124 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, PetscReal, Mat);
125 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_seqMPI(Mat, Mat, PetscReal, Mat);
126 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat);
127 PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat);
128 PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, Mat);
129 
130 PETSC_INTERN PetscErrorCode MatMatMatMultSymbolic_MPIAIJ_MPIAIJ_MPIAIJ(Mat, Mat, Mat, PetscReal, Mat);
131 PETSC_INTERN PetscErrorCode MatMatMatMultNumeric_MPIAIJ_MPIAIJ_MPIAIJ(Mat, Mat, Mat, Mat);
132 
133 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat);
134 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat);
135 
136 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat, Mat, PetscReal, Mat);
137 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat, Mat, PetscReal, Mat);
138 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat, Mat, PetscReal, Mat);
139 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat, Mat, Mat);
140 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat, Mat, Mat);
141 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat, Mat, Mat);
142 
143 #if defined(PETSC_HAVE_HYPRE)
144 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat, Mat, PetscReal, Mat);
145 #endif
146 PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_MPIDense(Mat, MatType, MatReuse, Mat *);
147 #if defined(PETSC_HAVE_SCALAPACK)
148 PETSC_INTERN PetscErrorCode MatConvert_AIJ_ScaLAPACK(Mat, MatType, MatReuse, Mat *);
149 #endif
150 
151 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ(Mat);
152 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ_PtAP(void *);
153 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ_MatMatMult(void *);
154 
155 PETSC_INTERN PetscErrorCode MatGetBrowsOfAoCols_MPIAIJ(Mat, Mat, MatReuse, PetscInt **, PetscInt **, MatScalar **, Mat *);
156 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
157 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ_CopyFromCSRFormat(Mat, const PetscInt[], const PetscInt[], const PetscScalar[]);
158 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ_CopyFromCSRFormat_Symbolic(Mat, const PetscInt[], const PetscInt[]);
159 PETSC_INTERN PetscErrorCode MatSetOption_MPIAIJ(Mat, MatOption, PetscBool);
160 
161 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, PetscReal, Mat);
162 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat);
163 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat);
164 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, Mat);
165 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_matmatmult(Mat, Mat, Mat);
166 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIDense(Mat, Mat, PetscReal, Mat);
167 PETSC_INTERN PetscErrorCode MatGetSeqNonzeroStructure_MPIAIJ(Mat, Mat *);
168 
169 PETSC_INTERN PetscErrorCode MatSetFromOptions_MPIAIJ(Mat, PetscOptionItems *);
170 PETSC_INTERN PetscErrorCode MatMPIAIJSetPreallocation_MPIAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]);
171 
172 #if !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128) && !defined(PETSC_USE_REAL___FP16)
173 PETSC_INTERN PetscErrorCode MatLUFactorSymbolic_MPIAIJ_TFS(Mat, IS, IS, const MatFactorInfo *, Mat *);
174 #endif
175 PETSC_INTERN PetscErrorCode MatSolve_MPIAIJ(Mat, Vec, Vec);
176 PETSC_INTERN PetscErrorCode MatILUFactor_MPIAIJ(Mat, IS, IS, const MatFactorInfo *);
177 
178 PETSC_INTERN PetscErrorCode MatAXPYGetPreallocation_MPIX_private(PetscInt, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, PetscInt *);
179 
180 extern PetscErrorCode MatGetDiagonalBlock_MPIAIJ(Mat, Mat *);
181 extern PetscErrorCode MatDiagonalScaleLocal_MPIAIJ(Mat, Vec);
182 
183 PETSC_INTERN PetscErrorCode MatGetSeqMats_MPIAIJ(Mat, Mat *, Mat *);
184 PETSC_INTERN PetscErrorCode MatSetSeqMats_MPIAIJ(Mat, IS, IS, IS, MatStructure, Mat, Mat);
185 
186 PETSC_INTERN PetscErrorCode MatSetPreallocationCOO_MPIAIJ(Mat, PetscCount, PetscInt[], PetscInt[]);
187 PETSC_INTERN PetscErrorCode MatResetPreallocationCOO_MPIAIJ(Mat);
188 
189 /* compute apa = A[i,:]*P = Ad[i,:]*P_loc + Ao*[i,:]*P_oth using sparse axpy */
190 #define AProw_scalable(i, ad, ao, p_loc, p_oth, api, apj, apa) \
191   { \
192     PetscInt     _anz, _pnz, _j, _k, *_ai, *_aj, _row, *_pi, *_pj, _nextp, *_apJ; \
193     PetscScalar *_aa, _valtmp, *_pa; \
194     _apJ = apj + api[i]; \
195     /* diagonal portion of A */ \
196     _ai  = ad->i; \
197     _anz = _ai[i + 1] - _ai[i]; \
198     _aj  = ad->j + _ai[i]; \
199     _aa  = ad->a + _ai[i]; \
200     for (_j = 0; _j < _anz; _j++) { \
201       _row = _aj[_j]; \
202       _pi  = p_loc->i; \
203       _pnz = _pi[_row + 1] - _pi[_row]; \
204       _pj  = p_loc->j + _pi[_row]; \
205       _pa  = p_loc->a + _pi[_row]; \
206       /* perform sparse axpy */ \
207       _valtmp = _aa[_j]; \
208       _nextp  = 0; \
209       for (_k = 0; _nextp < _pnz; _k++) { \
210         if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */ \
211           apa[_k] += _valtmp * _pa[_nextp++]; \
212         } \
213       } \
214       (void)PetscLogFlops(2.0 * _pnz); \
215     } \
216     /* off-diagonal portion of A */ \
217     if (p_oth) { \
218       _ai  = ao->i; \
219       _anz = _ai[i + 1] - _ai[i]; \
220       _aj  = ao->j + _ai[i]; \
221       _aa  = ao->a + _ai[i]; \
222       for (_j = 0; _j < _anz; _j++) { \
223         _row = _aj[_j]; \
224         _pi  = p_oth->i; \
225         _pnz = _pi[_row + 1] - _pi[_row]; \
226         _pj  = p_oth->j + _pi[_row]; \
227         _pa  = p_oth->a + _pi[_row]; \
228         /* perform sparse axpy */ \
229         _valtmp = _aa[_j]; \
230         _nextp  = 0; \
231         for (_k = 0; _nextp < _pnz; _k++) { \
232           if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */ \
233             apa[_k] += _valtmp * _pa[_nextp++]; \
234           } \
235         } \
236         (void)PetscLogFlops(2.0 * _pnz); \
237       } \
238     } \
239   }
240 
241 #define AProw_nonscalable(i, ad, ao, p_loc, p_oth, apa) \
242   { \
243     PetscInt     _anz, _pnz, _j, _k, *_ai, *_aj, _row, *_pi, *_pj; \
244     PetscScalar *_aa, _valtmp, *_pa; \
245     /* diagonal portion of A */ \
246     _ai  = ad->i; \
247     _anz = _ai[i + 1] - _ai[i]; \
248     _aj  = ad->j + _ai[i]; \
249     _aa  = ad->a + _ai[i]; \
250     for (_j = 0; _j < _anz; _j++) { \
251       _row = _aj[_j]; \
252       _pi  = p_loc->i; \
253       _pnz = _pi[_row + 1] - _pi[_row]; \
254       _pj  = p_loc->j + _pi[_row]; \
255       _pa  = p_loc->a + _pi[_row]; \
256       /* perform dense axpy */ \
257       _valtmp = _aa[_j]; \
258       for (_k = 0; _k < _pnz; _k++) apa[_pj[_k]] += _valtmp * _pa[_k]; \
259       (void)PetscLogFlops(2.0 * _pnz); \
260     } \
261     /* off-diagonal portion of A */ \
262     if (p_oth) { \
263       _ai  = ao->i; \
264       _anz = _ai[i + 1] - _ai[i]; \
265       _aj  = ao->j + _ai[i]; \
266       _aa  = ao->a + _ai[i]; \
267       for (_j = 0; _j < _anz; _j++) { \
268         _row = _aj[_j]; \
269         _pi  = p_oth->i; \
270         _pnz = _pi[_row + 1] - _pi[_row]; \
271         _pj  = p_oth->j + _pi[_row]; \
272         _pa  = p_oth->a + _pi[_row]; \
273         /* perform dense axpy */ \
274         _valtmp = _aa[_j]; \
275         for (_k = 0; _k < _pnz; _k++) apa[_pj[_k]] += _valtmp * _pa[_k]; \
276         (void)PetscLogFlops(2.0 * _pnz); \
277       } \
278     } \
279   }
280 
281 #endif
282