xref: /petsc/src/mat/impls/aij/mpi/mpiaij.h (revision 21e3ffae2f3b73c0bd738cf6d0a809700fc04bb0)
1 #ifndef __MPIAIJ_H
2 #define __MPIAIJ_H
3 
4 #include <../src/mat/impls/aij/seq/aij.h>
5 
6 typedef struct { /* used by MatCreateMPIAIJSumSeqAIJ for reusing the merged matrix */
7   PetscLayout  rowmap;
8   PetscInt   **buf_ri, **buf_rj;
9   PetscMPIInt *len_s, *len_r, *id_r; /* array of length of comm->size, store send/recv matrix values */
10   PetscMPIInt  nsend, nrecv;
11   PetscInt    *bi, *bj;               /* i and j array of the local portion of mpi C (matrix product) - rename to ci, cj! */
12   PetscInt    *owners_co, *coi, *coj; /* i and j array of (p->B)^T*A*P - used in the communication */
13 } Mat_Merge_SeqsToMPI;
14 
15 typedef struct {                                /* used by MatPtAPXXX_MPIAIJ_MPIAIJ() and MatMatMultXXX_MPIAIJ_MPIAIJ() */
16   PetscInt              *startsj_s, *startsj_r; /* used by MatGetBrowsOfAoCols_MPIAIJ */
17   PetscScalar           *bufa;                  /* used by MatGetBrowsOfAoCols_MPIAIJ */
18   Mat                    P_loc, P_oth;          /* partial B_seq -- intend to replace B_seq */
19   PetscInt              *api, *apj;             /* symbolic i and j arrays of the local product A_loc*B_seq */
20   PetscScalar           *apv;
21   MatReuse               reuse; /* flag to skip MatGetBrowsOfAoCols_MPIAIJ() and MatMPIAIJGetLocalMat() in 1st call of MatPtAPNumeric_MPIAIJ_MPIAIJ() */
22   PetscScalar           *apa;   /* tmp array for store a row of A*P used in MatMatMult() */
23   Mat                    A_loc; /* used by MatTransposeMatMult(), contains api and apj */
24   ISLocalToGlobalMapping ltog;  /* mapping from local column indices to global column indices for A_loc */
25   Mat                    Pt;    /* used by MatTransposeMatMult(), Pt = P^T */
26   Mat                    Rd, Ro, AP_loc, C_loc, C_oth;
27   PetscInt               algType; /* implementation algorithm */
28   PetscSF                sf;      /* use it to communicate remote part of C */
29   PetscInt              *c_othi, *c_rmti;
30 
31   Mat_Merge_SeqsToMPI *merge;
32 } Mat_APMPI;
33 
34 typedef struct {
35   Mat         A, B; /* local submatrices: A (diag part),
36                                            B (off-diag part) */
37   PetscMPIInt size; /* size of communicator */
38   PetscMPIInt rank; /* rank of proc in communicator */
39 
40   /* The following variables are used for matrix assembly */
41   PetscBool    donotstash;        /* PETSC_TRUE if off processor entries dropped */
42   MPI_Request *send_waits;        /* array of send requests */
43   MPI_Request *recv_waits;        /* array of receive requests */
44   PetscInt     nsends, nrecvs;    /* numbers of sends and receives */
45   PetscScalar *svalues, *rvalues; /* sending and receiving data */
46   PetscInt     rmax;              /* maximum message length */
47 #if defined(PETSC_USE_CTABLE)
48   PetscHMapI colmap;
49 #else
50   PetscInt *colmap; /* local col number of off-diag col */
51 #endif
52   PetscInt *garray; /* global index of all off-processor columns */
53 
54   /* The following variables are used for matrix-vector products */
55   Vec        lvec; /* local vector */
56   Vec        diag;
57   VecScatter Mvctx;       /* scatter context for vector */
58   PetscBool  roworiented; /* if true, row-oriented input, default true */
59 
60   /* The following variables are for MatGetRow() */
61   PetscInt    *rowindices;   /* column indices for row */
62   PetscScalar *rowvalues;    /* nonzero values in row */
63   PetscBool    getrowactive; /* indicates MatGetRow(), not restored */
64 
65   PetscInt *ld; /* number of entries per row left of diagonal block */
66 
67   /* Used by device classes */
68   void *spptr;
69 
70   /* MatSetValuesCOO() related stuff */
71   PetscCount   coo_n;                      /* Number of COOs passed to MatSetPreallocationCOO)() */
72   PetscSF      coo_sf;                     /* SF to send/recv remote values in MatSetValuesCOO() */
73   PetscCount   Annz, Bnnz;                 /* Number of entries in diagonal A and off-diagonal B */
74   PetscCount   Annz2, Bnnz2;               /* Number of unique remote entries belonging to A and B */
75   PetscCount   Atot1, Atot2, Btot1, Btot2; /* Total local (tot1) and remote (tot2) entries (which might contain repeats) belonging to A and B */
76   PetscCount  *Ajmap1, *Aperm1;            /* Lengths: [Annz+1], [Atot1]. Local entries to diag */
77   PetscCount  *Bjmap1, *Bperm1;            /* Lengths: [Bnnz+1], [Btot1]. Local entries to offdiag */
78   PetscCount  *Aimap2, *Ajmap2, *Aperm2;   /* Lengths: [Annz2], [Annz2+1], [Atot2]. Remote entries to diag */
79   PetscCount  *Bimap2, *Bjmap2, *Bperm2;   /* Lengths: [Bnnz2], [Bnnz2+1], [Btot2]. Remote entries to offdiag */
80   PetscCount  *Cperm1;                     /* [sendlen] Permutation to fill MPI send buffer. 'C' for communication */
81   PetscScalar *sendbuf, *recvbuf;          /* Buffers for remote values in MatSetValuesCOO() */
82   PetscInt     sendlen, recvlen;           /* Lengths (in unit of PetscScalar) of send/recvbuf */
83 
84   struct _MatOps cops;
85 } Mat_MPIAIJ;
86 
87 PETSC_EXTERN PetscErrorCode MatCreate_MPIAIJ(Mat);
88 
89 PETSC_INTERN PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat, MatAssemblyType);
90 
91 PETSC_INTERN PetscErrorCode MatSetUpMultiply_MPIAIJ(Mat);
92 PETSC_INTERN PetscErrorCode MatDisAssemble_MPIAIJ(Mat);
93 PETSC_INTERN PetscErrorCode MatDuplicate_MPIAIJ(Mat, MatDuplicateOption, Mat *);
94 PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ(Mat, PetscInt, IS[], PetscInt);
95 PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ_Scalable(Mat, PetscInt, IS[], PetscInt);
96 PETSC_INTERN PetscErrorCode MatFDColoringCreate_MPIXAIJ(Mat, ISColoring, MatFDColoring);
97 PETSC_INTERN PetscErrorCode MatFDColoringSetUp_MPIXAIJ(Mat, ISColoring, MatFDColoring);
98 PETSC_INTERN PetscErrorCode MatCreateSubMatrices_MPIAIJ(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *[]);
99 PETSC_INTERN PetscErrorCode MatCreateSubMatricesMPI_MPIAIJ(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *[]);
100 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_All(Mat, MatCreateSubMatrixOption, MatReuse, Mat *[]);
101 PETSC_INTERN PetscErrorCode MatView_MPIAIJ(Mat, PetscViewer);
102 
103 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ(Mat, IS, IS, MatReuse, Mat *);
104 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_nonscalable(Mat, IS, IS, PetscInt, MatReuse, Mat *);
105 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowDist(Mat, IS, IS, IS, MatReuse, Mat *);
106 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowColDist(Mat, IS, IS, MatReuse, Mat *);
107 PETSC_INTERN PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat, MPI_Comm, MatReuse, Mat *);
108 
109 PETSC_INTERN PetscErrorCode MatLoad_MPIAIJ(Mat, PetscViewer);
110 PETSC_INTERN PetscErrorCode MatLoad_MPIAIJ_Binary(Mat, PetscViewer);
111 PETSC_INTERN PetscErrorCode MatCreateColmap_MPIAIJ_Private(Mat);
112 
113 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_MPIAIJ(Mat);
114 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_MPIAIJBACKEND(Mat);
115 PETSC_INTERN PetscErrorCode MatProductSymbolic_MPIAIJBACKEND(Mat);
116 PETSC_INTERN PetscErrorCode MatProductSymbolic_AB_MPIAIJ_MPIAIJ(Mat);
117 
118 PETSC_INTERN PetscErrorCode MatProductSymbolic_PtAP_MPIAIJ_MPIAIJ(Mat);
119 
120 PETSC_INTERN PetscErrorCode MatProductSymbolic_RARt_MPIAIJ_MPIAIJ(Mat);
121 PETSC_INTERN PetscErrorCode MatProductNumeric_RARt_MPIAIJ_MPIAIJ(Mat);
122 
123 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, PetscReal, Mat);
124 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_seqMPI(Mat, Mat, PetscReal, Mat);
125 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat);
126 PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat);
127 PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, Mat);
128 
129 PETSC_INTERN PetscErrorCode MatMatMatMultSymbolic_MPIAIJ_MPIAIJ_MPIAIJ(Mat, Mat, Mat, PetscReal, Mat);
130 PETSC_INTERN PetscErrorCode MatMatMatMultNumeric_MPIAIJ_MPIAIJ_MPIAIJ(Mat, Mat, Mat, Mat);
131 
132 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat);
133 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat);
134 
135 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat, Mat, PetscReal, Mat);
136 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat, Mat, PetscReal, Mat);
137 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat, Mat, PetscReal, Mat);
138 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat, Mat, Mat);
139 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat, Mat, Mat);
140 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat, Mat, Mat);
141 
142 #if defined(PETSC_HAVE_HYPRE)
143 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat, Mat, PetscReal, Mat);
144 #endif
145 PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_MPIDense(Mat, MatType, MatReuse, Mat *);
146 #if defined(PETSC_HAVE_SCALAPACK)
147 PETSC_INTERN PetscErrorCode MatConvert_AIJ_ScaLAPACK(Mat, MatType, MatReuse, Mat *);
148 #endif
149 
150 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ(Mat);
151 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ_PtAP(void *);
152 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ_MatMatMult(void *);
153 
154 PETSC_INTERN PetscErrorCode MatGetBrowsOfAoCols_MPIAIJ(Mat, Mat, MatReuse, PetscInt **, PetscInt **, MatScalar **, Mat *);
155 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
156 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ_CopyFromCSRFormat(Mat, const PetscInt[], const PetscInt[], const PetscScalar[]);
157 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ_CopyFromCSRFormat_Symbolic(Mat, const PetscInt[], const PetscInt[]);
158 PETSC_INTERN PetscErrorCode MatSetOption_MPIAIJ(Mat, MatOption, PetscBool);
159 
160 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, PetscReal, Mat);
161 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat);
162 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat);
163 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, Mat);
164 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_matmatmult(Mat, Mat, Mat);
165 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIDense(Mat, Mat, PetscReal, Mat);
166 PETSC_INTERN PetscErrorCode MatGetSeqNonzeroStructure_MPIAIJ(Mat, Mat *);
167 
168 PETSC_INTERN PetscErrorCode MatSetFromOptions_MPIAIJ(Mat, PetscOptionItems *);
169 PETSC_INTERN PetscErrorCode MatMPIAIJSetPreallocation_MPIAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]);
170 
171 #if !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128) && !defined(PETSC_USE_REAL___FP16)
172 PETSC_INTERN PetscErrorCode MatLUFactorSymbolic_MPIAIJ_TFS(Mat, IS, IS, const MatFactorInfo *, Mat *);
173 #endif
174 PETSC_INTERN PetscErrorCode MatSolve_MPIAIJ(Mat, Vec, Vec);
175 PETSC_INTERN PetscErrorCode MatILUFactor_MPIAIJ(Mat, IS, IS, const MatFactorInfo *);
176 
177 PETSC_INTERN PetscErrorCode MatAXPYGetPreallocation_MPIX_private(PetscInt, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, PetscInt *);
178 
179 extern PetscErrorCode MatGetDiagonalBlock_MPIAIJ(Mat, Mat *);
180 extern PetscErrorCode MatDiagonalScaleLocal_MPIAIJ(Mat, Vec);
181 
182 PETSC_INTERN PetscErrorCode MatGetSeqMats_MPIAIJ(Mat, Mat *, Mat *);
183 PETSC_INTERN PetscErrorCode MatSetSeqMats_MPIAIJ(Mat, IS, IS, IS, MatStructure, Mat, Mat);
184 
185 PETSC_INTERN PetscErrorCode MatSetPreallocationCOO_MPIAIJ(Mat, PetscCount, PetscInt[], PetscInt[]);
186 PETSC_INTERN PetscErrorCode MatResetPreallocationCOO_MPIAIJ(Mat);
187 
188 /* compute apa = A[i,:]*P = Ad[i,:]*P_loc + Ao*[i,:]*P_oth using sparse axpy */
189 #define AProw_scalable(i, ad, ao, p_loc, p_oth, api, apj, apa) \
190   { \
191     PetscInt     _anz, _pnz, _j, _k, *_ai, *_aj, _row, *_pi, *_pj, _nextp, *_apJ; \
192     PetscScalar *_aa, _valtmp, *_pa; \
193     _apJ = apj + api[i]; \
194     /* diagonal portion of A */ \
195     _ai  = ad->i; \
196     _anz = _ai[i + 1] - _ai[i]; \
197     _aj  = ad->j + _ai[i]; \
198     _aa  = ad->a + _ai[i]; \
199     for (_j = 0; _j < _anz; _j++) { \
200       _row = _aj[_j]; \
201       _pi  = p_loc->i; \
202       _pnz = _pi[_row + 1] - _pi[_row]; \
203       _pj  = p_loc->j + _pi[_row]; \
204       _pa  = p_loc->a + _pi[_row]; \
205       /* perform sparse axpy */ \
206       _valtmp = _aa[_j]; \
207       _nextp  = 0; \
208       for (_k = 0; _nextp < _pnz; _k++) { \
209         if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */ \
210           apa[_k] += _valtmp * _pa[_nextp++]; \
211         } \
212       } \
213       (void)PetscLogFlops(2.0 * _pnz); \
214     } \
215     /* off-diagonal portion of A */ \
216     if (p_oth) { \
217       _ai  = ao->i; \
218       _anz = _ai[i + 1] - _ai[i]; \
219       _aj  = ao->j + _ai[i]; \
220       _aa  = ao->a + _ai[i]; \
221       for (_j = 0; _j < _anz; _j++) { \
222         _row = _aj[_j]; \
223         _pi  = p_oth->i; \
224         _pnz = _pi[_row + 1] - _pi[_row]; \
225         _pj  = p_oth->j + _pi[_row]; \
226         _pa  = p_oth->a + _pi[_row]; \
227         /* perform sparse axpy */ \
228         _valtmp = _aa[_j]; \
229         _nextp  = 0; \
230         for (_k = 0; _nextp < _pnz; _k++) { \
231           if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */ \
232             apa[_k] += _valtmp * _pa[_nextp++]; \
233           } \
234         } \
235         (void)PetscLogFlops(2.0 * _pnz); \
236       } \
237     } \
238   }
239 
240 #define AProw_nonscalable(i, ad, ao, p_loc, p_oth, apa) \
241   { \
242     PetscInt     _anz, _pnz, _j, _k, *_ai, *_aj, _row, *_pi, *_pj; \
243     PetscScalar *_aa, _valtmp, *_pa; \
244     /* diagonal portion of A */ \
245     _ai  = ad->i; \
246     _anz = _ai[i + 1] - _ai[i]; \
247     _aj  = ad->j + _ai[i]; \
248     _aa  = ad->a + _ai[i]; \
249     for (_j = 0; _j < _anz; _j++) { \
250       _row = _aj[_j]; \
251       _pi  = p_loc->i; \
252       _pnz = _pi[_row + 1] - _pi[_row]; \
253       _pj  = p_loc->j + _pi[_row]; \
254       _pa  = p_loc->a + _pi[_row]; \
255       /* perform dense axpy */ \
256       _valtmp = _aa[_j]; \
257       for (_k = 0; _k < _pnz; _k++) apa[_pj[_k]] += _valtmp * _pa[_k]; \
258       (void)PetscLogFlops(2.0 * _pnz); \
259     } \
260     /* off-diagonal portion of A */ \
261     if (p_oth) { \
262       _ai  = ao->i; \
263       _anz = _ai[i + 1] - _ai[i]; \
264       _aj  = ao->j + _ai[i]; \
265       _aa  = ao->a + _ai[i]; \
266       for (_j = 0; _j < _anz; _j++) { \
267         _row = _aj[_j]; \
268         _pi  = p_oth->i; \
269         _pnz = _pi[_row + 1] - _pi[_row]; \
270         _pj  = p_oth->j + _pi[_row]; \
271         _pa  = p_oth->a + _pi[_row]; \
272         /* perform dense axpy */ \
273         _valtmp = _aa[_j]; \
274         for (_k = 0; _k < _pnz; _k++) apa[_pj[_k]] += _valtmp * _pa[_k]; \
275         (void)PetscLogFlops(2.0 * _pnz); \
276       } \
277     } \
278   }
279 
280 #endif
281