xref: /petsc/src/mat/impls/aij/mpi/mpiaij.h (revision d2522c19e8fa9bca20aaca277941d9a63e71db6a)
1 #if !defined(__MPIAIJ_H)
2 #define __MPIAIJ_H
3 
4 #include <../src/mat/impls/aij/seq/aij.h>
5 
6 typedef struct { /* used by MatCreateMPIAIJSumSeqAIJ for reusing the merged matrix */
7   PetscLayout  rowmap;
8   PetscInt   **buf_ri, **buf_rj;
9   PetscMPIInt *len_s, *len_r, *id_r; /* array of length of comm->size, store send/recv matrix values */
10   PetscMPIInt  nsend, nrecv;
11   PetscInt    *bi, *bj;               /* i and j array of the local portion of mpi C (matrix product) - rename to ci, cj! */
12   PetscInt    *owners_co, *coi, *coj; /* i and j array of (p->B)^T*A*P - used in the communication */
13 } Mat_Merge_SeqsToMPI;
14 
15 typedef struct {                                /* used by MatPtAPXXX_MPIAIJ_MPIAIJ() and MatMatMultXXX_MPIAIJ_MPIAIJ() */
16   PetscInt              *startsj_s, *startsj_r; /* used by MatGetBrowsOfAoCols_MPIAIJ */
17   PetscScalar           *bufa;                  /* used by MatGetBrowsOfAoCols_MPIAIJ */
18   Mat                    P_loc, P_oth;          /* partial B_seq -- intend to replace B_seq */
19   PetscInt              *api, *apj;             /* symbolic i and j arrays of the local product A_loc*B_seq */
20   PetscScalar           *apv;
21   MatReuse               reuse; /* flag to skip MatGetBrowsOfAoCols_MPIAIJ() and MatMPIAIJGetLocalMat() in 1st call of MatPtAPNumeric_MPIAIJ_MPIAIJ() */
22   PetscScalar           *apa;   /* tmp array for store a row of A*P used in MatMatMult() */
23   Mat                    A_loc; /* used by MatTransposeMatMult(), contains api and apj */
24   ISLocalToGlobalMapping ltog;  /* mapping from local column indices to global column indices for A_loc */
25   Mat                    Pt;    /* used by MatTransposeMatMult(), Pt = P^T */
26   Mat                    Rd, Ro, AP_loc, C_loc, C_oth;
27   PetscInt               algType; /* implementation algorithm */
28   PetscSF                sf;      /* use it to communicate remote part of C */
29   PetscInt              *c_othi, *c_rmti;
30 
31   Mat_Merge_SeqsToMPI *merge;
32 } Mat_APMPI;
33 
34 typedef struct {
35   Mat         A, B; /* local submatrices: A (diag part),
36                                            B (off-diag part) */
37   PetscMPIInt size; /* size of communicator */
38   PetscMPIInt rank; /* rank of proc in communicator */
39 
40   /* The following variables are used for matrix assembly */
41   PetscBool    donotstash;        /* PETSC_TRUE if off processor entries dropped */
42   MPI_Request *send_waits;        /* array of send requests */
43   MPI_Request *recv_waits;        /* array of receive requests */
44   PetscInt     nsends, nrecvs;    /* numbers of sends and receives */
45   PetscScalar *svalues, *rvalues; /* sending and receiving data */
46   PetscInt     rmax;              /* maximum message length */
47 #if defined(PETSC_USE_CTABLE)
48   PetscTable colmap;
49 #else
50   PetscInt *colmap; /* local col number of off-diag col */
51 #endif
52   PetscInt *garray; /* global index of all off-processor columns */
53 
54   /* The following variables are used for matrix-vector products */
55   Vec        lvec; /* local vector */
56   Vec        diag;
57   VecScatter Mvctx;       /* scatter context for vector */
58   PetscBool  roworiented; /* if true, row-oriented input, default true */
59 
60   /* The following variables are for MatGetRow() */
61   PetscInt    *rowindices;   /* column indices for row */
62   PetscScalar *rowvalues;    /* nonzero values in row */
63   PetscBool    getrowactive; /* indicates MatGetRow(), not restored */
64 
65   PetscInt *ld; /* number of entries per row left of diagonal block */
66 
67   /* Used by device classes */
68   void *spptr;
69 
70   /* MatSetValuesCOO() related stuff */
71   PetscCount   coo_n;                      /* Number of COOs passed to MatSetPreallocationCOO)() */
72   PetscSF      coo_sf;                     /* SF to send/recv remote values in MatSetValuesCOO() */
73   PetscCount   Annz, Bnnz;                 /* Number of entries in diagonal A and off-diagonal B */
74   PetscCount   Annz2, Bnnz2;               /* Number of unique remote entries belonging to A and B */
75   PetscCount   Atot1, Atot2, Btot1, Btot2; /* Total local (tot1) and remote (tot2) entries (which might contain repeats) belonging to A and B */
76   PetscCount  *Ajmap1, *Aperm1;            /* Lengths: [Annz+1], [Atot1]. Local entries to diag */
77   PetscCount  *Bjmap1, *Bperm1;            /* Lengths: [Bnnz+1], [Btot1]. Local entries to offdiag */
78   PetscCount  *Aimap2, *Ajmap2, *Aperm2;   /* Lengths: [Annz2], [Annz2+1], [Atot2]. Remote entries to diag */
79   PetscCount  *Bimap2, *Bjmap2, *Bperm2;   /* Lengths: [Bnnz2], [Bnnz2+1], [Btot2]. Remote entries to offdiag */
80   PetscCount  *Cperm1;                     /* [sendlen] Permutation to fill MPI send buffer. 'C' for communication */
81   PetscScalar *sendbuf, *recvbuf;          /* Buffers for remote values in MatSetValuesCOO() */
82   PetscInt     sendlen, recvlen;           /* Lengths (in unit of PetscScalar) of send/recvbuf */
83 } Mat_MPIAIJ;
84 
85 PETSC_EXTERN PetscErrorCode MatCreate_MPIAIJ(Mat);
86 
87 PETSC_INTERN PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat, MatAssemblyType);
88 
89 PETSC_INTERN PetscErrorCode MatSetUpMultiply_MPIAIJ(Mat);
90 PETSC_INTERN PetscErrorCode MatDisAssemble_MPIAIJ(Mat);
91 PETSC_INTERN PetscErrorCode MatDuplicate_MPIAIJ(Mat, MatDuplicateOption, Mat *);
92 PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ(Mat, PetscInt, IS[], PetscInt);
93 PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ_Scalable(Mat, PetscInt, IS[], PetscInt);
94 PETSC_INTERN PetscErrorCode MatFDColoringCreate_MPIXAIJ(Mat, ISColoring, MatFDColoring);
95 PETSC_INTERN PetscErrorCode MatFDColoringSetUp_MPIXAIJ(Mat, ISColoring, MatFDColoring);
96 PETSC_INTERN PetscErrorCode MatCreateSubMatrices_MPIAIJ(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *[]);
97 PETSC_INTERN PetscErrorCode MatCreateSubMatricesMPI_MPIAIJ(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *[]);
98 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_All(Mat, MatCreateSubMatrixOption, MatReuse, Mat *[]);
99 PETSC_INTERN PetscErrorCode MatView_MPIAIJ(Mat, PetscViewer);
100 
101 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ(Mat, IS, IS, MatReuse, Mat *);
102 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_nonscalable(Mat, IS, IS, PetscInt, MatReuse, Mat *);
103 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowDist(Mat, IS, IS, IS, MatReuse, Mat *);
104 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowColDist(Mat, IS, IS, MatReuse, Mat *);
105 PETSC_INTERN PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat, MPI_Comm, MatReuse, Mat *);
106 
107 PETSC_INTERN PetscErrorCode MatLoad_MPIAIJ(Mat, PetscViewer);
108 PETSC_INTERN PetscErrorCode MatLoad_MPIAIJ_Binary(Mat, PetscViewer);
109 PETSC_INTERN PetscErrorCode MatCreateColmap_MPIAIJ_Private(Mat);
110 
111 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_MPIAIJ(Mat);
112 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_MPIAIJBACKEND(Mat);
113 PETSC_INTERN PetscErrorCode MatProductSymbolic_MPIAIJBACKEND(Mat);
114 PETSC_INTERN PetscErrorCode MatProductSymbolic_AB_MPIAIJ_MPIAIJ(Mat);
115 
116 PETSC_INTERN PetscErrorCode MatProductSymbolic_PtAP_MPIAIJ_MPIAIJ(Mat);
117 
118 PETSC_INTERN PetscErrorCode MatProductSymbolic_RARt_MPIAIJ_MPIAIJ(Mat);
119 PETSC_INTERN PetscErrorCode MatProductNumeric_RARt_MPIAIJ_MPIAIJ(Mat);
120 
121 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, PetscReal, Mat);
122 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_seqMPI(Mat, Mat, PetscReal, Mat);
123 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat);
124 PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat);
125 PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, Mat);
126 
127 PETSC_INTERN PetscErrorCode MatMatMatMultSymbolic_MPIAIJ_MPIAIJ_MPIAIJ(Mat, Mat, Mat, PetscReal, Mat);
128 PETSC_INTERN PetscErrorCode MatMatMatMultNumeric_MPIAIJ_MPIAIJ_MPIAIJ(Mat, Mat, Mat, Mat);
129 
130 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat);
131 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat);
132 
133 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat, Mat, PetscReal, Mat);
134 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat, Mat, PetscReal, Mat);
135 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat, Mat, PetscReal, Mat);
136 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat, Mat, Mat);
137 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat, Mat, Mat);
138 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat, Mat, Mat);
139 
140 #if defined(PETSC_HAVE_HYPRE)
141 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat, Mat, PetscReal, Mat);
142 #endif
143 PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_MPIDense(Mat, MatType, MatReuse, Mat *);
144 #if defined(PETSC_HAVE_SCALAPACK)
145 PETSC_INTERN PetscErrorCode MatConvert_AIJ_ScaLAPACK(Mat, MatType, MatReuse, Mat *);
146 #endif
147 
148 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ(Mat);
149 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ_PtAP(void *);
150 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ_MatMatMult(void *);
151 
152 PETSC_INTERN PetscErrorCode MatGetBrowsOfAoCols_MPIAIJ(Mat, Mat, MatReuse, PetscInt **, PetscInt **, MatScalar **, Mat *);
153 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
154 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ_CopyFromCSRFormat(Mat, const PetscInt[], const PetscInt[], const PetscScalar[]);
155 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ_CopyFromCSRFormat_Symbolic(Mat, const PetscInt[], const PetscInt[]);
156 PETSC_INTERN PetscErrorCode MatSetOption_MPIAIJ(Mat, MatOption, PetscBool);
157 
158 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, PetscReal, Mat);
159 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat);
160 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat);
161 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, Mat);
162 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_matmatmult(Mat, Mat, Mat);
163 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIDense(Mat, Mat, PetscReal, Mat);
164 PETSC_INTERN PetscErrorCode MatGetSeqNonzeroStructure_MPIAIJ(Mat, Mat *);
165 
166 PETSC_INTERN PetscErrorCode MatSetFromOptions_MPIAIJ(Mat, PetscOptionItems *);
167 PETSC_INTERN PetscErrorCode MatMPIAIJSetPreallocation_MPIAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]);
168 
169 #if !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128) && !defined(PETSC_USE_REAL___FP16)
170 PETSC_INTERN PetscErrorCode MatLUFactorSymbolic_MPIAIJ_TFS(Mat, IS, IS, const MatFactorInfo *, Mat *);
171 #endif
172 PETSC_INTERN PetscErrorCode MatSolve_MPIAIJ(Mat, Vec, Vec);
173 PETSC_INTERN PetscErrorCode MatILUFactor_MPIAIJ(Mat, IS, IS, const MatFactorInfo *);
174 
175 PETSC_INTERN PetscErrorCode MatAXPYGetPreallocation_MPIX_private(PetscInt, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, PetscInt *);
176 
177 extern PetscErrorCode MatGetDiagonalBlock_MPIAIJ(Mat, Mat *);
178 extern PetscErrorCode MatDiagonalScaleLocal_MPIAIJ(Mat, Vec);
179 
180 PETSC_INTERN PetscErrorCode MatGetSeqMats_MPIAIJ(Mat, Mat *, Mat *);
181 PETSC_INTERN PetscErrorCode MatSetSeqMats_MPIAIJ(Mat, IS, IS, IS, MatStructure, Mat, Mat);
182 
183 PETSC_INTERN PetscErrorCode MatSetPreallocationCOO_MPIAIJ(Mat, PetscCount, PetscInt[], PetscInt[]);
184 PETSC_INTERN PetscErrorCode MatResetPreallocationCOO_MPIAIJ(Mat);
185 
186 /* compute apa = A[i,:]*P = Ad[i,:]*P_loc + Ao*[i,:]*P_oth using sparse axpy */
187 #define AProw_scalable(i, ad, ao, p_loc, p_oth, api, apj, apa) \
188   { \
189     PetscInt     _anz, _pnz, _j, _k, *_ai, *_aj, _row, *_pi, *_pj, _nextp, *_apJ; \
190     PetscScalar *_aa, _valtmp, *_pa; \
191     _apJ = apj + api[i]; \
192     /* diagonal portion of A */ \
193     _ai  = ad->i; \
194     _anz = _ai[i + 1] - _ai[i]; \
195     _aj  = ad->j + _ai[i]; \
196     _aa  = ad->a + _ai[i]; \
197     for (_j = 0; _j < _anz; _j++) { \
198       _row    = _aj[_j]; \
199       _pi     = p_loc->i; \
200       _pnz    = _pi[_row + 1] - _pi[_row]; \
201       _pj     = p_loc->j + _pi[_row]; \
202       _pa     = p_loc->a + _pi[_row]; \
203       /* perform sparse axpy */ \
204       _valtmp = _aa[_j]; \
205       _nextp  = 0; \
206       for (_k = 0; _nextp < _pnz; _k++) { \
207         if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */ \
208           apa[_k] += _valtmp * _pa[_nextp++]; \
209         } \
210       } \
211       (void)PetscLogFlops(2.0 * _pnz); \
212     } \
213     /* off-diagonal portion of A */ \
214     if (p_oth) { \
215       _ai  = ao->i; \
216       _anz = _ai[i + 1] - _ai[i]; \
217       _aj  = ao->j + _ai[i]; \
218       _aa  = ao->a + _ai[i]; \
219       for (_j = 0; _j < _anz; _j++) { \
220         _row    = _aj[_j]; \
221         _pi     = p_oth->i; \
222         _pnz    = _pi[_row + 1] - _pi[_row]; \
223         _pj     = p_oth->j + _pi[_row]; \
224         _pa     = p_oth->a + _pi[_row]; \
225         /* perform sparse axpy */ \
226         _valtmp = _aa[_j]; \
227         _nextp  = 0; \
228         for (_k = 0; _nextp < _pnz; _k++) { \
229           if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */ \
230             apa[_k] += _valtmp * _pa[_nextp++]; \
231           } \
232         } \
233         (void)PetscLogFlops(2.0 * _pnz); \
234       } \
235     } \
236   }
237 
238 #define AProw_nonscalable(i, ad, ao, p_loc, p_oth, apa) \
239   { \
240     PetscInt     _anz, _pnz, _j, _k, *_ai, *_aj, _row, *_pi, *_pj; \
241     PetscScalar *_aa, _valtmp, *_pa; \
242     /* diagonal portion of A */ \
243     _ai  = ad->i; \
244     _anz = _ai[i + 1] - _ai[i]; \
245     _aj  = ad->j + _ai[i]; \
246     _aa  = ad->a + _ai[i]; \
247     for (_j = 0; _j < _anz; _j++) { \
248       _row    = _aj[_j]; \
249       _pi     = p_loc->i; \
250       _pnz    = _pi[_row + 1] - _pi[_row]; \
251       _pj     = p_loc->j + _pi[_row]; \
252       _pa     = p_loc->a + _pi[_row]; \
253       /* perform dense axpy */ \
254       _valtmp = _aa[_j]; \
255       for (_k = 0; _k < _pnz; _k++) { apa[_pj[_k]] += _valtmp * _pa[_k]; } \
256       (void)PetscLogFlops(2.0 * _pnz); \
257     } \
258     /* off-diagonal portion of A */ \
259     if (p_oth) { \
260       _ai  = ao->i; \
261       _anz = _ai[i + 1] - _ai[i]; \
262       _aj  = ao->j + _ai[i]; \
263       _aa  = ao->a + _ai[i]; \
264       for (_j = 0; _j < _anz; _j++) { \
265         _row    = _aj[_j]; \
266         _pi     = p_oth->i; \
267         _pnz    = _pi[_row + 1] - _pi[_row]; \
268         _pj     = p_oth->j + _pi[_row]; \
269         _pa     = p_oth->a + _pi[_row]; \
270         /* perform dense axpy */ \
271         _valtmp = _aa[_j]; \
272         for (_k = 0; _k < _pnz; _k++) { apa[_pj[_k]] += _valtmp * _pa[_k]; } \
273         (void)PetscLogFlops(2.0 * _pnz); \
274       } \
275     } \
276   }
277 
278 #endif
279