xref: /petsc/src/mat/impls/aij/seq/aij.h (revision b2ccae6bdc8edea944f1c160ca3b2eb32c69ecb2)
1 #pragma once
2 
3 #include <petsc/private/matimpl.h>
4 #include <petsc/private/hashmapi.h>
5 #include <petsc/private/hashmapijv.h>
6 
7 /*
8  Used by MatCreateSubMatrices_MPIXAIJ_Local()
9 */
10 typedef struct {   /* used by MatCreateSubMatrices_MPIAIJ_SingleIS_Local() and MatCreateSubMatrices_MPIAIJ_Local */
11   PetscInt     id; /* index of submats, only submats[0] is responsible for deleting some arrays below */
12   PetscMPIInt  nrqs, nrqr;
13   PetscInt   **rbuf1, **rbuf2, **rbuf3, **sbuf1, **sbuf2;
14   PetscInt   **ptr;
15   PetscInt    *tmp;
16   PetscInt    *ctr;
17   PetscMPIInt *pa; /* process array */
18   PetscInt    *req_size;
19   PetscMPIInt *req_source1, *req_source2;
20   PetscBool    allcolumns, allrows;
21   PetscBool    singleis;
22   PetscMPIInt *row2proc; /* row to process (MPI rank) map */
23   PetscInt     nstages;
24 #if defined(PETSC_USE_CTABLE)
25   PetscHMapI cmap, rmap;
26   PetscInt  *cmap_loc, *rmap_loc;
27 #else
28   PetscInt *cmap, *rmap;
29 #endif
30   PetscErrorCode (*destroy)(Mat);
31 } Mat_SubSppt;
32 
33 /* Operations provided by MATSEQAIJ and its subclasses */
34 typedef struct {
35   PetscErrorCode (*getarray)(Mat, PetscScalar **);
36   PetscErrorCode (*restorearray)(Mat, PetscScalar **);
37   PetscErrorCode (*getarrayread)(Mat, const PetscScalar **);
38   PetscErrorCode (*restorearrayread)(Mat, const PetscScalar **);
39   PetscErrorCode (*getarraywrite)(Mat, PetscScalar **);
40   PetscErrorCode (*restorearraywrite)(Mat, PetscScalar **);
41   PetscErrorCode (*getcsrandmemtype)(Mat, const PetscInt **, const PetscInt **, PetscScalar **, PetscMemType *);
42 } Mat_SeqAIJOps;
43 
44 /*
45     Struct header shared by SeqAIJ, SeqBAIJ and SeqSBAIJ matrix formats
46 */
47 #define SEQAIJHEADER(datatype) \
48   PetscBool         roworiented; /* if true, row-oriented input, default */ \
49   PetscInt          nonew;       /* 1 don't add new nonzeros, -1 generate error on new */ \
50   PetscInt          nounused;    /* -1 generate error on unused space */ \
51   PetscInt          maxnz;       /* allocated nonzeros */ \
52   PetscInt         *imax;        /* maximum space allocated for each row */ \
53   PetscInt         *ilen;        /* actual length of each row */ \
54   PetscInt         *ipre;        /* space preallocated for each row by user */ \
55   PetscBool         free_imax_ilen; \
56   PetscInt          reallocs;           /* number of mallocs done during MatSetValues() \
57                                         as more values are set than were prealloced */ \
58   PetscInt          rmax;               /* max nonzeros in any row */ \
59   PetscBool         keepnonzeropattern; /* keeps matrix nonzero structure same in calls to MatZeroRows()*/ \
60   PetscBool         ignorezeroentries; \
61   PetscBool         free_ij;       /* free the column indices j and row offsets i when the matrix is destroyed */ \
62   PetscBool         free_a;        /* free the numerical values when matrix is destroy */ \
63   Mat_CompressedRow compressedrow; /* use compressed row format */ \
64   PetscInt          nz;            /* nonzeros */ \
65   PetscInt         *i;             /* pointer to beginning of each row */ \
66   PetscInt         *j;             /* column values: j + i[k] - 1 is start of row k */ \
67   PetscInt         *diag;          /* pointers to diagonal elements */ \
68   PetscInt          nonzerorowcnt; /* how many rows have nonzero entries */ \
69   PetscBool         free_diag; \
70   datatype         *a;              /* nonzero elements */ \
71   PetscScalar      *solve_work;     /* work space used in MatSolve */ \
72   IS                row, col, icol; /* index sets, used for reorderings */ \
73   PetscBool         pivotinblocks;  /* pivot inside factorization of each diagonal block */ \
74   Mat               parent;         /* set if this matrix was formed with MatDuplicate(...,MAT_SHARE_NONZERO_PATTERN,....); \
75                                          means that this shares some data structures with the parent including diag, ilen, imax, i, j */ \
76   Mat_SubSppt      *submatis1;      /* used by MatCreateSubMatrices_MPIXAIJ_Local */ \
77   Mat_SeqAIJOps     ops[1]          /* operations for SeqAIJ and its subclasses */
78 
79 typedef struct {
80   MatTransposeColoring matcoloring;
81   Mat                  Bt_den;  /* dense matrix of B^T */
82   Mat                  ABt_den; /* dense matrix of A*B^T */
83   PetscBool            usecoloring;
84 } Mat_MatMatTransMult;
85 
86 typedef struct { /* used by MatTransposeMatMult() */
87   Mat At;        /* transpose of the first matrix */
88   Mat mA;        /* maij matrix of A */
89   Vec bt, ct;    /* vectors to hold locally transposed arrays of B and C */
90   /* used by PtAP */
91   void *data;
92   PetscErrorCode (*destroy)(void *);
93 } Mat_MatTransMatMult;
94 
95 typedef struct {
96   PetscInt    *api, *apj; /* symbolic structure of A*P */
97   PetscScalar *apa;       /* temporary array for storing one row of A*P */
98 } Mat_AP;
99 
100 typedef struct {
101   MatTransposeColoring matcoloring;
102   Mat                  Rt;   /* sparse or dense matrix of R^T */
103   Mat                  RARt; /* dense matrix of R*A*R^T */
104   Mat                  ARt;  /* A*R^T used for the case -matrart_color_art */
105   MatScalar           *work; /* work array to store columns of A*R^T used in MatMatMatMultNumeric_SeqAIJ_SeqAIJ_SeqDense() */
106   /* free intermediate products needed for PtAP */
107   void *data;
108   PetscErrorCode (*destroy)(void *);
109 } Mat_RARt;
110 
111 typedef struct {
112   Mat BC; /* temp matrix for storing B*C */
113 } Mat_MatMatMatMult;
114 
115 /*
116   MATSEQAIJ format - Compressed row storage (also called Yale sparse matrix
117   format) or compressed sparse row (CSR).  The i[] and j[] arrays start at 0. For example,
118   j[i[k]+p] is the pth column in row k.  Note that the diagonal
119   matrix elements are stored with the rest of the nonzeros (not separately).
120 */
121 
122 /* Info about i-nodes (identical nodes) helper class for SeqAIJ */
123 typedef struct {
124   MatScalar *bdiag, *ibdiag, *ssor_work; /* diagonal blocks of matrix used for MatSOR_SeqAIJ_Inode() */
125   PetscInt   bdiagsize;                  /* length of bdiag and ibdiag */
126   PetscBool  ibdiagvalid;                /* do ibdiag[] and bdiag[] contain the most recent values */
127 
128   PetscBool        use;
129   PetscInt         node_count;       /* number of inodes */
130   PetscInt        *size_csr;         /* inode sizes in csr with size_csr[0] = 0 and i-th node size = size_csr[i+1] - size_csr[i], to facilitate parallel computation */
131   PetscInt         limit;            /* inode limit */
132   PetscInt         max_limit;        /* maximum supported inode limit */
133   PetscBool        checked;          /* if inodes have been checked for */
134   PetscObjectState mat_nonzerostate; /* non-zero state when inodes were checked for */
135 } Mat_SeqAIJ_Inode;
136 
137 PETSC_INTERN PetscErrorCode MatView_SeqAIJ_Inode(Mat, PetscViewer);
138 PETSC_INTERN PetscErrorCode MatAssemblyEnd_SeqAIJ_Inode(Mat, MatAssemblyType);
139 PETSC_INTERN PetscErrorCode MatDestroy_SeqAIJ_Inode(Mat);
140 PETSC_INTERN PetscErrorCode MatCreate_SeqAIJ_Inode(Mat);
141 PETSC_INTERN PetscErrorCode MatSetOption_SeqAIJ_Inode(Mat, MatOption, PetscBool);
142 PETSC_INTERN PetscErrorCode MatDuplicate_SeqAIJ_Inode(Mat, MatDuplicateOption, Mat *);
143 PETSC_INTERN PetscErrorCode MatDuplicateNoCreate_SeqAIJ(Mat, Mat, MatDuplicateOption, PetscBool);
144 PETSC_INTERN PetscErrorCode MatLUFactorNumeric_SeqAIJ_Inode(Mat, Mat, const MatFactorInfo *);
145 PETSC_INTERN PetscErrorCode MatSeqAIJGetArray_SeqAIJ(Mat, PetscScalar **);
146 PETSC_INTERN PetscErrorCode MatSeqAIJRestoreArray_SeqAIJ(Mat, PetscScalar **);
147 
148 typedef struct {
149   SEQAIJHEADER(MatScalar);
150   Mat_SeqAIJ_Inode inode;
151   MatScalar       *saved_values; /* location for stashing nonzero values of matrix */
152 
153   PetscScalar *idiag, *mdiag, *ssor_work; /* inverse of diagonal entries, diagonal values and workspace for Eisenstat trick */
154   PetscBool    idiagvalid;                /* current idiag[] and mdiag[] are valid */
155   PetscScalar *ibdiag;                    /* inverses of block diagonals */
156   PetscBool    ibdiagvalid;               /* inverses of block diagonals are valid. */
157   PetscBool    diagonaldense;             /* all entries along the diagonal have been set; i.e. no missing diagonal terms */
158   PetscScalar  fshift, omega;             /* last used omega and fshift */
159 
160   /* MatSetValues() via hash related fields */
161   PetscHMapIJV   ht;
162   PetscInt      *dnz;
163   struct _MatOps cops;
164 } Mat_SeqAIJ;
165 
166 typedef struct {
167   PetscInt    nz;   /* nz of the matrix after assembly */
168   PetscCount  n;    /* Number of entries in MatSetPreallocationCOO() */
169   PetscCount  Atot; /* Total number of valid (i.e., w/ non-negative indices) entries in the COO array */
170   PetscCount *jmap; /* perm[jmap[i]..jmap[i+1]) give indices of entries in v[] associated with i-th nonzero of the matrix */
171   PetscCount *perm; /* The permutation array in sorting (i,j) by row and then by col */
172 } MatCOOStruct_SeqAIJ;
173 
174 #define MatSeqXAIJGetOptions_Private(A) \
175   { \
176     const PetscBool oldvalues = (PetscBool)(A != PETSC_NULLPTR); \
177     PetscInt        nonew = 0, nounused = 0; \
178     PetscBool       roworiented = PETSC_FALSE; \
179     if (oldvalues) { \
180       nonew       = ((Mat_SeqAIJ *)A->data)->nonew; \
181       nounused    = ((Mat_SeqAIJ *)A->data)->nounused; \
182       roworiented = ((Mat_SeqAIJ *)A->data)->roworiented; \
183     } \
184     (void)0
185 
186 #define MatSeqXAIJRestoreOptions_Private(A) \
187   if (oldvalues) { \
188     ((Mat_SeqAIJ *)A->data)->nonew       = nonew; \
189     ((Mat_SeqAIJ *)A->data)->nounused    = nounused; \
190     ((Mat_SeqAIJ *)A->data)->roworiented = roworiented; \
191   } \
192   } \
193   (void)0
194 
195 static inline PetscErrorCode MatXAIJAllocatea(Mat A, PetscInt nz, PetscScalar **array)
196 {
197   Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data;
198 
199   PetscFunctionBegin;
200   PetscCall(PetscShmgetAllocateArray(nz, sizeof(PetscScalar), (void **)array));
201   a->free_a = PETSC_TRUE;
202   PetscFunctionReturn(PETSC_SUCCESS);
203 }
204 
205 static inline PetscErrorCode MatXAIJDeallocatea(Mat A, PetscScalar **array)
206 {
207   Mat_SeqAIJ *a = (Mat_SeqAIJ *)A->data;
208 
209   PetscFunctionBegin;
210   if (a->free_a) PetscCall(PetscShmgetDeallocateArray((void **)array));
211   a->free_a = PETSC_FALSE;
212   PetscFunctionReturn(PETSC_SUCCESS);
213 }
214 
215 /*
216   Frees the a, i, and j arrays from the XAIJ (AIJ, BAIJ, and SBAIJ) matrix types
217 */
218 static inline PetscErrorCode MatSeqXAIJFreeAIJ(Mat AA, MatScalar **a, PetscInt **j, PetscInt **i)
219 {
220   Mat_SeqAIJ *A = (Mat_SeqAIJ *)AA->data;
221 
222   PetscFunctionBegin;
223   if (A->free_a) PetscCall(PetscShmgetDeallocateArray((void **)a));
224   if (A->free_ij) PetscCall(PetscShmgetDeallocateArray((void **)j));
225   if (A->free_ij) PetscCall(PetscShmgetDeallocateArray((void **)i));
226   PetscFunctionReturn(PETSC_SUCCESS);
227 }
228 /*
229     Allocates larger a, i, and j arrays for the XAIJ (AIJ, BAIJ, and SBAIJ) matrix types
230     This is a macro because it takes the datatype as an argument which can be either a Mat or a MatScalar
231 */
232 #define MatSeqXAIJReallocateAIJ(Amat, AM, BS2, NROW, ROW, COL, RMAX, AA, AI, AJ, RP, AP, AIMAX, NONEW, datatype) \
233   do { \
234     if (NROW >= RMAX) { \
235       Mat_SeqAIJ *Ain       = (Mat_SeqAIJ *)Amat->data; \
236       PetscInt    CHUNKSIZE = 15, new_nz = AI[AM] + CHUNKSIZE, len, *new_i = NULL, *new_j = NULL; \
237       datatype   *new_a; \
238 \
239       PetscCheck(NONEW != -2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "New nonzero at (%" PetscInt_FMT ",%" PetscInt_FMT ") caused a malloc. Use MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE) to turn off this check", ROW, COL); \
240       /* malloc new storage space */ \
241       PetscCall(PetscShmgetAllocateArray(BS2 * new_nz, sizeof(PetscScalar), (void **)&new_a)); \
242       PetscCall(PetscShmgetAllocateArray(new_nz, sizeof(PetscInt), (void **)&new_j)); \
243       PetscCall(PetscShmgetAllocateArray(AM + 1, sizeof(PetscInt), (void **)&new_i)); \
244       Ain->free_a  = PETSC_TRUE; \
245       Ain->free_ij = PETSC_TRUE; \
246       /* copy over old data into new slots */ \
247       for (ii = 0; ii < ROW + 1; ii++) new_i[ii] = AI[ii]; \
248       for (ii = ROW + 1; ii < AM + 1; ii++) new_i[ii] = AI[ii] + CHUNKSIZE; \
249       PetscCall(PetscArraycpy(new_j, AJ, AI[ROW] + NROW)); \
250       len = (new_nz - CHUNKSIZE - AI[ROW] - NROW); \
251       PetscCall(PetscArraycpy(new_j + AI[ROW] + NROW + CHUNKSIZE, PetscSafePointerPlusOffset(AJ, AI[ROW] + NROW), len)); \
252       PetscCall(PetscArraycpy(new_a, AA, BS2 * (AI[ROW] + NROW))); \
253       PetscCall(PetscArrayzero(new_a + BS2 * (AI[ROW] + NROW), BS2 * CHUNKSIZE)); \
254       PetscCall(PetscArraycpy(new_a + BS2 * (AI[ROW] + NROW + CHUNKSIZE), PetscSafePointerPlusOffset(AA, BS2 * (AI[ROW] + NROW)), BS2 * len)); \
255       /* free up old matrix storage */ \
256       PetscCall(MatSeqXAIJFreeAIJ(A, &Ain->a, &Ain->j, &Ain->i)); \
257       AA     = new_a; \
258       Ain->a = new_a; \
259       AI = Ain->i = new_i; \
260       AJ = Ain->j = new_j; \
261 \
262       RP   = AJ + AI[ROW]; \
263       AP   = AA + BS2 * AI[ROW]; \
264       RMAX = AIMAX[ROW] = AIMAX[ROW] + CHUNKSIZE; \
265       Ain->maxnz += BS2 * CHUNKSIZE; \
266       Ain->reallocs++; \
267       Amat->nonzerostate++; \
268     } \
269   } while (0)
270 
271 #define MatSeqXAIJReallocateAIJ_structure_only(Amat, AM, BS2, NROW, ROW, COL, RMAX, AI, AJ, RP, AIMAX, NONEW, datatype) \
272   do { \
273     if (NROW >= RMAX) { \
274       Mat_SeqAIJ *Ain = (Mat_SeqAIJ *)Amat->data; \
275       /* there is no extra room in row, therefore enlarge */ \
276       PetscInt CHUNKSIZE = 15, new_nz = AI[AM] + CHUNKSIZE, len, *new_i = NULL, *new_j = NULL; \
277 \
278       PetscCheck(NONEW != -2, PETSC_COMM_SELF, PETSC_ERR_ARG_OUTOFRANGE, "New nonzero at (%" PetscInt_FMT ",%" PetscInt_FMT ") caused a malloc. Use MatSetOption(A, MAT_NEW_NONZERO_ALLOCATION_ERR, PETSC_FALSE) to turn off this check", ROW, COL); \
279       /* malloc new storage space */ \
280       PetscCall(PetscShmgetAllocateArray(new_nz, sizeof(PetscInt), (void **)&new_j)); \
281       PetscCall(PetscShmgetAllocateArray(AM + 1, sizeof(PetscInt), (void **)&new_i)); \
282       Ain->free_a  = PETSC_FALSE; \
283       Ain->free_ij = PETSC_TRUE; \
284 \
285       /* copy over old data into new slots */ \
286       for (ii = 0; ii < ROW + 1; ii++) new_i[ii] = AI[ii]; \
287       for (ii = ROW + 1; ii < AM + 1; ii++) new_i[ii] = AI[ii] + CHUNKSIZE; \
288       PetscCall(PetscArraycpy(new_j, AJ, AI[ROW] + NROW)); \
289       len = (new_nz - CHUNKSIZE - AI[ROW] - NROW); \
290       PetscCall(PetscArraycpy(new_j + AI[ROW] + NROW + CHUNKSIZE, AJ + AI[ROW] + NROW, len)); \
291 \
292       /* free up old matrix storage */ \
293       PetscCall(MatSeqXAIJFreeAIJ(A, &Ain->a, &Ain->j, &Ain->i)); \
294       Ain->a = NULL; \
295       AI = Ain->i = new_i; \
296       AJ = Ain->j = new_j; \
297 \
298       RP   = AJ + AI[ROW]; \
299       RMAX = AIMAX[ROW] = AIMAX[ROW] + CHUNKSIZE; \
300       Ain->maxnz += BS2 * CHUNKSIZE; \
301       Ain->reallocs++; \
302       Amat->nonzerostate++; \
303     } \
304   } while (0)
305 
306 PETSC_INTERN PetscErrorCode MatSeqAIJSetPreallocation_SeqAIJ(Mat, PetscInt, const PetscInt *);
307 PETSC_INTERN PetscErrorCode MatSetPreallocationCOO_SeqAIJ(Mat, PetscCount, PetscInt[], PetscInt[]);
308 
309 PETSC_INTERN PetscErrorCode MatILUFactorSymbolic_SeqAIJ(Mat, Mat, IS, IS, const MatFactorInfo *);
310 PETSC_INTERN PetscErrorCode MatILUFactorSymbolic_SeqAIJ_ilu0(Mat, Mat, IS, IS, const MatFactorInfo *);
311 
312 PETSC_INTERN PetscErrorCode MatICCFactorSymbolic_SeqAIJ(Mat, Mat, IS, const MatFactorInfo *);
313 PETSC_INTERN PetscErrorCode MatCholeskyFactorSymbolic_SeqAIJ(Mat, Mat, IS, const MatFactorInfo *);
314 PETSC_INTERN PetscErrorCode MatCholeskyFactorNumeric_SeqAIJ_inplace(Mat, Mat, const MatFactorInfo *);
315 PETSC_INTERN PetscErrorCode MatCholeskyFactorNumeric_SeqAIJ(Mat, Mat, const MatFactorInfo *);
316 PETSC_INTERN PetscErrorCode MatDuplicate_SeqAIJ(Mat, MatDuplicateOption, Mat *);
317 PETSC_INTERN PetscErrorCode MatCopy_SeqAIJ(Mat, Mat, MatStructure);
318 PETSC_INTERN PetscErrorCode MatMissingDiagonal_SeqAIJ(Mat, PetscBool *, PetscInt *);
319 PETSC_INTERN PetscErrorCode MatMarkDiagonal_SeqAIJ(Mat);
320 PETSC_INTERN PetscErrorCode MatFindZeroDiagonals_SeqAIJ_Private(Mat, PetscInt *, PetscInt **);
321 
322 PETSC_INTERN PetscErrorCode MatMult_SeqAIJ(Mat, Vec, Vec);
323 PETSC_INTERN PetscErrorCode MatMult_SeqAIJ_Inode(Mat, Vec, Vec);
324 PETSC_INTERN PetscErrorCode MatMultAdd_SeqAIJ(Mat, Vec, Vec, Vec);
325 PETSC_INTERN PetscErrorCode MatMultAdd_SeqAIJ_Inode(Mat, Vec, Vec, Vec);
326 PETSC_INTERN PetscErrorCode MatMultTranspose_SeqAIJ(Mat, Vec, Vec);
327 PETSC_INTERN PetscErrorCode MatMultTransposeAdd_SeqAIJ(Mat, Vec, Vec, Vec);
328 PETSC_INTERN PetscErrorCode MatSOR_SeqAIJ(Mat, Vec, PetscReal, MatSORType, PetscReal, PetscInt, PetscInt, Vec);
329 PETSC_INTERN PetscErrorCode MatSOR_SeqAIJ_Inode(Mat, Vec, PetscReal, MatSORType, PetscReal, PetscInt, PetscInt, Vec);
330 
331 PETSC_INTERN PetscErrorCode MatSetOption_SeqAIJ(Mat, MatOption, PetscBool);
332 
333 PETSC_INTERN PetscErrorCode MatGetSymbolicTranspose_SeqAIJ(Mat, PetscInt *[], PetscInt *[]);
334 PETSC_INTERN PetscErrorCode MatRestoreSymbolicTranspose_SeqAIJ(Mat, PetscInt *[], PetscInt *[]);
335 PETSC_INTERN PetscErrorCode MatGetSymbolicTransposeReduced_SeqAIJ(Mat, PetscInt, PetscInt, PetscInt *[], PetscInt *[]);
336 PETSC_INTERN PetscErrorCode MatTransposeSymbolic_SeqAIJ(Mat, Mat *);
337 PETSC_INTERN PetscErrorCode MatTranspose_SeqAIJ(Mat, MatReuse, Mat *);
338 
339 PETSC_INTERN PetscErrorCode MatToSymmetricIJ_SeqAIJ(PetscInt, PetscInt *, PetscInt *, PetscBool, PetscInt, PetscInt, PetscInt **, PetscInt **);
340 PETSC_INTERN PetscErrorCode MatLUFactorSymbolic_SeqAIJ(Mat, Mat, IS, IS, const MatFactorInfo *);
341 PETSC_INTERN PetscErrorCode MatLUFactorNumeric_SeqAIJ_inplace(Mat, Mat, const MatFactorInfo *);
342 PETSC_INTERN PetscErrorCode MatLUFactorNumeric_SeqAIJ(Mat, Mat, const MatFactorInfo *);
343 PETSC_INTERN PetscErrorCode MatLUFactorNumeric_SeqAIJ_InplaceWithPerm(Mat, Mat, const MatFactorInfo *);
344 PETSC_INTERN PetscErrorCode MatLUFactor_SeqAIJ(Mat, IS, IS, const MatFactorInfo *);
345 PETSC_INTERN PetscErrorCode MatSolve_SeqAIJ_inplace(Mat, Vec, Vec);
346 PETSC_INTERN PetscErrorCode MatSolve_SeqAIJ(Mat, Vec, Vec);
347 PETSC_INTERN PetscErrorCode MatSolve_SeqAIJ_Inode(Mat, Vec, Vec);
348 PETSC_INTERN PetscErrorCode MatSolve_SeqAIJ_NaturalOrdering(Mat, Vec, Vec);
349 PETSC_INTERN PetscErrorCode MatSolveAdd_SeqAIJ(Mat, Vec, Vec, Vec);
350 PETSC_INTERN PetscErrorCode MatSolveTranspose_SeqAIJ_inplace(Mat, Vec, Vec);
351 PETSC_INTERN PetscErrorCode MatSolveTranspose_SeqAIJ(Mat, Vec, Vec);
352 PETSC_INTERN PetscErrorCode MatSolveTransposeAdd_SeqAIJ_inplace(Mat, Vec, Vec, Vec);
353 PETSC_INTERN PetscErrorCode MatSolveTransposeAdd_SeqAIJ(Mat, Vec, Vec, Vec);
354 PETSC_INTERN PetscErrorCode MatMatSolve_SeqAIJ(Mat, Mat, Mat);
355 PETSC_INTERN PetscErrorCode MatMatSolveTranspose_SeqAIJ(Mat, Mat, Mat);
356 PETSC_INTERN PetscErrorCode MatEqual_SeqAIJ(Mat, Mat, PetscBool *);
357 PETSC_INTERN PetscErrorCode MatFDColoringCreate_SeqXAIJ(Mat, ISColoring, MatFDColoring);
358 PETSC_INTERN PetscErrorCode MatFDColoringSetUp_SeqXAIJ(Mat, ISColoring, MatFDColoring);
359 PETSC_INTERN PetscErrorCode MatFDColoringSetUpBlocked_AIJ_Private(Mat, MatFDColoring, PetscInt);
360 PETSC_INTERN PetscErrorCode MatLoad_AIJ_HDF5(Mat, PetscViewer);
361 PETSC_INTERN PetscErrorCode MatLoad_SeqAIJ_Binary(Mat, PetscViewer);
362 PETSC_INTERN PetscErrorCode MatLoad_SeqAIJ(Mat, PetscViewer);
363 PETSC_INTERN PetscErrorCode RegisterApplyPtAPRoutines_Private(Mat);
364 
365 #if defined(PETSC_HAVE_HYPRE)
366 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_Transpose_AIJ_AIJ(Mat);
367 #endif
368 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_SeqAIJ(Mat);
369 
370 PETSC_INTERN PetscErrorCode MatProductSymbolic_SeqAIJ_SeqAIJ(Mat);
371 PETSC_INTERN PetscErrorCode MatProductSymbolic_PtAP_SeqAIJ_SeqAIJ(Mat);
372 PETSC_INTERN PetscErrorCode MatProductSymbolic_RARt_SeqAIJ_SeqAIJ(Mat);
373 
374 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_SeqAIJ_SeqAIJ(Mat, Mat, PetscReal, Mat);
375 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_SeqAIJ_SeqAIJ_Sorted(Mat, Mat, PetscReal, Mat);
376 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_SeqDense_SeqAIJ(Mat, Mat, PetscReal, Mat);
377 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_SeqAIJ_SeqAIJ_Scalable(Mat, Mat, PetscReal, Mat);
378 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_SeqAIJ_SeqAIJ_Scalable_fast(Mat, Mat, PetscReal, Mat);
379 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_SeqAIJ_SeqAIJ_Heap(Mat, Mat, PetscReal, Mat);
380 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_SeqAIJ_SeqAIJ_BTHeap(Mat, Mat, PetscReal, Mat);
381 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_SeqAIJ_SeqAIJ_RowMerge(Mat, Mat, PetscReal, Mat);
382 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_SeqAIJ_SeqAIJ_LLCondensed(Mat, Mat, PetscReal, Mat);
383 #if defined(PETSC_HAVE_HYPRE)
384 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_AIJ_AIJ_wHYPRE(Mat, Mat, PetscReal, Mat);
385 #endif
386 
387 PETSC_INTERN PetscErrorCode MatMatMultNumeric_SeqAIJ_SeqAIJ(Mat, Mat, Mat);
388 PETSC_INTERN PetscErrorCode MatMatMultNumeric_SeqAIJ_SeqAIJ_Sorted(Mat, Mat, Mat);
389 
390 PETSC_INTERN PetscErrorCode MatMatMultNumeric_SeqDense_SeqAIJ(Mat, Mat, Mat);
391 PETSC_INTERN PetscErrorCode MatMatMultNumeric_SeqAIJ_SeqAIJ_Scalable(Mat, Mat, Mat);
392 
393 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_SeqAIJ_SeqAIJ_SparseAxpy(Mat, Mat, PetscReal, Mat);
394 PETSC_INTERN PetscErrorCode MatPtAPNumeric_SeqAIJ_SeqAIJ(Mat, Mat, Mat);
395 PETSC_INTERN PetscErrorCode MatPtAPNumeric_SeqAIJ_SeqAIJ_SparseAxpy(Mat, Mat, Mat);
396 
397 PETSC_INTERN PetscErrorCode MatRARtSymbolic_SeqAIJ_SeqAIJ(Mat, Mat, PetscReal, Mat);
398 PETSC_INTERN PetscErrorCode MatRARtSymbolic_SeqAIJ_SeqAIJ_matmattransposemult(Mat, Mat, PetscReal, Mat);
399 PETSC_INTERN PetscErrorCode MatRARtSymbolic_SeqAIJ_SeqAIJ_colorrart(Mat, Mat, PetscReal, Mat);
400 PETSC_INTERN PetscErrorCode MatRARtNumeric_SeqAIJ_SeqAIJ(Mat, Mat, Mat);
401 PETSC_INTERN PetscErrorCode MatRARtNumeric_SeqAIJ_SeqAIJ_matmattransposemult(Mat, Mat, Mat);
402 PETSC_INTERN PetscErrorCode MatRARtNumeric_SeqAIJ_SeqAIJ_colorrart(Mat, Mat, Mat);
403 
404 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_SeqAIJ_SeqAIJ(Mat, Mat, PetscReal, Mat);
405 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_SeqAIJ_SeqAIJ(Mat, Mat, Mat);
406 PETSC_INTERN PetscErrorCode MatDestroy_SeqAIJ_MatTransMatMult(void *);
407 
408 PETSC_INTERN PetscErrorCode MatMatTransposeMultSymbolic_SeqAIJ_SeqAIJ(Mat, Mat, PetscReal, Mat);
409 PETSC_INTERN PetscErrorCode MatMatTransposeMultNumeric_SeqAIJ_SeqAIJ(Mat, Mat, Mat);
410 PETSC_INTERN PetscErrorCode MatTransposeColoringCreate_SeqAIJ(Mat, ISColoring, MatTransposeColoring);
411 PETSC_INTERN PetscErrorCode MatTransColoringApplySpToDen_SeqAIJ(MatTransposeColoring, Mat, Mat);
412 PETSC_INTERN PetscErrorCode MatTransColoringApplyDenToSp_SeqAIJ(MatTransposeColoring, Mat, Mat);
413 
414 PETSC_INTERN PetscErrorCode MatMatMatMultSymbolic_SeqAIJ_SeqAIJ_SeqAIJ(Mat, Mat, Mat, PetscReal, Mat);
415 PETSC_INTERN PetscErrorCode MatMatMatMultNumeric_SeqAIJ_SeqAIJ_SeqAIJ(Mat, Mat, Mat, Mat);
416 
417 PETSC_INTERN PetscErrorCode MatSetRandomSkipColumnRange_SeqAIJ_Private(Mat, PetscInt, PetscInt, PetscRandom);
418 PETSC_INTERN PetscErrorCode MatSetValues_SeqAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode);
419 PETSC_INTERN PetscErrorCode MatGetRow_SeqAIJ(Mat, PetscInt, PetscInt *, PetscInt **, PetscScalar **);
420 PETSC_INTERN PetscErrorCode MatRestoreRow_SeqAIJ(Mat, PetscInt, PetscInt *, PetscInt **, PetscScalar **);
421 PETSC_INTERN PetscErrorCode MatScale_SeqAIJ(Mat, PetscScalar);
422 PETSC_INTERN PetscErrorCode MatDiagonalScale_SeqAIJ(Mat, Vec, Vec);
423 PETSC_INTERN PetscErrorCode MatDiagonalSet_SeqAIJ(Mat, Vec, InsertMode);
424 PETSC_INTERN PetscErrorCode MatAXPY_SeqAIJ(Mat, PetscScalar, Mat, MatStructure);
425 PETSC_INTERN PetscErrorCode MatGetRowIJ_SeqAIJ(Mat, PetscInt, PetscBool, PetscBool, PetscInt *, const PetscInt *[], const PetscInt *[], PetscBool *);
426 PETSC_INTERN PetscErrorCode MatRestoreRowIJ_SeqAIJ(Mat, PetscInt, PetscBool, PetscBool, PetscInt *, const PetscInt *[], const PetscInt *[], PetscBool *);
427 PETSC_INTERN PetscErrorCode MatGetColumnIJ_SeqAIJ(Mat, PetscInt, PetscBool, PetscBool, PetscInt *, const PetscInt *[], const PetscInt *[], PetscBool *);
428 PETSC_INTERN PetscErrorCode MatRestoreColumnIJ_SeqAIJ(Mat, PetscInt, PetscBool, PetscBool, PetscInt *, const PetscInt *[], const PetscInt *[], PetscBool *);
429 PETSC_INTERN PetscErrorCode MatGetColumnIJ_SeqAIJ_Color(Mat, PetscInt, PetscBool, PetscBool, PetscInt *, const PetscInt *[], const PetscInt *[], PetscInt *[], PetscBool *);
430 PETSC_INTERN PetscErrorCode MatRestoreColumnIJ_SeqAIJ_Color(Mat, PetscInt, PetscBool, PetscBool, PetscInt *, const PetscInt *[], const PetscInt *[], PetscInt *[], PetscBool *);
431 PETSC_INTERN PetscErrorCode MatDestroy_SeqAIJ(Mat);
432 PETSC_INTERN PetscErrorCode MatView_SeqAIJ(Mat, PetscViewer);
433 
434 PETSC_INTERN PetscErrorCode MatSeqAIJInvalidateDiagonal(Mat);
435 PETSC_INTERN PetscErrorCode MatSeqAIJInvalidateDiagonal_Inode(Mat);
436 PETSC_INTERN PetscErrorCode MatSeqAIJCheckInode(Mat);
437 PETSC_INTERN PetscErrorCode MatSeqAIJCheckInode_FactorLU(Mat);
438 
439 PETSC_INTERN PetscErrorCode MatAXPYGetPreallocation_SeqAIJ(Mat, Mat, PetscInt *);
440 
441 #if defined(PETSC_HAVE_MATLAB)
442 PETSC_EXTERN PetscErrorCode MatlabEnginePut_SeqAIJ(PetscObject, void *);
443 PETSC_EXTERN PetscErrorCode MatlabEngineGet_SeqAIJ(PetscObject, void *);
444 #endif
445 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqSBAIJ(Mat, MatType, MatReuse, Mat *);
446 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqBAIJ(Mat, MatType, MatReuse, Mat *);
447 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqDense(Mat, MatType, MatReuse, Mat *);
448 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJCRL(Mat, MatType, MatReuse, Mat *);
449 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_Elemental(Mat, MatType, MatReuse, Mat *);
450 #if defined(PETSC_HAVE_SCALAPACK)
451 PETSC_INTERN PetscErrorCode MatConvert_AIJ_ScaLAPACK(Mat, MatType, MatReuse, Mat *);
452 #endif
453 PETSC_INTERN PetscErrorCode MatConvert_AIJ_HYPRE(Mat, MatType, MatReuse, Mat *);
454 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJPERM(Mat, MatType, MatReuse, Mat *);
455 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJSELL(Mat, MatType, MatReuse, Mat *);
456 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJMKL(Mat, MatType, MatReuse, Mat *);
457 PETSC_INTERN PetscErrorCode MatConvert_SeqAIJ_SeqAIJViennaCL(Mat, MatType, MatReuse, Mat *);
458 PETSC_INTERN PetscErrorCode MatReorderForNonzeroDiagonal_SeqAIJ(Mat, PetscReal, IS, IS);
459 PETSC_INTERN PetscErrorCode MatRARt_SeqAIJ_SeqAIJ(Mat, Mat, MatReuse, PetscReal, Mat *);
460 PETSC_EXTERN PetscErrorCode MatCreate_SeqAIJ(Mat);
461 PETSC_INTERN PetscErrorCode MatAssemblyEnd_SeqAIJ(Mat, MatAssemblyType);
462 PETSC_INTERN PetscErrorCode MatZeroEntries_SeqAIJ(Mat);
463 
464 PETSC_INTERN PetscErrorCode MatAXPYGetPreallocation_SeqX_private(PetscInt, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, PetscInt *);
465 PETSC_INTERN PetscErrorCode MatCreateMPIMatConcatenateSeqMat_SeqAIJ(MPI_Comm, Mat, PetscInt, MatReuse, Mat *);
466 PETSC_INTERN PetscErrorCode MatCreateMPIMatConcatenateSeqMat_MPIAIJ(MPI_Comm, Mat, PetscInt, MatReuse, Mat *);
467 
468 PETSC_INTERN PetscErrorCode MatSetSeqMat_SeqAIJ(Mat, IS, IS, MatStructure, Mat);
469 PETSC_INTERN PetscErrorCode MatEliminateZeros_SeqAIJ(Mat, PetscBool);
470 PETSC_INTERN PetscErrorCode MatDestroySubMatrix_Private(Mat_SubSppt *);
471 PETSC_INTERN PetscErrorCode MatDestroySubMatrix_SeqAIJ(Mat);
472 PETSC_INTERN PetscErrorCode MatDestroySubMatrix_Dummy(Mat);
473 PETSC_INTERN PetscErrorCode MatDestroySubMatrices_Dummy(PetscInt, Mat *[]);
474 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_SeqAIJ(Mat, IS, IS, PetscInt, MatReuse, Mat *);
475 
476 PETSC_INTERN PetscErrorCode MatSetSeqAIJWithArrays_private(MPI_Comm, PetscInt, PetscInt, PetscInt[], PetscInt[], PetscScalar[], MatType, Mat);
477 
478 PETSC_INTERN PetscErrorCode MatResetPreallocation_SeqAIJ_Private(Mat A, PetscBool *memoryreset);
479 
480 PETSC_SINGLE_LIBRARY_INTERN PetscErrorCode MatSeqAIJCompactOutExtraColumns_SeqAIJ(Mat, ISLocalToGlobalMapping *);
481 
482 /*
483     PetscSparseDenseMinusDot - The inner kernel of triangular solves and Gauss-Siedel smoothing. \sum_i xv[i] * r[xi[i]] for CSR storage
484 
485   Input Parameters:
486 +  nnz - the number of entries
487 .  r - the array of vector values
488 .  xv - the matrix values for the row
489 -  xi - the column indices of the nonzeros in the row
490 
491   Output Parameter:
492 .  sum - negative the sum of results
493 
494   PETSc compile flags:
495 +   PETSC_KERNEL_USE_UNROLL_4
496 -   PETSC_KERNEL_USE_UNROLL_2
497 
498   Developer Note:
499     The macro changes sum but not other parameters
500 
501 .seealso: `PetscSparseDensePlusDot()`
502 */
503 #if defined(PETSC_KERNEL_USE_UNROLL_4)
504   #define PetscSparseDenseMinusDot(sum, r, xv, xi, nnz) \
505     do { \
506       if (nnz > 0) { \
507         PetscInt nnz2 = nnz, rem = nnz & 0x3; \
508         switch (rem) { \
509         case 3: \
510           sum -= *xv++ * r[*xi++]; \
511         case 2: \
512           sum -= *xv++ * r[*xi++]; \
513         case 1: \
514           sum -= *xv++ * r[*xi++]; \
515           nnz2 -= rem; \
516         } \
517         while (nnz2 > 0) { \
518           sum -= xv[0] * r[xi[0]] + xv[1] * r[xi[1]] + xv[2] * r[xi[2]] + xv[3] * r[xi[3]]; \
519           xv += 4; \
520           xi += 4; \
521           nnz2 -= 4; \
522         } \
523         xv -= nnz; \
524         xi -= nnz; \
525       } \
526     } while (0)
527 
528 #elif defined(PETSC_KERNEL_USE_UNROLL_2)
529   #define PetscSparseDenseMinusDot(sum, r, xv, xi, nnz) \
530     do { \
531       PetscInt __i, __i1, __i2; \
532       for (__i = 0; __i < nnz - 1; __i += 2) { \
533         __i1 = xi[__i]; \
534         __i2 = xi[__i + 1]; \
535         sum -= (xv[__i] * r[__i1] + xv[__i + 1] * r[__i2]); \
536       } \
537       if (nnz & 0x1) sum -= xv[__i] * r[xi[__i]]; \
538     } while (0)
539 
540 #else
541   #define PetscSparseDenseMinusDot(sum, r, xv, xi, nnz) \
542     do { \
543       PetscInt __i; \
544       for (__i = 0; __i < nnz; __i++) sum -= xv[__i] * r[xi[__i]]; \
545     } while (0)
546 #endif
547 
548 /*
549     PetscSparseDensePlusDot - The inner kernel of matrix-vector product \sum_i xv[i] * r[xi[i]] for CSR storage
550 
551   Input Parameters:
552 +  nnz - the number of entries
553 .  r - the array of vector values
554 .  xv - the matrix values for the row
555 -  xi - the column indices of the nonzeros in the row
556 
557   Output Parameter:
558 .  sum - the sum of results
559 
560   PETSc compile flags:
561 +   PETSC_KERNEL_USE_UNROLL_4
562 -   PETSC_KERNEL_USE_UNROLL_2
563 
564   Developer Note:
565     The macro changes sum but not other parameters
566 
567 .seealso: `PetscSparseDenseMinusDot()`
568 */
569 #if defined(PETSC_KERNEL_USE_UNROLL_4)
570   #define PetscSparseDensePlusDot(sum, r, xv, xi, nnz) \
571     do { \
572       if (nnz > 0) { \
573         PetscInt nnz2 = nnz, rem = nnz & 0x3; \
574         switch (rem) { \
575         case 3: \
576           sum += *xv++ * r[*xi++]; \
577         case 2: \
578           sum += *xv++ * r[*xi++]; \
579         case 1: \
580           sum += *xv++ * r[*xi++]; \
581           nnz2 -= rem; \
582         } \
583         while (nnz2 > 0) { \
584           sum += xv[0] * r[xi[0]] + xv[1] * r[xi[1]] + xv[2] * r[xi[2]] + xv[3] * r[xi[3]]; \
585           xv += 4; \
586           xi += 4; \
587           nnz2 -= 4; \
588         } \
589         xv -= nnz; \
590         xi -= nnz; \
591       } \
592     } while (0)
593 
594 #elif defined(PETSC_KERNEL_USE_UNROLL_2)
595   #define PetscSparseDensePlusDot(sum, r, xv, xi, nnz) \
596     do { \
597       PetscInt __i, __i1, __i2; \
598       for (__i = 0; __i < nnz - 1; __i += 2) { \
599         __i1 = xi[__i]; \
600         __i2 = xi[__i + 1]; \
601         sum += (xv[__i] * r[__i1] + xv[__i + 1] * r[__i2]); \
602       } \
603       if (nnz & 0x1) sum += xv[__i] * r[xi[__i]]; \
604     } while (0)
605 
606 #elif !(defined(__GNUC__) && defined(_OPENMP)) && defined(PETSC_USE_AVX512_KERNELS) && defined(PETSC_HAVE_IMMINTRIN_H) && defined(__AVX512F__) && defined(PETSC_USE_REAL_DOUBLE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_64BIT_INDICES) && !defined(PETSC_SKIP_IMMINTRIN_H_CUDAWORKAROUND)
607   #define PetscSparseDensePlusDot(sum, r, xv, xi, nnz) PetscSparseDensePlusDot_AVX512_Private(&(sum), (r), (xv), (xi), (nnz))
608 
609 #else
610   #define PetscSparseDensePlusDot(sum, r, xv, xi, nnz) \
611     do { \
612       PetscInt __i; \
613       for (__i = 0; __i < nnz; __i++) sum += xv[__i] * r[xi[__i]]; \
614     } while (0)
615 #endif
616 
617 #if defined(PETSC_USE_AVX512_KERNELS) && defined(PETSC_HAVE_IMMINTRIN_H) && defined(__AVX512F__) && defined(PETSC_USE_REAL_DOUBLE) && !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_64BIT_INDICES) && !defined(PETSC_SKIP_IMMINTRIN_H_CUDAWORKAROUND)
618   #include <immintrin.h>
619   #if !defined(_MM_SCALE_8)
620     #define _MM_SCALE_8 8
621   #endif
622 
623 static inline void PetscSparseDensePlusDot_AVX512_Private(PetscScalar *sum, const PetscScalar *x, const MatScalar *aa, const PetscInt *aj, PetscInt n)
624 {
625   __m512d  vec_x, vec_y, vec_vals;
626   __m256i  vec_idx;
627   PetscInt j;
628 
629   vec_y = _mm512_setzero_pd();
630   for (j = 0; j < (n >> 3); j++) {
631     vec_idx  = _mm256_loadu_si256((__m256i const *)aj);
632     vec_vals = _mm512_loadu_pd(aa);
633     vec_x    = _mm512_i32gather_pd(vec_idx, x, _MM_SCALE_8);
634     vec_y    = _mm512_fmadd_pd(vec_x, vec_vals, vec_y);
635     aj += 8;
636     aa += 8;
637   }
638   #if defined(__AVX512VL__)
639   /* masked load requires avx512vl, which is not supported by KNL */
640   if (n & 0x07) {
641     __mmask8 mask;
642     mask     = (__mmask8)(0xff >> (8 - (n & 0x07)));
643     vec_idx  = _mm256_mask_loadu_epi32(vec_idx, mask, aj);
644     vec_vals = _mm512_mask_loadu_pd(vec_vals, mask, aa);
645     vec_x    = _mm512_mask_i32gather_pd(vec_x, mask, vec_idx, x, _MM_SCALE_8);
646     vec_y    = _mm512_mask3_fmadd_pd(vec_x, vec_vals, vec_y, mask);
647   }
648   *sum += _mm512_reduce_add_pd(vec_y);
649   #else
650   *sum += _mm512_reduce_add_pd(vec_y);
651   for (j = 0; j < (n & 0x07); j++) *sum += aa[j] * x[aj[j]];
652   #endif
653 }
654 #endif
655 
656 /*
657     PetscSparseDenseMaxDot - The inner kernel of a modified matrix-vector product \max_i xv[i] * r[xi[i]] for CSR storage
658 
659   Input Parameters:
660 +  nnz - the number of entries
661 .  r - the array of vector values
662 .  xv - the matrix values for the row
663 -  xi - the column indices of the nonzeros in the row
664 
665   Output Parameter:
666 .  max - the max of results
667 
668 .seealso: `PetscSparseDensePlusDot()`, `PetscSparseDenseMinusDot()`
669 */
670 #define PetscSparseDenseMaxDot(max, r, xv, xi, nnz) \
671   do { \
672     for (PetscInt __i = 0; __i < (nnz); __i++) max = PetscMax(PetscRealPart(max), PetscRealPart((xv)[__i] * (r)[(xi)[__i]])); \
673   } while (0)
674 
675 /*
676  Add column indices into table for counting the max nonzeros of merged rows
677  */
678 #define MatRowMergeMax_SeqAIJ(mat, nrows, ta) \
679   do { \
680     if (mat) { \
681       for (PetscInt _row = 0; _row < (nrows); _row++) { \
682         const PetscInt _nz = (mat)->i[_row + 1] - (mat)->i[_row]; \
683         for (PetscInt _j = 0; _j < _nz; _j++) { \
684           PetscInt *_col = _j + (mat)->j + (mat)->i[_row]; \
685           PetscCall(PetscHMapISet((ta), *_col + 1, 1)); \
686         } \
687       } \
688     } \
689   } while (0)
690 
691 /*
692  Add column indices into table for counting the nonzeros of merged rows
693  */
694 #define MatMergeRows_SeqAIJ(mat, nrows, rows, ta) \
695   do { \
696     for (PetscInt _i = 0; _i < (nrows); _i++) { \
697       const PetscInt _row = (rows)[_i]; \
698       const PetscInt _nz  = (mat)->i[_row + 1] - (mat)->i[_row]; \
699       for (PetscInt _j = 0; _j < _nz; _j++) { \
700         PetscInt *_col = _j + (mat)->j + (mat)->i[_row]; \
701         PetscCall(PetscHMapISetWithMode((ta), *_col + 1, 1, INSERT_VALUES)); \
702       } \
703     } \
704   } while (0)
705