1 #pragma once 2 3 #include <../src/mat/impls/aij/seq/aij.h> 4 5 typedef struct { /* used by MatCreateMPIAIJSumSeqAIJ for reusing the merged matrix */ 6 PetscLayout rowmap; 7 PetscInt **buf_ri, **buf_rj; 8 PetscMPIInt *len_s, *len_r, *id_r; /* array of length of comm->size, store send/recv matrix values */ 9 PetscMPIInt nsend, nrecv; 10 PetscInt *bi, *bj; /* i and j array of the local portion of mpi C (matrix product) - rename to ci, cj! */ 11 PetscInt *owners_co, *coi, *coj; /* i and j array of (p->B)^T*A*P - used in the communication */ 12 } MatMergeSeqsToMPI; 13 14 typedef struct { /* used by MatPtAPXXX_MPIAIJ_MPIAIJ() and MatMatMultXXX_MPIAIJ_MPIAIJ() */ 15 PetscInt *startsj_s, *startsj_r; /* used by MatGetBrowsOfAoCols_MPIAIJ */ 16 PetscScalar *bufa; /* used by MatGetBrowsOfAoCols_MPIAIJ */ 17 Mat P_loc, P_oth; /* partial B_seq -- intend to replace B_seq */ 18 PetscInt *api, *apj; /* symbolic i and j arrays of the local product A_loc*B_seq */ 19 PetscScalar *apv; 20 MatReuse reuse; /* flag to skip MatGetBrowsOfAoCols_MPIAIJ() and MatMPIAIJGetLocalMat() in 1st call of MatPtAPNumeric_MPIAIJ_MPIAIJ() */ 21 PetscScalar *apa; /* tmp array for store a row of A*P used in MatMatMult() */ 22 Mat A_loc; /* used by MatTransposeMatMult(), contains api and apj */ 23 ISLocalToGlobalMapping ltog; /* mapping from local column indices to global column indices for A_loc */ 24 Mat Pt; /* used by MatTransposeMatMult(), Pt = P^T */ 25 Mat Rd, Ro, AP_loc, C_loc, C_oth; 26 PetscInt algType; /* implementation algorithm */ 27 PetscSF sf; /* use it to communicate remote part of C */ 28 PetscInt *c_othi, *c_rmti; 29 30 MatMergeSeqsToMPI *merge; 31 } MatProductCtx_APMPI; 32 33 #if defined(PETSC_USE_CTABLE) 34 #define PETSCTABLE PetscHMapI 35 #else 36 #define PETSCTABLE PetscInt * 37 #endif 38 39 // Shared by MPIAIJ, MPIBAIJ, MPISBAIJ so that we can access common fields in the same way. 40 #define MPIAIJHEADER \ 41 Mat A, B; /* local submatrices: A (diag part), B (off-diag part) */ \ 42 PetscMPIInt size; /* size of communicator */ \ 43 PetscMPIInt rank; /* rank of proc in communicator */ \ 44 \ 45 /* The following variables are used for matrix assembly */ \ 46 PetscBool donotstash; /* if 1, off processor entries dropped */ \ 47 MPI_Request *send_waits; /* array of send requests */ \ 48 MPI_Request *recv_waits; /* array of receive requests */ \ 49 PetscInt nsends, nrecvs; /* numbers of sends and receives */ \ 50 MatScalar *svalues, *rvalues; /* sending and receiving data */ \ 51 PetscInt rmax; /* maximum message length */ \ 52 PETSCTABLE colmap; /* local col number of off-diag col */ \ 53 PetscInt *garray; /* work array */ \ 54 \ 55 /* The following variables are used for matrix-vector products */ \ 56 Vec lvec; /* local vector */ \ 57 VecScatter Mvctx; /* scatter context for vector */ \ 58 PetscBool roworiented; /* if true, row-oriented input, default true */ \ 59 \ 60 /* The following variables are for MatGetRow() */ \ 61 PetscInt *rowindices; /* column indices for row */ \ 62 PetscScalar *rowvalues; /* nonzero values in row */ \ 63 PetscBool getrowactive 64 65 typedef struct { 66 MPIAIJHEADER; 67 Vec diag; 68 PetscInt *ld; /* number of entries per row left of diagonal block */ 69 70 /* Used by device classes */ 71 void *spptr; 72 73 struct _MatOps cops; 74 } Mat_MPIAIJ; 75 76 typedef struct { 77 PetscCount n; /* Number of COOs passed to MatSetPreallocationCOO)() */ 78 PetscSF sf; /* SF to send/recv remote values in MatSetValuesCOO() */ 79 PetscCount Annz, Bnnz; /* Number of entries in diagonal A and off-diagonal B */ 80 PetscCount Annz2, Bnnz2; /* Number of unique remote entries belonging to A and B */ 81 PetscCount Atot1, Atot2, Btot1, Btot2; /* Total local (tot1) and remote (tot2) entries (which might contain repeats) belonging to A and B */ 82 PetscCount *Ajmap1, *Aperm1; /* Lengths: [Annz+1], [Atot1]. Local entries to diag */ 83 PetscCount *Bjmap1, *Bperm1; /* Lengths: [Bnnz+1], [Btot1]. Local entries to offdiag */ 84 PetscCount *Aimap2, *Ajmap2, *Aperm2; /* Lengths: [Annz2], [Annz2+1], [Atot2]. Remote entries to diag */ 85 PetscCount *Bimap2, *Bjmap2, *Bperm2; /* Lengths: [Bnnz2], [Bnnz2+1], [Btot2]. Remote entries to offdiag */ 86 PetscCount *Cperm1; /* [sendlen] Permutation to fill MPI send buffer. 'C' for communication */ 87 PetscScalar *sendbuf, *recvbuf; /* Buffers for remote values in MatSetValuesCOO() */ 88 PetscInt sendlen, recvlen; /* Lengths (in unit of PetscScalar) of send/recvbuf */ 89 } MatCOOStruct_MPIAIJ; 90 91 PETSC_EXTERN PetscErrorCode MatCreate_MPIAIJ(Mat); 92 93 PETSC_INTERN PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat, MatAssemblyType); 94 95 PETSC_INTERN PetscErrorCode MatSetUpMultiply_MPIAIJ(Mat); 96 PETSC_INTERN PetscErrorCode MatDisAssemble_MPIAIJ(Mat, PetscBool); 97 PETSC_INTERN PetscErrorCode MatDuplicate_MPIAIJ(Mat, MatDuplicateOption, Mat *); 98 PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ(Mat, PetscInt, IS[], PetscInt); 99 PETSC_INTERN PetscErrorCode MatIncreaseOverlap_MPIAIJ_Scalable(Mat, PetscInt, IS[], PetscInt); 100 PETSC_INTERN PetscErrorCode MatFDColoringCreate_MPIXAIJ(Mat, ISColoring, MatFDColoring); 101 PETSC_INTERN PetscErrorCode MatFDColoringSetUp_MPIXAIJ(Mat, ISColoring, MatFDColoring); 102 PETSC_INTERN PetscErrorCode MatCreateSubMatrices_MPIAIJ(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *[]); 103 PETSC_INTERN PetscErrorCode MatCreateSubMatricesMPI_MPIAIJ(Mat, PetscInt, const IS[], const IS[], MatReuse, Mat *[]); 104 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_All(Mat, MatCreateSubMatrixOption, MatReuse, Mat *[]); 105 PETSC_INTERN PetscErrorCode MatView_MPIAIJ(Mat, PetscViewer); 106 107 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ(Mat, IS, IS, MatReuse, Mat *); 108 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_nonscalable(Mat, IS, IS, PetscInt, MatReuse, Mat *); 109 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowDist(Mat, IS, IS, IS, MatReuse, Mat *); 110 PETSC_INTERN PetscErrorCode MatCreateSubMatrix_MPIAIJ_SameRowColDist(Mat, IS, IS, MatReuse, Mat *); 111 PETSC_INTERN PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat, MPI_Comm, MatReuse, Mat *); 112 113 PETSC_INTERN PetscErrorCode MatLoad_MPIAIJ(Mat, PetscViewer); 114 PETSC_INTERN PetscErrorCode MatLoad_MPIAIJ_Binary(Mat, PetscViewer); 115 PETSC_INTERN PetscErrorCode MatCreateColmap_MPIAIJ_Private(Mat); 116 117 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_MPIAIJ(Mat); 118 PETSC_INTERN PetscErrorCode MatProductSetFromOptions_MPIAIJBACKEND(Mat); 119 PETSC_INTERN PetscErrorCode MatProductSymbolic_MPIAIJBACKEND(Mat); 120 PETSC_INTERN PetscErrorCode MatProductSymbolic_AB_MPIAIJ_MPIAIJ(Mat); 121 122 PETSC_INTERN PetscErrorCode MatProductSymbolic_PtAP_MPIAIJ_MPIAIJ(Mat); 123 124 PETSC_INTERN PetscErrorCode MatProductSymbolic_RARt_MPIAIJ_MPIAIJ(Mat); 125 PETSC_INTERN PetscErrorCode MatProductNumeric_RARt_MPIAIJ_MPIAIJ(Mat); 126 127 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, PetscReal, Mat); 128 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ_seqMPI(Mat, Mat, PetscReal, Mat); 129 PETSC_INTERN PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat); 130 PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat); 131 PETSC_INTERN PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, Mat); 132 133 PETSC_INTERN PetscErrorCode MatMatMatMultSymbolic_MPIAIJ_MPIAIJ_MPIAIJ(Mat, Mat, Mat, PetscReal, Mat); 134 PETSC_INTERN PetscErrorCode MatMatMatMultNumeric_MPIAIJ_MPIAIJ_MPIAIJ(Mat, Mat, Mat, Mat); 135 136 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat); 137 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat); 138 139 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_scalable(Mat, Mat, PetscReal, Mat); 140 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce(Mat, Mat, PetscReal, Mat); 141 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ_allatonce_merged(Mat, Mat, PetscReal, Mat); 142 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_scalable(Mat, Mat, Mat); 143 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce(Mat, Mat, Mat); 144 PETSC_INTERN PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ_allatonce_merged(Mat, Mat, Mat); 145 146 #if defined(PETSC_HAVE_HYPRE) 147 PETSC_INTERN PetscErrorCode MatPtAPSymbolic_AIJ_AIJ_wHYPRE(Mat, Mat, PetscReal, Mat); 148 #endif 149 PETSC_INTERN PetscErrorCode MatConvert_MPIAIJ_MPIDense(Mat, MatType, MatReuse, Mat *); 150 #if defined(PETSC_HAVE_SCALAPACK) && (defined(PETSC_USE_REAL_SINGLE) || defined(PETSC_USE_REAL_DOUBLE)) 151 PETSC_INTERN PetscErrorCode MatConvert_AIJ_ScaLAPACK(Mat, MatType, MatReuse, Mat *); 152 #endif 153 154 PETSC_INTERN PetscErrorCode MatDestroy_MPIAIJ(Mat); 155 PETSC_INTERN PetscErrorCode MatProductCtxDestroy_MPIAIJ_PtAP(PetscCtxRt); 156 PETSC_INTERN PetscErrorCode MatProductCtxDestroy_MPIAIJ_MatMatMult(PetscCtxRt); 157 158 PETSC_INTERN PetscErrorCode MatGetBrowsOfAoCols_MPIAIJ(Mat, Mat, MatReuse, PetscInt **, PetscInt **, MatScalar **, Mat *); 159 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[], const PetscScalar[], InsertMode); 160 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ_CopyFromCSRFormat(Mat, const PetscInt[], const PetscInt[], const PetscScalar[]); 161 PETSC_INTERN PetscErrorCode MatSetValues_MPIAIJ_CopyFromCSRFormat_Symbolic(Mat, const PetscInt[], const PetscInt[]); 162 PETSC_INTERN PetscErrorCode MatSetOption_MPIAIJ(Mat, MatOption, PetscBool); 163 164 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, PetscReal, Mat); 165 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIAIJ(Mat, Mat, PetscReal, Mat); 166 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ(Mat, Mat, Mat); 167 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_nonscalable(Mat, Mat, Mat); 168 PETSC_INTERN PetscErrorCode MatTransposeMatMultNumeric_MPIAIJ_MPIAIJ_matmatmult(Mat, Mat, Mat); 169 PETSC_INTERN PetscErrorCode MatTransposeMatMultSymbolic_MPIAIJ_MPIDense(Mat, Mat, PetscReal, Mat); 170 PETSC_INTERN PetscErrorCode MatGetSeqNonzeroStructure_MPIAIJ(Mat, Mat *); 171 172 PETSC_INTERN PetscErrorCode MatSetFromOptions_MPIAIJ(Mat, PetscOptionItems); 173 PETSC_INTERN PetscErrorCode MatMPIAIJSetPreallocation_MPIAIJ(Mat, PetscInt, const PetscInt[], PetscInt, const PetscInt[]); 174 175 PETSC_INTERN PetscErrorCode MatAXPYGetPreallocation_MPIX_private(PetscInt, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, const PetscInt *, PetscInt *); 176 177 extern PetscErrorCode MatGetDiagonalBlock_MPIAIJ(Mat, Mat *); 178 extern PetscErrorCode MatDiagonalScaleLocal_MPIAIJ(Mat, Vec); 179 180 PETSC_INTERN PetscErrorCode MatGetSeqMats_MPIAIJ(Mat, Mat *, Mat *); 181 PETSC_INTERN PetscErrorCode MatSetSeqMats_MPIAIJ(Mat, IS, IS, IS, MatStructure, Mat, Mat); 182 183 PETSC_INTERN PetscErrorCode MatSetPreallocationCOO_MPIAIJ(Mat, PetscCount, PetscInt[], PetscInt[]); 184 185 PETSC_INTERN PetscErrorCode MatGetCurrentMemType_MPIAIJ(Mat, PetscMemType *); 186 187 /* compute apa = A[i,:]*P = Ad[i,:]*P_loc + Ao*[i,:]*P_oth using sparse axpy */ 188 #define AProw_scalable(i, ad, ao, p_loc, p_oth, api, apj, apa) \ 189 do { \ 190 PetscInt _anz, _pnz, _j, _k, *_ai, *_aj, _row, *_pi, *_pj, _nextp, *_apJ; \ 191 PetscScalar *_aa, _valtmp, *_pa; \ 192 _apJ = apj + api[i]; \ 193 /* diagonal portion of A */ \ 194 _ai = ad->i; \ 195 _anz = _ai[i + 1] - _ai[i]; \ 196 _aj = PetscSafePointerPlusOffset(ad->j, _ai[i]); \ 197 _aa = PetscSafePointerPlusOffset(ad->a, _ai[i]); \ 198 for (_j = 0; _j < _anz; _j++) { \ 199 _row = _aj[_j]; \ 200 _pi = p_loc->i; \ 201 _pnz = _pi[_row + 1] - _pi[_row]; \ 202 _pj = p_loc->j + _pi[_row]; \ 203 _pa = p_loc->a + _pi[_row]; \ 204 /* perform sparse axpy */ \ 205 _valtmp = _aa[_j]; \ 206 _nextp = 0; \ 207 for (_k = 0; _nextp < _pnz; _k++) { \ 208 if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */ \ 209 apa[_k] += _valtmp * _pa[_nextp++]; \ 210 } \ 211 } \ 212 (void)PetscLogFlops(2.0 * _pnz); \ 213 } \ 214 /* off-diagonal portion of A */ \ 215 if (p_oth) { \ 216 _ai = ao->i; \ 217 _anz = _ai[i + 1] - _ai[i]; \ 218 _aj = PetscSafePointerPlusOffset(ao->j, _ai[i]); \ 219 _aa = PetscSafePointerPlusOffset(ao->a, _ai[i]); \ 220 for (_j = 0; _j < _anz; _j++) { \ 221 _row = _aj[_j]; \ 222 _pi = p_oth->i; \ 223 _pnz = _pi[_row + 1] - _pi[_row]; \ 224 _pj = p_oth->j + _pi[_row]; \ 225 _pa = p_oth->a + _pi[_row]; \ 226 /* perform sparse axpy */ \ 227 _valtmp = _aa[_j]; \ 228 _nextp = 0; \ 229 for (_k = 0; _nextp < _pnz; _k++) { \ 230 if (_apJ[_k] == _pj[_nextp]) { /* column of AP == column of P */ \ 231 apa[_k] += _valtmp * _pa[_nextp++]; \ 232 } \ 233 } \ 234 (void)PetscLogFlops(2.0 * _pnz); \ 235 } \ 236 } \ 237 } while (0) 238 239 #define AProw_nonscalable(i, ad, ao, p_loc, p_oth, apa) \ 240 do { \ 241 PetscInt _anz, _pnz, _j, _k, *_ai, *_aj, _row, *_pi, *_pj; \ 242 PetscScalar *_aa, _valtmp, *_pa; \ 243 /* diagonal portion of A */ \ 244 _ai = ad->i; \ 245 _anz = _ai[i + 1] - _ai[i]; \ 246 _aj = PetscSafePointerPlusOffset(ad->j, _ai[i]); \ 247 _aa = PetscSafePointerPlusOffset(ad->a, _ai[i]); \ 248 for (_j = 0; _j < _anz; _j++) { \ 249 _row = _aj[_j]; \ 250 _pi = p_loc->i; \ 251 _pnz = _pi[_row + 1] - _pi[_row]; \ 252 _pj = PetscSafePointerPlusOffset(p_loc->j, _pi[_row]); \ 253 _pa = PetscSafePointerPlusOffset(p_loc->a, _pi[_row]); \ 254 /* perform dense axpy */ \ 255 _valtmp = _aa[_j]; \ 256 for (_k = 0; _k < _pnz; _k++) apa[_pj[_k]] += _valtmp * _pa[_k]; \ 257 (void)PetscLogFlops(2.0 * _pnz); \ 258 } \ 259 /* off-diagonal portion of A */ \ 260 if (p_oth) { \ 261 _ai = ao->i; \ 262 _anz = _ai[i + 1] - _ai[i]; \ 263 _aj = PetscSafePointerPlusOffset(ao->j, _ai[i]); \ 264 _aa = PetscSafePointerPlusOffset(ao->a, _ai[i]); \ 265 for (_j = 0; _j < _anz; _j++) { \ 266 _row = _aj[_j]; \ 267 _pi = p_oth->i; \ 268 _pnz = _pi[_row + 1] - _pi[_row]; \ 269 _pj = PetscSafePointerPlusOffset(p_oth->j, _pi[_row]); \ 270 _pa = PetscSafePointerPlusOffset(p_oth->a, _pi[_row]); \ 271 /* perform dense axpy */ \ 272 _valtmp = _aa[_j]; \ 273 for (_k = 0; _k < _pnz; _k++) apa[_pj[_k]] += _valtmp * _pa[_k]; \ 274 (void)PetscLogFlops(2.0 * _pnz); \ 275 } \ 276 } \ 277 } while (0) 278