1 2 #if !defined(__MPIAIJ_H) 3 #define __MPIAIJ_H 4 5 #include <../src/mat/impls/aij/seq/aij.h> 6 #include <../src/sys/ctable.h> 7 8 typedef struct { 9 Mat A,B; /* local submatrices: A (diag part), 10 B (off-diag part) */ 11 PetscMPIInt size; /* size of communicator */ 12 PetscMPIInt rank; /* rank of proc in communicator */ 13 14 /* The following variables are used for matrix assembly */ 15 16 PetscBool donotstash; /* PETSC_TRUE if off processor entries dropped */ 17 MPI_Request *send_waits; /* array of send requests */ 18 MPI_Request *recv_waits; /* array of receive requests */ 19 PetscInt nsends,nrecvs; /* numbers of sends and receives */ 20 PetscScalar *svalues,*rvalues; /* sending and receiving data */ 21 PetscInt rmax; /* maximum message length */ 22 #if defined (PETSC_USE_CTABLE) 23 PetscTable colmap; 24 #else 25 PetscInt *colmap; /* local col number of off-diag col */ 26 #endif 27 PetscInt *garray; /* global index of all off-processor columns */ 28 29 /* The following variables are used for matrix-vector products */ 30 31 Vec lvec; /* local vector */ 32 Vec diag; 33 VecScatter Mvctx; /* scatter context for vector */ 34 PetscBool roworiented; /* if true, row-oriented input, default true */ 35 36 /* The following variables are for MatGetRow() */ 37 38 PetscInt *rowindices; /* column indices for row */ 39 PetscScalar *rowvalues; /* nonzero values in row */ 40 PetscBool getrowactive; /* indicates MatGetRow(), not restored */ 41 42 /* Used by MatDistribute_MPIAIJ() to allow reuse of previous matrix allocation and nonzero pattern */ 43 PetscInt *ld; /* number of entries per row left of diagona block */ 44 } Mat_MPIAIJ; 45 46 typedef struct { /* used by MatMatMult_MPIAIJ_MPIAIJ and MatPtAP_MPIAIJ_MPIAIJ for reusing symbolic mat product */ 47 PetscInt *startsj,*startsj_r; 48 PetscScalar *bufa; 49 IS isrowa,isrowb,iscolb; 50 Mat *aseq,*bseq,C_seq; /* A_seq=aseq[0], B_seq=bseq[0] */ 51 Mat A_loc,B_seq; 52 Mat B_loc,B_oth; /* partial B_seq -- intend to replace B_seq */ 53 PetscInt brstart; /* starting owned rows of B in matrix bseq[0]; brend = brstart+B->m */ 54 PetscInt *abi,*abj; /* symbolic i and j arrays of the local product A_loc*B_seq */ 55 PetscInt abnz_max; /* max(abi[i+1] - abi[i]), max num of nnz in a row of A_loc*B_seq */ 56 MatReuse reuse; 57 PetscErrorCode (*destroy)(Mat); 58 PetscErrorCode (*duplicate)(Mat,MatDuplicateOption,Mat*); 59 } Mat_MatMatMultMPI; 60 61 typedef struct { /* used by MatMerge_SeqsToMPI for reusing the merged matrix */ 62 PetscLayout rowmap; 63 PetscInt **buf_ri,**buf_rj; 64 PetscMPIInt *len_s,*len_r,*id_r; /* array of length of comm->size, store send/recv matrix values */ 65 PetscMPIInt nsend,nrecv; 66 PetscInt *bi,*bj; /* i and j array of the local portion of mpi C=P^T*A*P */ 67 PetscInt *owners_co,*coi,*coj; /* i and j array of (p->B)^T*A*P - used in the communication */ 68 PetscErrorCode (*destroy)(Mat); 69 PetscErrorCode (*duplicate)(Mat,MatDuplicateOption,Mat*); 70 } Mat_Merge_SeqsToMPI; 71 72 extern PetscErrorCode MatSetColoring_MPIAIJ(Mat,ISColoring); 73 extern PetscErrorCode MatSetValuesAdic_MPIAIJ(Mat,void*); 74 extern PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat,PetscInt,void*); 75 extern PetscErrorCode MatSetUpMultiply_MPIAIJ(Mat); 76 extern PetscErrorCode DisAssemble_MPIAIJ(Mat); 77 extern PetscErrorCode MatDuplicate_MPIAIJ(Mat,MatDuplicateOption,Mat *); 78 extern PetscErrorCode MatIncreaseOverlap_MPIAIJ(Mat,PetscInt,IS [],PetscInt); 79 extern PetscErrorCode MatFDColoringCreate_MPIAIJ(Mat,ISColoring,MatFDColoring); 80 extern PetscErrorCode MatGetSubMatrices_MPIAIJ (Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 81 extern PetscErrorCode MatGetSubMatrix_MPIAIJ_All(Mat,MatGetSubMatrixOption,MatReuse,Mat *[]); 82 extern PetscErrorCode MatGetSubMatricesParallel_MPIAIJ (Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 83 84 extern PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat,IS,IS,MatReuse,Mat *); 85 extern PetscErrorCode MatGetSubMatrix_MPIAIJ_Private (Mat,IS,IS,PetscInt,MatReuse,Mat *); 86 extern PetscErrorCode MatGetMultiProcBlock_MPIAIJ(Mat,MPI_Comm,Mat*); 87 88 extern PetscErrorCode MatLoad_MPIAIJ(Mat,PetscViewer); 89 extern PetscErrorCode MatMatMult_MPIAIJ_MPIAIJ(Mat,Mat,MatReuse,PetscReal,Mat*); 90 extern PetscErrorCode MatMatMultSymbolic_MPIAIJ_MPIAIJ(Mat,Mat,PetscReal,Mat*); 91 extern PetscErrorCode MatMatMultNumeric_MPIAIJ_MPIAIJ(Mat,Mat,Mat); 92 extern PetscErrorCode MatPtAPSymbolic_MPIAIJ(Mat,Mat,PetscReal,Mat*); 93 extern PetscErrorCode MatPtAPNumeric_MPIAIJ(Mat,Mat,Mat); 94 extern PetscErrorCode MatPtAPSymbolic_MPIAIJ_MPIAIJ(Mat,Mat,PetscReal,Mat*); 95 extern PetscErrorCode MatPtAPNumeric_MPIAIJ_MPIAIJ(Mat,Mat,Mat); 96 extern PetscErrorCode MatSetValues_MPIAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar [],InsertMode); 97 extern PetscErrorCode MatDestroy_MPIAIJ_MatMatMult(Mat); 98 extern PetscErrorCode PetscContainerDestroy_Mat_MatMatMultMPI(void*); 99 extern PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*); 100 extern PetscErrorCode MatGetSeqNonzeroStructure_MPIAIJ(Mat,Mat*); 101 102 EXTERN_C_BEGIN 103 extern PetscErrorCode MatMPIAIJSetPreallocation_MPIAIJ(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 104 EXTERN_C_END 105 106 #if !defined(PETSC_USE_COMPLEX) && !defined(PETSC_USE_REAL_SINGLE) && !defined(PETSC_USE_REAL___FLOAT128) 107 extern PetscErrorCode MatLUFactorSymbolic_MPIAIJ_TFS(Mat,IS,IS,const MatFactorInfo*,Mat*); 108 #endif 109 extern PetscErrorCode MatSolve_MPIAIJ(Mat,Vec,Vec); 110 extern PetscErrorCode MatILUFactor_MPIAIJ(Mat,IS,IS,const MatFactorInfo *); 111 112 EXTERN_C_BEGIN 113 extern PetscErrorCode MatGetDiagonalBlock_MPIAIJ(Mat,Mat *); 114 extern PetscErrorCode MatDiagonalScaleLocal_MPIAIJ(Mat,Vec); 115 EXTERN_C_END 116 117 #endif 118