1 /* 2 Include file for the matrix component of PETSc 3 */ 4 #ifndef __PETSCMAT_H 5 #define __PETSCMAT_H 6 #include "petscvec.h" 7 PETSC_EXTERN_CXX_BEGIN 8 9 /*S 10 Mat - Abstract PETSc matrix object 11 12 Level: beginner 13 14 Concepts: matrix; linear operator 15 16 .seealso: MatCreate(), MatType, MatSetType() 17 S*/ 18 typedef struct _p_Mat* Mat; 19 20 /*J 21 MatType - String with the name of a PETSc matrix or the creation function 22 with an optional dynamic library name, for example 23 http://www.mcs.anl.gov/petsc/lib.a:mymatcreate() 24 25 Level: beginner 26 27 .seealso: MatSetType(), Mat, MatSolverPackage 28 J*/ 29 #define MatType char* 30 #define MATSAME "same" 31 #define MATMAIJ "maij" 32 #define MATSEQMAIJ "seqmaij" 33 #define MATMPIMAIJ "mpimaij" 34 #define MATIS "is" 35 #define MATAIJ "aij" 36 #define MATSEQAIJ "seqaij" 37 #define MATSEQAIJPTHREAD "seqaijpthread" 38 #define MATAIJPTHREAD "aijpthread" 39 #define MATMPIAIJ "mpiaij" 40 #define MATAIJCRL "aijcrl" 41 #define MATSEQAIJCRL "seqaijcrl" 42 #define MATMPIAIJCRL "mpiaijcrl" 43 #define MATAIJCUSP "aijcusp" 44 #define MATSEQAIJCUSP "seqaijcusp" 45 #define MATMPIAIJCUSP "mpiaijcusp" 46 #define MATAIJPERM "aijperm" 47 #define MATSEQAIJPERM "seqaijperm" 48 #define MATMPIAIJPERM "mpiaijperm" 49 #define MATSHELL "shell" 50 #define MATDENSE "dense" 51 #define MATSEQDENSE "seqdense" 52 #define MATMPIDENSE "mpidense" 53 #define MATBAIJ "baij" 54 #define MATSEQBAIJ "seqbaij" 55 #define MATMPIBAIJ "mpibaij" 56 #define MATMPIADJ "mpiadj" 57 #define MATSBAIJ "sbaij" 58 #define MATSEQSBAIJ "seqsbaij" 59 #define MATMPISBAIJ "mpisbaij" 60 #define MATSEQBSTRM "seqbstrm" 61 #define MATMPIBSTRM "mpibstrm" 62 #define MATBSTRM "bstrm" 63 #define MATSEQSBSTRM "seqsbstrm" 64 #define MATMPISBSTRM "mpisbstrm" 65 #define MATSBSTRM "sbstrm" 66 #define MATDAAD "daad" 67 #define MATMFFD "mffd" 68 #define MATNORMAL "normal" 69 #define MATLRC "lrc" 70 #define MATSCATTER "scatter" 71 #define MATBLOCKMAT "blockmat" 72 #define MATCOMPOSITE "composite" 73 #define MATFFT "fft" 74 #define MATFFTW "fftw" 75 #define MATSEQCUFFT "seqcufft" 76 #define MATTRANSPOSEMAT "transpose" 77 #define MATSCHURCOMPLEMENT "schurcomplement" 78 #define MATPYTHON "python" 79 #define MATHYPRESTRUCT "hyprestruct" 80 #define MATHYPRESSTRUCT "hypresstruct" 81 #define MATSUBMATRIX "submatrix" 82 #define MATLOCALREF "localref" 83 #define MATNEST "nest" 84 #define MATIJ "ij" 85 86 /*J 87 MatSolverPackage - String with the name of a PETSc matrix solver type. 88 89 For example: "petsc" indicates what PETSc provides, "superlu" indicates either 90 SuperLU or SuperLU_Dist etc. 91 92 93 Level: beginner 94 95 .seealso: MatGetFactor(), Mat, MatSetType(), MatType 96 J*/ 97 #define MatSolverPackage char* 98 #define MATSOLVERSPOOLES "spooles" 99 #define MATSOLVERSUPERLU "superlu" 100 #define MATSOLVERSUPERLU_DIST "superlu_dist" 101 #define MATSOLVERUMFPACK "umfpack" 102 #define MATSOLVERCHOLMOD "cholmod" 103 #define MATSOLVERESSL "essl" 104 #define MATSOLVERLUSOL "lusol" 105 #define MATSOLVERMUMPS "mumps" 106 #define MATSOLVERPASTIX "pastix" 107 #define MATSOLVERMATLAB "matlab" 108 #define MATSOLVERPETSC "petsc" 109 #define MATSOLVERPLAPACK "plapack" 110 #define MATSOLVERBAS "bas" 111 112 #define MATSOLVERBSTRM "bstrm" 113 #define MATSOLVERSBSTRM "sbstrm" 114 115 /*E 116 MatFactorType - indicates what type of factorization is requested 117 118 Level: beginner 119 120 Any additions/changes here MUST also be made in include/finclude/petscmat.h 121 122 .seealso: MatSolverPackage, MatGetFactor() 123 E*/ 124 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType; 125 extern const char *const MatFactorTypes[]; 126 127 extern PetscErrorCode MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*); 128 extern PetscErrorCode MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *); 129 extern PetscErrorCode MatFactorGetSolverPackage(Mat,const MatSolverPackage*); 130 extern PetscErrorCode MatGetFactorType(Mat,MatFactorType*); 131 132 /* Logging support */ 133 #define MAT_FILE_CLASSID 1211216 /* used to indicate matrices in binary files */ 134 extern PetscClassId MAT_CLASSID; 135 extern PetscClassId MAT_FDCOLORING_CLASSID; 136 extern PetscClassId MAT_PARTITIONING_CLASSID; 137 extern PetscClassId MAT_NULLSPACE_CLASSID; 138 extern PetscClassId MATMFFD_CLASSID; 139 140 /*E 141 MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices() 142 or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate 143 that the input matrix is to be replaced with the converted matrix. 144 145 Level: beginner 146 147 Any additions/changes here MUST also be made in include/finclude/petscmat.h 148 149 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert() 150 E*/ 151 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse; 152 153 /*E 154 MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices() 155 include the matrix values. Currently it is only used by MatGetSeqNonzerostructure(). 156 157 Level: beginner 158 159 .seealso: MatGetSeqNonzerostructure() 160 E*/ 161 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption; 162 163 extern PetscErrorCode MatInitializePackage(const char[]); 164 165 extern PetscErrorCode MatCreate(MPI_Comm,Mat*); 166 PetscPolymorphicFunction(MatCreate,(MPI_Comm comm),(comm,&A),Mat,A) 167 PetscPolymorphicFunction(MatCreate,(),(PETSC_COMM_WORLD,&A),Mat,A) 168 extern PetscErrorCode MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt); 169 extern PetscErrorCode MatSetType(Mat,const MatType); 170 extern PetscErrorCode MatSetFromOptions(Mat); 171 extern PetscErrorCode MatSetUpPreallocation(Mat); 172 extern PetscErrorCode MatRegisterAll(const char[]); 173 extern PetscErrorCode MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat)); 174 extern PetscErrorCode MatRegisterBaseName(const char[],const char[],const char[]); 175 extern PetscErrorCode MatSetOptionsPrefix(Mat,const char[]); 176 extern PetscErrorCode MatAppendOptionsPrefix(Mat,const char[]); 177 extern PetscErrorCode MatGetOptionsPrefix(Mat,const char*[]); 178 179 /*MC 180 MatRegisterDynamic - Adds a new matrix type 181 182 Synopsis: 183 PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat)) 184 185 Not Collective 186 187 Input Parameters: 188 + name - name of a new user-defined matrix type 189 . path - path (either absolute or relative) the library containing this solver 190 . name_create - name of routine to create method context 191 - routine_create - routine to create method context 192 193 Notes: 194 MatRegisterDynamic() may be called multiple times to add several user-defined solvers. 195 196 If dynamic libraries are used, then the fourth input argument (routine_create) 197 is ignored. 198 199 Sample usage: 200 .vb 201 MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a, 202 "MyMatCreate",MyMatCreate); 203 .ve 204 205 Then, your solver can be chosen with the procedural interface via 206 $ MatSetType(Mat,"my_mat") 207 or at runtime via the option 208 $ -mat_type my_mat 209 210 Level: advanced 211 212 Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values. 213 If your function is not being put into a shared library then use VecRegister() instead 214 215 .keywords: Mat, register 216 217 .seealso: MatRegisterAll(), MatRegisterDestroy() 218 219 M*/ 220 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 221 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0) 222 #else 223 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d) 224 #endif 225 226 extern PetscBool MatRegisterAllCalled; 227 extern PetscFList MatList; 228 extern PetscFList MatColoringList; 229 extern PetscFList MatPartitioningList; 230 231 /*E 232 MatStructure - Indicates if the matrix has the same nonzero structure 233 234 Level: beginner 235 236 Any additions/changes here MUST also be made in include/finclude/petscmat.h 237 238 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators() 239 E*/ 240 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure; 241 242 extern PetscErrorCode MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*); 243 extern PetscErrorCode MatCreateMPIDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*); 244 extern PetscErrorCode MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 245 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,nz,nnz,&A),Mat,A) 246 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,&A),Mat,A) 247 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,m,n,0,nnz,&A),Mat,A) 248 PetscPolymorphicFunction(MatCreateSeqAIJ,(PetscInt m,PetscInt n),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,&A),Mat,A) 249 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,m,n,nz,PETSC_NULL,A)) 250 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,m,n,0,nnz,A)) 251 PetscPolymorphicSubroutine(MatCreateSeqAIJ,(PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,m,n,0,PETSC_NULL,A)) 252 extern PetscErrorCode MatCreateMPIAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 253 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 254 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 255 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 256 PetscPolymorphicFunction(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 257 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 258 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,m,n,M,N,0,nnz,0,onz,A)) 259 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 260 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 261 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 262 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 263 PetscPolymorphicFunction(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 264 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 265 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,nnz,0,onz,A)) 266 PetscPolymorphicSubroutine(MatCreateMPIAIJ,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 267 extern PetscErrorCode MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *); 268 extern PetscErrorCode MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*); 269 270 extern PetscErrorCode MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 271 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A) 272 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A) 273 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A) 274 PetscPolymorphicFunction(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A) 275 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A)) 276 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A)) 277 PetscPolymorphicSubroutine(MatCreateSeqBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A)) 278 extern PetscErrorCode MatCreateMPIBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 279 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 280 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 281 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 282 PetscPolymorphicFunction(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 283 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 284 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A)) 285 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 286 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 287 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 288 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 289 PetscPolymorphicFunction(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 290 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 291 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A)) 292 PetscPolymorphicSubroutine(MatCreateMPIBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 293 extern PetscErrorCode MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*); 294 295 extern PetscErrorCode MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*); 296 extern PetscErrorCode MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 297 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,nz,nnz,&A),Mat,A) 298 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,&A),Mat,A) 299 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[]),(PETSC_COMM_SELF,bs,m,n,0,nnz,&A),Mat,A) 300 PetscPolymorphicFunction(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,&A),Mat,A) 301 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt nz,Mat *A),(PETSC_COMM_SELF,bs,m,n,nz,PETSC_NULL,A)) 302 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,const PetscInt nnz[],Mat *A),(PETSC_COMM_SELF,bs,m,n,0,nnz,A)) 303 PetscPolymorphicSubroutine(MatCreateSeqSBAIJ,(PetscInt bs,PetscInt m,PetscInt n,Mat *A),(PETSC_COMM_SELF,bs,m,n,0,PETSC_NULL,A)) 304 305 extern PetscErrorCode MatCreateMPISBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 306 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(comm,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 307 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 308 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(comm,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 309 PetscPolymorphicFunction(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 310 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(comm,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 311 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(comm,bs,m,n,M,N,0,nnz,0,onz,A)) 312 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(MPI_Comm comm,PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(comm,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 313 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,const PetscInt nnz[],PetscInt onz,const PetscInt onnz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,nnz,onz,onnz,&A),Mat,A) 314 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,&A),Mat,A) 315 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[]),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,&A),Mat,A) 316 PetscPolymorphicFunction(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,&A),Mat,A) 317 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt nz,PetscInt nnz,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,nz,PETSC_NULL,nnz,PETSC_NULL,A)) 318 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt nnz[],const PetscInt onz[],Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,nnz,0,onz,A)) 319 PetscPolymorphicSubroutine(MatCreateMPISBAIJ,(PetscInt bs,PetscInt m,PetscInt n,PetscInt M,PetscInt N,Mat *A),(PETSC_COMM_WORLD,bs,m,n,M,N,0,PETSC_NULL,0,PETSC_NULL,A)) 320 extern PetscErrorCode MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *); 321 extern PetscErrorCode MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 322 323 extern PetscErrorCode MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*); 324 PetscPolymorphicFunction(MatCreateShell,(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(comm,m,n,M,N,ctx,&A),Mat,A) 325 PetscPolymorphicFunction(MatCreateShell,(PetscInt m,PetscInt n,PetscInt M,PetscInt N,void *ctx),(PETSC_COMM_WORLD,m,n,M,N,ctx,&A),Mat,A) 326 extern PetscErrorCode MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*); 327 extern PetscErrorCode MatCreateNormal(Mat,Mat*); 328 PetscPolymorphicFunction(MatCreateNormal,(Mat mat),(mat,&A),Mat,A) 329 extern PetscErrorCode MatCreateLRC(Mat,Mat,Mat,Mat*); 330 extern PetscErrorCode MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*); 331 extern PetscErrorCode MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 332 extern PetscErrorCode MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 333 334 extern PetscErrorCode MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 335 extern PetscErrorCode MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 336 extern PetscErrorCode MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 337 extern PetscErrorCode MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 338 339 extern PetscErrorCode MatCreateScatter(MPI_Comm,VecScatter,Mat*); 340 extern PetscErrorCode MatScatterSetVecScatter(Mat,VecScatter); 341 extern PetscErrorCode MatScatterGetVecScatter(Mat,VecScatter*); 342 extern PetscErrorCode MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*); 343 extern PetscErrorCode MatCompositeAddMat(Mat,Mat); 344 extern PetscErrorCode MatCompositeMerge(Mat); 345 extern PetscErrorCode MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*); 346 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType; 347 extern PetscErrorCode MatCompositeSetType(Mat,MatCompositeType); 348 349 extern PetscErrorCode MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*); 350 extern PetscErrorCode MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*); 351 352 extern PetscErrorCode MatCreateTranspose(Mat,Mat*); 353 extern PetscErrorCode MatCreateSubMatrix(Mat,IS,IS,Mat*); 354 extern PetscErrorCode MatSubMatrixUpdate(Mat,Mat,IS,IS); 355 extern PetscErrorCode MatCreateLocalRef(Mat,IS,IS,Mat*); 356 357 extern PetscErrorCode MatPythonSetType(Mat,const char[]); 358 359 extern PetscErrorCode MatSetUp(Mat); 360 extern PetscErrorCode MatDestroy(Mat*); 361 362 extern PetscErrorCode MatConjugate(Mat); 363 extern PetscErrorCode MatRealPart(Mat); 364 extern PetscErrorCode MatImaginaryPart(Mat); 365 extern PetscErrorCode MatGetDiagonalBlock(Mat,Mat*); 366 extern PetscErrorCode MatGetTrace(Mat,PetscScalar*); 367 extern PetscErrorCode MatInvertBlockDiagonal(Mat,PetscScalar **); 368 369 /* ------------------------------------------------------------*/ 370 extern PetscErrorCode MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 371 extern PetscErrorCode MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 372 extern PetscErrorCode MatSetValuesRow(Mat,PetscInt,const PetscScalar[]); 373 extern PetscErrorCode MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]); 374 extern PetscErrorCode MatSetValuesBatch(Mat,PetscInt,PetscInt,PetscInt[],const PetscScalar[]); 375 376 /*S 377 MatStencil - Data structure (C struct) for storing information about a single row or 378 column of a matrix as index on an associated grid. 379 380 Level: beginner 381 382 Concepts: matrix; linear operator 383 384 .seealso: MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockStencil() 385 S*/ 386 typedef struct { 387 PetscInt k,j,i,c; 388 } MatStencil; 389 390 extern PetscErrorCode MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode); 391 extern PetscErrorCode MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode); 392 extern PetscErrorCode MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt); 393 394 extern PetscErrorCode MatSetColoring(Mat,ISColoring); 395 extern PetscErrorCode MatSetValuesAdic(Mat,void*); 396 extern PetscErrorCode MatSetValuesAdifor(Mat,PetscInt,void*); 397 398 /*E 399 MatAssemblyType - Indicates if the matrix is now to be used, or if you plan 400 to continue to add values to it 401 402 Level: beginner 403 404 .seealso: MatAssemblyBegin(), MatAssemblyEnd() 405 E*/ 406 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType; 407 extern PetscErrorCode MatAssemblyBegin(Mat,MatAssemblyType); 408 extern PetscErrorCode MatAssemblyEnd(Mat,MatAssemblyType); 409 extern PetscErrorCode MatAssembled(Mat,PetscBool *); 410 411 412 413 /*E 414 MatOption - Options that may be set for a matrix and its behavior or storage 415 416 Level: beginner 417 418 Any additions/changes here MUST also be made in include/finclude/petscmat.h 419 420 .seealso: MatSetOption() 421 E*/ 422 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS, 423 MAT_SYMMETRIC, 424 MAT_STRUCTURALLY_SYMMETRIC, 425 MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES, 426 MAT_NEW_NONZERO_LOCATION_ERR, 427 MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE, 428 MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES, 429 MAT_USE_INODES, 430 MAT_HERMITIAN, 431 MAT_SYMMETRY_ETERNAL, 432 MAT_CHECK_COMPRESSED_ROW, 433 MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR, 434 MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR, 435 MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS, 436 NUM_MAT_OPTIONS} MatOption; 437 extern const char *MatOptions[]; 438 extern PetscErrorCode MatSetOption(Mat,MatOption,PetscBool ); 439 extern PetscErrorCode MatGetType(Mat,const MatType*); 440 PetscPolymorphicFunction(MatGetType,(Mat mat),(mat,&t),const MatType,t) 441 442 extern PetscErrorCode MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]); 443 extern PetscErrorCode MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 444 extern PetscErrorCode MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 445 extern PetscErrorCode MatGetRowUpperTriangular(Mat); 446 extern PetscErrorCode MatRestoreRowUpperTriangular(Mat); 447 extern PetscErrorCode MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 448 extern PetscErrorCode MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 449 extern PetscErrorCode MatGetColumnVector(Mat,Vec,PetscInt); 450 extern PetscErrorCode MatGetArray(Mat,PetscScalar *[]); 451 PetscPolymorphicFunction(MatGetArray,(Mat mat),(mat,&a),PetscScalar*,a) 452 extern PetscErrorCode MatRestoreArray(Mat,PetscScalar *[]); 453 extern PetscErrorCode MatGetBlockSize(Mat,PetscInt *); 454 PetscPolymorphicFunction(MatGetBlockSize,(Mat mat),(mat,&a),PetscInt,a) 455 extern PetscErrorCode MatSetBlockSize(Mat,PetscInt); 456 457 458 extern PetscErrorCode MatMult(Mat,Vec,Vec); 459 extern PetscErrorCode MatMultDiagonalBlock(Mat,Vec,Vec); 460 extern PetscErrorCode MatMultAdd(Mat,Vec,Vec,Vec); 461 PetscPolymorphicSubroutine(MatMultAdd,(Mat A,Vec x,Vec y),(A,x,y,y)) 462 extern PetscErrorCode MatMultTranspose(Mat,Vec,Vec); 463 extern PetscErrorCode MatMultHermitianTranspose(Mat,Vec,Vec); 464 extern PetscErrorCode MatIsTranspose(Mat,Mat,PetscReal,PetscBool *); 465 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B,PetscReal tol),(A,B,tol,&t),PetscBool ,t) 466 PetscPolymorphicFunction(MatIsTranspose,(Mat A,Mat B),(A,B,0,&t),PetscBool ,t) 467 extern PetscErrorCode MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *); 468 extern PetscErrorCode MatMultTransposeAdd(Mat,Vec,Vec,Vec); 469 extern PetscErrorCode MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec); 470 PetscPolymorphicSubroutine(MatMultTransposeAdd,(Mat A,Vec x,Vec y),(A,x,y,y)) 471 extern PetscErrorCode MatMultConstrained(Mat,Vec,Vec); 472 extern PetscErrorCode MatMultTransposeConstrained(Mat,Vec,Vec); 473 extern PetscErrorCode MatMatSolve(Mat,Mat,Mat); 474 475 /*E 476 MatDuplicateOption - Indicates if a duplicated sparse matrix should have 477 its numerical values copied over or just its nonzero structure. 478 479 Level: beginner 480 481 Any additions/changes here MUST also be made in include/finclude/petscmat.h 482 483 $ MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix 484 $ this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you 485 $ have several matrices with the same nonzero pattern. 486 487 .seealso: MatDuplicate() 488 E*/ 489 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption; 490 491 extern PetscErrorCode MatConvert(Mat,const MatType,MatReuse,Mat*); 492 PetscPolymorphicFunction(MatConvert,(Mat A,const MatType t),(A,t,MAT_INITIAL_MATRIX,&a),Mat,a) 493 extern PetscErrorCode MatDuplicate(Mat,MatDuplicateOption,Mat*); 494 PetscPolymorphicFunction(MatDuplicate,(Mat A,MatDuplicateOption o),(A,o,&a),Mat,a) 495 PetscPolymorphicFunction(MatDuplicate,(Mat A),(A,MAT_COPY_VALUES,&a),Mat,a) 496 497 498 extern PetscErrorCode MatCopy(Mat,Mat,MatStructure); 499 extern PetscErrorCode MatView(Mat,PetscViewer); 500 extern PetscErrorCode MatIsSymmetric(Mat,PetscReal,PetscBool *); 501 PetscPolymorphicFunction(MatIsSymmetric,(Mat A,PetscReal tol),(A,tol,&t),PetscBool ,t) 502 PetscPolymorphicFunction(MatIsSymmetric,(Mat A),(A,0,&t),PetscBool ,t) 503 extern PetscErrorCode MatIsStructurallySymmetric(Mat,PetscBool *); 504 PetscPolymorphicFunction(MatIsStructurallySymmetric,(Mat A),(A,&t),PetscBool ,t) 505 extern PetscErrorCode MatIsHermitian(Mat,PetscReal,PetscBool *); 506 PetscPolymorphicFunction(MatIsHermitian,(Mat A),(A,0,&t),PetscBool ,t) 507 extern PetscErrorCode MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *); 508 extern PetscErrorCode MatIsHermitianKnown(Mat,PetscBool *,PetscBool *); 509 extern PetscErrorCode MatMissingDiagonal(Mat,PetscBool *,PetscInt *); 510 extern PetscErrorCode MatLoad(Mat, PetscViewer); 511 512 extern PetscErrorCode MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool *); 513 extern PetscErrorCode MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool *); 514 extern PetscErrorCode MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool *); 515 extern PetscErrorCode MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool *); 516 517 /*S 518 MatInfo - Context of matrix information, used with MatGetInfo() 519 520 In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE 521 522 Level: intermediate 523 524 Concepts: matrix^nonzero information 525 526 .seealso: MatGetInfo(), MatInfoType 527 S*/ 528 typedef struct { 529 PetscLogDouble block_size; /* block size */ 530 PetscLogDouble nz_allocated,nz_used,nz_unneeded; /* number of nonzeros */ 531 PetscLogDouble memory; /* memory allocated */ 532 PetscLogDouble assemblies; /* number of matrix assemblies called */ 533 PetscLogDouble mallocs; /* number of mallocs during MatSetValues() */ 534 PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */ 535 PetscLogDouble factor_mallocs; /* number of mallocs during factorization */ 536 } MatInfo; 537 538 /*E 539 MatInfoType - Indicates if you want information about the local part of the matrix, 540 the entire parallel matrix or the maximum over all the local parts. 541 542 Level: beginner 543 544 Any additions/changes here MUST also be made in include/finclude/petscmat.h 545 546 .seealso: MatGetInfo(), MatInfo 547 E*/ 548 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType; 549 extern PetscErrorCode MatGetInfo(Mat,MatInfoType,MatInfo*); 550 extern PetscErrorCode MatGetDiagonal(Mat,Vec); 551 extern PetscErrorCode MatGetRowMax(Mat,Vec,PetscInt[]); 552 extern PetscErrorCode MatGetRowMin(Mat,Vec,PetscInt[]); 553 extern PetscErrorCode MatGetRowMaxAbs(Mat,Vec,PetscInt[]); 554 extern PetscErrorCode MatGetRowMinAbs(Mat,Vec,PetscInt[]); 555 extern PetscErrorCode MatGetRowSum(Mat,Vec); 556 extern PetscErrorCode MatTranspose(Mat,MatReuse,Mat*); 557 PetscPolymorphicFunction(MatTranspose,(Mat A),(A,MAT_INITIAL_MATRIX,&t),Mat,t) 558 extern PetscErrorCode MatHermitianTranspose(Mat,MatReuse,Mat*); 559 extern PetscErrorCode MatPermute(Mat,IS,IS,Mat *); 560 PetscPolymorphicFunction(MatPermute,(Mat A,IS is1,IS is2),(A,is1,is2,&t),Mat,t) 561 extern PetscErrorCode MatDiagonalScale(Mat,Vec,Vec); 562 extern PetscErrorCode MatDiagonalSet(Mat,Vec,InsertMode); 563 extern PetscErrorCode MatEqual(Mat,Mat,PetscBool *); 564 PetscPolymorphicFunction(MatEqual,(Mat A,Mat B),(A,B,&t),PetscBool ,t) 565 extern PetscErrorCode MatMultEqual(Mat,Mat,PetscInt,PetscBool *); 566 extern PetscErrorCode MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *); 567 extern PetscErrorCode MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *); 568 extern PetscErrorCode MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *); 569 570 extern PetscErrorCode MatNorm(Mat,NormType,PetscReal *); 571 PetscPolymorphicFunction(MatNorm,(Mat A,NormType t),(A,t,&n),PetscReal,n) 572 extern PetscErrorCode MatGetColumnNorms(Mat,NormType,PetscReal *); 573 extern PetscErrorCode MatZeroEntries(Mat); 574 extern PetscErrorCode MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 575 extern PetscErrorCode MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec); 576 extern PetscErrorCode MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec); 577 extern PetscErrorCode MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 578 extern PetscErrorCode MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec); 579 580 extern PetscErrorCode MatUseScaledForm(Mat,PetscBool ); 581 extern PetscErrorCode MatScaleSystem(Mat,Vec,Vec); 582 extern PetscErrorCode MatUnScaleSystem(Mat,Vec,Vec); 583 584 extern PetscErrorCode MatGetSize(Mat,PetscInt*,PetscInt*); 585 extern PetscErrorCode MatGetLocalSize(Mat,PetscInt*,PetscInt*); 586 extern PetscErrorCode MatGetOwnershipRange(Mat,PetscInt*,PetscInt*); 587 extern PetscErrorCode MatGetOwnershipRanges(Mat,const PetscInt**); 588 extern PetscErrorCode MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*); 589 extern PetscErrorCode MatGetOwnershipRangesColumn(Mat,const PetscInt**); 590 591 extern PetscErrorCode MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 592 extern PetscErrorCode MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 593 extern PetscErrorCode MatDestroyMatrices(PetscInt,Mat *[]); 594 extern PetscErrorCode MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *); 595 extern PetscErrorCode MatGetLocalSubMatrix(Mat,IS,IS,Mat*); 596 extern PetscErrorCode MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*); 597 extern PetscErrorCode MatGetSeqNonzeroStructure(Mat,Mat*); 598 extern PetscErrorCode MatDestroySeqNonzeroStructure(Mat*); 599 600 extern PetscErrorCode MatMerge(MPI_Comm,Mat,PetscInt,MatReuse,Mat*); 601 extern PetscErrorCode MatMerge_SeqsToMPI(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*); 602 extern PetscErrorCode MatMerge_SeqsToMPISymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*); 603 extern PetscErrorCode MatMerge_SeqsToMPINumeric(Mat,Mat); 604 extern PetscErrorCode MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*); 605 extern PetscErrorCode MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*); 606 extern PetscErrorCode MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,PetscInt*,Mat*); 607 extern PetscErrorCode MatGetBrowsOfAoCols(Mat,Mat,MatReuse,PetscInt**,PetscInt**,MatScalar**,Mat*); 608 #if defined (PETSC_USE_CTABLE) 609 extern PetscErrorCode MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *); 610 #else 611 extern PetscErrorCode MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *); 612 #endif 613 extern PetscErrorCode MatGetGhosts(Mat, PetscInt *,const PetscInt *[]); 614 615 extern PetscErrorCode MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt); 616 617 extern PetscErrorCode MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*); 618 extern PetscErrorCode MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*); 619 extern PetscErrorCode MatMatMultNumeric(Mat,Mat,Mat); 620 621 extern PetscErrorCode MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*); 622 extern PetscErrorCode MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*); 623 extern PetscErrorCode MatPtAPNumeric(Mat,Mat,Mat); 624 625 extern PetscErrorCode MatMatMultTranspose(Mat,Mat,MatReuse,PetscReal,Mat*); 626 extern PetscErrorCode MatMatMultTransposeSymbolic(Mat,Mat,PetscReal,Mat*); 627 extern PetscErrorCode MatMatMultTransposeNumeric(Mat,Mat,Mat); 628 629 extern PetscErrorCode MatAXPY(Mat,PetscScalar,Mat,MatStructure); 630 extern PetscErrorCode MatAYPX(Mat,PetscScalar,Mat,MatStructure); 631 632 extern PetscErrorCode MatScale(Mat,PetscScalar); 633 extern PetscErrorCode MatShift(Mat,PetscScalar); 634 635 extern PetscErrorCode MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping); 636 extern PetscErrorCode MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping); 637 extern PetscErrorCode MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*); 638 extern PetscErrorCode MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*); 639 extern PetscErrorCode MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 640 extern PetscErrorCode MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec); 641 extern PetscErrorCode MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 642 extern PetscErrorCode MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec); 643 extern PetscErrorCode MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 644 extern PetscErrorCode MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 645 646 extern PetscErrorCode MatStashSetInitialSize(Mat,PetscInt,PetscInt); 647 extern PetscErrorCode MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*); 648 649 extern PetscErrorCode MatInterpolate(Mat,Vec,Vec); 650 extern PetscErrorCode MatInterpolateAdd(Mat,Vec,Vec,Vec); 651 PetscPolymorphicSubroutine(MatInterpolateAdd,(Mat A,Vec x,Vec y),(A,x,y,y)) 652 extern PetscErrorCode MatRestrict(Mat,Vec,Vec); 653 extern PetscErrorCode MatGetVecs(Mat,Vec*,Vec*); 654 extern PetscErrorCode MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*); 655 extern PetscErrorCode MatGetMultiProcBlock(Mat,MPI_Comm,Mat*); 656 extern PetscErrorCode MatFindZeroDiagonals(Mat,IS*); 657 658 /*MC 659 MatSetValue - Set a single entry into a matrix. 660 661 Not collective 662 663 Input Parameters: 664 + m - the matrix 665 . row - the row location of the entry 666 . col - the column location of the entry 667 . value - the value to insert 668 - mode - either INSERT_VALUES or ADD_VALUES 669 670 Notes: 671 For efficiency one should use MatSetValues() and set several or many 672 values simultaneously if possible. 673 674 Level: beginner 675 676 .seealso: MatSetValues(), MatSetValueLocal() 677 M*/ 678 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);} 679 680 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);} 681 682 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);} 683 684 /*MC 685 MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per 686 row in a matrix providing the data that one can use to correctly preallocate the matrix. 687 688 Synopsis: 689 PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz) 690 691 Collective on MPI_Comm 692 693 Input Parameters: 694 + comm - the communicator that will share the eventually allocated matrix 695 . nrows - the number of LOCAL rows in the matrix 696 - ncols - the number of LOCAL columns in the matrix 697 698 Output Parameters: 699 + dnz - the array that will be passed to the matrix preallocation routines 700 - ozn - the other array passed to the matrix preallocation routines 701 702 703 Level: intermediate 704 705 Notes: 706 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 707 708 Do not malloc or free dnz and onz, that is handled internally by these routines 709 710 Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices) 711 712 This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize(). 713 714 Concepts: preallocation^Matrix 715 716 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 717 MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal() 718 M*/ 719 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \ 720 { \ 721 PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \ 722 _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \ 723 _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 724 _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 725 _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\ 726 _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows; 727 728 /*MC 729 MatPreallocateSymmetricInitialize - Begins the block of code that will count the number of nonzeros per 730 row in a matrix providing the data that one can use to correctly preallocate the matrix. 731 732 Synopsis: 733 PetscErrorCode MatPreallocateSymmetricInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz) 734 735 Collective on MPI_Comm 736 737 Input Parameters: 738 + comm - the communicator that will share the eventually allocated matrix 739 . nrows - the number of LOCAL rows in the matrix 740 - ncols - the number of LOCAL columns in the matrix 741 742 Output Parameters: 743 + dnz - the array that will be passed to the matrix preallocation routines 744 - ozn - the other array passed to the matrix preallocation routines 745 746 747 Level: intermediate 748 749 Notes: 750 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 751 752 Do not malloc or free dnz and onz, that is handled internally by these routines 753 754 This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize(). 755 756 Concepts: preallocation^Matrix 757 758 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 759 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal() 760 M*/ 761 #define MatPreallocateSymmetricInitialize(comm,nrows,ncols,dnz,onz) 0; \ 762 { \ 763 PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__end; \ 764 _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \ 765 _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 766 _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 767 _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr);\ 768 _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows; 769 770 /*MC 771 MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be 772 inserted using a local number of the rows and columns 773 774 Synopsis: 775 PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 776 777 Not Collective 778 779 Input Parameters: 780 + map - the row mapping from local numbering to global numbering 781 . nrows - the number of rows indicated 782 . rows - the indices of the rows 783 . cmap - the column mapping from local to global numbering 784 . ncols - the number of columns in the matrix 785 . cols - the columns indicated 786 . dnz - the array that will be passed to the matrix preallocation routines 787 - ozn - the other array passed to the matrix preallocation routines 788 789 790 Level: intermediate 791 792 Notes: 793 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 794 795 Do not malloc or free dnz and onz, that is handled internally by these routines 796 797 Concepts: preallocation^Matrix 798 799 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 800 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal() 801 M*/ 802 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \ 803 {\ 804 PetscInt __l;\ 805 _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\ 806 _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\ 807 for (__l=0;__l<nrows;__l++) {\ 808 _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\ 809 }\ 810 } 811 812 /*MC 813 MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be 814 inserted using a local number of the rows and columns 815 816 Synopsis: 817 PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 818 819 Not Collective 820 821 Input Parameters: 822 + map - the mapping between local numbering and global numbering 823 . nrows - the number of rows indicated 824 . rows - the indices of the rows 825 . ncols - the number of columns in the matrix 826 . cols - the columns indicated 827 . dnz - the array that will be passed to the matrix preallocation routines 828 - ozn - the other array passed to the matrix preallocation routines 829 830 831 Level: intermediate 832 833 Notes: 834 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 835 836 Do not malloc or free dnz and onz that is handled internally by these routines 837 838 Concepts: preallocation^Matrix 839 840 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 841 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 842 M*/ 843 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\ 844 {\ 845 PetscInt __l;\ 846 _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\ 847 _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\ 848 for (__l=0;__l<nrows;__l++) {\ 849 _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\ 850 }\ 851 } 852 853 /*MC 854 MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be 855 inserted using a local number of the rows and columns 856 857 Synopsis: 858 PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 859 860 Not Collective 861 862 Input Parameters: 863 + row - the row 864 . ncols - the number of columns in the matrix 865 - cols - the columns indicated 866 867 Output Parameters: 868 + dnz - the array that will be passed to the matrix preallocation routines 869 - ozn - the other array passed to the matrix preallocation routines 870 871 872 Level: intermediate 873 874 Notes: 875 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 876 877 Do not malloc or free dnz and onz that is handled internally by these routines 878 879 This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize(). 880 881 Concepts: preallocation^Matrix 882 883 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 884 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 885 M*/ 886 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\ 887 { PetscInt __i; \ 888 if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\ 889 if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\ 890 for (__i=0; __i<nc; __i++) {\ 891 if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \ 892 else dnz[row - __rstart]++;\ 893 }\ 894 } 895 896 /*MC 897 MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be 898 inserted using a local number of the rows and columns 899 900 Synopsis: 901 PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 902 903 Not Collective 904 905 Input Parameters: 906 + nrows - the number of rows indicated 907 . rows - the indices of the rows 908 . ncols - the number of columns in the matrix 909 . cols - the columns indicated 910 . dnz - the array that will be passed to the matrix preallocation routines 911 - ozn - the other array passed to the matrix preallocation routines 912 913 914 Level: intermediate 915 916 Notes: 917 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 918 919 Do not malloc or free dnz and onz that is handled internally by these routines 920 921 This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize(). 922 923 Concepts: preallocation^Matrix 924 925 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 926 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 927 M*/ 928 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\ 929 { PetscInt __i; \ 930 for (__i=0; __i<nc; __i++) {\ 931 if (cols[__i] >= __end) onz[row - __rstart]++; \ 932 else if (cols[__i] >= row) dnz[row - __rstart]++;\ 933 }\ 934 } 935 936 /*MC 937 MatPreallocateLocation - An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists 938 939 Synopsis: 940 PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz) 941 942 Not Collective 943 944 Input Parameters: 945 . A - matrix 946 . row - row where values exist (must be local to this process) 947 . ncols - number of columns 948 . cols - columns with nonzeros 949 . dnz - the array that will be passed to the matrix preallocation routines 950 - ozn - the other array passed to the matrix preallocation routines 951 952 953 Level: intermediate 954 955 Notes: 956 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 957 958 Do not malloc or free dnz and onz that is handled internally by these routines 959 960 This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines. 961 962 Concepts: preallocation^Matrix 963 964 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 965 MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal() 966 M*/ 967 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr = MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);} 968 969 970 /*MC 971 MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per 972 row in a matrix providing the data that one can use to correctly preallocate the matrix. 973 974 Synopsis: 975 PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz) 976 977 Collective on MPI_Comm 978 979 Input Parameters: 980 + dnz - the array that was be passed to the matrix preallocation routines 981 - ozn - the other array passed to the matrix preallocation routines 982 983 984 Level: intermediate 985 986 Notes: 987 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 988 989 Do not malloc or free dnz and onz that is handled internally by these routines 990 991 This is a MACRO not a function because it closes the { started in MatPreallocateInitialize(). 992 993 Concepts: preallocation^Matrix 994 995 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 996 MatPreallocateSymmetricInitialize(), MatPreallocateSymmetricSetLocal() 997 M*/ 998 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);} 999 1000 1001 1002 /* Routines unique to particular data structures */ 1003 extern PetscErrorCode MatShellGetContext(Mat,void *); 1004 PetscPolymorphicFunction(MatShellGetContext,(Mat A),(A,&t),void*,t) 1005 1006 extern PetscErrorCode MatInodeAdjustForInodes(Mat,IS*,IS*); 1007 extern PetscErrorCode MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *); 1008 1009 extern PetscErrorCode MatSeqAIJSetColumnIndices(Mat,PetscInt[]); 1010 extern PetscErrorCode MatSeqBAIJSetColumnIndices(Mat,PetscInt[]); 1011 extern PetscErrorCode MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 1012 extern PetscErrorCode MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 1013 extern PetscErrorCode MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 1014 extern PetscErrorCode MatCreateSeqAIJFromTriple(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*,PetscInt,PetscBool); 1015 1016 #define MAT_SKIP_ALLOCATION -4 1017 1018 extern PetscErrorCode MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]); 1019 PetscPolymorphicSubroutine(MatSeqBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz)) 1020 extern PetscErrorCode MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]); 1021 PetscPolymorphicSubroutine(MatSeqSBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[]),(A,bs,0,nnz)) 1022 extern PetscErrorCode MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]); 1023 PetscPolymorphicSubroutine(MatSeqAIJSetPreallocation,(Mat A,const PetscInt nnz[]),(A,0,nnz)) 1024 1025 extern PetscErrorCode MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 1026 PetscPolymorphicSubroutine(MatMPIBAIJSetPreallocation,(Mat A,PetscInt bs,const PetscInt nnz[],const PetscInt onz[]),(A,bs,0,nnz,0,onz)) 1027 extern PetscErrorCode MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 1028 extern PetscErrorCode MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 1029 extern PetscErrorCode MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []); 1030 extern PetscErrorCode MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 1031 extern PetscErrorCode MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]); 1032 extern PetscErrorCode MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 1033 extern PetscErrorCode MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]); 1034 extern PetscErrorCode MatMPIDenseSetPreallocation(Mat,PetscScalar[]); 1035 extern PetscErrorCode MatSeqDenseSetPreallocation(Mat,PetscScalar[]); 1036 extern PetscErrorCode MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]); 1037 extern PetscErrorCode MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]); 1038 extern PetscErrorCode MatAdicSetLocalFunction(Mat,void (*)(void)); 1039 1040 extern PetscErrorCode MatSeqDenseSetLDA(Mat,PetscInt); 1041 extern PetscErrorCode MatDenseGetLocalMatrix(Mat,Mat*); 1042 1043 extern PetscErrorCode MatStoreValues(Mat); 1044 extern PetscErrorCode MatRetrieveValues(Mat); 1045 1046 extern PetscErrorCode MatDAADSetCtx(Mat,void*); 1047 1048 extern PetscErrorCode MatFindNonzeroRows(Mat,IS*); 1049 /* 1050 These routines are not usually accessed directly, rather solving is 1051 done through the KSP and PC interfaces. 1052 */ 1053 1054 /*J 1055 MatOrderingType - String with the name of a PETSc matrix ordering or the creation function 1056 with an optional dynamic library name, for example 1057 http://www.mcs.anl.gov/petsc/lib.a:orderingcreate() 1058 1059 Level: beginner 1060 1061 Cannot use const because the PC objects manipulate the string 1062 1063 .seealso: MatGetOrdering() 1064 J*/ 1065 #define MatOrderingType char* 1066 #define MATORDERINGNATURAL "natural" 1067 #define MATORDERINGND "nd" 1068 #define MATORDERING1WD "1wd" 1069 #define MATORDERINGRCM "rcm" 1070 #define MATORDERINGQMD "qmd" 1071 #define MATORDERINGROWLENGTH "rowlength" 1072 #define MATORDERINGAMD "amd" /* only works if UMFPACK is installed with PETSc */ 1073 1074 extern PetscErrorCode MatGetOrdering(Mat,const MatOrderingType,IS*,IS*); 1075 extern PetscErrorCode MatGetOrderingList(PetscFList *list); 1076 extern PetscErrorCode MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*)); 1077 1078 /*MC 1079 MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package. 1080 1081 Synopsis: 1082 PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering)) 1083 1084 Not Collective 1085 1086 Input Parameters: 1087 + sname - name of ordering (for example MATORDERINGND) 1088 . path - location of library where creation routine is 1089 . name - name of function that creates the ordering type,a string 1090 - function - function pointer that creates the ordering 1091 1092 Level: developer 1093 1094 If dynamic libraries are used, then the fourth input argument (function) 1095 is ignored. 1096 1097 Sample usage: 1098 .vb 1099 MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a, 1100 "MyOrder",MyOrder); 1101 .ve 1102 1103 Then, your partitioner can be chosen with the procedural interface via 1104 $ MatOrderingSetType(part,"my_order) 1105 or at runtime via the option 1106 $ -pc_factor_mat_ordering_type my_order 1107 1108 ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values. 1109 1110 .keywords: matrix, ordering, register 1111 1112 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll() 1113 M*/ 1114 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1115 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0) 1116 #else 1117 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d) 1118 #endif 1119 1120 extern PetscErrorCode MatOrderingRegisterDestroy(void); 1121 extern PetscErrorCode MatOrderingRegisterAll(const char[]); 1122 extern PetscBool MatOrderingRegisterAllCalled; 1123 extern PetscFList MatOrderingList; 1124 1125 extern PetscErrorCode MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS); 1126 1127 /*S 1128 MatFactorShiftType - Numeric Shift. 1129 1130 Level: beginner 1131 1132 S*/ 1133 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType; 1134 extern const char *MatFactorShiftTypes[]; 1135 1136 /*S 1137 MatFactorInfo - Data passed into the matrix factorization routines 1138 1139 In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use 1140 $ MatFactorInfo info(MAT_FACTORINFO_SIZE) 1141 1142 Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC. 1143 1144 You can use MatFactorInfoInitialize() to set default values. 1145 1146 Level: developer 1147 1148 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(), 1149 MatFactorInfoInitialize() 1150 1151 S*/ 1152 typedef struct { 1153 PetscReal diagonal_fill; /* force diagonal to fill in if initially not filled */ 1154 PetscReal usedt; 1155 PetscReal dt; /* drop tolerance */ 1156 PetscReal dtcol; /* tolerance for pivoting */ 1157 PetscReal dtcount; /* maximum nonzeros to be allowed per row */ 1158 PetscReal fill; /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */ 1159 PetscReal levels; /* ICC/ILU(levels) */ 1160 PetscReal pivotinblocks; /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0 1161 factorization may be faster if do not pivot */ 1162 PetscReal zeropivot; /* pivot is called zero if less than this */ 1163 PetscReal shifttype; /* type of shift added to matrix factor to prevent zero pivots */ 1164 PetscReal shiftamount; /* how large the shift is */ 1165 } MatFactorInfo; 1166 1167 extern PetscErrorCode MatFactorInfoInitialize(MatFactorInfo*); 1168 extern PetscErrorCode MatCholeskyFactor(Mat,IS,const MatFactorInfo*); 1169 extern PetscErrorCode MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*); 1170 extern PetscErrorCode MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*); 1171 extern PetscErrorCode MatLUFactor(Mat,IS,IS,const MatFactorInfo*); 1172 extern PetscErrorCode MatILUFactor(Mat,IS,IS,const MatFactorInfo*); 1173 extern PetscErrorCode MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*); 1174 extern PetscErrorCode MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*); 1175 extern PetscErrorCode MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*); 1176 extern PetscErrorCode MatICCFactor(Mat,IS,const MatFactorInfo*); 1177 extern PetscErrorCode MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*); 1178 extern PetscErrorCode MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*); 1179 extern PetscErrorCode MatSolve(Mat,Vec,Vec); 1180 extern PetscErrorCode MatForwardSolve(Mat,Vec,Vec); 1181 extern PetscErrorCode MatBackwardSolve(Mat,Vec,Vec); 1182 extern PetscErrorCode MatSolveAdd(Mat,Vec,Vec,Vec); 1183 extern PetscErrorCode MatSolveTranspose(Mat,Vec,Vec); 1184 extern PetscErrorCode MatSolveTransposeAdd(Mat,Vec,Vec,Vec); 1185 extern PetscErrorCode MatSolves(Mat,Vecs,Vecs); 1186 1187 extern PetscErrorCode MatSetUnfactored(Mat); 1188 1189 /*E 1190 MatSORType - What type of (S)SOR to perform 1191 1192 Level: beginner 1193 1194 May be bitwise ORd together 1195 1196 Any additions/changes here MUST also be made in include/finclude/petscmat.h 1197 1198 MatSORType may be bitwise ORd together, so do not change the numbers 1199 1200 .seealso: MatSOR() 1201 E*/ 1202 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3, 1203 SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8, 1204 SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16, 1205 SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType; 1206 extern PetscErrorCode MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec); 1207 1208 /* 1209 These routines are for efficiently computing Jacobians via finite differences. 1210 */ 1211 1212 /*J 1213 MatColoringType - String with the name of a PETSc matrix coloring or the creation function 1214 with an optional dynamic library name, for example 1215 http://www.mcs.anl.gov/petsc/lib.a:coloringcreate() 1216 1217 Level: beginner 1218 1219 .seealso: MatGetColoring() 1220 J*/ 1221 #define MatColoringType char* 1222 #define MATCOLORINGNATURAL "natural" 1223 #define MATCOLORINGSL "sl" 1224 #define MATCOLORINGLF "lf" 1225 #define MATCOLORINGID "id" 1226 1227 extern PetscErrorCode MatGetColoring(Mat,const MatColoringType,ISColoring*); 1228 extern PetscErrorCode MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *)); 1229 1230 /*MC 1231 MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the 1232 matrix package. 1233 1234 Synopsis: 1235 PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring)) 1236 1237 Not Collective 1238 1239 Input Parameters: 1240 + sname - name of Coloring (for example MATCOLORINGSL) 1241 . path - location of library where creation routine is 1242 . name - name of function that creates the Coloring type, a string 1243 - function - function pointer that creates the coloring 1244 1245 Level: developer 1246 1247 If dynamic libraries are used, then the fourth input argument (function) 1248 is ignored. 1249 1250 Sample usage: 1251 .vb 1252 MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a, 1253 "MyColor",MyColor); 1254 .ve 1255 1256 Then, your partitioner can be chosen with the procedural interface via 1257 $ MatColoringSetType(part,"my_color") 1258 or at runtime via the option 1259 $ -mat_coloring_type my_color 1260 1261 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1262 1263 .keywords: matrix, Coloring, register 1264 1265 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll() 1266 M*/ 1267 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1268 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0) 1269 #else 1270 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d) 1271 #endif 1272 1273 extern PetscBool MatColoringRegisterAllCalled; 1274 1275 extern PetscErrorCode MatColoringRegisterAll(const char[]); 1276 extern PetscErrorCode MatColoringRegisterDestroy(void); 1277 extern PetscErrorCode MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*); 1278 1279 /*S 1280 MatFDColoring - Object for computing a sparse Jacobian via finite differences 1281 and coloring 1282 1283 Level: beginner 1284 1285 Concepts: coloring, sparse Jacobian, finite differences 1286 1287 .seealso: MatFDColoringCreate() 1288 S*/ 1289 typedef struct _p_MatFDColoring* MatFDColoring; 1290 1291 extern PetscErrorCode MatFDColoringCreate(Mat,ISColoring,MatFDColoring *); 1292 extern PetscErrorCode MatFDColoringDestroy(MatFDColoring*); 1293 extern PetscErrorCode MatFDColoringView(MatFDColoring,PetscViewer); 1294 extern PetscErrorCode MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*); 1295 extern PetscErrorCode MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**); 1296 extern PetscErrorCode MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal); 1297 extern PetscErrorCode MatFDColoringSetFromOptions(MatFDColoring); 1298 extern PetscErrorCode MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *); 1299 extern PetscErrorCode MatFDColoringApplyTS(Mat,MatFDColoring,PetscReal,Vec,MatStructure*,void *); 1300 extern PetscErrorCode MatFDColoringSetF(MatFDColoring,Vec); 1301 extern PetscErrorCode MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]); 1302 /* 1303 These routines are for partitioning matrices: currently used only 1304 for adjacency matrix, MatCreateMPIAdj(). 1305 */ 1306 1307 /*S 1308 MatPartitioning - Object for managing the partitioning of a matrix or graph 1309 1310 Level: beginner 1311 1312 Concepts: partitioning 1313 1314 .seealso: MatPartitioningCreate(), MatPartitioningType 1315 S*/ 1316 typedef struct _p_MatPartitioning* MatPartitioning; 1317 1318 /*J 1319 MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function 1320 with an optional dynamic library name, for example 1321 http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate() 1322 1323 Level: beginner 1324 1325 .seealso: MatPartitioningCreate(), MatPartitioning 1326 J*/ 1327 #define MatPartitioningType char* 1328 #define MATPARTITIONINGCURRENT "current" 1329 #define MATPARTITIONINGSQUARE "square" 1330 #define MATPARTITIONINGPARMETIS "parmetis" 1331 #define MATPARTITIONINGCHACO "chaco" 1332 #define MATPARTITIONINGPARTY "party" 1333 #define MATPARTITIONINGPTSCOTCH "ptscotch" 1334 1335 1336 extern PetscErrorCode MatPartitioningCreate(MPI_Comm,MatPartitioning*); 1337 extern PetscErrorCode MatPartitioningSetType(MatPartitioning,const MatPartitioningType); 1338 extern PetscErrorCode MatPartitioningSetNParts(MatPartitioning,PetscInt); 1339 extern PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning,Mat); 1340 extern PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]); 1341 extern PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []); 1342 extern PetscErrorCode MatPartitioningApply(MatPartitioning,IS*); 1343 extern PetscErrorCode MatPartitioningDestroy(MatPartitioning*); 1344 1345 extern PetscErrorCode MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning)); 1346 1347 /*MC 1348 MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the 1349 matrix package. 1350 1351 Synopsis: 1352 PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning)) 1353 1354 Not Collective 1355 1356 Input Parameters: 1357 + sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis 1358 . path - location of library where creation routine is 1359 . name - name of function that creates the partitioning type, a string 1360 - function - function pointer that creates the partitioning type 1361 1362 Level: developer 1363 1364 If dynamic libraries are used, then the fourth input argument (function) 1365 is ignored. 1366 1367 Sample usage: 1368 .vb 1369 MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a, 1370 "MyPartCreate",MyPartCreate); 1371 .ve 1372 1373 Then, your partitioner can be chosen with the procedural interface via 1374 $ MatPartitioningSetType(part,"my_part") 1375 or at runtime via the option 1376 $ -mat_partitioning_type my_part 1377 1378 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1379 1380 .keywords: matrix, partitioning, register 1381 1382 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll() 1383 M*/ 1384 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1385 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0) 1386 #else 1387 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d) 1388 #endif 1389 1390 extern PetscBool MatPartitioningRegisterAllCalled; 1391 1392 extern PetscErrorCode MatPartitioningRegisterAll(const char[]); 1393 extern PetscErrorCode MatPartitioningRegisterDestroy(void); 1394 1395 extern PetscErrorCode MatPartitioningView(MatPartitioning,PetscViewer); 1396 extern PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning); 1397 extern PetscErrorCode MatPartitioningGetType(MatPartitioning,const MatPartitioningType*); 1398 1399 extern PetscErrorCode MatPartitioningParmetisSetCoarseSequential(MatPartitioning); 1400 extern PetscErrorCode MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *); 1401 1402 typedef enum { MP_CHACO_MULTILEVEL=1,MP_CHACO_SPECTRAL=2,MP_CHACO_LINEAR=4,MP_CHACO_RANDOM=5,MP_CHACO_SCATTERED=6 } MPChacoGlobalType; 1403 extern const char *MPChacoGlobalTypes[]; 1404 typedef enum { MP_CHACO_KERNIGHAN=1,MP_CHACO_NONE=2 } MPChacoLocalType; 1405 extern const char *MPChacoLocalTypes[]; 1406 typedef enum { MP_CHACO_LANCZOS=0,MP_CHACO_RQI=1 } MPChacoEigenType; 1407 extern const char *MPChacoEigenTypes[]; 1408 1409 extern PetscErrorCode MatPartitioningChacoSetGlobal(MatPartitioning,MPChacoGlobalType); 1410 extern PetscErrorCode MatPartitioningChacoGetGlobal(MatPartitioning,MPChacoGlobalType*); 1411 extern PetscErrorCode MatPartitioningChacoSetLocal(MatPartitioning,MPChacoLocalType); 1412 extern PetscErrorCode MatPartitioningChacoGetLocal(MatPartitioning,MPChacoLocalType*); 1413 extern PetscErrorCode MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal); 1414 extern PetscErrorCode MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType); 1415 extern PetscErrorCode MatPartitioningChacoGetEigenSolver(MatPartitioning,MPChacoEigenType*); 1416 extern PetscErrorCode MatPartitioningChacoSetEigenTol(MatPartitioning,PetscReal); 1417 extern PetscErrorCode MatPartitioningChacoGetEigenTol(MatPartitioning,PetscReal*); 1418 extern PetscErrorCode MatPartitioningChacoSetEigenNumber(MatPartitioning,PetscInt); 1419 extern PetscErrorCode MatPartitioningChacoGetEigenNumber(MatPartitioning,PetscInt*); 1420 1421 #define MP_PARTY_OPT "opt" 1422 #define MP_PARTY_LIN "lin" 1423 #define MP_PARTY_SCA "sca" 1424 #define MP_PARTY_RAN "ran" 1425 #define MP_PARTY_GBF "gbf" 1426 #define MP_PARTY_GCF "gcf" 1427 #define MP_PARTY_BUB "bub" 1428 #define MP_PARTY_DEF "def" 1429 extern PetscErrorCode MatPartitioningPartySetGlobal(MatPartitioning,const char*); 1430 #define MP_PARTY_HELPFUL_SETS "hs" 1431 #define MP_PARTY_KERNIGHAN_LIN "kl" 1432 #define MP_PARTY_NONE "no" 1433 extern PetscErrorCode MatPartitioningPartySetLocal(MatPartitioning,const char*); 1434 extern PetscErrorCode MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal); 1435 extern PetscErrorCode MatPartitioningPartySetBipart(MatPartitioning,PetscBool); 1436 extern PetscErrorCode MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool); 1437 1438 typedef enum { MP_PTSCOTCH_QUALITY,MP_PTSCOTCH_SPEED,MP_PTSCOTCH_BALANCE,MP_PTSCOTCH_SAFETY,MP_PTSCOTCH_SCALABILITY } MPPTScotchStrategyType; 1439 extern const char *MPPTScotchStrategyTypes[]; 1440 1441 extern PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning,PetscReal); 1442 extern PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning,PetscReal*); 1443 extern PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning,MPPTScotchStrategyType); 1444 extern PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning,MPPTScotchStrategyType*); 1445 1446 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*); 1447 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*); 1448 1449 /* 1450 If you add entries here you must also add them to finclude/petscmat.h 1451 */ 1452 typedef enum { MATOP_SET_VALUES=0, 1453 MATOP_GET_ROW=1, 1454 MATOP_RESTORE_ROW=2, 1455 MATOP_MULT=3, 1456 MATOP_MULT_ADD=4, 1457 MATOP_MULT_TRANSPOSE=5, 1458 MATOP_MULT_TRANSPOSE_ADD=6, 1459 MATOP_SOLVE=7, 1460 MATOP_SOLVE_ADD=8, 1461 MATOP_SOLVE_TRANSPOSE=9, 1462 MATOP_SOLVE_TRANSPOSE_ADD=10, 1463 MATOP_LUFACTOR=11, 1464 MATOP_CHOLESKYFACTOR=12, 1465 MATOP_SOR=13, 1466 MATOP_TRANSPOSE=14, 1467 MATOP_GETINFO=15, 1468 MATOP_EQUAL=16, 1469 MATOP_GET_DIAGONAL=17, 1470 MATOP_DIAGONAL_SCALE=18, 1471 MATOP_NORM=19, 1472 MATOP_ASSEMBLY_BEGIN=20, 1473 MATOP_ASSEMBLY_END=21, 1474 MATOP_SET_OPTION=22, 1475 MATOP_ZERO_ENTRIES=23, 1476 MATOP_ZERO_ROWS=24, 1477 MATOP_LUFACTOR_SYMBOLIC=25, 1478 MATOP_LUFACTOR_NUMERIC=26, 1479 MATOP_CHOLESKY_FACTOR_SYMBOLIC=27, 1480 MATOP_CHOLESKY_FACTOR_NUMERIC=28, 1481 MATOP_SETUP_PREALLOCATION=29, 1482 MATOP_ILUFACTOR_SYMBOLIC=30, 1483 MATOP_ICCFACTOR_SYMBOLIC=31, 1484 MATOP_GET_ARRAY=32, 1485 MATOP_RESTORE_ARRAY=33, 1486 MATOP_DUPLICATE=34, 1487 MATOP_FORWARD_SOLVE=35, 1488 MATOP_BACKWARD_SOLVE=36, 1489 MATOP_ILUFACTOR=37, 1490 MATOP_ICCFACTOR=38, 1491 MATOP_AXPY=39, 1492 MATOP_GET_SUBMATRICES=40, 1493 MATOP_INCREASE_OVERLAP=41, 1494 MATOP_GET_VALUES=42, 1495 MATOP_COPY=43, 1496 MATOP_GET_ROW_MAX=44, 1497 MATOP_SCALE=45, 1498 MATOP_SHIFT=46, 1499 MATOP_DIAGONAL_SET=47, 1500 MATOP_ILUDT_FACTOR=48, 1501 MATOP_SET_BLOCK_SIZE=49, 1502 MATOP_GET_ROW_IJ=50, 1503 MATOP_RESTORE_ROW_IJ=51, 1504 MATOP_GET_COLUMN_IJ=52, 1505 MATOP_RESTORE_COLUMN_IJ=53, 1506 MATOP_FDCOLORING_CREATE=54, 1507 MATOP_COLORING_PATCH=55, 1508 MATOP_SET_UNFACTORED=56, 1509 MATOP_PERMUTE=57, 1510 MATOP_SET_VALUES_BLOCKED=58, 1511 MATOP_GET_SUBMATRIX=59, 1512 MATOP_DESTROY=60, 1513 MATOP_VIEW=61, 1514 MATOP_CONVERT_FROM=62, 1515 MATOP_USE_SCALED_FORM=63, 1516 MATOP_SCALE_SYSTEM=64, 1517 MATOP_UNSCALE_SYSTEM=65, 1518 MATOP_SET_LOCAL_TO_GLOBAL_MAP=66, 1519 MATOP_SET_VALUES_LOCAL=67, 1520 MATOP_ZERO_ROWS_LOCAL=68, 1521 MATOP_GET_ROW_MAX_ABS=69, 1522 MATOP_GET_ROW_MIN_ABS=70, 1523 MATOP_CONVERT=71, 1524 MATOP_SET_COLORING=72, 1525 MATOP_SET_VALUES_ADIC=73, 1526 MATOP_SET_VALUES_ADIFOR=74, 1527 MATOP_FD_COLORING_APPLY=75, 1528 MATOP_SET_FROM_OPTIONS=76, 1529 MATOP_MULT_CON=77, 1530 MATOP_MULT_TRANSPOSE_CON=78, 1531 MATOP_PERMUTE_SPARSIFY=79, 1532 MATOP_MULT_MULTIPLE=80, 1533 MATOP_SOLVE_MULTIPLE=81, 1534 MATOP_GET_INERTIA=82, 1535 MATOP_LOAD=83, 1536 MATOP_IS_SYMMETRIC=84, 1537 MATOP_IS_HERMITIAN=85, 1538 MATOP_IS_STRUCTURALLY_SYMMETRIC=86, 1539 MATOP_DUMMY=87, 1540 MATOP_GET_VECS=88, 1541 MATOP_MAT_MULT=89, 1542 MATOP_MAT_MULT_SYMBOLIC=90, 1543 MATOP_MAT_MULT_NUMERIC=91, 1544 MATOP_PTAP=92, 1545 MATOP_PTAP_SYMBOLIC=93, 1546 MATOP_PTAP_NUMERIC=94, 1547 MATOP_MAT_MULTTRANSPOSE=95, 1548 MATOP_MAT_MULTTRANSPOSE_SYM=96, 1549 MATOP_MAT_MULTTRANSPOSE_NUM=97, 1550 MATOP_PTAP_SYMBOLIC_SEQAIJ=98, 1551 MATOP_PTAP_NUMERIC_SEQAIJ=99, 1552 MATOP_PTAP_SYMBOLIC_MPIAIJ=100, 1553 MATOP_PTAP_NUMERIC_MPIAIJ=101, 1554 MATOP_CONJUGATE=102, 1555 MATOP_SET_SIZES=103, 1556 MATOP_SET_VALUES_ROW=104, 1557 MATOP_REAL_PART=105, 1558 MATOP_IMAG_PART=106, 1559 MATOP_GET_ROW_UTRIANGULAR=107, 1560 MATOP_RESTORE_ROW_UTRIANGULAR=108, 1561 MATOP_MATSOLVE=109, 1562 MATOP_GET_REDUNDANTMATRIX=110, 1563 MATOP_GET_ROW_MIN=111, 1564 MATOP_GET_COLUMN_VEC=112, 1565 MATOP_MISSING_DIAGONAL=113, 1566 MATOP_MATGETSEQNONZEROSTRUCTURE=114, 1567 MATOP_CREATE=115, 1568 MATOP_GET_GHOSTS=116, 1569 MATOP_GET_LOCALSUBMATRIX=117, 1570 MATOP_RESTORE_LOCALSUBMATRIX=118, 1571 MATOP_MULT_DIAGONAL_BLOCK=119, 1572 MATOP_HERMITIANTRANSPOSE=120, 1573 MATOP_MULTHERMITIANTRANSPOSE=121, 1574 MATOP_MULTHERMITIANTRANSPOSEADD=122, 1575 MATOP_GETMULTIPROCBLOCK=123, 1576 MATOP_GETCOLUMNNORMS=125, 1577 MATOP_GET_SUBMATRICES_PARALLEL=128, 1578 MATOP_SET_VALUES_BATCH=129 1579 } MatOperation; 1580 extern PetscErrorCode MatHasOperation(Mat,MatOperation,PetscBool *); 1581 extern PetscErrorCode MatShellSetOperation(Mat,MatOperation,void(*)(void)); 1582 extern PetscErrorCode MatShellGetOperation(Mat,MatOperation,void(**)(void)); 1583 extern PetscErrorCode MatShellSetContext(Mat,void*); 1584 1585 /* 1586 Codes for matrices stored on disk. By default they are 1587 stored in a universal format. By changing the format with 1588 PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will 1589 be stored in a way natural for the matrix, for example dense matrices 1590 would be stored as dense. Matrices stored this way may only be 1591 read into matrices of the same type. 1592 */ 1593 #define MATRIX_BINARY_FORMAT_DENSE -1 1594 1595 extern PetscErrorCode MatMPIBAIJSetHashTableFactor(Mat,PetscReal); 1596 extern PetscErrorCode MatISGetLocalMat(Mat,Mat*); 1597 1598 /*S 1599 MatNullSpace - Object that removes a null space from a vector, i.e. 1600 orthogonalizes the vector to a subsapce 1601 1602 Level: advanced 1603 1604 Concepts: matrix; linear operator, null space 1605 1606 Users manual sections: 1607 . sec_singular 1608 1609 .seealso: MatNullSpaceCreate() 1610 S*/ 1611 typedef struct _p_MatNullSpace* MatNullSpace; 1612 1613 extern PetscErrorCode MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*); 1614 extern PetscErrorCode MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*); 1615 extern PetscErrorCode MatNullSpaceDestroy(MatNullSpace*); 1616 extern PetscErrorCode MatNullSpaceRemove(MatNullSpace,Vec,Vec*); 1617 extern PetscErrorCode MatSetNullSpace(Mat,MatNullSpace); 1618 extern PetscErrorCode MatNullSpaceTest(MatNullSpace,Mat,PetscBool *); 1619 extern PetscErrorCode MatNullSpaceView(MatNullSpace,PetscViewer); 1620 1621 extern PetscErrorCode MatReorderingSeqSBAIJ(Mat,IS); 1622 extern PetscErrorCode MatMPISBAIJSetHashTableFactor(Mat,PetscReal); 1623 extern PetscErrorCode MatSeqSBAIJSetColumnIndices(Mat,PetscInt *); 1624 extern PetscErrorCode MatSeqBAIJInvertBlockDiagonal(Mat); 1625 1626 extern PetscErrorCode MatCreateMAIJ(Mat,PetscInt,Mat*); 1627 extern PetscErrorCode MatMAIJRedimension(Mat,PetscInt,Mat*); 1628 extern PetscErrorCode MatMAIJGetAIJ(Mat,Mat*); 1629 1630 extern PetscErrorCode MatComputeExplicitOperator(Mat,Mat*); 1631 1632 extern PetscErrorCode MatDiagonalScaleLocal(Mat,Vec); 1633 1634 extern PetscErrorCode MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*); 1635 extern PetscErrorCode MatMFFDSetBase(Mat,Vec,Vec); 1636 extern PetscErrorCode MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*); 1637 extern PetscErrorCode MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*)); 1638 extern PetscErrorCode MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec)); 1639 extern PetscErrorCode MatMFFDAddNullSpace(Mat,MatNullSpace); 1640 extern PetscErrorCode MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt); 1641 extern PetscErrorCode MatMFFDResetHHistory(Mat); 1642 extern PetscErrorCode MatMFFDSetFunctionError(Mat,PetscReal); 1643 extern PetscErrorCode MatMFFDSetPeriod(Mat,PetscInt); 1644 extern PetscErrorCode MatMFFDGetH(Mat,PetscScalar *); 1645 extern PetscErrorCode MatMFFDSetOptionsPrefix(Mat,const char[]); 1646 extern PetscErrorCode MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*); 1647 extern PetscErrorCode MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*); 1648 1649 /*S 1650 MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free 1651 Jacobian vector products 1652 1653 Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure 1654 1655 MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure 1656 1657 Level: developer 1658 1659 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister() 1660 S*/ 1661 typedef struct _p_MatMFFD* MatMFFD; 1662 1663 /*J 1664 MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function 1665 1666 Level: beginner 1667 1668 .seealso: MatMFFDSetType(), MatMFFDRegister() 1669 J*/ 1670 #define MatMFFDType char* 1671 #define MATMFFD_DS "ds" 1672 #define MATMFFD_WP "wp" 1673 1674 extern PetscErrorCode MatMFFDSetType(Mat,const MatMFFDType); 1675 extern PetscErrorCode MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD)); 1676 1677 /*MC 1678 MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry. 1679 1680 Synopsis: 1681 PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD)) 1682 1683 Not Collective 1684 1685 Input Parameters: 1686 + name_solver - name of a new user-defined compute-h module 1687 . path - path (either absolute or relative) the library containing this solver 1688 . name_create - name of routine to create method context 1689 - routine_create - routine to create method context 1690 1691 Level: developer 1692 1693 Notes: 1694 MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers. 1695 1696 If dynamic libraries are used, then the fourth input argument (routine_create) 1697 is ignored. 1698 1699 Sample usage: 1700 .vb 1701 MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a, 1702 "MyHCreate",MyHCreate); 1703 .ve 1704 1705 Then, your solver can be chosen with the procedural interface via 1706 $ MatMFFDSetType(mfctx,"my_h") 1707 or at runtime via the option 1708 $ -snes_mf_type my_h 1709 1710 .keywords: MatMFFD, register 1711 1712 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy() 1713 M*/ 1714 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1715 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0) 1716 #else 1717 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d) 1718 #endif 1719 1720 extern PetscErrorCode MatMFFDRegisterAll(const char[]); 1721 extern PetscErrorCode MatMFFDRegisterDestroy(void); 1722 extern PetscErrorCode MatMFFDDSSetUmin(Mat,PetscReal); 1723 extern PetscErrorCode MatMFFDWPSetComputeNormU(Mat,PetscBool ); 1724 1725 1726 extern PetscErrorCode PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *); 1727 extern PetscErrorCode PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *); 1728 1729 /* 1730 PETSc interface to MUMPS 1731 */ 1732 #ifdef PETSC_HAVE_MUMPS 1733 extern PetscErrorCode MatMumpsSetIcntl(Mat,PetscInt,PetscInt); 1734 #endif 1735 1736 /* 1737 PETSc interface to SUPERLU 1738 */ 1739 #ifdef PETSC_HAVE_SUPERLU 1740 extern PetscErrorCode MatSuperluSetILUDropTol(Mat,PetscReal); 1741 #endif 1742 1743 #if defined(PETSC_HAVE_CUSP) 1744 extern PetscErrorCode MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 1745 extern PetscErrorCode MatCreateMPIAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 1746 #endif 1747 1748 /* 1749 PETSc interface to FFTW 1750 */ 1751 #if defined(PETSC_HAVE_FFTW) 1752 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec); 1753 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec); 1754 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*); 1755 #endif 1756 1757 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*); 1758 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*); 1759 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***); 1760 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*); 1761 extern PetscErrorCode MatNestSetVecType(Mat,const VecType); 1762 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]); 1763 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat); 1764 1765 /* 1766 MatIJ: 1767 An unweighted directed pseudograph 1768 An interpretation of this matrix as a (pseudo)graph allows us to define additional operations on it: 1769 A MatIJ can act on sparse arrays: arrays of indices, or index arrays of integers, scalars, or integer-scalar pairs 1770 by mapping the indices to the indices connected to them by the (pseudo)graph ed 1771 */ 1772 typedef enum {MATIJ_LOCAL, MATIJ_GLOBAL} MatIJIndexType; 1773 extern PetscErrorCode MatIJSetMultivalued(Mat, PetscBool); 1774 extern PetscErrorCode MatIJGetMultivalued(Mat, PetscBool*); 1775 extern PetscErrorCode MatIJSetEdges(Mat, PetscInt, const PetscInt*, const PetscInt*); 1776 extern PetscErrorCode MatIJGetEdges(Mat, PetscInt *, PetscInt **, PetscInt **); 1777 extern PetscErrorCode MatIJSetEdgesIS(Mat, IS, IS); 1778 extern PetscErrorCode MatIJGetEdgesIS(Mat, IS*, IS*); 1779 extern PetscErrorCode MatIJGetRowSizes(Mat, MatIJIndexType, PetscInt, const PetscInt *, PetscInt **); 1780 extern PetscErrorCode MatIJGetMinRowSize(Mat, PetscInt *); 1781 extern PetscErrorCode MatIJGetMaxRowSize(Mat, PetscInt *); 1782 extern PetscErrorCode MatIJGetSupport(Mat, PetscInt *, PetscInt **); 1783 extern PetscErrorCode MatIJGetSupportIS(Mat, IS *); 1784 extern PetscErrorCode MatIJGetImage(Mat, PetscInt*, PetscInt**); 1785 extern PetscErrorCode MatIJGetImageIS(Mat, IS *); 1786 extern PetscErrorCode MatIJGetSupportSize(Mat, PetscInt *); 1787 extern PetscErrorCode MatIJGetImageSize(Mat, PetscInt *); 1788 1789 extern PetscErrorCode MatIJBinRenumber(Mat, Mat*); 1790 1791 extern PetscErrorCode MatIJMap(Mat, MatIJIndexType, PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[], MatIJIndexType,PetscInt*,PetscInt*[],PetscInt*[],PetscScalar*[],PetscInt*[]); 1792 extern PetscErrorCode MatIJBin(Mat, MatIJIndexType, PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],PetscInt*,PetscInt*[],PetscInt*[],PetscScalar*[],PetscInt*[]); 1793 extern PetscErrorCode MatIJBinMap(Mat,Mat, MatIJIndexType,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],MatIJIndexType,PetscInt*,PetscInt*[],PetscInt*[],PetscScalar*[],PetscInt*[]); 1794 1795 PETSC_EXTERN_CXX_END 1796 #endif 1797