1 /* 2 Include file for the matrix component of PETSc 3 */ 4 #ifndef __PETSCMAT_H 5 #define __PETSCMAT_H 6 #include "petscvec.h" 7 PETSC_EXTERN_CXX_BEGIN 8 9 /*S 10 Mat - Abstract PETSc matrix object 11 12 Level: beginner 13 14 Concepts: matrix; linear operator 15 16 .seealso: MatCreate(), MatType, MatSetType() 17 S*/ 18 typedef struct _p_Mat* Mat; 19 20 /*J 21 MatType - String with the name of a PETSc matrix or the creation function 22 with an optional dynamic library name, for example 23 http://www.mcs.anl.gov/petsc/lib.a:mymatcreate() 24 25 Level: beginner 26 27 .seealso: MatSetType(), Mat, MatSolverPackage 28 J*/ 29 #define MatType char* 30 #define MATSAME "same" 31 #define MATMAIJ "maij" 32 #define MATSEQMAIJ "seqmaij" 33 #define MATMPIMAIJ "mpimaij" 34 #define MATIS "is" 35 #define MATAIJ "aij" 36 #define MATSEQAIJ "seqaij" 37 #define MATSEQAIJPTHREAD "seqaijpthread" 38 #define MATAIJPTHREAD "aijpthread" 39 #define MATMPIAIJ "mpiaij" 40 #define MATAIJCRL "aijcrl" 41 #define MATSEQAIJCRL "seqaijcrl" 42 #define MATMPIAIJCRL "mpiaijcrl" 43 #define MATAIJCUSP "aijcusp" 44 #define MATSEQAIJCUSP "seqaijcusp" 45 #define MATMPIAIJCUSP "mpiaijcusp" 46 #define MATAIJPERM "aijperm" 47 #define MATSEQAIJPERM "seqaijperm" 48 #define MATMPIAIJPERM "mpiaijperm" 49 #define MATSHELL "shell" 50 #define MATDENSE "dense" 51 #define MATSEQDENSE "seqdense" 52 #define MATMPIDENSE "mpidense" 53 #define MATBAIJ "baij" 54 #define MATSEQBAIJ "seqbaij" 55 #define MATMPIBAIJ "mpibaij" 56 #define MATMPIADJ "mpiadj" 57 #define MATSBAIJ "sbaij" 58 #define MATSEQSBAIJ "seqsbaij" 59 #define MATMPISBAIJ "mpisbaij" 60 #define MATSEQBSTRM "seqbstrm" 61 #define MATMPIBSTRM "mpibstrm" 62 #define MATBSTRM "bstrm" 63 #define MATSEQSBSTRM "seqsbstrm" 64 #define MATMPISBSTRM "mpisbstrm" 65 #define MATSBSTRM "sbstrm" 66 #define MATDAAD "daad" 67 #define MATMFFD "mffd" 68 #define MATNORMAL "normal" 69 #define MATLRC "lrc" 70 #define MATSCATTER "scatter" 71 #define MATBLOCKMAT "blockmat" 72 #define MATCOMPOSITE "composite" 73 #define MATFFT "fft" 74 #define MATFFTW "fftw" 75 #define MATSEQCUFFT "seqcufft" 76 #define MATTRANSPOSEMAT "transpose" 77 #define MATSCHURCOMPLEMENT "schurcomplement" 78 #define MATPYTHON "python" 79 #define MATHYPRESTRUCT "hyprestruct" 80 #define MATHYPRESSTRUCT "hypresstruct" 81 #define MATSUBMATRIX "submatrix" 82 #define MATLOCALREF "localref" 83 #define MATNEST "nest" 84 #define MATIJ "ij" 85 86 /*J 87 MatSolverPackage - String with the name of a PETSc matrix solver type. 88 89 For example: "petsc" indicates what PETSc provides, "superlu" indicates either 90 SuperLU or SuperLU_Dist etc. 91 92 93 Level: beginner 94 95 .seealso: MatGetFactor(), Mat, MatSetType(), MatType 96 J*/ 97 #define MatSolverPackage char* 98 #define MATSOLVERSPOOLES "spooles" 99 #define MATSOLVERSUPERLU "superlu" 100 #define MATSOLVERSUPERLU_DIST "superlu_dist" 101 #define MATSOLVERUMFPACK "umfpack" 102 #define MATSOLVERCHOLMOD "cholmod" 103 #define MATSOLVERESSL "essl" 104 #define MATSOLVERLUSOL "lusol" 105 #define MATSOLVERMUMPS "mumps" 106 #define MATSOLVERPASTIX "pastix" 107 #define MATSOLVERMATLAB "matlab" 108 #define MATSOLVERPETSC "petsc" 109 #define MATSOLVERPLAPACK "plapack" 110 #define MATSOLVERBAS "bas" 111 112 #define MATSOLVERBSTRM "bstrm" 113 #define MATSOLVERSBSTRM "sbstrm" 114 115 /*E 116 MatFactorType - indicates what type of factorization is requested 117 118 Level: beginner 119 120 Any additions/changes here MUST also be made in include/finclude/petscmat.h 121 122 .seealso: MatSolverPackage, MatGetFactor() 123 E*/ 124 typedef enum {MAT_FACTOR_NONE, MAT_FACTOR_LU, MAT_FACTOR_CHOLESKY, MAT_FACTOR_ILU, MAT_FACTOR_ICC,MAT_FACTOR_ILUDT} MatFactorType; 125 extern const char *const MatFactorTypes[]; 126 127 extern PetscErrorCode MatGetFactor(Mat,const MatSolverPackage,MatFactorType,Mat*); 128 extern PetscErrorCode MatGetFactorAvailable(Mat,const MatSolverPackage,MatFactorType,PetscBool *); 129 extern PetscErrorCode MatFactorGetSolverPackage(Mat,const MatSolverPackage*); 130 extern PetscErrorCode MatGetFactorType(Mat,MatFactorType*); 131 132 /* Logging support */ 133 #define MAT_FILE_CLASSID 1211216 /* used to indicate matrices in binary files */ 134 extern PetscClassId MAT_CLASSID; 135 extern PetscClassId MAT_FDCOLORING_CLASSID; 136 extern PetscClassId MAT_TRANSPOSECOLORING_CLASSID; 137 extern PetscClassId MAT_PARTITIONING_CLASSID; 138 extern PetscClassId MAT_COARSEN_CLASSID; 139 extern PetscClassId MAT_NULLSPACE_CLASSID; 140 extern PetscClassId MATMFFD_CLASSID; 141 142 /*E 143 MatReuse - Indicates if matrices obtained from a previous call to MatGetSubMatrices() 144 or MatGetSubMatrix() are to be reused to store the new matrix values. For MatConvert() is used to indicate 145 that the input matrix is to be replaced with the converted matrix. 146 147 Level: beginner 148 149 Any additions/changes here MUST also be made in include/finclude/petscmat.h 150 151 .seealso: MatGetSubMatrices(), MatGetSubMatrix(), MatDestroyMatrices(), MatConvert() 152 E*/ 153 typedef enum {MAT_INITIAL_MATRIX,MAT_REUSE_MATRIX,MAT_IGNORE_MATRIX} MatReuse; 154 155 /*E 156 MatGetSubMatrixOption - Indicates if matrices obtained from a call to MatGetSubMatrices() 157 include the matrix values. Currently it is only used by MatGetSeqNonzerostructure(). 158 159 Level: beginner 160 161 .seealso: MatGetSeqNonzerostructure() 162 E*/ 163 typedef enum {MAT_DO_NOT_GET_VALUES,MAT_GET_VALUES} MatGetSubMatrixOption; 164 165 extern PetscErrorCode MatInitializePackage(const char[]); 166 167 extern PetscErrorCode MatCreate(MPI_Comm,Mat*); 168 extern PetscErrorCode MatSetSizes(Mat,PetscInt,PetscInt,PetscInt,PetscInt); 169 extern PetscErrorCode MatSetType(Mat,const MatType); 170 extern PetscErrorCode MatSetFromOptions(Mat); 171 extern PetscErrorCode MatRegisterAll(const char[]); 172 extern PetscErrorCode MatRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat)); 173 extern PetscErrorCode MatRegisterBaseName(const char[],const char[],const char[]); 174 extern PetscErrorCode MatSetOptionsPrefix(Mat,const char[]); 175 extern PetscErrorCode MatAppendOptionsPrefix(Mat,const char[]); 176 extern PetscErrorCode MatGetOptionsPrefix(Mat,const char*[]); 177 178 /*MC 179 MatRegisterDynamic - Adds a new matrix type 180 181 Synopsis: 182 PetscErrorCode MatRegisterDynamic(const char *name,const char *path,const char *name_create,PetscErrorCode (*routine_create)(Mat)) 183 184 Not Collective 185 186 Input Parameters: 187 + name - name of a new user-defined matrix type 188 . path - path (either absolute or relative) the library containing this solver 189 . name_create - name of routine to create method context 190 - routine_create - routine to create method context 191 192 Notes: 193 MatRegisterDynamic() may be called multiple times to add several user-defined solvers. 194 195 If dynamic libraries are used, then the fourth input argument (routine_create) 196 is ignored. 197 198 Sample usage: 199 .vb 200 MatRegisterDynamic("my_mat",/home/username/my_lib/lib/libO/solaris/mylib.a, 201 "MyMatCreate",MyMatCreate); 202 .ve 203 204 Then, your solver can be chosen with the procedural interface via 205 $ MatSetType(Mat,"my_mat") 206 or at runtime via the option 207 $ -mat_type my_mat 208 209 Level: advanced 210 211 Notes: ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values. 212 If your function is not being put into a shared library then use VecRegister() instead 213 214 .keywords: Mat, register 215 216 .seealso: MatRegisterAll(), MatRegisterDestroy() 217 218 M*/ 219 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 220 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,0) 221 #else 222 #define MatRegisterDynamic(a,b,c,d) MatRegister(a,b,c,d) 223 #endif 224 225 extern PetscBool MatRegisterAllCalled; 226 extern PetscFList MatList; 227 extern PetscFList MatColoringList; 228 extern PetscFList MatPartitioningList; 229 extern PetscFList MatCoarsenList; 230 231 /*E 232 MatStructure - Indicates if the matrix has the same nonzero structure 233 234 Level: beginner 235 236 Any additions/changes here MUST also be made in include/finclude/petscmat.h 237 238 .seealso: MatCopy(), KSPSetOperators(), PCSetOperators() 239 E*/ 240 typedef enum {DIFFERENT_NONZERO_PATTERN,SUBSET_NONZERO_PATTERN,SAME_NONZERO_PATTERN,SAME_PRECONDITIONER} MatStructure; 241 242 extern PetscErrorCode MatCreateSeqDense(MPI_Comm,PetscInt,PetscInt,PetscScalar[],Mat*); 243 extern PetscErrorCode MatCreateDense(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscScalar[],Mat*); 244 extern PetscErrorCode MatCreateSeqAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 245 extern PetscErrorCode MatCreateAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 246 extern PetscErrorCode MatCreateMPIAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *); 247 extern PetscErrorCode MatCreateMPIAIJWithSplitArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],PetscInt[],PetscInt[],PetscScalar[],Mat*); 248 249 extern PetscErrorCode MatCreateSeqBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 250 extern PetscErrorCode MatCreateBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 251 extern PetscErrorCode MatCreateMPIBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat*); 252 253 extern PetscErrorCode MatCreateMPIAdj(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscInt[],Mat*); 254 extern PetscErrorCode MatCreateSeqSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 255 256 extern PetscErrorCode MatCreateSBAIJ(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 257 extern PetscErrorCode MatCreateMPISBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[],Mat *); 258 extern PetscErrorCode MatMPISBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 259 extern PetscErrorCode MatXAIJSetPreallocation(Mat,PetscInt,const PetscInt*,const PetscInt*,const PetscInt*,const PetscInt*); 260 261 extern PetscErrorCode MatCreateShell(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,void *,Mat*); 262 extern PetscErrorCode MatCreateAdic(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,void (*)(void),Mat*); 263 extern PetscErrorCode MatCreateNormal(Mat,Mat*); 264 extern PetscErrorCode MatCreateLRC(Mat,Mat,Mat,Mat*); 265 extern PetscErrorCode MatCreateIS(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,ISLocalToGlobalMapping,Mat*); 266 extern PetscErrorCode MatCreateSeqAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 267 extern PetscErrorCode MatCreateMPIAIJCRL(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 268 269 extern PetscErrorCode MatCreateSeqBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 270 extern PetscErrorCode MatCreateMPIBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 271 extern PetscErrorCode MatCreateSeqSBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 272 extern PetscErrorCode MatCreateMPISBSTRM(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 273 274 extern PetscErrorCode MatCreateScatter(MPI_Comm,VecScatter,Mat*); 275 extern PetscErrorCode MatScatterSetVecScatter(Mat,VecScatter); 276 extern PetscErrorCode MatScatterGetVecScatter(Mat,VecScatter*); 277 extern PetscErrorCode MatCreateBlockMat(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt*,Mat*); 278 extern PetscErrorCode MatCompositeAddMat(Mat,Mat); 279 extern PetscErrorCode MatCompositeMerge(Mat); 280 extern PetscErrorCode MatCreateComposite(MPI_Comm,PetscInt,const Mat*,Mat*); 281 typedef enum {MAT_COMPOSITE_ADDITIVE,MAT_COMPOSITE_MULTIPLICATIVE} MatCompositeType; 282 extern PetscErrorCode MatCompositeSetType(Mat,MatCompositeType); 283 284 extern PetscErrorCode MatCreateFFT(MPI_Comm,PetscInt,const PetscInt[],const MatType,Mat*); 285 extern PetscErrorCode MatCreateSeqCUFFT(MPI_Comm,PetscInt,const PetscInt[],Mat*); 286 287 extern PetscErrorCode MatCreateTranspose(Mat,Mat*); 288 extern PetscErrorCode MatCreateSubMatrix(Mat,IS,IS,Mat*); 289 extern PetscErrorCode MatSubMatrixUpdate(Mat,Mat,IS,IS); 290 extern PetscErrorCode MatCreateLocalRef(Mat,IS,IS,Mat*); 291 292 extern PetscErrorCode MatPythonSetType(Mat,const char[]); 293 294 extern PetscErrorCode MatSetUp(Mat); 295 extern PetscErrorCode MatDestroy(Mat*); 296 297 extern PetscErrorCode MatConjugate(Mat); 298 extern PetscErrorCode MatRealPart(Mat); 299 extern PetscErrorCode MatImaginaryPart(Mat); 300 extern PetscErrorCode MatGetDiagonalBlock(Mat,Mat*); 301 extern PetscErrorCode MatGetTrace(Mat,PetscScalar*); 302 extern PetscErrorCode MatInvertBlockDiagonal(Mat,PetscScalar **); 303 304 /* ------------------------------------------------------------*/ 305 extern PetscErrorCode MatSetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 306 extern PetscErrorCode MatSetValuesBlocked(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 307 extern PetscErrorCode MatSetValuesRow(Mat,PetscInt,const PetscScalar[]); 308 extern PetscErrorCode MatSetValuesRowLocal(Mat,PetscInt,const PetscScalar[]); 309 extern PetscErrorCode MatSetValuesBatch(Mat,PetscInt,PetscInt,PetscInt[],const PetscScalar[]); 310 311 /*S 312 MatStencil - Data structure (C struct) for storing information about a single row or 313 column of a matrix as indexed on an associated grid. 314 315 Fortran usage is different, see MatSetValuesStencil() for details. 316 317 Level: beginner 318 319 Concepts: matrix; linear operator 320 321 .seealso: MatSetValuesStencil(), MatSetStencil(), MatSetValuesBlockedStencil() 322 S*/ 323 typedef struct { 324 PetscInt k,j,i,c; 325 } MatStencil; 326 327 extern PetscErrorCode MatSetValuesStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode); 328 extern PetscErrorCode MatSetValuesBlockedStencil(Mat,PetscInt,const MatStencil[],PetscInt,const MatStencil[],const PetscScalar[],InsertMode); 329 extern PetscErrorCode MatSetStencil(Mat,PetscInt,const PetscInt[],const PetscInt[],PetscInt); 330 331 extern PetscErrorCode MatSetColoring(Mat,ISColoring); 332 extern PetscErrorCode MatSetValuesAdic(Mat,void*); 333 extern PetscErrorCode MatSetValuesAdifor(Mat,PetscInt,void*); 334 335 /*E 336 MatAssemblyType - Indicates if the matrix is now to be used, or if you plan 337 to continue to add values to it 338 339 Level: beginner 340 341 .seealso: MatAssemblyBegin(), MatAssemblyEnd() 342 E*/ 343 typedef enum {MAT_FLUSH_ASSEMBLY=1,MAT_FINAL_ASSEMBLY=0} MatAssemblyType; 344 extern PetscErrorCode MatAssemblyBegin(Mat,MatAssemblyType); 345 extern PetscErrorCode MatAssemblyEnd(Mat,MatAssemblyType); 346 extern PetscErrorCode MatAssembled(Mat,PetscBool *); 347 348 349 350 /*E 351 MatOption - Options that may be set for a matrix and its behavior or storage 352 353 Level: beginner 354 355 Any additions/changes here MUST also be made in include/finclude/petscmat.h 356 357 .seealso: MatSetOption() 358 E*/ 359 typedef enum {MAT_ROW_ORIENTED,MAT_NEW_NONZERO_LOCATIONS, 360 MAT_SYMMETRIC, 361 MAT_STRUCTURALLY_SYMMETRIC, 362 MAT_NEW_DIAGONALS,MAT_IGNORE_OFF_PROC_ENTRIES, 363 MAT_NEW_NONZERO_LOCATION_ERR, 364 MAT_NEW_NONZERO_ALLOCATION_ERR,MAT_USE_HASH_TABLE, 365 MAT_KEEP_NONZERO_PATTERN,MAT_IGNORE_ZERO_ENTRIES, 366 MAT_USE_INODES, 367 MAT_HERMITIAN, 368 MAT_SYMMETRY_ETERNAL, 369 MAT_CHECK_COMPRESSED_ROW, 370 MAT_IGNORE_LOWER_TRIANGULAR,MAT_ERROR_LOWER_TRIANGULAR, 371 MAT_GETROW_UPPERTRIANGULAR,MAT_UNUSED_NONZERO_LOCATION_ERR, 372 MAT_SPD,MAT_NO_OFF_PROC_ENTRIES,MAT_NO_OFF_PROC_ZERO_ROWS, 373 NUM_MAT_OPTIONS} MatOption; 374 extern const char *MatOptions[]; 375 extern PetscErrorCode MatSetOption(Mat,MatOption,PetscBool ); 376 extern PetscErrorCode MatGetType(Mat,const MatType*); 377 378 extern PetscErrorCode MatGetValues(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],PetscScalar[]); 379 extern PetscErrorCode MatGetRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 380 extern PetscErrorCode MatRestoreRow(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 381 extern PetscErrorCode MatGetRowUpperTriangular(Mat); 382 extern PetscErrorCode MatRestoreRowUpperTriangular(Mat); 383 extern PetscErrorCode MatGetColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 384 extern PetscErrorCode MatRestoreColumn(Mat,PetscInt,PetscInt *,const PetscInt *[],const PetscScalar*[]); 385 extern PetscErrorCode MatGetColumnVector(Mat,Vec,PetscInt); 386 extern PetscErrorCode MatGetArray(Mat,PetscScalar *[]); 387 extern PetscErrorCode MatRestoreArray(Mat,PetscScalar *[]); 388 extern PetscErrorCode MatGetBlockSize(Mat,PetscInt *); 389 extern PetscErrorCode MatSetBlockSize(Mat,PetscInt); 390 extern PetscErrorCode MatGetBlockSizes(Mat,PetscInt *,PetscInt *); 391 extern PetscErrorCode MatSetBlockSizes(Mat,PetscInt,PetscInt); 392 extern PetscErrorCode MatSetNThreads(Mat,PetscInt); 393 extern PetscErrorCode MatGetNThreads(Mat,PetscInt*); 394 395 extern PetscErrorCode MatMult(Mat,Vec,Vec); 396 extern PetscErrorCode MatMultDiagonalBlock(Mat,Vec,Vec); 397 extern PetscErrorCode MatMultAdd(Mat,Vec,Vec,Vec); 398 extern PetscErrorCode MatMultTranspose(Mat,Vec,Vec); 399 extern PetscErrorCode MatMultHermitianTranspose(Mat,Vec,Vec); 400 extern PetscErrorCode MatIsTranspose(Mat,Mat,PetscReal,PetscBool *); 401 extern PetscErrorCode MatIsHermitianTranspose(Mat,Mat,PetscReal,PetscBool *); 402 extern PetscErrorCode MatMultTransposeAdd(Mat,Vec,Vec,Vec); 403 extern PetscErrorCode MatMultHermitianTransposeAdd(Mat,Vec,Vec,Vec); 404 extern PetscErrorCode MatMultConstrained(Mat,Vec,Vec); 405 extern PetscErrorCode MatMultTransposeConstrained(Mat,Vec,Vec); 406 extern PetscErrorCode MatMatSolve(Mat,Mat,Mat); 407 408 /*E 409 MatDuplicateOption - Indicates if a duplicated sparse matrix should have 410 its numerical values copied over or just its nonzero structure. 411 412 Level: beginner 413 414 Any additions/changes here MUST also be made in include/finclude/petscmat.h 415 416 $ MAT_SHARE_NONZERO_PATTERN - the i and j arrays in the new matrix will be shared with the original matrix 417 $ this also triggers the MAT_DO_NOT_COPY_VALUES option. This is used when you 418 $ have several matrices with the same nonzero pattern. 419 420 .seealso: MatDuplicate() 421 E*/ 422 typedef enum {MAT_DO_NOT_COPY_VALUES,MAT_COPY_VALUES,MAT_SHARE_NONZERO_PATTERN} MatDuplicateOption; 423 424 extern PetscErrorCode MatConvert(Mat,const MatType,MatReuse,Mat*); 425 extern PetscErrorCode MatDuplicate(Mat,MatDuplicateOption,Mat*); 426 427 428 extern PetscErrorCode MatCopy(Mat,Mat,MatStructure); 429 extern PetscErrorCode MatView(Mat,PetscViewer); 430 extern PetscErrorCode MatIsSymmetric(Mat,PetscReal,PetscBool *); 431 extern PetscErrorCode MatIsStructurallySymmetric(Mat,PetscBool *); 432 extern PetscErrorCode MatIsHermitian(Mat,PetscReal,PetscBool *); 433 extern PetscErrorCode MatIsSymmetricKnown(Mat,PetscBool *,PetscBool *); 434 extern PetscErrorCode MatIsHermitianKnown(Mat,PetscBool *,PetscBool *); 435 extern PetscErrorCode MatMissingDiagonal(Mat,PetscBool *,PetscInt *); 436 extern PetscErrorCode MatLoad(Mat, PetscViewer); 437 438 extern PetscErrorCode MatGetRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool *); 439 extern PetscErrorCode MatRestoreRowIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool *); 440 extern PetscErrorCode MatGetColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt*,PetscInt *[],PetscInt *[],PetscBool *); 441 extern PetscErrorCode MatRestoreColumnIJ(Mat,PetscInt,PetscBool ,PetscBool ,PetscInt *,PetscInt *[],PetscInt *[],PetscBool *); 442 443 /*S 444 MatInfo - Context of matrix information, used with MatGetInfo() 445 446 In Fortran this is simply a double precision array of dimension MAT_INFO_SIZE 447 448 Level: intermediate 449 450 Concepts: matrix^nonzero information 451 452 .seealso: MatGetInfo(), MatInfoType 453 S*/ 454 typedef struct { 455 PetscLogDouble block_size; /* block size */ 456 PetscLogDouble nz_allocated,nz_used,nz_unneeded; /* number of nonzeros */ 457 PetscLogDouble memory; /* memory allocated */ 458 PetscLogDouble assemblies; /* number of matrix assemblies called */ 459 PetscLogDouble mallocs; /* number of mallocs during MatSetValues() */ 460 PetscLogDouble fill_ratio_given,fill_ratio_needed; /* fill ratio for LU/ILU */ 461 PetscLogDouble factor_mallocs; /* number of mallocs during factorization */ 462 } MatInfo; 463 464 /*E 465 MatInfoType - Indicates if you want information about the local part of the matrix, 466 the entire parallel matrix or the maximum over all the local parts. 467 468 Level: beginner 469 470 Any additions/changes here MUST also be made in include/finclude/petscmat.h 471 472 .seealso: MatGetInfo(), MatInfo 473 E*/ 474 typedef enum {MAT_LOCAL=1,MAT_GLOBAL_MAX=2,MAT_GLOBAL_SUM=3} MatInfoType; 475 extern PetscErrorCode MatGetInfo(Mat,MatInfoType,MatInfo*); 476 extern PetscErrorCode MatGetDiagonal(Mat,Vec); 477 extern PetscErrorCode MatGetRowMax(Mat,Vec,PetscInt[]); 478 extern PetscErrorCode MatGetRowMin(Mat,Vec,PetscInt[]); 479 extern PetscErrorCode MatGetRowMaxAbs(Mat,Vec,PetscInt[]); 480 extern PetscErrorCode MatGetRowMinAbs(Mat,Vec,PetscInt[]); 481 extern PetscErrorCode MatGetRowSum(Mat,Vec); 482 extern PetscErrorCode MatTranspose(Mat,MatReuse,Mat*); 483 extern PetscErrorCode MatHermitianTranspose(Mat,MatReuse,Mat*); 484 extern PetscErrorCode MatPermute(Mat,IS,IS,Mat *); 485 extern PetscErrorCode MatDiagonalScale(Mat,Vec,Vec); 486 extern PetscErrorCode MatDiagonalSet(Mat,Vec,InsertMode); 487 extern PetscErrorCode MatEqual(Mat,Mat,PetscBool *); 488 extern PetscErrorCode MatMultEqual(Mat,Mat,PetscInt,PetscBool *); 489 extern PetscErrorCode MatMultAddEqual(Mat,Mat,PetscInt,PetscBool *); 490 extern PetscErrorCode MatMultTransposeEqual(Mat,Mat,PetscInt,PetscBool *); 491 extern PetscErrorCode MatMultTransposeAddEqual(Mat,Mat,PetscInt,PetscBool *); 492 493 extern PetscErrorCode MatNorm(Mat,NormType,PetscReal *); 494 extern PetscErrorCode MatGetColumnNorms(Mat,NormType,PetscReal *); 495 extern PetscErrorCode MatZeroEntries(Mat); 496 extern PetscErrorCode MatZeroRows(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 497 extern PetscErrorCode MatZeroRowsIS(Mat,IS,PetscScalar,Vec,Vec); 498 extern PetscErrorCode MatZeroRowsStencil(Mat,PetscInt,const MatStencil [],PetscScalar,Vec,Vec); 499 extern PetscErrorCode MatZeroRowsColumnsStencil(Mat,PetscInt,const MatStencil[],PetscScalar,Vec,Vec); 500 extern PetscErrorCode MatZeroRowsColumns(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 501 extern PetscErrorCode MatZeroRowsColumnsIS(Mat,IS,PetscScalar,Vec,Vec); 502 503 extern PetscErrorCode MatUseScaledForm(Mat,PetscBool ); 504 extern PetscErrorCode MatScaleSystem(Mat,Vec,Vec); 505 extern PetscErrorCode MatUnScaleSystem(Mat,Vec,Vec); 506 507 extern PetscErrorCode MatGetSize(Mat,PetscInt*,PetscInt*); 508 extern PetscErrorCode MatGetLocalSize(Mat,PetscInt*,PetscInt*); 509 extern PetscErrorCode MatGetOwnershipRange(Mat,PetscInt*,PetscInt*); 510 extern PetscErrorCode MatGetOwnershipRanges(Mat,const PetscInt**); 511 extern PetscErrorCode MatGetOwnershipRangeColumn(Mat,PetscInt*,PetscInt*); 512 extern PetscErrorCode MatGetOwnershipRangesColumn(Mat,const PetscInt**); 513 514 extern PetscErrorCode MatGetSubMatrices(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 515 extern PetscErrorCode MatGetSubMatricesParallel(Mat,PetscInt,const IS[],const IS[],MatReuse,Mat *[]); 516 extern PetscErrorCode MatDestroyMatrices(PetscInt,Mat *[]); 517 extern PetscErrorCode MatGetSubMatrix(Mat,IS,IS,MatReuse,Mat *); 518 extern PetscErrorCode MatGetLocalSubMatrix(Mat,IS,IS,Mat*); 519 extern PetscErrorCode MatRestoreLocalSubMatrix(Mat,IS,IS,Mat*); 520 extern PetscErrorCode MatGetSeqNonzeroStructure(Mat,Mat*); 521 extern PetscErrorCode MatDestroySeqNonzeroStructure(Mat*); 522 523 extern PetscErrorCode MatCreateMPIAIJConcatenateSeqAIJ(MPI_Comm,Mat,PetscInt,MatReuse,Mat*); 524 extern PetscErrorCode MatCreateMPIAIJConcatenateSeqAIJSymbolic(MPI_Comm,Mat,PetscInt,Mat*); 525 extern PetscErrorCode MatCreateMPIAIJConcatenateSeqAIJNumeric(MPI_Comm,Mat,PetscInt,Mat); 526 extern PetscErrorCode MatCreateMPIAIJSumSeqAIJ(MPI_Comm,Mat,PetscInt,PetscInt,MatReuse,Mat*); 527 extern PetscErrorCode MatCreateMPIAIJSumSeqAIJSymbolic(MPI_Comm,Mat,PetscInt,PetscInt,Mat*); 528 extern PetscErrorCode MatCreateMPIAIJSumSeqAIJNumeric(Mat,Mat); 529 extern PetscErrorCode MatMPIAIJGetLocalMat(Mat,MatReuse,Mat*); 530 extern PetscErrorCode MatMPIAIJGetLocalMatCondensed(Mat,MatReuse,IS*,IS*,Mat*); 531 extern PetscErrorCode MatGetBrowsOfAcols(Mat,Mat,MatReuse,IS*,IS*,Mat*); 532 #if defined (PETSC_USE_CTABLE) 533 extern PetscErrorCode MatGetCommunicationStructs(Mat, Vec *, PetscTable *, VecScatter *); 534 #else 535 extern PetscErrorCode MatGetCommunicationStructs(Mat, Vec *, PetscInt *[], VecScatter *); 536 #endif 537 extern PetscErrorCode MatGetGhosts(Mat, PetscInt *,const PetscInt *[]); 538 539 extern PetscErrorCode MatIncreaseOverlap(Mat,PetscInt,IS[],PetscInt); 540 541 extern PetscErrorCode MatMatMult(Mat,Mat,MatReuse,PetscReal,Mat*); 542 extern PetscErrorCode MatMatMultSymbolic(Mat,Mat,PetscReal,Mat*); 543 extern PetscErrorCode MatMatMultNumeric(Mat,Mat,Mat); 544 545 extern PetscErrorCode MatPtAP(Mat,Mat,MatReuse,PetscReal,Mat*); 546 extern PetscErrorCode MatPtAPSymbolic(Mat,Mat,PetscReal,Mat*); 547 extern PetscErrorCode MatPtAPNumeric(Mat,Mat,Mat); 548 extern PetscErrorCode MatRARt(Mat,Mat,MatReuse,PetscReal,Mat*); 549 extern PetscErrorCode MatRARtSymbolic(Mat,Mat,PetscReal,Mat*); 550 extern PetscErrorCode MatRARtNumeric(Mat,Mat,Mat); 551 552 extern PetscErrorCode MatTransposeMatMult(Mat,Mat,MatReuse,PetscReal,Mat*); 553 extern PetscErrorCode MatTransposetMatMultSymbolic(Mat,Mat,PetscReal,Mat*); 554 extern PetscErrorCode MatTransposetMatMultNumeric(Mat,Mat,Mat); 555 extern PetscErrorCode MatMatTransposeMult(Mat,Mat,MatReuse,PetscReal,Mat*); 556 extern PetscErrorCode MatMatTransposeMultSymbolic(Mat,Mat,PetscReal,Mat*); 557 extern PetscErrorCode MatMatTransposeMultNumeric(Mat,Mat,Mat); 558 559 extern PetscErrorCode MatAXPY(Mat,PetscScalar,Mat,MatStructure); 560 extern PetscErrorCode MatAYPX(Mat,PetscScalar,Mat,MatStructure); 561 562 extern PetscErrorCode MatScale(Mat,PetscScalar); 563 extern PetscErrorCode MatShift(Mat,PetscScalar); 564 565 extern PetscErrorCode MatSetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping); 566 extern PetscErrorCode MatSetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping,ISLocalToGlobalMapping); 567 extern PetscErrorCode MatGetLocalToGlobalMapping(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*); 568 extern PetscErrorCode MatGetLocalToGlobalMappingBlock(Mat,ISLocalToGlobalMapping*,ISLocalToGlobalMapping*); 569 extern PetscErrorCode MatZeroRowsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 570 extern PetscErrorCode MatZeroRowsLocalIS(Mat,IS,PetscScalar,Vec,Vec); 571 extern PetscErrorCode MatZeroRowsColumnsLocal(Mat,PetscInt,const PetscInt [],PetscScalar,Vec,Vec); 572 extern PetscErrorCode MatZeroRowsColumnsLocalIS(Mat,IS,PetscScalar,Vec,Vec); 573 extern PetscErrorCode MatSetValuesLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 574 extern PetscErrorCode MatSetValuesBlockedLocal(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[],const PetscScalar[],InsertMode); 575 576 extern PetscErrorCode MatStashSetInitialSize(Mat,PetscInt,PetscInt); 577 extern PetscErrorCode MatStashGetInfo(Mat,PetscInt*,PetscInt*,PetscInt*,PetscInt*); 578 579 extern PetscErrorCode MatInterpolate(Mat,Vec,Vec); 580 extern PetscErrorCode MatInterpolateAdd(Mat,Vec,Vec,Vec); 581 extern PetscErrorCode MatRestrict(Mat,Vec,Vec); 582 extern PetscErrorCode MatGetVecs(Mat,Vec*,Vec*); 583 extern PetscErrorCode MatGetRedundantMatrix(Mat,PetscInt,MPI_Comm,PetscInt,MatReuse,Mat*); 584 extern PetscErrorCode MatGetMultiProcBlock(Mat,MPI_Comm,MatReuse,Mat*); 585 extern PetscErrorCode MatFindZeroDiagonals(Mat,IS*); 586 587 /*MC 588 MatSetValue - Set a single entry into a matrix. 589 590 Not collective 591 592 Input Parameters: 593 + m - the matrix 594 . row - the row location of the entry 595 . col - the column location of the entry 596 . value - the value to insert 597 - mode - either INSERT_VALUES or ADD_VALUES 598 599 Notes: 600 For efficiency one should use MatSetValues() and set several or many 601 values simultaneously if possible. 602 603 Level: beginner 604 605 .seealso: MatSetValues(), MatSetValueLocal() 606 M*/ 607 PETSC_STATIC_INLINE PetscErrorCode MatSetValue(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValues(v,1,&i,1,&j,&va,mode);} 608 609 PETSC_STATIC_INLINE PetscErrorCode MatGetValue(Mat v,PetscInt i,PetscInt j,PetscScalar *va) {return MatGetValues(v,1,&i,1,&j,va);} 610 611 PETSC_STATIC_INLINE PetscErrorCode MatSetValueLocal(Mat v,PetscInt i,PetscInt j,PetscScalar va,InsertMode mode) {return MatSetValuesLocal(v,1,&i,1,&j,&va,mode);} 612 613 /*MC 614 MatPreallocateInitialize - Begins the block of code that will count the number of nonzeros per 615 row in a matrix providing the data that one can use to correctly preallocate the matrix. 616 617 Synopsis: 618 PetscErrorCode MatPreallocateInitialize(MPI_Comm comm, PetscInt nrows, PetscInt ncols, PetscInt *dnz, PetscInt *onz) 619 620 Collective on MPI_Comm 621 622 Input Parameters: 623 + comm - the communicator that will share the eventually allocated matrix 624 . nrows - the number of LOCAL rows in the matrix 625 - ncols - the number of LOCAL columns in the matrix 626 627 Output Parameters: 628 + dnz - the array that will be passed to the matrix preallocation routines 629 - ozn - the other array passed to the matrix preallocation routines 630 631 632 Level: intermediate 633 634 Notes: 635 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 636 637 Do not malloc or free dnz and onz, that is handled internally by these routines 638 639 Use MatPreallocateInitializeSymmetric() for symmetric matrices (MPISBAIJ matrices) 640 641 This is a MACRO not a function because it has a leading { that is closed by PetscPreallocateFinalize(). 642 643 Concepts: preallocation^Matrix 644 645 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 646 MatPreallocateInitializeSymmetric(), MatPreallocateSymmetricSetLocal() 647 M*/ 648 #define MatPreallocateInitialize(comm,nrows,ncols,dnz,onz) 0; \ 649 { \ 650 PetscErrorCode _4_ierr; PetscInt __nrows = (nrows),__ctmp = (ncols),__rstart,__start,__end; \ 651 _4_ierr = PetscMalloc2(__nrows,PetscInt,&dnz,__nrows,PetscInt,&onz);CHKERRQ(_4_ierr); \ 652 _4_ierr = PetscMemzero(dnz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr);\ 653 _4_ierr = PetscMemzero(onz,__nrows*sizeof(PetscInt));CHKERRQ(_4_ierr); __start = 0; __end = __start; \ 654 _4_ierr = MPI_Scan(&__ctmp,&__end,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __start = __end - __ctmp;\ 655 _4_ierr = MPI_Scan(&__nrows,&__rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(_4_ierr); __rstart = __rstart - __nrows; 656 657 /*MC 658 MatPreallocateSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be 659 inserted using a local number of the rows and columns 660 661 Synopsis: 662 PetscErrorCode MatPreallocateSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 663 664 Not Collective 665 666 Input Parameters: 667 + map - the row mapping from local numbering to global numbering 668 . nrows - the number of rows indicated 669 . rows - the indices of the rows 670 . cmap - the column mapping from local to global numbering 671 . ncols - the number of columns in the matrix 672 . cols - the columns indicated 673 . dnz - the array that will be passed to the matrix preallocation routines 674 - ozn - the other array passed to the matrix preallocation routines 675 676 677 Level: intermediate 678 679 Notes: 680 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 681 682 Do not malloc or free dnz and onz, that is handled internally by these routines 683 684 Concepts: preallocation^Matrix 685 686 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 687 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal() 688 M*/ 689 #define MatPreallocateSetLocal(rmap,nrows,rows,cmap,ncols,cols,dnz,onz) 0; \ 690 {\ 691 PetscInt __l;\ 692 _4_ierr = ISLocalToGlobalMappingApply(rmap,nrows,rows,rows);CHKERRQ(_4_ierr);\ 693 _4_ierr = ISLocalToGlobalMappingApply(cmap,ncols,cols,cols);CHKERRQ(_4_ierr);\ 694 for (__l=0;__l<nrows;__l++) {\ 695 _4_ierr = MatPreallocateSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\ 696 }\ 697 } 698 699 /*MC 700 MatPreallocateSymmetricSetLocal - Indicates the locations (rows and columns) in the matrix where nonzeros will be 701 inserted using a local number of the rows and columns 702 703 Synopsis: 704 PetscErrorCode MatPreallocateSymmetricSetLocal(ISLocalToGlobalMappping map,PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 705 706 Not Collective 707 708 Input Parameters: 709 + map - the mapping between local numbering and global numbering 710 . nrows - the number of rows indicated 711 . rows - the indices of the rows 712 . ncols - the number of columns in the matrix 713 . cols - the columns indicated 714 . dnz - the array that will be passed to the matrix preallocation routines 715 - ozn - the other array passed to the matrix preallocation routines 716 717 718 Level: intermediate 719 720 Notes: 721 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 722 723 Do not malloc or free dnz and onz that is handled internally by these routines 724 725 Concepts: preallocation^Matrix 726 727 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 728 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 729 M*/ 730 #define MatPreallocateSymmetricSetLocal(map,nrows,rows,ncols,cols,dnz,onz) 0;\ 731 {\ 732 PetscInt __l;\ 733 _4_ierr = ISLocalToGlobalMappingApply(map,nrows,rows,rows);CHKERRQ(_4_ierr);\ 734 _4_ierr = ISLocalToGlobalMappingApply(map,ncols,cols,cols);CHKERRQ(_4_ierr);\ 735 for (__l=0;__l<nrows;__l++) {\ 736 _4_ierr = MatPreallocateSymmetricSet((rows)[__l],ncols,cols,dnz,onz);CHKERRQ(_4_ierr);\ 737 }\ 738 } 739 740 /*MC 741 MatPreallocateSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be 742 inserted using a local number of the rows and columns 743 744 Synopsis: 745 PetscErrorCode MatPreallocateSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 746 747 Not Collective 748 749 Input Parameters: 750 + row - the row 751 . ncols - the number of columns in the matrix 752 - cols - the columns indicated 753 754 Output Parameters: 755 + dnz - the array that will be passed to the matrix preallocation routines 756 - ozn - the other array passed to the matrix preallocation routines 757 758 759 Level: intermediate 760 761 Notes: 762 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 763 764 Do not malloc or free dnz and onz that is handled internally by these routines 765 766 This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize(). 767 768 Concepts: preallocation^Matrix 769 770 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 771 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 772 M*/ 773 #define MatPreallocateSet(row,nc,cols,dnz,onz) 0;\ 774 { PetscInt __i; \ 775 if (row < __rstart) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D less than first local row %D",row,__rstart);\ 776 if (row >= __rstart+__nrows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Trying to set preallocation for row %D greater than last local row %D",row,__rstart+__nrows-1);\ 777 for (__i=0; __i<nc; __i++) {\ 778 if ((cols)[__i] < __start || (cols)[__i] >= __end) onz[row - __rstart]++; \ 779 else dnz[row - __rstart]++;\ 780 }\ 781 } 782 783 /*MC 784 MatPreallocateSymmetricSet - Indicates the locations (rows and columns) in the matrix where nonzeros will be 785 inserted using a local number of the rows and columns 786 787 Synopsis: 788 PetscErrorCode MatPreallocateSymmetricSet(PetscInt nrows, PetscInt *rows,PetscInt ncols, PetscInt *cols,PetscInt *dnz, PetscInt *onz) 789 790 Not Collective 791 792 Input Parameters: 793 + nrows - the number of rows indicated 794 . rows - the indices of the rows 795 . ncols - the number of columns in the matrix 796 . cols - the columns indicated 797 . dnz - the array that will be passed to the matrix preallocation routines 798 - ozn - the other array passed to the matrix preallocation routines 799 800 801 Level: intermediate 802 803 Notes: 804 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 805 806 Do not malloc or free dnz and onz that is handled internally by these routines 807 808 This is a MACRO not a function because it uses variables declared in MatPreallocateInitialize(). 809 810 Concepts: preallocation^Matrix 811 812 .seealso: MatPreallocateFinalize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateInitialize(), 813 MatPreallocateInitialize(), MatPreallocateSymmetricSetLocal(), MatPreallocateSetLocal() 814 M*/ 815 #define MatPreallocateSymmetricSet(row,nc,cols,dnz,onz) 0;\ 816 { PetscInt __i; \ 817 for (__i=0; __i<nc; __i++) {\ 818 if (cols[__i] >= __end) onz[row - __rstart]++; \ 819 else if (cols[__i] >= row) dnz[row - __rstart]++;\ 820 }\ 821 } 822 823 /*MC 824 MatPreallocateLocation - An alternative to MatPreallocationSet() that puts the nonzero locations into the matrix if it exists 825 826 Synopsis: 827 PetscErrorCode MatPreallocateLocations(Mat A,PetscInt row,PetscInt ncols,PetscInt *cols,PetscInt *dnz,PetscInt *onz) 828 829 Not Collective 830 831 Input Parameters: 832 . A - matrix 833 . row - row where values exist (must be local to this process) 834 . ncols - number of columns 835 . cols - columns with nonzeros 836 . dnz - the array that will be passed to the matrix preallocation routines 837 - ozn - the other array passed to the matrix preallocation routines 838 839 840 Level: intermediate 841 842 Notes: 843 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 844 845 Do not malloc or free dnz and onz that is handled internally by these routines 846 847 This is a MACRO not a function because it uses a bunch of variables private to the MatPreallocation.... routines. 848 849 Concepts: preallocation^Matrix 850 851 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 852 MatPreallocateSymmetricSetLocal() 853 M*/ 854 #define MatPreallocateLocation(A,row,ncols,cols,dnz,onz) 0;if (A) {ierr = MatSetValues(A,1,&row,ncols,cols,PETSC_NULL,INSERT_VALUES);CHKERRQ(ierr);} else {ierr = MatPreallocateSet(row,ncols,cols,dnz,onz);CHKERRQ(ierr);} 855 856 857 /*MC 858 MatPreallocateFinalize - Ends the block of code that will count the number of nonzeros per 859 row in a matrix providing the data that one can use to correctly preallocate the matrix. 860 861 Synopsis: 862 PetscErrorCode MatPreallocateFinalize(PetscInt *dnz, PetscInt *onz) 863 864 Collective on MPI_Comm 865 866 Input Parameters: 867 + dnz - the array that was be passed to the matrix preallocation routines 868 - ozn - the other array passed to the matrix preallocation routines 869 870 871 Level: intermediate 872 873 Notes: 874 See the <A href="../../docs/manual.pdf#nameddest=ch_performance">Hints for Performance Improvment</A> chapter in the users manual for more details. 875 876 Do not malloc or free dnz and onz that is handled internally by these routines 877 878 This is a MACRO not a function because it closes the { started in MatPreallocateInitialize(). 879 880 Concepts: preallocation^Matrix 881 882 .seealso: MatPreallocateInitialize(), MatPreallocateSet(), MatPreallocateSymmetricSet(), MatPreallocateSetLocal(), 883 MatPreallocateSymmetricSetLocal() 884 M*/ 885 #define MatPreallocateFinalize(dnz,onz) 0;_4_ierr = PetscFree2(dnz,onz);CHKERRQ(_4_ierr);} 886 887 888 889 /* Routines unique to particular data structures */ 890 extern PetscErrorCode MatShellGetContext(Mat,void *); 891 892 extern PetscErrorCode MatInodeAdjustForInodes(Mat,IS*,IS*); 893 extern PetscErrorCode MatInodeGetInodeSizes(Mat,PetscInt *,PetscInt *[],PetscInt *); 894 895 extern PetscErrorCode MatSeqAIJSetColumnIndices(Mat,PetscInt[]); 896 extern PetscErrorCode MatSeqBAIJSetColumnIndices(Mat,PetscInt[]); 897 extern PetscErrorCode MatCreateSeqAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 898 extern PetscErrorCode MatCreateSeqBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 899 extern PetscErrorCode MatCreateSeqSBAIJWithArrays(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*); 900 extern PetscErrorCode MatCreateSeqAIJFromTriple(MPI_Comm,PetscInt,PetscInt,PetscInt[],PetscInt[],PetscScalar[],Mat*,PetscInt,PetscBool); 901 902 #define MAT_SKIP_ALLOCATION -4 903 904 extern PetscErrorCode MatSeqBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]); 905 extern PetscErrorCode MatSeqSBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[]); 906 extern PetscErrorCode MatSeqAIJSetPreallocation(Mat,PetscInt,const PetscInt[]); 907 908 extern PetscErrorCode MatMPIBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 909 extern PetscErrorCode MatMPISBAIJSetPreallocation(Mat,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 910 extern PetscErrorCode MatMPIAIJSetPreallocation(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 911 extern PetscErrorCode MatSeqAIJSetPreallocationCSR(Mat,const PetscInt [],const PetscInt [],const PetscScalar []); 912 extern PetscErrorCode MatSeqBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 913 extern PetscErrorCode MatMPIAIJSetPreallocationCSR(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]); 914 extern PetscErrorCode MatMPIBAIJSetPreallocationCSR(Mat,PetscInt,const PetscInt[],const PetscInt[],const PetscScalar[]); 915 extern PetscErrorCode MatMPIAdjSetPreallocation(Mat,PetscInt[],PetscInt[],PetscInt[]); 916 extern PetscErrorCode MatMPIDenseSetPreallocation(Mat,PetscScalar[]); 917 extern PetscErrorCode MatSeqDenseSetPreallocation(Mat,PetscScalar[]); 918 extern PetscErrorCode MatMPIAIJGetSeqAIJ(Mat,Mat*,Mat*,PetscInt*[]); 919 extern PetscErrorCode MatMPIBAIJGetSeqBAIJ(Mat,Mat*,Mat*,PetscInt*[]); 920 extern PetscErrorCode MatAdicSetLocalFunction(Mat,void (*)(void)); 921 extern PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat,Mat*); 922 923 extern PetscErrorCode MatSeqDenseSetLDA(Mat,PetscInt); 924 extern PetscErrorCode MatDenseGetLocalMatrix(Mat,Mat*); 925 926 extern PetscErrorCode MatStoreValues(Mat); 927 extern PetscErrorCode MatRetrieveValues(Mat); 928 929 extern PetscErrorCode MatDAADSetCtx(Mat,void*); 930 931 extern PetscErrorCode MatFindNonzeroRows(Mat,IS*); 932 /* 933 These routines are not usually accessed directly, rather solving is 934 done through the KSP and PC interfaces. 935 */ 936 937 /*J 938 MatOrderingType - String with the name of a PETSc matrix ordering or the creation function 939 with an optional dynamic library name, for example 940 http://www.mcs.anl.gov/petsc/lib.a:orderingcreate() 941 942 Level: beginner 943 944 Cannot use const because the PC objects manipulate the string 945 946 .seealso: MatGetOrdering() 947 J*/ 948 #define MatOrderingType char* 949 #define MATORDERINGNATURAL "natural" 950 #define MATORDERINGND "nd" 951 #define MATORDERING1WD "1wd" 952 #define MATORDERINGRCM "rcm" 953 #define MATORDERINGQMD "qmd" 954 #define MATORDERINGROWLENGTH "rowlength" 955 #define MATORDERINGAMD "amd" /* only works if UMFPACK is installed with PETSc */ 956 957 extern PetscErrorCode MatGetOrdering(Mat,const MatOrderingType,IS*,IS*); 958 extern PetscErrorCode MatGetOrderingList(PetscFList *list); 959 extern PetscErrorCode MatOrderingRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,const MatOrderingType,IS*,IS*)); 960 961 /*MC 962 MatOrderingRegisterDynamic - Adds a new sparse matrix ordering to the matrix package. 963 964 Synopsis: 965 PetscErrorCode MatOrderingRegisterDynamic(const char *name_ordering,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatOrdering)) 966 967 Not Collective 968 969 Input Parameters: 970 + sname - name of ordering (for example MATORDERINGND) 971 . path - location of library where creation routine is 972 . name - name of function that creates the ordering type,a string 973 - function - function pointer that creates the ordering 974 975 Level: developer 976 977 If dynamic libraries are used, then the fourth input argument (function) 978 is ignored. 979 980 Sample usage: 981 .vb 982 MatOrderingRegisterDynamic("my_order",/home/username/my_lib/lib/libO/solaris/mylib.a, 983 "MyOrder",MyOrder); 984 .ve 985 986 Then, your partitioner can be chosen with the procedural interface via 987 $ MatOrderingSetType(part,"my_order) 988 or at runtime via the option 989 $ -pc_factor_mat_ordering_type my_order 990 991 ${PETSC_ARCH} occuring in pathname will be replaced with appropriate values. 992 993 .keywords: matrix, ordering, register 994 995 .seealso: MatOrderingRegisterDestroy(), MatOrderingRegisterAll() 996 M*/ 997 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 998 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,0) 999 #else 1000 #define MatOrderingRegisterDynamic(a,b,c,d) MatOrderingRegister(a,b,c,d) 1001 #endif 1002 1003 extern PetscErrorCode MatOrderingRegisterDestroy(void); 1004 extern PetscErrorCode MatOrderingRegisterAll(const char[]); 1005 extern PetscBool MatOrderingRegisterAllCalled; 1006 extern PetscFList MatOrderingList; 1007 1008 extern PetscErrorCode MatReorderForNonzeroDiagonal(Mat,PetscReal,IS,IS); 1009 1010 /*S 1011 MatFactorShiftType - Numeric Shift. 1012 1013 Level: beginner 1014 1015 S*/ 1016 typedef enum {MAT_SHIFT_NONE,MAT_SHIFT_NONZERO,MAT_SHIFT_POSITIVE_DEFINITE,MAT_SHIFT_INBLOCKS} MatFactorShiftType; 1017 extern const char *MatFactorShiftTypes[]; 1018 1019 /*S 1020 MatFactorInfo - Data passed into the matrix factorization routines 1021 1022 In Fortran these are simply double precision arrays of size MAT_FACTORINFO_SIZE, that is use 1023 $ MatFactorInfo info(MAT_FACTORINFO_SIZE) 1024 1025 Notes: These are not usually directly used by users, instead use PC type of LU, ILU, CHOLESKY or ICC. 1026 1027 You can use MatFactorInfoInitialize() to set default values. 1028 1029 Level: developer 1030 1031 .seealso: MatLUFactorSymbolic(), MatILUFactorSymbolic(), MatCholeskyFactorSymbolic(), MatICCFactorSymbolic(), MatICCFactor(), 1032 MatFactorInfoInitialize() 1033 1034 S*/ 1035 typedef struct { 1036 PetscReal diagonal_fill; /* force diagonal to fill in if initially not filled */ 1037 PetscReal usedt; 1038 PetscReal dt; /* drop tolerance */ 1039 PetscReal dtcol; /* tolerance for pivoting */ 1040 PetscReal dtcount; /* maximum nonzeros to be allowed per row */ 1041 PetscReal fill; /* expected fill, nonzeros in factored matrix/nonzeros in original matrix */ 1042 PetscReal levels; /* ICC/ILU(levels) */ 1043 PetscReal pivotinblocks; /* for BAIJ and SBAIJ matrices pivot in factorization on blocks, default 1.0 1044 factorization may be faster if do not pivot */ 1045 PetscReal zeropivot; /* pivot is called zero if less than this */ 1046 PetscReal shifttype; /* type of shift added to matrix factor to prevent zero pivots */ 1047 PetscReal shiftamount; /* how large the shift is */ 1048 } MatFactorInfo; 1049 1050 extern PetscErrorCode MatFactorInfoInitialize(MatFactorInfo*); 1051 extern PetscErrorCode MatCholeskyFactor(Mat,IS,const MatFactorInfo*); 1052 extern PetscErrorCode MatCholeskyFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*); 1053 extern PetscErrorCode MatCholeskyFactorNumeric(Mat,Mat,const MatFactorInfo*); 1054 extern PetscErrorCode MatLUFactor(Mat,IS,IS,const MatFactorInfo*); 1055 extern PetscErrorCode MatILUFactor(Mat,IS,IS,const MatFactorInfo*); 1056 extern PetscErrorCode MatLUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*); 1057 extern PetscErrorCode MatILUFactorSymbolic(Mat,Mat,IS,IS,const MatFactorInfo*); 1058 extern PetscErrorCode MatICCFactorSymbolic(Mat,Mat,IS,const MatFactorInfo*); 1059 extern PetscErrorCode MatICCFactor(Mat,IS,const MatFactorInfo*); 1060 extern PetscErrorCode MatLUFactorNumeric(Mat,Mat,const MatFactorInfo*); 1061 extern PetscErrorCode MatGetInertia(Mat,PetscInt*,PetscInt*,PetscInt*); 1062 extern PetscErrorCode MatSolve(Mat,Vec,Vec); 1063 extern PetscErrorCode MatForwardSolve(Mat,Vec,Vec); 1064 extern PetscErrorCode MatBackwardSolve(Mat,Vec,Vec); 1065 extern PetscErrorCode MatSolveAdd(Mat,Vec,Vec,Vec); 1066 extern PetscErrorCode MatSolveTranspose(Mat,Vec,Vec); 1067 extern PetscErrorCode MatSolveTransposeAdd(Mat,Vec,Vec,Vec); 1068 extern PetscErrorCode MatSolves(Mat,Vecs,Vecs); 1069 1070 extern PetscErrorCode MatSetUnfactored(Mat); 1071 1072 /*E 1073 MatSORType - What type of (S)SOR to perform 1074 1075 Level: beginner 1076 1077 May be bitwise ORd together 1078 1079 Any additions/changes here MUST also be made in include/finclude/petscmat.h 1080 1081 MatSORType may be bitwise ORd together, so do not change the numbers 1082 1083 .seealso: MatSOR() 1084 E*/ 1085 typedef enum {SOR_FORWARD_SWEEP=1,SOR_BACKWARD_SWEEP=2,SOR_SYMMETRIC_SWEEP=3, 1086 SOR_LOCAL_FORWARD_SWEEP=4,SOR_LOCAL_BACKWARD_SWEEP=8, 1087 SOR_LOCAL_SYMMETRIC_SWEEP=12,SOR_ZERO_INITIAL_GUESS=16, 1088 SOR_EISENSTAT=32,SOR_APPLY_UPPER=64,SOR_APPLY_LOWER=128} MatSORType; 1089 extern PetscErrorCode MatSOR(Mat,Vec,PetscReal,MatSORType,PetscReal,PetscInt,PetscInt,Vec); 1090 1091 /* 1092 These routines are for efficiently computing Jacobians via finite differences. 1093 */ 1094 1095 /*J 1096 MatColoringType - String with the name of a PETSc matrix coloring or the creation function 1097 with an optional dynamic library name, for example 1098 http://www.mcs.anl.gov/petsc/lib.a:coloringcreate() 1099 1100 Level: beginner 1101 1102 .seealso: MatGetColoring() 1103 J*/ 1104 #define MatColoringType char* 1105 #define MATCOLORINGNATURAL "natural" 1106 #define MATCOLORINGSL "sl" 1107 #define MATCOLORINGLF "lf" 1108 #define MATCOLORINGID "id" 1109 1110 extern PetscErrorCode MatGetColoring(Mat,const MatColoringType,ISColoring*); 1111 extern PetscErrorCode MatColoringRegister(const char[],const char[],const char[],PetscErrorCode(*)(Mat,MatColoringType,ISColoring *)); 1112 1113 /*MC 1114 MatColoringRegisterDynamic - Adds a new sparse matrix coloring to the 1115 matrix package. 1116 1117 Synopsis: 1118 PetscErrorCode MatColoringRegisterDynamic(const char *name_coloring,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatColoring)) 1119 1120 Not Collective 1121 1122 Input Parameters: 1123 + sname - name of Coloring (for example MATCOLORINGSL) 1124 . path - location of library where creation routine is 1125 . name - name of function that creates the Coloring type, a string 1126 - function - function pointer that creates the coloring 1127 1128 Level: developer 1129 1130 If dynamic libraries are used, then the fourth input argument (function) 1131 is ignored. 1132 1133 Sample usage: 1134 .vb 1135 MatColoringRegisterDynamic("my_color",/home/username/my_lib/lib/libO/solaris/mylib.a, 1136 "MyColor",MyColor); 1137 .ve 1138 1139 Then, your partitioner can be chosen with the procedural interface via 1140 $ MatColoringSetType(part,"my_color") 1141 or at runtime via the option 1142 $ -mat_coloring_type my_color 1143 1144 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1145 1146 .keywords: matrix, Coloring, register 1147 1148 .seealso: MatColoringRegisterDestroy(), MatColoringRegisterAll() 1149 M*/ 1150 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1151 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,0) 1152 #else 1153 #define MatColoringRegisterDynamic(a,b,c,d) MatColoringRegister(a,b,c,d) 1154 #endif 1155 1156 extern PetscBool MatColoringRegisterAllCalled; 1157 1158 extern PetscErrorCode MatColoringRegisterAll(const char[]); 1159 extern PetscErrorCode MatColoringRegisterDestroy(void); 1160 extern PetscErrorCode MatColoringPatch(Mat,PetscInt,PetscInt,ISColoringValue[],ISColoring*); 1161 1162 /*S 1163 MatFDColoring - Object for computing a sparse Jacobian via finite differences 1164 and coloring 1165 1166 Level: beginner 1167 1168 Concepts: coloring, sparse Jacobian, finite differences 1169 1170 .seealso: MatFDColoringCreate() 1171 S*/ 1172 typedef struct _p_MatFDColoring* MatFDColoring; 1173 1174 extern PetscErrorCode MatFDColoringCreate(Mat,ISColoring,MatFDColoring *); 1175 extern PetscErrorCode MatFDColoringDestroy(MatFDColoring*); 1176 extern PetscErrorCode MatFDColoringView(MatFDColoring,PetscViewer); 1177 extern PetscErrorCode MatFDColoringSetFunction(MatFDColoring,PetscErrorCode (*)(void),void*); 1178 extern PetscErrorCode MatFDColoringGetFunction(MatFDColoring,PetscErrorCode (**)(void),void**); 1179 extern PetscErrorCode MatFDColoringSetParameters(MatFDColoring,PetscReal,PetscReal); 1180 extern PetscErrorCode MatFDColoringSetFromOptions(MatFDColoring); 1181 extern PetscErrorCode MatFDColoringApply(Mat,MatFDColoring,Vec,MatStructure*,void *); 1182 extern PetscErrorCode MatFDColoringSetF(MatFDColoring,Vec); 1183 extern PetscErrorCode MatFDColoringGetPerturbedColumns(MatFDColoring,PetscInt*,PetscInt*[]); 1184 1185 /*S 1186 MatTransposeColoring - Object for computing a sparse matrix product C=A*B^T via coloring 1187 1188 Level: beginner 1189 1190 Concepts: coloring, sparse matrix product 1191 1192 .seealso: MatTransposeColoringCreate() 1193 S*/ 1194 typedef struct _p_MatTransposeColoring* MatTransposeColoring; 1195 1196 extern PetscErrorCode MatTransposeColoringCreate(Mat,ISColoring,MatTransposeColoring *); 1197 extern PetscErrorCode MatTransColoringApplySpToDen(MatTransposeColoring,Mat,Mat); 1198 extern PetscErrorCode MatTransColoringApplyDenToSp(MatTransposeColoring,Mat,Mat); 1199 extern PetscErrorCode MatTransposeColoringDestroy(MatTransposeColoring*); 1200 1201 /* 1202 These routines are for partitioning matrices: currently used only 1203 for adjacency matrix, MatCreateMPIAdj(). 1204 */ 1205 1206 /*S 1207 MatPartitioning - Object for managing the partitioning of a matrix or graph 1208 1209 Level: beginner 1210 1211 Concepts: partitioning 1212 1213 .seealso: MatPartitioningCreate(), MatPartitioningType 1214 S*/ 1215 typedef struct _p_MatPartitioning* MatPartitioning; 1216 1217 /*J 1218 MatPartitioningType - String with the name of a PETSc matrix partitioning or the creation function 1219 with an optional dynamic library name, for example 1220 http://www.mcs.anl.gov/petsc/lib.a:partitioningcreate() 1221 1222 Level: beginner 1223 dm 1224 .seealso: MatPartitioningCreate(), MatPartitioning 1225 J*/ 1226 #define MatPartitioningType char* 1227 #define MATPARTITIONINGCURRENT "current" 1228 #define MATPARTITIONINGSQUARE "square" 1229 #define MATPARTITIONINGPARMETIS "parmetis" 1230 #define MATPARTITIONINGCHACO "chaco" 1231 #define MATPARTITIONINGPARTY "party" 1232 #define MATPARTITIONINGPTSCOTCH "ptscotch" 1233 1234 1235 extern PetscErrorCode MatPartitioningCreate(MPI_Comm,MatPartitioning*); 1236 extern PetscErrorCode MatPartitioningSetType(MatPartitioning,const MatPartitioningType); 1237 extern PetscErrorCode MatPartitioningSetNParts(MatPartitioning,PetscInt); 1238 extern PetscErrorCode MatPartitioningSetAdjacency(MatPartitioning,Mat); 1239 extern PetscErrorCode MatPartitioningSetVertexWeights(MatPartitioning,const PetscInt[]); 1240 extern PetscErrorCode MatPartitioningSetPartitionWeights(MatPartitioning,const PetscReal []); 1241 extern PetscErrorCode MatPartitioningApply(MatPartitioning,IS*); 1242 extern PetscErrorCode MatPartitioningDestroy(MatPartitioning*); 1243 1244 extern PetscErrorCode MatPartitioningRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatPartitioning)); 1245 1246 /*MC 1247 MatPartitioningRegisterDynamic - Adds a new sparse matrix partitioning to the 1248 matrix package. 1249 1250 Synopsis: 1251 PetscErrorCode MatPartitioningRegisterDynamic(const char *name_partitioning,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatPartitioning)) 1252 1253 Not Collective 1254 1255 Input Parameters: 1256 + sname - name of partitioning (for example MATPARTITIONINGCURRENT) or parmetis 1257 . path - location of library where creation routine is 1258 . name - name of function that creates the partitioning type, a string 1259 - function - function pointer that creates the partitioning type 1260 1261 Level: developer 1262 1263 If dynamic libraries are used, then the fourth input argument (function) 1264 is ignored. 1265 1266 Sample usage: 1267 .vb 1268 MatPartitioningRegisterDynamic("my_part",/home/username/my_lib/lib/libO/solaris/mylib.a, 1269 "MyPartCreate",MyPartCreate); 1270 .ve 1271 1272 Then, your partitioner can be chosen with the procedural interface via 1273 $ MatPartitioningSetType(part,"my_part") 1274 or at runtime via the option 1275 $ -mat_partitioning_type my_part 1276 1277 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1278 1279 .keywords: matrix, partitioning, register 1280 1281 .seealso: MatPartitioningRegisterDestroy(), MatPartitioningRegisterAll() 1282 M*/ 1283 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1284 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,0) 1285 #else 1286 #define MatPartitioningRegisterDynamic(a,b,c,d) MatPartitioningRegister(a,b,c,d) 1287 #endif 1288 1289 extern PetscBool MatPartitioningRegisterAllCalled; 1290 1291 extern PetscErrorCode MatPartitioningRegisterAll(const char[]); 1292 extern PetscErrorCode MatPartitioningRegisterDestroy(void); 1293 1294 extern PetscErrorCode MatPartitioningView(MatPartitioning,PetscViewer); 1295 extern PetscErrorCode MatPartitioningSetFromOptions(MatPartitioning); 1296 extern PetscErrorCode MatPartitioningGetType(MatPartitioning,const MatPartitioningType*); 1297 1298 extern PetscErrorCode MatPartitioningParmetisSetCoarseSequential(MatPartitioning); 1299 extern PetscErrorCode MatPartitioningParmetisGetEdgeCut(MatPartitioning, PetscInt *); 1300 1301 typedef enum { MP_CHACO_MULTILEVEL=1,MP_CHACO_SPECTRAL=2,MP_CHACO_LINEAR=4,MP_CHACO_RANDOM=5,MP_CHACO_SCATTERED=6 } MPChacoGlobalType; 1302 extern const char *MPChacoGlobalTypes[]; 1303 typedef enum { MP_CHACO_KERNIGHAN=1,MP_CHACO_NONE=2 } MPChacoLocalType; 1304 extern const char *MPChacoLocalTypes[]; 1305 typedef enum { MP_CHACO_LANCZOS=0,MP_CHACO_RQI=1 } MPChacoEigenType; 1306 extern const char *MPChacoEigenTypes[]; 1307 1308 extern PetscErrorCode MatPartitioningChacoSetGlobal(MatPartitioning,MPChacoGlobalType); 1309 extern PetscErrorCode MatPartitioningChacoGetGlobal(MatPartitioning,MPChacoGlobalType*); 1310 extern PetscErrorCode MatPartitioningChacoSetLocal(MatPartitioning,MPChacoLocalType); 1311 extern PetscErrorCode MatPartitioningChacoGetLocal(MatPartitioning,MPChacoLocalType*); 1312 extern PetscErrorCode MatPartitioningChacoSetCoarseLevel(MatPartitioning,PetscReal); 1313 extern PetscErrorCode MatPartitioningChacoSetEigenSolver(MatPartitioning,MPChacoEigenType); 1314 extern PetscErrorCode MatPartitioningChacoGetEigenSolver(MatPartitioning,MPChacoEigenType*); 1315 extern PetscErrorCode MatPartitioningChacoSetEigenTol(MatPartitioning,PetscReal); 1316 extern PetscErrorCode MatPartitioningChacoGetEigenTol(MatPartitioning,PetscReal*); 1317 extern PetscErrorCode MatPartitioningChacoSetEigenNumber(MatPartitioning,PetscInt); 1318 extern PetscErrorCode MatPartitioningChacoGetEigenNumber(MatPartitioning,PetscInt*); 1319 1320 #define MP_PARTY_OPT "opt" 1321 #define MP_PARTY_LIN "lin" 1322 #define MP_PARTY_SCA "sca" 1323 #define MP_PARTY_RAN "ran" 1324 #define MP_PARTY_GBF "gbf" 1325 #define MP_PARTY_GCF "gcf" 1326 #define MP_PARTY_BUB "bub" 1327 #define MP_PARTY_DEF "def" 1328 extern PetscErrorCode MatPartitioningPartySetGlobal(MatPartitioning,const char*); 1329 #define MP_PARTY_HELPFUL_SETS "hs" 1330 #define MP_PARTY_KERNIGHAN_LIN "kl" 1331 #define MP_PARTY_NONE "no" 1332 extern PetscErrorCode MatPartitioningPartySetLocal(MatPartitioning,const char*); 1333 extern PetscErrorCode MatPartitioningPartySetCoarseLevel(MatPartitioning,PetscReal); 1334 extern PetscErrorCode MatPartitioningPartySetBipart(MatPartitioning,PetscBool); 1335 extern PetscErrorCode MatPartitioningPartySetMatchOptimization(MatPartitioning,PetscBool); 1336 1337 typedef enum { MP_PTSCOTCH_QUALITY,MP_PTSCOTCH_SPEED,MP_PTSCOTCH_BALANCE,MP_PTSCOTCH_SAFETY,MP_PTSCOTCH_SCALABILITY } MPPTScotchStrategyType; 1338 extern const char *MPPTScotchStrategyTypes[]; 1339 1340 extern PetscErrorCode MatPartitioningPTScotchSetImbalance(MatPartitioning,PetscReal); 1341 extern PetscErrorCode MatPartitioningPTScotchGetImbalance(MatPartitioning,PetscReal*); 1342 extern PetscErrorCode MatPartitioningPTScotchSetStrategy(MatPartitioning,MPPTScotchStrategyType); 1343 extern PetscErrorCode MatPartitioningPTScotchGetStrategy(MatPartitioning,MPPTScotchStrategyType*); 1344 1345 /* 1346 These routines are for coarsening matrices: 1347 */ 1348 1349 /*S 1350 MatCoarsen - Object for managing the coarsening of a graph (symmetric matrix) 1351 1352 Level: beginner 1353 1354 Concepts: coarsen 1355 1356 .seealso: MatCoarsenCreate), MatCoarsenType 1357 S*/ 1358 typedef struct _p_MatCoarsen* MatCoarsen; 1359 1360 /*J 1361 MatCoarsenType - String with the name of a PETSc matrix coarsen or the creation function 1362 with an optional dynamic library name, for example 1363 http://www.mcs.anl.gov/petsc/lib.a:coarsencreate() 1364 1365 Level: beginner 1366 dm 1367 .seealso: MatCoarsenCreate(), MatCoarsen 1368 J*/ 1369 #define MatCoarsenType char* 1370 #define MATCOARSENMIS "mis" 1371 #define MATCOARSENHEM "hem" 1372 1373 /* linked list for aggregates */ 1374 typedef struct _PetscCDIntNd{ 1375 struct _PetscCDIntNd *next; 1376 PetscInt gid; 1377 }PetscCDIntNd; 1378 1379 /* only used by node pool */ 1380 typedef struct _PetscCDArrNd{ 1381 struct _PetscCDArrNd *next; 1382 struct _PetscCDIntNd *array; 1383 }PetscCDArrNd; 1384 1385 typedef struct _PetscCoarsenData{ 1386 /* node pool */ 1387 PetscCDArrNd pool_list; 1388 PetscCDIntNd *new_node; 1389 PetscInt new_left; 1390 PetscInt chk_sz; 1391 PetscCDIntNd *extra_nodes; 1392 /* Array of lists */ 1393 PetscCDIntNd **array; 1394 PetscInt size; 1395 /* cache a Mat for communication data */ 1396 Mat mat; 1397 /* cache IS of removed equations */ 1398 IS removedIS; 1399 }PetscCoarsenData; 1400 1401 extern PetscErrorCode MatCoarsenCreate(MPI_Comm,MatCoarsen*); 1402 extern PetscErrorCode MatCoarsenSetType(MatCoarsen,const MatCoarsenType); 1403 extern PetscErrorCode MatCoarsenSetAdjacency(MatCoarsen,Mat); 1404 extern PetscErrorCode MatCoarsenSetGreedyOrdering(MatCoarsen,const IS); 1405 extern PetscErrorCode MatCoarsenSetStrictAggs(MatCoarsen,PetscBool); 1406 extern PetscErrorCode MatCoarsenSetVerbose(MatCoarsen,PetscInt); 1407 extern PetscErrorCode MatCoarsenGetData( MatCoarsen, PetscCoarsenData ** ); 1408 extern PetscErrorCode MatCoarsenApply(MatCoarsen); 1409 extern PetscErrorCode MatCoarsenDestroy(MatCoarsen*); 1410 1411 extern PetscErrorCode MatCoarsenRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatCoarsen)); 1412 1413 /*MC 1414 MatCoarsenRegisterDynamic - Adds a new sparse matrix coarsen to the 1415 matrix package. 1416 1417 Synopsis: 1418 PetscErrorCode MatCoarsenRegisterDynamic(const char *name_coarsen,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatCoarsen)) 1419 1420 Not Collective 1421 1422 Input Parameters: 1423 + sname - name of coarsen (for example MATCOARSENMIS) 1424 . path - location of library where creation routine is 1425 . name - name of function that creates the coarsen type, a string 1426 - function - function pointer that creates the coarsen type 1427 1428 Level: developer 1429 1430 If dynamic libraries are used, then the fourth input argument (function) 1431 is ignored. 1432 1433 Sample usage: 1434 .vb 1435 MatCoarsenRegisterDynamic("my_agg",/home/username/my_lib/lib/libO/solaris/mylib.a, 1436 "MyAggCreate",MyAggCreate); 1437 .ve 1438 1439 Then, your aggregator can be chosen with the procedural interface via 1440 $ MatCoarsenSetType(agg,"my_agg") 1441 or at runtime via the option 1442 $ -mat_coarsen_type my_agg 1443 1444 $PETSC_ARCH occuring in pathname will be replaced with appropriate values. 1445 1446 .keywords: matrix, coarsen, register 1447 1448 .seealso: MatCoarsenRegisterDestroy(), MatCoarsenRegisterAll() 1449 M*/ 1450 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1451 #define MatCoarsenRegisterDynamic(a,b,c,d) MatCoarsenRegister(a,b,c,0) 1452 #else 1453 #define MatCoarsenRegisterDynamic(a,b,c,d) MatCoarsenRegister(a,b,c,d) 1454 #endif 1455 1456 extern PetscBool MatCoarsenRegisterAllCalled; 1457 1458 extern PetscErrorCode MatCoarsenRegisterAll(const char[]); 1459 extern PetscErrorCode MatCoarsenRegisterDestroy(void); 1460 1461 extern PetscErrorCode MatCoarsenView(MatCoarsen,PetscViewer); 1462 extern PetscErrorCode MatCoarsenSetFromOptions(MatCoarsen); 1463 extern PetscErrorCode MatCoarsenGetType(MatCoarsen,const MatCoarsenType*); 1464 1465 1466 extern PetscErrorCode MatMeshToVertexGraph(Mat,PetscInt,Mat*); 1467 extern PetscErrorCode MatMeshToCellGraph(Mat,PetscInt,Mat*); 1468 1469 /* 1470 If you add entries here you must also add them to finclude/petscmat.h 1471 */ 1472 typedef enum { MATOP_SET_VALUES=0, 1473 MATOP_GET_ROW=1, 1474 MATOP_RESTORE_ROW=2, 1475 MATOP_MULT=3, 1476 MATOP_MULT_ADD=4, 1477 MATOP_MULT_TRANSPOSE=5, 1478 MATOP_MULT_TRANSPOSE_ADD=6, 1479 MATOP_SOLVE=7, 1480 MATOP_SOLVE_ADD=8, 1481 MATOP_SOLVE_TRANSPOSE=9, 1482 MATOP_SOLVE_TRANSPOSE_ADD=10, 1483 MATOP_LUFACTOR=11, 1484 MATOP_CHOLESKYFACTOR=12, 1485 MATOP_SOR=13, 1486 MATOP_TRANSPOSE=14, 1487 MATOP_GETINFO=15, 1488 MATOP_EQUAL=16, 1489 MATOP_GET_DIAGONAL=17, 1490 MATOP_DIAGONAL_SCALE=18, 1491 MATOP_NORM=19, 1492 MATOP_ASSEMBLY_BEGIN=20, 1493 MATOP_ASSEMBLY_END=21, 1494 MATOP_SET_OPTION=22, 1495 MATOP_ZERO_ENTRIES=23, 1496 MATOP_ZERO_ROWS=24, 1497 MATOP_LUFACTOR_SYMBOLIC=25, 1498 MATOP_LUFACTOR_NUMERIC=26, 1499 MATOP_CHOLESKY_FACTOR_SYMBOLIC=27, 1500 MATOP_CHOLESKY_FACTOR_NUMERIC=28, 1501 MATOP_SETUP_PREALLOCATION=29, 1502 MATOP_ILUFACTOR_SYMBOLIC=30, 1503 MATOP_ICCFACTOR_SYMBOLIC=31, 1504 MATOP_GET_ARRAY=32, 1505 MATOP_RESTORE_ARRAY=33, 1506 MATOP_DUPLICATE=34, 1507 MATOP_FORWARD_SOLVE=35, 1508 MATOP_BACKWARD_SOLVE=36, 1509 MATOP_ILUFACTOR=37, 1510 MATOP_ICCFACTOR=38, 1511 MATOP_AXPY=39, 1512 MATOP_GET_SUBMATRICES=40, 1513 MATOP_INCREASE_OVERLAP=41, 1514 MATOP_GET_VALUES=42, 1515 MATOP_COPY=43, 1516 MATOP_GET_ROW_MAX=44, 1517 MATOP_SCALE=45, 1518 MATOP_SHIFT=46, 1519 MATOP_DIAGONAL_SET=47, 1520 MATOP_ILUDT_FACTOR=48, 1521 MATOP_SET_BLOCK_SIZE=49, 1522 MATOP_GET_ROW_IJ=50, 1523 MATOP_RESTORE_ROW_IJ=51, 1524 MATOP_GET_COLUMN_IJ=52, 1525 MATOP_RESTORE_COLUMN_IJ=53, 1526 MATOP_FDCOLORING_CREATE=54, 1527 MATOP_COLORING_PATCH=55, 1528 MATOP_SET_UNFACTORED=56, 1529 MATOP_PERMUTE=57, 1530 MATOP_SET_VALUES_BLOCKED=58, 1531 MATOP_GET_SUBMATRIX=59, 1532 MATOP_DESTROY=60, 1533 MATOP_VIEW=61, 1534 MATOP_CONVERT_FROM=62, 1535 MATOP_USE_SCALED_FORM=63, 1536 MATOP_SCALE_SYSTEM=64, 1537 MATOP_UNSCALE_SYSTEM=65, 1538 MATOP_SET_LOCAL_TO_GLOBAL_MAP=66, 1539 MATOP_SET_VALUES_LOCAL=67, 1540 MATOP_ZERO_ROWS_LOCAL=68, 1541 MATOP_GET_ROW_MAX_ABS=69, 1542 MATOP_GET_ROW_MIN_ABS=70, 1543 MATOP_CONVERT=71, 1544 MATOP_SET_COLORING=72, 1545 MATOP_SET_VALUES_ADIC=73, 1546 MATOP_SET_VALUES_ADIFOR=74, 1547 MATOP_FD_COLORING_APPLY=75, 1548 MATOP_SET_FROM_OPTIONS=76, 1549 MATOP_MULT_CON=77, 1550 MATOP_MULT_TRANSPOSE_CON=78, 1551 MATOP_PERMUTE_SPARSIFY=79, 1552 MATOP_MULT_MULTIPLE=80, 1553 MATOP_SOLVE_MULTIPLE=81, 1554 MATOP_GET_INERTIA=82, 1555 MATOP_LOAD=83, 1556 MATOP_IS_SYMMETRIC=84, 1557 MATOP_IS_HERMITIAN=85, 1558 MATOP_IS_STRUCTURALLY_SYMMETRIC=86, 1559 MATOP_DUMMY=87, 1560 MATOP_GET_VECS=88, 1561 MATOP_MAT_MULT=89, 1562 MATOP_MAT_MULT_SYMBOLIC=90, 1563 MATOP_MAT_MULT_NUMERIC=91, 1564 MATOP_PTAP=92, 1565 MATOP_PTAP_SYMBOLIC=93, 1566 MATOP_PTAP_NUMERIC=94, 1567 MATOP_MAT_MULTTRANSPOSE=95, 1568 MATOP_MAT_MULTTRANSPOSE_SYM=96, 1569 MATOP_MAT_MULTTRANSPOSE_NUM=97, 1570 MATOP_PTAP_SYMBOLIC_SEQAIJ=98, 1571 MATOP_PTAP_NUMERIC_SEQAIJ=99, 1572 MATOP_PTAP_SYMBOLIC_MPIAIJ=100, 1573 MATOP_PTAP_NUMERIC_MPIAIJ=101, 1574 MATOP_CONJUGATE=102, 1575 MATOP_SET_SIZES=103, 1576 MATOP_SET_VALUES_ROW=104, 1577 MATOP_REAL_PART=105, 1578 MATOP_IMAG_PART=106, 1579 MATOP_GET_ROW_UTRIANGULAR=107, 1580 MATOP_RESTORE_ROW_UTRIANGULAR=108, 1581 MATOP_MATSOLVE=109, 1582 MATOP_GET_REDUNDANTMATRIX=110, 1583 MATOP_GET_ROW_MIN=111, 1584 MATOP_GET_COLUMN_VEC=112, 1585 MATOP_MISSING_DIAGONAL=113, 1586 MATOP_MATGETSEQNONZEROSTRUCTURE=114, 1587 MATOP_CREATE=115, 1588 MATOP_GET_GHOSTS=116, 1589 MATOP_GET_LOCALSUBMATRIX=117, 1590 MATOP_RESTORE_LOCALSUBMATRIX=118, 1591 MATOP_MULT_DIAGONAL_BLOCK=119, 1592 MATOP_HERMITIANTRANSPOSE=120, 1593 MATOP_MULTHERMITIANTRANSPOSE=121, 1594 MATOP_MULTHERMITIANTRANSPOSEADD=122, 1595 MATOP_GETMULTIPROCBLOCK=123, 1596 MATOP_GETCOLUMNNORMS=125, 1597 MATOP_GET_SUBMATRICES_PARALLEL=128, 1598 MATOP_SET_VALUES_BATCH=129, 1599 MATOP_TRANSPOSEMATMULT=130, 1600 MATOP_TRANSPOSEMATMULT_SYMBOLIC=131, 1601 MATOP_TRANSPOSEMATMULT_NUMERIC=132, 1602 MATOP_TRANSPOSECOLORING_CREATE=133, 1603 MATOP_TRANSCOLORING_APPLY_SPTODEN=134, 1604 MATOP_TRANSCOLORING_APPLY_DENTOSP=135, 1605 MATOP_RARt=136, 1606 MATOP_RARt_SYMBOLIC=137, 1607 MATOP_RARt_NUMERIC=138, 1608 MATOP_SET_BLOCK_SIZES=139 1609 } MatOperation; 1610 extern PetscErrorCode MatHasOperation(Mat,MatOperation,PetscBool *); 1611 extern PetscErrorCode MatShellSetOperation(Mat,MatOperation,void(*)(void)); 1612 extern PetscErrorCode MatShellGetOperation(Mat,MatOperation,void(**)(void)); 1613 extern PetscErrorCode MatShellSetContext(Mat,void*); 1614 1615 /* 1616 Codes for matrices stored on disk. By default they are 1617 stored in a universal format. By changing the format with 1618 PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE); the matrices will 1619 be stored in a way natural for the matrix, for example dense matrices 1620 would be stored as dense. Matrices stored this way may only be 1621 read into matrices of the same type. 1622 */ 1623 #define MATRIX_BINARY_FORMAT_DENSE -1 1624 1625 extern PetscErrorCode MatMPIBAIJSetHashTableFactor(Mat,PetscReal); 1626 extern PetscErrorCode MatISGetLocalMat(Mat,Mat*); 1627 extern PetscErrorCode MatISSetLocalMat(Mat,Mat); 1628 1629 /*S 1630 MatNullSpace - Object that removes a null space from a vector, i.e. 1631 orthogonalizes the vector to a subsapce 1632 1633 Level: advanced 1634 1635 Concepts: matrix; linear operator, null space 1636 1637 Users manual sections: 1638 . sec_singular 1639 1640 .seealso: MatNullSpaceCreate() 1641 S*/ 1642 typedef struct _p_MatNullSpace* MatNullSpace; 1643 1644 extern PetscErrorCode MatNullSpaceCreate(MPI_Comm,PetscBool ,PetscInt,const Vec[],MatNullSpace*); 1645 extern PetscErrorCode MatNullSpaceSetFunction(MatNullSpace,PetscErrorCode (*)(MatNullSpace,Vec,void*),void*); 1646 extern PetscErrorCode MatNullSpaceDestroy(MatNullSpace*); 1647 extern PetscErrorCode MatNullSpaceRemove(MatNullSpace,Vec,Vec*); 1648 extern PetscErrorCode MatGetNullSpace(Mat, MatNullSpace *); 1649 extern PetscErrorCode MatSetNullSpace(Mat,MatNullSpace); 1650 extern PetscErrorCode MatSetNearNullSpace(Mat,MatNullSpace); 1651 extern PetscErrorCode MatGetNearNullSpace(Mat,MatNullSpace*); 1652 extern PetscErrorCode MatNullSpaceTest(MatNullSpace,Mat,PetscBool *); 1653 extern PetscErrorCode MatNullSpaceView(MatNullSpace,PetscViewer); 1654 extern PetscErrorCode MatNullSpaceGetVecs(MatNullSpace,PetscBool*,PetscInt*,const Vec**); 1655 extern PetscErrorCode MatNullSpaceCreateRigidBody(Vec,MatNullSpace*); 1656 1657 extern PetscErrorCode MatReorderingSeqSBAIJ(Mat,IS); 1658 extern PetscErrorCode MatMPISBAIJSetHashTableFactor(Mat,PetscReal); 1659 extern PetscErrorCode MatSeqSBAIJSetColumnIndices(Mat,PetscInt *); 1660 extern PetscErrorCode MatSeqBAIJInvertBlockDiagonal(Mat); 1661 1662 extern PetscErrorCode MatCreateMAIJ(Mat,PetscInt,Mat*); 1663 extern PetscErrorCode MatMAIJRedimension(Mat,PetscInt,Mat*); 1664 extern PetscErrorCode MatMAIJGetAIJ(Mat,Mat*); 1665 1666 extern PetscErrorCode MatComputeExplicitOperator(Mat,Mat*); 1667 1668 extern PetscErrorCode MatDiagonalScaleLocal(Mat,Vec); 1669 1670 extern PetscErrorCode MatCreateMFFD(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,Mat*); 1671 extern PetscErrorCode MatMFFDSetBase(Mat,Vec,Vec); 1672 extern PetscErrorCode MatMFFDSetFunction(Mat,PetscErrorCode(*)(void*,Vec,Vec),void*); 1673 extern PetscErrorCode MatMFFDSetFunctioni(Mat,PetscErrorCode (*)(void*,PetscInt,Vec,PetscScalar*)); 1674 extern PetscErrorCode MatMFFDSetFunctioniBase(Mat,PetscErrorCode (*)(void*,Vec)); 1675 extern PetscErrorCode MatMFFDAddNullSpace(Mat,MatNullSpace); 1676 extern PetscErrorCode MatMFFDSetHHistory(Mat,PetscScalar[],PetscInt); 1677 extern PetscErrorCode MatMFFDResetHHistory(Mat); 1678 extern PetscErrorCode MatMFFDSetFunctionError(Mat,PetscReal); 1679 extern PetscErrorCode MatMFFDSetPeriod(Mat,PetscInt); 1680 extern PetscErrorCode MatMFFDGetH(Mat,PetscScalar *); 1681 extern PetscErrorCode MatMFFDSetOptionsPrefix(Mat,const char[]); 1682 extern PetscErrorCode MatMFFDCheckPositivity(void*,Vec,Vec,PetscScalar*); 1683 extern PetscErrorCode MatMFFDSetCheckh(Mat,PetscErrorCode (*)(void*,Vec,Vec,PetscScalar*),void*); 1684 1685 /*S 1686 MatMFFD - A data structured used to manage the computation of the h differencing parameter for matrix-free 1687 Jacobian vector products 1688 1689 Notes: MATMFFD is a specific MatType which uses the MatMFFD data structure 1690 1691 MatMFFD*() methods actually take the Mat as their first argument. Not a MatMFFD data structure 1692 1693 Level: developer 1694 1695 .seealso: MATMFFD, MatCreateMFFD(), MatMFFDSetFuction(), MatMFFDSetType(), MatMFFDRegister() 1696 S*/ 1697 typedef struct _p_MatMFFD* MatMFFD; 1698 1699 /*J 1700 MatMFFDType - algorithm used to compute the h used in computing matrix-vector products via differencing of the function 1701 1702 Level: beginner 1703 1704 .seealso: MatMFFDSetType(), MatMFFDRegister() 1705 J*/ 1706 #define MatMFFDType char* 1707 #define MATMFFD_DS "ds" 1708 #define MATMFFD_WP "wp" 1709 1710 extern PetscErrorCode MatMFFDSetType(Mat,const MatMFFDType); 1711 extern PetscErrorCode MatMFFDRegister(const char[],const char[],const char[],PetscErrorCode (*)(MatMFFD)); 1712 1713 /*MC 1714 MatMFFDRegisterDynamic - Adds a method to the MatMFFD registry. 1715 1716 Synopsis: 1717 PetscErrorCode MatMFFDRegisterDynamic(const char *name_solver,const char *path,const char *name_create,PetscErrorCode (*routine_create)(MatMFFD)) 1718 1719 Not Collective 1720 1721 Input Parameters: 1722 + name_solver - name of a new user-defined compute-h module 1723 . path - path (either absolute or relative) the library containing this solver 1724 . name_create - name of routine to create method context 1725 - routine_create - routine to create method context 1726 1727 Level: developer 1728 1729 Notes: 1730 MatMFFDRegisterDynamic() may be called multiple times to add several user-defined solvers. 1731 1732 If dynamic libraries are used, then the fourth input argument (routine_create) 1733 is ignored. 1734 1735 Sample usage: 1736 .vb 1737 MatMFFDRegisterDynamic("my_h",/home/username/my_lib/lib/libO/solaris/mylib.a, 1738 "MyHCreate",MyHCreate); 1739 .ve 1740 1741 Then, your solver can be chosen with the procedural interface via 1742 $ MatMFFDSetType(mfctx,"my_h") 1743 or at runtime via the option 1744 $ -snes_mf_type my_h 1745 1746 .keywords: MatMFFD, register 1747 1748 .seealso: MatMFFDRegisterAll(), MatMFFDRegisterDestroy() 1749 M*/ 1750 #if defined(PETSC_USE_DYNAMIC_LIBRARIES) 1751 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,0) 1752 #else 1753 #define MatMFFDRegisterDynamic(a,b,c,d) MatMFFDRegister(a,b,c,d) 1754 #endif 1755 1756 extern PetscErrorCode MatMFFDRegisterAll(const char[]); 1757 extern PetscErrorCode MatMFFDRegisterDestroy(void); 1758 extern PetscErrorCode MatMFFDDSSetUmin(Mat,PetscReal); 1759 extern PetscErrorCode MatMFFDWPSetComputeNormU(Mat,PetscBool ); 1760 1761 1762 extern PetscErrorCode PetscViewerMathematicaPutMatrix(PetscViewer, PetscInt, PetscInt, PetscReal *); 1763 extern PetscErrorCode PetscViewerMathematicaPutCSRMatrix(PetscViewer, PetscInt, PetscInt, PetscInt *, PetscInt *, PetscReal *); 1764 1765 /* 1766 PETSc interface to MUMPS 1767 */ 1768 #ifdef PETSC_HAVE_MUMPS 1769 extern PetscErrorCode MatMumpsSetIcntl(Mat,PetscInt,PetscInt); 1770 #endif 1771 1772 /* 1773 PETSc interface to SUPERLU 1774 */ 1775 #ifdef PETSC_HAVE_SUPERLU 1776 extern PetscErrorCode MatSuperluSetILUDropTol(Mat,PetscReal); 1777 #endif 1778 1779 #if defined(PETSC_HAVE_CUSP) 1780 extern PetscErrorCode MatCreateSeqAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,const PetscInt[],Mat*); 1781 extern PetscErrorCode MatCreateAIJCUSP(MPI_Comm,PetscInt,PetscInt,PetscInt,PetscInt,PetscInt,const PetscInt[],PetscInt,const PetscInt[],Mat*); 1782 #endif 1783 1784 /* 1785 PETSc interface to FFTW 1786 */ 1787 #if defined(PETSC_HAVE_FFTW) 1788 extern PetscErrorCode VecScatterPetscToFFTW(Mat,Vec,Vec); 1789 extern PetscErrorCode VecScatterFFTWToPetsc(Mat,Vec,Vec); 1790 extern PetscErrorCode MatGetVecsFFTW(Mat,Vec*,Vec*,Vec*); 1791 #endif 1792 1793 extern PetscErrorCode MatCreateNest(MPI_Comm,PetscInt,const IS[],PetscInt,const IS[],const Mat[],Mat*); 1794 extern PetscErrorCode MatNestGetSize(Mat,PetscInt*,PetscInt*); 1795 extern PetscErrorCode MatNestGetISs(Mat,IS[],IS[]); 1796 extern PetscErrorCode MatNestGetLocalISs(Mat,IS[],IS[]); 1797 extern PetscErrorCode MatNestGetSubMats(Mat,PetscInt*,PetscInt*,Mat***); 1798 extern PetscErrorCode MatNestGetSubMat(Mat,PetscInt,PetscInt,Mat*); 1799 extern PetscErrorCode MatNestSetVecType(Mat,const VecType); 1800 extern PetscErrorCode MatNestSetSubMats(Mat,PetscInt,const IS[],PetscInt,const IS[],const Mat[]); 1801 extern PetscErrorCode MatNestSetSubMat(Mat,PetscInt,PetscInt,Mat); 1802 1803 /* 1804 MatIJ: 1805 An unweighted directed pseudograph 1806 An interpretation of this matrix as a (pseudo)graph allows us to define additional operations on it: 1807 A MatIJ can act on sparse arrays: arrays of indices, or index arrays of integers, scalars, or integer-scalar pairs 1808 by mapping the indices to the indices connected to them by the (pseudo)graph ed 1809 */ 1810 typedef enum {MATIJ_LOCAL, MATIJ_GLOBAL} MatIJIndexType; 1811 extern PetscErrorCode MatIJSetMultivalued(Mat, PetscBool); 1812 extern PetscErrorCode MatIJGetMultivalued(Mat, PetscBool*); 1813 extern PetscErrorCode MatIJSetEdges(Mat, PetscInt, const PetscInt*, const PetscInt*); 1814 extern PetscErrorCode MatIJGetEdges(Mat, PetscInt *, PetscInt **, PetscInt **); 1815 extern PetscErrorCode MatIJSetEdgesIS(Mat, IS, IS); 1816 extern PetscErrorCode MatIJGetEdgesIS(Mat, IS*, IS*); 1817 extern PetscErrorCode MatIJGetRowSizes(Mat, MatIJIndexType, PetscInt, const PetscInt *, PetscInt **); 1818 extern PetscErrorCode MatIJGetMinRowSize(Mat, PetscInt *); 1819 extern PetscErrorCode MatIJGetMaxRowSize(Mat, PetscInt *); 1820 extern PetscErrorCode MatIJGetSupport(Mat, PetscInt *, PetscInt **); 1821 extern PetscErrorCode MatIJGetSupportIS(Mat, IS *); 1822 extern PetscErrorCode MatIJGetImage(Mat, PetscInt*, PetscInt**); 1823 extern PetscErrorCode MatIJGetImageIS(Mat, IS *); 1824 extern PetscErrorCode MatIJGetSupportSize(Mat, PetscInt *); 1825 extern PetscErrorCode MatIJGetImageSize(Mat, PetscInt *); 1826 1827 extern PetscErrorCode MatIJBinRenumber(Mat, Mat*); 1828 1829 extern PetscErrorCode MatIJMap(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*, MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**); 1830 extern PetscErrorCode MatIJBin(Mat, MatIJIndexType, PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**); 1831 extern PetscErrorCode MatIJBinMap(Mat,Mat, MatIJIndexType,PetscInt,const PetscInt*,const PetscInt*,const PetscScalar*,MatIJIndexType,PetscInt*,PetscInt**,PetscInt**,PetscScalar**,PetscInt**); 1832 1833 PETSC_EXTERN_CXX_END 1834 #endif 1835