1 /* 2 Provides an interface to the Tufo-Fischer parallel direct solver 3 */ 4 5 #include <petsc/private/pcimpl.h> /*I "petscpc.h" I*/ 6 #include <../src/mat/impls/aij/mpi/mpiaij.h> 7 #include <../src/ksp/pc/impls/tfs/tfs.h> 8 9 typedef struct { 10 xxt_ADT xxt; 11 xyt_ADT xyt; 12 Vec b, xd, xo; 13 PetscInt nd; 14 } PC_TFS; 15 16 static PetscErrorCode PCDestroy_TFS(PC pc) 17 { 18 PC_TFS *tfs = (PC_TFS *)pc->data; 19 20 PetscFunctionBegin; 21 /* free the XXT datastructures */ 22 if (tfs->xxt) PetscCall(XXT_free(tfs->xxt)); 23 if (tfs->xyt) PetscCall(XYT_free(tfs->xyt)); 24 PetscCall(VecDestroy(&tfs->b)); 25 PetscCall(VecDestroy(&tfs->xd)); 26 PetscCall(VecDestroy(&tfs->xo)); 27 PetscCall(PetscFree(pc->data)); 28 PetscFunctionReturn(PETSC_SUCCESS); 29 } 30 31 static PetscErrorCode PCApply_TFS_XXT(PC pc, Vec x, Vec y) 32 { 33 PC_TFS *tfs = (PC_TFS *)pc->data; 34 PetscScalar *yy; 35 const PetscScalar *xx; 36 37 PetscFunctionBegin; 38 PetscCall(VecGetArrayRead(x, &xx)); 39 PetscCall(VecGetArray(y, &yy)); 40 PetscCall(XXT_solve(tfs->xxt, yy, (PetscScalar *)xx)); 41 PetscCall(VecRestoreArrayRead(x, &xx)); 42 PetscCall(VecRestoreArray(y, &yy)); 43 PetscFunctionReturn(PETSC_SUCCESS); 44 } 45 46 static PetscErrorCode PCApply_TFS_XYT(PC pc, Vec x, Vec y) 47 { 48 PC_TFS *tfs = (PC_TFS *)pc->data; 49 PetscScalar *yy; 50 const PetscScalar *xx; 51 52 PetscFunctionBegin; 53 PetscCall(VecGetArrayRead(x, &xx)); 54 PetscCall(VecGetArray(y, &yy)); 55 PetscCall(XYT_solve(tfs->xyt, yy, (PetscScalar *)xx)); 56 PetscCall(VecRestoreArrayRead(x, &xx)); 57 PetscCall(VecRestoreArray(y, &yy)); 58 PetscFunctionReturn(PETSC_SUCCESS); 59 } 60 61 static PetscErrorCode PCTFSLocalMult_TFS(PC pc, PetscScalar *xin, PetscScalar *xout) 62 { 63 PC_TFS *tfs = (PC_TFS *)pc->data; 64 Mat A = pc->pmat; 65 Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 66 67 PetscFunctionBegin; 68 PetscCall(VecPlaceArray(tfs->b, xout)); 69 PetscCall(VecPlaceArray(tfs->xd, xin)); 70 PetscCall(VecPlaceArray(tfs->xo, xin + tfs->nd)); 71 PetscCall(MatMult(a->A, tfs->xd, tfs->b)); 72 PetscCall(MatMultAdd(a->B, tfs->xo, tfs->b, tfs->b)); 73 PetscCall(VecResetArray(tfs->b)); 74 PetscCall(VecResetArray(tfs->xd)); 75 PetscCall(VecResetArray(tfs->xo)); 76 PetscFunctionReturn(PETSC_SUCCESS); 77 } 78 79 static PetscErrorCode PCSetUp_TFS(PC pc) 80 { 81 PC_TFS *tfs = (PC_TFS *)pc->data; 82 Mat A = pc->pmat; 83 Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 84 PetscInt *localtoglobal, ncol, i; 85 PetscBool ismpiaij; 86 87 PetscFunctionBegin; 88 PetscCheck(A->cmap->N == A->rmap->N, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_SIZ, "matrix must be square"); 89 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATMPIAIJ, &ismpiaij)); 90 PetscCheck(ismpiaij, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Currently only supports MPIAIJ matrices"); 91 92 /* generate the local to global mapping */ 93 ncol = a->A->cmap->n + a->B->cmap->n; 94 PetscCall(PetscMalloc1(ncol, &localtoglobal)); 95 for (i = 0; i < a->A->cmap->n; i++) localtoglobal[i] = A->cmap->rstart + i + 1; 96 for (i = 0; i < a->B->cmap->n; i++) localtoglobal[i + a->A->cmap->n] = a->garray[i] + 1; 97 98 /* generate the vectors needed for the local solves */ 99 PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, a->A->rmap->n, NULL, &tfs->b)); 100 PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, a->A->cmap->n, NULL, &tfs->xd)); 101 PetscCall(VecCreateSeqWithArray(PETSC_COMM_SELF, 1, a->B->cmap->n, NULL, &tfs->xo)); 102 tfs->nd = a->A->cmap->n; 103 104 PetscCall(PetscBarrier((PetscObject)pc)); 105 if (A->symmetric == PETSC_BOOL3_TRUE) { 106 tfs->xxt = XXT_new(); 107 PetscCall(XXT_factor(tfs->xxt, localtoglobal, A->rmap->n, ncol, (PetscErrorCode (*)(void *, PetscScalar *, PetscScalar *))PCTFSLocalMult_TFS, pc)); 108 pc->ops->apply = PCApply_TFS_XXT; 109 } else { 110 tfs->xyt = XYT_new(); 111 PetscCall(XYT_factor(tfs->xyt, localtoglobal, A->rmap->n, ncol, (PetscErrorCode (*)(void *, PetscScalar *, PetscScalar *))PCTFSLocalMult_TFS, pc)); 112 pc->ops->apply = PCApply_TFS_XYT; 113 } 114 115 PetscCall(PetscFree(localtoglobal)); 116 PetscFunctionReturn(PETSC_SUCCESS); 117 } 118 119 /*MC 120 PCTFS - A parallel direct solver intended for problems with very few unknowns (like the 121 coarse grid in multigrid). Performs a Cholesky or LU factorization of a matrix defined by 122 its local matrix-vector product. 123 124 Level: beginner 125 126 Notes: 127 Only implemented for the `MATMPIAIJ` matrices 128 129 Only works on a solver object that lives on all of `PETSC_COMM_WORLD`! 130 131 Only works for real numbers (is not built if `PetscScalar` is complex) 132 133 Implemented by Henry M. Tufo III and Paul Fischer originally for Nek5000 and called XXT or XYT 134 135 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC` 136 M*/ 137 PETSC_EXTERN PetscErrorCode PCCreate_TFS(PC pc) 138 { 139 PC_TFS *tfs; 140 PetscMPIInt cmp; 141 142 PetscFunctionBegin; 143 PetscCallMPI(MPI_Comm_compare(PETSC_COMM_WORLD, PetscObjectComm((PetscObject)pc), &cmp)); 144 PetscCheck(cmp == MPI_IDENT || cmp == MPI_CONGRUENT, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "TFS only works with PETSC_COMM_WORLD objects"); 145 PetscCall(PetscNew(&tfs)); 146 147 tfs->xxt = NULL; 148 tfs->xyt = NULL; 149 tfs->b = NULL; 150 tfs->xd = NULL; 151 tfs->xo = NULL; 152 tfs->nd = 0; 153 154 pc->ops->apply = NULL; 155 pc->ops->applytranspose = NULL; 156 pc->ops->setup = PCSetUp_TFS; 157 pc->ops->destroy = PCDestroy_TFS; 158 pc->ops->applyrichardson = NULL; 159 pc->ops->applysymmetricleft = NULL; 160 pc->ops->applysymmetricright = NULL; 161 pc->data = (void *)tfs; 162 PetscFunctionReturn(PETSC_SUCCESS); 163 } 164