1 /* TODOLIST 2 3 Solvers 4 - Add support for cholesky for coarse solver (similar to local solvers) 5 - Propagate ksp prefixes for solvers to mat objects? 6 7 User interface 8 - ** DM attached to pc? 9 10 Debugging output 11 - * Better management of verbosity levels of debugging output 12 13 Extra 14 - *** Is it possible to work with PCBDDCGraph on boundary indices only (less memory consumed)? 15 - BDDC with MG framework? 16 17 MATIS related operations contained in BDDC code 18 - Provide general case for subassembling 19 20 */ 21 22 #include <petsc/private/pcbddcimpl.h> /*I "petscpc.h" I*/ /* header file for Fortran wrappers */ 23 #include <petsc/private/pcbddcprivateimpl.h> 24 #include <petscblaslapack.h> 25 26 static PetscBool PCBDDCPackageInitialized = PETSC_FALSE; 27 28 static PetscBool cited = PETSC_FALSE; 29 static const char citation[] = "@article{ZampiniPCBDDC,\n" 30 "author = {Stefano Zampini},\n" 31 "title = {{PCBDDC}: A Class of Robust Dual-Primal Methods in {PETS}c},\n" 32 "journal = {SIAM Journal on Scientific Computing},\n" 33 "volume = {38},\n" 34 "number = {5},\n" 35 "pages = {S282-S306},\n" 36 "year = {2016},\n" 37 "doi = {10.1137/15M1025785},\n" 38 "URL = {http://dx.doi.org/10.1137/15M1025785},\n" 39 "eprint = {http://dx.doi.org/10.1137/15M1025785}\n" 40 "}\n"; 41 42 PetscLogEvent PC_BDDC_Topology[PETSC_PCBDDC_MAXLEVELS]; 43 PetscLogEvent PC_BDDC_LocalSolvers[PETSC_PCBDDC_MAXLEVELS]; 44 PetscLogEvent PC_BDDC_LocalWork[PETSC_PCBDDC_MAXLEVELS]; 45 PetscLogEvent PC_BDDC_CorrectionSetUp[PETSC_PCBDDC_MAXLEVELS]; 46 PetscLogEvent PC_BDDC_ApproxSetUp[PETSC_PCBDDC_MAXLEVELS]; 47 PetscLogEvent PC_BDDC_ApproxApply[PETSC_PCBDDC_MAXLEVELS]; 48 PetscLogEvent PC_BDDC_CoarseSetUp[PETSC_PCBDDC_MAXLEVELS]; 49 PetscLogEvent PC_BDDC_CoarseSolver[PETSC_PCBDDC_MAXLEVELS]; 50 PetscLogEvent PC_BDDC_AdaptiveSetUp[PETSC_PCBDDC_MAXLEVELS]; 51 PetscLogEvent PC_BDDC_Scaling[PETSC_PCBDDC_MAXLEVELS]; 52 PetscLogEvent PC_BDDC_Schurs[PETSC_PCBDDC_MAXLEVELS]; 53 PetscLogEvent PC_BDDC_Solves[PETSC_PCBDDC_MAXLEVELS][3]; 54 55 const char *const PCBDDCInterfaceExtTypes[] = {"DIRICHLET", "LUMP", "PCBDDCInterfaceExtType", "PC_BDDC_INTERFACE_EXT_", NULL}; 56 57 static PetscErrorCode PCApply_BDDC(PC, Vec, Vec); 58 59 static PetscErrorCode PCSetFromOptions_BDDC(PC pc, PetscOptionItems *PetscOptionsObject) 60 { 61 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 62 PetscInt nt, i; 63 64 PetscFunctionBegin; 65 PetscOptionsHeadBegin(PetscOptionsObject, "BDDC options"); 66 /* Verbose debugging */ 67 PetscCall(PetscOptionsInt("-pc_bddc_check_level", "Verbose output for PCBDDC (intended for debug)", "none", pcbddc->dbg_flag, &pcbddc->dbg_flag, NULL)); 68 /* Approximate solvers */ 69 PetscCall(PetscOptionsEnum("-pc_bddc_interface_ext_type", "Use DIRICHLET or LUMP to extend interface corrections to interior", "PCBDDCSetInterfaceExtType", PCBDDCInterfaceExtTypes, (PetscEnum)pcbddc->interface_extension, (PetscEnum *)&pcbddc->interface_extension, NULL)); 70 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_DIRICHLET) { 71 PetscCall(PetscOptionsBool("-pc_bddc_dirichlet_approximate", "Inform PCBDDC that we are using approximate Dirichlet solvers", "none", pcbddc->NullSpace_corr[0], &pcbddc->NullSpace_corr[0], NULL)); 72 PetscCall(PetscOptionsBool("-pc_bddc_dirichlet_approximate_scale", "Inform PCBDDC that we need to scale the Dirichlet solve", "none", pcbddc->NullSpace_corr[1], &pcbddc->NullSpace_corr[1], NULL)); 73 } else { 74 /* This flag is needed/implied by lumping */ 75 pcbddc->switch_static = PETSC_TRUE; 76 } 77 PetscCall(PetscOptionsBool("-pc_bddc_neumann_approximate", "Inform PCBDDC that we are using approximate Neumann solvers", "none", pcbddc->NullSpace_corr[2], &pcbddc->NullSpace_corr[2], NULL)); 78 PetscCall(PetscOptionsBool("-pc_bddc_neumann_approximate_scale", "Inform PCBDDC that we need to scale the Neumann solve", "none", pcbddc->NullSpace_corr[3], &pcbddc->NullSpace_corr[3], NULL)); 79 /* Primal space customization */ 80 PetscCall(PetscOptionsBool("-pc_bddc_use_local_mat_graph", "Use or not adjacency graph of local mat for interface analysis", "none", pcbddc->use_local_adj, &pcbddc->use_local_adj, NULL)); 81 PetscCall(PetscOptionsInt("-pc_bddc_graph_maxcount", "Maximum number of shared subdomains for a connected component", "none", pcbddc->graphmaxcount, &pcbddc->graphmaxcount, NULL)); 82 PetscCall(PetscOptionsBool("-pc_bddc_corner_selection", "Activates face-based corner selection", "none", pcbddc->corner_selection, &pcbddc->corner_selection, NULL)); 83 PetscCall(PetscOptionsBool("-pc_bddc_use_vertices", "Use or not corner dofs in coarse space", "none", pcbddc->use_vertices, &pcbddc->use_vertices, NULL)); 84 PetscCall(PetscOptionsBool("-pc_bddc_use_edges", "Use or not edge constraints in coarse space", "none", pcbddc->use_edges, &pcbddc->use_edges, NULL)); 85 PetscCall(PetscOptionsBool("-pc_bddc_use_faces", "Use or not face constraints in coarse space", "none", pcbddc->use_faces, &pcbddc->use_faces, NULL)); 86 PetscCall(PetscOptionsInt("-pc_bddc_vertex_size", "Connected components smaller or equal to vertex size will be considered as primal vertices", "none", pcbddc->vertex_size, &pcbddc->vertex_size, NULL)); 87 PetscCall(PetscOptionsBool("-pc_bddc_use_nnsp", "Use near null space attached to the matrix to compute constraints", "none", pcbddc->use_nnsp, &pcbddc->use_nnsp, NULL)); 88 PetscCall(PetscOptionsBool("-pc_bddc_use_nnsp_true", "Use near null space attached to the matrix to compute constraints as is", "none", pcbddc->use_nnsp_true, &pcbddc->use_nnsp_true, NULL)); 89 PetscCall(PetscOptionsBool("-pc_bddc_use_qr_single", "Use QR factorization for single constraints on cc (QR is always used when multiple constraints are present)", "none", pcbddc->use_qr_single, &pcbddc->use_qr_single, NULL)); 90 /* Change of basis */ 91 PetscCall(PetscOptionsBool("-pc_bddc_use_change_of_basis", "Use or not internal change of basis on local edge nodes", "none", pcbddc->use_change_of_basis, &pcbddc->use_change_of_basis, NULL)); 92 PetscCall(PetscOptionsBool("-pc_bddc_use_change_on_faces", "Use or not internal change of basis on local face nodes", "none", pcbddc->use_change_on_faces, &pcbddc->use_change_on_faces, NULL)); 93 if (!pcbddc->use_change_of_basis) pcbddc->use_change_on_faces = PETSC_FALSE; 94 /* Switch between M_2 (default) and M_3 preconditioners (as defined by C. Dohrmann in the ref. article) */ 95 PetscCall(PetscOptionsBool("-pc_bddc_switch_static", "Switch on static condensation ops around the interface preconditioner", "none", pcbddc->switch_static, &pcbddc->switch_static, NULL)); 96 PetscCall(PetscOptionsInt("-pc_bddc_coarse_eqs_per_proc", "Target number of equations per process for coarse problem redistribution (significant only at the coarsest level)", "none", pcbddc->coarse_eqs_per_proc, &pcbddc->coarse_eqs_per_proc, NULL)); 97 i = pcbddc->coarsening_ratio; 98 PetscCall(PetscOptionsInt("-pc_bddc_coarsening_ratio", "Set coarsening ratio used in multilevel coarsening", "PCBDDCSetCoarseningRatio", i, &i, NULL)); 99 PetscCall(PCBDDCSetCoarseningRatio(pc, i)); 100 i = pcbddc->max_levels; 101 PetscCall(PetscOptionsInt("-pc_bddc_levels", "Set maximum number of levels for multilevel", "PCBDDCSetLevels", i, &i, NULL)); 102 PetscCall(PCBDDCSetLevels(pc, i)); 103 PetscCall(PetscOptionsInt("-pc_bddc_coarse_eqs_limit", "Set maximum number of equations on coarsest grid to aim for", "none", pcbddc->coarse_eqs_limit, &pcbddc->coarse_eqs_limit, NULL)); 104 PetscCall(PetscOptionsBool("-pc_bddc_use_coarse_estimates", "Use estimated eigenvalues for coarse problem", "none", pcbddc->use_coarse_estimates, &pcbddc->use_coarse_estimates, NULL)); 105 PetscCall(PetscOptionsBool("-pc_bddc_use_deluxe_scaling", "Use deluxe scaling for BDDC", "none", pcbddc->use_deluxe_scaling, &pcbddc->use_deluxe_scaling, NULL)); 106 PetscCall(PetscOptionsBool("-pc_bddc_schur_rebuild", "Whether or not the interface graph for Schur principal minors has to be rebuilt (i.e. define the interface without any adjacency)", "none", pcbddc->sub_schurs_rebuild, &pcbddc->sub_schurs_rebuild, NULL)); 107 PetscCall(PetscOptionsInt("-pc_bddc_schur_layers", "Number of dofs' layers for the computation of principal minors (i.e. -1 uses all dofs)", "none", pcbddc->sub_schurs_layers, &pcbddc->sub_schurs_layers, NULL)); 108 PetscCall(PetscOptionsBool("-pc_bddc_schur_use_useradj", "Whether or not the CSR graph specified by the user should be used for computing successive layers (default is to use adj of local mat)", "none", pcbddc->sub_schurs_use_useradj, &pcbddc->sub_schurs_use_useradj, NULL)); 109 PetscCall(PetscOptionsBool("-pc_bddc_schur_exact", "Whether or not to use the exact Schur complement instead of the reduced one (which excludes size 1 cc)", "none", pcbddc->sub_schurs_exact_schur, &pcbddc->sub_schurs_exact_schur, NULL)); 110 PetscCall(PetscOptionsBool("-pc_bddc_deluxe_zerorows", "Zero rows and columns of deluxe operators associated with primal dofs", "none", pcbddc->deluxe_zerorows, &pcbddc->deluxe_zerorows, NULL)); 111 PetscCall(PetscOptionsBool("-pc_bddc_deluxe_singlemat", "Collapse deluxe operators", "none", pcbddc->deluxe_singlemat, &pcbddc->deluxe_singlemat, NULL)); 112 PetscCall(PetscOptionsBool("-pc_bddc_adaptive_userdefined", "Use user-defined constraints (should be attached via MatSetNearNullSpace to pmat) in addition to those adaptively generated", "none", pcbddc->adaptive_userdefined, &pcbddc->adaptive_userdefined, NULL)); 113 nt = 2; 114 PetscCall(PetscOptionsRealArray("-pc_bddc_adaptive_threshold", "Thresholds to be used for adaptive selection of constraints", "none", pcbddc->adaptive_threshold, &nt, NULL)); 115 if (nt == 1) pcbddc->adaptive_threshold[1] = pcbddc->adaptive_threshold[0]; 116 PetscCall(PetscOptionsInt("-pc_bddc_adaptive_nmin", "Minimum number of constraints per connected components", "none", pcbddc->adaptive_nmin, &pcbddc->adaptive_nmin, NULL)); 117 PetscCall(PetscOptionsInt("-pc_bddc_adaptive_nmax", "Maximum number of constraints per connected components", "none", pcbddc->adaptive_nmax, &pcbddc->adaptive_nmax, NULL)); 118 PetscCall(PetscOptionsBool("-pc_bddc_symmetric", "Symmetric computation of primal basis functions", "none", pcbddc->symmetric_primal, &pcbddc->symmetric_primal, NULL)); 119 PetscCall(PetscOptionsInt("-pc_bddc_coarse_adj", "Number of processors where to map the coarse adjacency list", "none", pcbddc->coarse_adj_red, &pcbddc->coarse_adj_red, NULL)); 120 PetscCall(PetscOptionsBool("-pc_bddc_benign_trick", "Apply the benign subspace trick to saddle point problems with discontinuous pressures", "none", pcbddc->benign_saddle_point, &pcbddc->benign_saddle_point, NULL)); 121 PetscCall(PetscOptionsBool("-pc_bddc_benign_change", "Compute the pressure change of basis explicitly", "none", pcbddc->benign_change_explicit, &pcbddc->benign_change_explicit, NULL)); 122 PetscCall(PetscOptionsBool("-pc_bddc_benign_compute_correction", "Compute the benign correction during PreSolve", "none", pcbddc->benign_compute_correction, &pcbddc->benign_compute_correction, NULL)); 123 PetscCall(PetscOptionsBool("-pc_bddc_nonetflux", "Automatic computation of no-net-flux quadrature weights", "none", pcbddc->compute_nonetflux, &pcbddc->compute_nonetflux, NULL)); 124 PetscCall(PetscOptionsBool("-pc_bddc_detect_disconnected", "Detects disconnected subdomains", "none", pcbddc->detect_disconnected, &pcbddc->detect_disconnected, NULL)); 125 PetscCall(PetscOptionsBool("-pc_bddc_detect_disconnected_filter", "Filters out small entries in the local matrix when detecting disconnected subdomains", "none", pcbddc->detect_disconnected_filter, &pcbddc->detect_disconnected_filter, NULL)); 126 PetscCall(PetscOptionsBool("-pc_bddc_eliminate_dirichlet", "Whether or not we want to eliminate dirichlet dofs during presolve", "none", pcbddc->eliminate_dirdofs, &pcbddc->eliminate_dirdofs, NULL)); 127 PetscOptionsHeadEnd(); 128 PetscFunctionReturn(PETSC_SUCCESS); 129 } 130 131 static PetscErrorCode PCView_BDDC(PC pc, PetscViewer viewer) 132 { 133 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 134 PC_IS *pcis = (PC_IS *)pc->data; 135 PetscBool isascii; 136 PetscSubcomm subcomm; 137 PetscViewer subviewer; 138 139 PetscFunctionBegin; 140 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 141 /* ASCII viewer */ 142 if (isascii) { 143 PetscMPIInt color, rank, size; 144 PetscInt64 loc[7], gsum[6], gmax[6], gmin[6], totbenign; 145 PetscScalar interface_size; 146 PetscReal ratio1 = 0., ratio2 = 0.; 147 Vec counter; 148 149 if (!pc->setupcalled) PetscCall(PetscViewerASCIIPrintf(viewer, " Partial information available: preconditioner has not been setup yet\n")); 150 PetscCall(PetscViewerASCIIPrintf(viewer, " Use verbose output: %" PetscInt_FMT "\n", pcbddc->dbg_flag)); 151 PetscCall(PetscViewerASCIIPrintf(viewer, " Use user-defined CSR: %d\n", !!pcbddc->mat_graph->nvtxs_csr)); 152 PetscCall(PetscViewerASCIIPrintf(viewer, " Use local mat graph: %d\n", pcbddc->use_local_adj && !pcbddc->mat_graph->nvtxs_csr)); 153 if (pcbddc->mat_graph->twodim) { 154 PetscCall(PetscViewerASCIIPrintf(viewer, " Connectivity graph topological dimension: 2\n")); 155 } else { 156 PetscCall(PetscViewerASCIIPrintf(viewer, " Connectivity graph topological dimension: 3\n")); 157 } 158 if (pcbddc->graphmaxcount != PETSC_MAX_INT) PetscCall(PetscViewerASCIIPrintf(viewer, " Graph max count: %" PetscInt_FMT "\n", pcbddc->graphmaxcount)); 159 PetscCall(PetscViewerASCIIPrintf(viewer, " Corner selection: %d (selected %d)\n", pcbddc->corner_selection, pcbddc->corner_selected)); 160 PetscCall(PetscViewerASCIIPrintf(viewer, " Use vertices: %d (vertex size %" PetscInt_FMT ")\n", pcbddc->use_vertices, pcbddc->vertex_size)); 161 PetscCall(PetscViewerASCIIPrintf(viewer, " Use edges: %d\n", pcbddc->use_edges)); 162 PetscCall(PetscViewerASCIIPrintf(viewer, " Use faces: %d\n", pcbddc->use_faces)); 163 PetscCall(PetscViewerASCIIPrintf(viewer, " Use true near null space: %d\n", pcbddc->use_nnsp_true)); 164 PetscCall(PetscViewerASCIIPrintf(viewer, " Use QR for single constraints on cc: %d\n", pcbddc->use_qr_single)); 165 PetscCall(PetscViewerASCIIPrintf(viewer, " Use change of basis on local edge nodes: %d\n", pcbddc->use_change_of_basis)); 166 PetscCall(PetscViewerASCIIPrintf(viewer, " Use change of basis on local face nodes: %d\n", pcbddc->use_change_on_faces)); 167 PetscCall(PetscViewerASCIIPrintf(viewer, " User defined change of basis matrix: %d\n", !!pcbddc->user_ChangeOfBasisMatrix)); 168 PetscCall(PetscViewerASCIIPrintf(viewer, " Has change of basis matrix: %d\n", !!pcbddc->ChangeOfBasisMatrix)); 169 PetscCall(PetscViewerASCIIPrintf(viewer, " Eliminate dirichlet boundary dofs: %d\n", pcbddc->eliminate_dirdofs)); 170 PetscCall(PetscViewerASCIIPrintf(viewer, " Switch on static condensation ops around the interface preconditioner: %d\n", pcbddc->switch_static)); 171 PetscCall(PetscViewerASCIIPrintf(viewer, " Use exact dirichlet trick: %d\n", pcbddc->use_exact_dirichlet_trick)); 172 PetscCall(PetscViewerASCIIPrintf(viewer, " Interface extension: %s\n", PCBDDCInterfaceExtTypes[pcbddc->interface_extension])); 173 PetscCall(PetscViewerASCIIPrintf(viewer, " Multilevel max levels: %" PetscInt_FMT "\n", pcbddc->max_levels)); 174 PetscCall(PetscViewerASCIIPrintf(viewer, " Multilevel coarsening ratio: %" PetscInt_FMT "\n", pcbddc->coarsening_ratio)); 175 PetscCall(PetscViewerASCIIPrintf(viewer, " Use estimated eigs for coarse problem: %d\n", pcbddc->use_coarse_estimates)); 176 PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe scaling: %d\n", pcbddc->use_deluxe_scaling)); 177 PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe zerorows: %d\n", pcbddc->deluxe_zerorows)); 178 PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe singlemat: %d\n", pcbddc->deluxe_singlemat)); 179 PetscCall(PetscViewerASCIIPrintf(viewer, " Rebuild interface graph for Schur principal minors: %d\n", pcbddc->sub_schurs_rebuild)); 180 PetscCall(PetscViewerASCIIPrintf(viewer, " Number of dofs' layers for the computation of principal minors: %" PetscInt_FMT "\n", pcbddc->sub_schurs_layers)); 181 PetscCall(PetscViewerASCIIPrintf(viewer, " Use user CSR graph to compute successive layers: %d\n", pcbddc->sub_schurs_use_useradj)); 182 if (pcbddc->adaptive_threshold[1] != pcbddc->adaptive_threshold[0]) { 183 PetscCall(PetscViewerASCIIPrintf(viewer, " Adaptive constraint selection thresholds (active %d, userdefined %d): %g,%g\n", pcbddc->adaptive_selection, pcbddc->adaptive_userdefined, (double)pcbddc->adaptive_threshold[0], (double)pcbddc->adaptive_threshold[1])); 184 } else { 185 PetscCall(PetscViewerASCIIPrintf(viewer, " Adaptive constraint selection threshold (active %d, userdefined %d): %g\n", pcbddc->adaptive_selection, pcbddc->adaptive_userdefined, (double)pcbddc->adaptive_threshold[0])); 186 } 187 PetscCall(PetscViewerASCIIPrintf(viewer, " Min constraints / connected component: %" PetscInt_FMT "\n", pcbddc->adaptive_nmin)); 188 PetscCall(PetscViewerASCIIPrintf(viewer, " Max constraints / connected component: %" PetscInt_FMT "\n", pcbddc->adaptive_nmax)); 189 PetscCall(PetscViewerASCIIPrintf(viewer, " Invert exact Schur complement for adaptive selection: %d\n", pcbddc->sub_schurs_exact_schur)); 190 PetscCall(PetscViewerASCIIPrintf(viewer, " Symmetric computation of primal basis functions: %d\n", pcbddc->symmetric_primal)); 191 PetscCall(PetscViewerASCIIPrintf(viewer, " Num. Procs. to map coarse adjacency list: %" PetscInt_FMT "\n", pcbddc->coarse_adj_red)); 192 PetscCall(PetscViewerASCIIPrintf(viewer, " Coarse eqs per proc (significant at the coarsest level): %" PetscInt_FMT "\n", pcbddc->coarse_eqs_per_proc)); 193 PetscCall(PetscViewerASCIIPrintf(viewer, " Detect disconnected: %d (filter %d)\n", pcbddc->detect_disconnected, pcbddc->detect_disconnected_filter)); 194 PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subspace trick: %d (change explicit %d)\n", pcbddc->benign_saddle_point, pcbddc->benign_change_explicit)); 195 PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subspace trick is active: %d\n", pcbddc->benign_have_null)); 196 PetscCall(PetscViewerASCIIPrintf(viewer, " Algebraic computation of no-net-flux: %d\n", pcbddc->compute_nonetflux)); 197 if (!pc->setupcalled) PetscFunctionReturn(PETSC_SUCCESS); 198 199 /* compute interface size */ 200 PetscCall(VecSet(pcis->vec1_B, 1.0)); 201 PetscCall(MatCreateVecs(pc->pmat, &counter, NULL)); 202 PetscCall(VecSet(counter, 0.0)); 203 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, counter, INSERT_VALUES, SCATTER_REVERSE)); 204 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, counter, INSERT_VALUES, SCATTER_REVERSE)); 205 PetscCall(VecSum(counter, &interface_size)); 206 PetscCall(VecDestroy(&counter)); 207 208 /* compute some statistics on the domain decomposition */ 209 gsum[0] = 1; 210 gsum[1] = gsum[2] = gsum[3] = gsum[4] = gsum[5] = 0; 211 loc[0] = !!pcis->n; 212 loc[1] = pcis->n - pcis->n_B; 213 loc[2] = pcis->n_B; 214 loc[3] = pcbddc->local_primal_size; 215 loc[4] = pcis->n; 216 loc[5] = pcbddc->n_local_subs > 0 ? pcbddc->n_local_subs : (pcis->n ? 1 : 0); 217 loc[6] = pcbddc->benign_n; 218 PetscCallMPI(MPI_Reduce(loc, gsum, 6, MPIU_INT64, MPI_SUM, 0, PetscObjectComm((PetscObject)pc))); 219 if (!loc[0]) loc[1] = loc[2] = loc[3] = loc[4] = loc[5] = -1; 220 PetscCallMPI(MPI_Reduce(loc, gmax, 6, MPIU_INT64, MPI_MAX, 0, PetscObjectComm((PetscObject)pc))); 221 if (!loc[0]) loc[1] = loc[2] = loc[3] = loc[4] = loc[5] = PETSC_MAX_INT; 222 PetscCallMPI(MPI_Reduce(loc, gmin, 6, MPIU_INT64, MPI_MIN, 0, PetscObjectComm((PetscObject)pc))); 223 PetscCallMPI(MPI_Reduce(&loc[6], &totbenign, 1, MPIU_INT64, MPI_SUM, 0, PetscObjectComm((PetscObject)pc))); 224 if (pcbddc->coarse_size) { 225 ratio1 = pc->pmat->rmap->N / (1. * pcbddc->coarse_size); 226 ratio2 = PetscRealPart(interface_size) / pcbddc->coarse_size; 227 } 228 PetscCall(PetscViewerASCIIPrintf(viewer, "********************************** STATISTICS AT LEVEL %" PetscInt_FMT " **********************************\n", pcbddc->current_level)); 229 PetscCall(PetscViewerASCIIPrintf(viewer, " Global dofs sizes: all %" PetscInt_FMT " interface %" PetscInt_FMT " coarse %" PetscInt_FMT "\n", pc->pmat->rmap->N, (PetscInt)PetscRealPart(interface_size), pcbddc->coarse_size)); 230 PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsening ratios: all/coarse %" PetscInt_FMT " interface/coarse %" PetscInt_FMT "\n", (PetscInt)ratio1, (PetscInt)ratio2)); 231 PetscCall(PetscViewerASCIIPrintf(viewer, " Active processes : %" PetscInt_FMT "\n", (PetscInt)gsum[0])); 232 PetscCall(PetscViewerASCIIPrintf(viewer, " Total subdomains : %" PetscInt_FMT "\n", (PetscInt)gsum[5])); 233 if (pcbddc->benign_have_null) PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subs : %" PetscInt_FMT "\n", (PetscInt)totbenign)); 234 PetscCall(PetscViewerASCIIPrintf(viewer, " Dofs type :\tMIN\tMAX\tMEAN\n")); 235 PetscCall(PetscViewerASCIIPrintf(viewer, " Interior dofs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[1], (PetscInt)gmax[1], (PetscInt)(gsum[1] / gsum[0]))); 236 PetscCall(PetscViewerASCIIPrintf(viewer, " Interface dofs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[2], (PetscInt)gmax[2], (PetscInt)(gsum[2] / gsum[0]))); 237 PetscCall(PetscViewerASCIIPrintf(viewer, " Primal dofs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[3], (PetscInt)gmax[3], (PetscInt)(gsum[3] / gsum[0]))); 238 PetscCall(PetscViewerASCIIPrintf(viewer, " Local dofs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[4], (PetscInt)gmax[4], (PetscInt)(gsum[4] / gsum[0]))); 239 PetscCall(PetscViewerASCIIPrintf(viewer, " Local subs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[5], (PetscInt)gmax[5])); 240 PetscCall(PetscViewerFlush(viewer)); 241 242 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank)); 243 244 /* local solvers */ 245 PetscCall(PetscViewerGetSubViewer(viewer, PetscObjectComm((PetscObject)pcbddc->ksp_D), &subviewer)); 246 if (rank == 0) { 247 PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Interior solver (rank 0)\n")); 248 PetscCall(PetscViewerASCIIPushTab(subviewer)); 249 PetscCall(KSPView(pcbddc->ksp_D, subviewer)); 250 PetscCall(PetscViewerASCIIPopTab(subviewer)); 251 PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Correction solver (rank 0)\n")); 252 PetscCall(PetscViewerASCIIPushTab(subviewer)); 253 PetscCall(KSPView(pcbddc->ksp_R, subviewer)); 254 PetscCall(PetscViewerASCIIPopTab(subviewer)); 255 PetscCall(PetscViewerFlush(subviewer)); 256 } 257 PetscCall(PetscViewerRestoreSubViewer(viewer, PetscObjectComm((PetscObject)pcbddc->ksp_D), &subviewer)); 258 /* the coarse problem can be handled by a different communicator */ 259 if (pcbddc->coarse_ksp) color = 1; 260 else color = 0; 261 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 262 PetscCall(PetscSubcommCreate(PetscObjectComm((PetscObject)pc), &subcomm)); 263 PetscCall(PetscSubcommSetNumber(subcomm, PetscMin(size, 2))); 264 PetscCall(PetscSubcommSetTypeGeneral(subcomm, color, rank)); 265 PetscCall(PetscViewerGetSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer)); 266 if (color == 1) { 267 PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Coarse solver\n")); 268 PetscCall(PetscViewerASCIIPushTab(subviewer)); 269 PetscCall(KSPView(pcbddc->coarse_ksp, subviewer)); 270 PetscCall(PetscViewerASCIIPopTab(subviewer)); 271 PetscCall(PetscViewerFlush(subviewer)); 272 } 273 PetscCall(PetscViewerRestoreSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer)); 274 PetscCall(PetscSubcommDestroy(&subcomm)); 275 PetscCall(PetscViewerFlush(viewer)); 276 } 277 PetscFunctionReturn(PETSC_SUCCESS); 278 } 279 280 static PetscErrorCode PCBDDCSetDiscreteGradient_BDDC(PC pc, Mat G, PetscInt order, PetscInt field, PetscBool global, PetscBool conforming) 281 { 282 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 283 284 PetscFunctionBegin; 285 PetscCall(PetscObjectReference((PetscObject)G)); 286 PetscCall(MatDestroy(&pcbddc->discretegradient)); 287 pcbddc->discretegradient = G; 288 pcbddc->nedorder = order > 0 ? order : -order; 289 pcbddc->nedfield = field; 290 pcbddc->nedglobal = global; 291 pcbddc->conforming = conforming; 292 PetscFunctionReturn(PETSC_SUCCESS); 293 } 294 295 /*@ 296 PCBDDCSetDiscreteGradient - Sets the discrete gradient to be used by the `PCBDDC` preconditioner 297 298 Collective 299 300 Input Parameters: 301 + pc - the preconditioning context 302 . G - the discrete gradient matrix (in `MATAIJ` format) 303 . order - the order of the Nedelec space (1 for the lowest order) 304 . field - the field id of the Nedelec dofs (not used if the fields have not been specified) 305 . global - the type of global ordering for the rows of `G` 306 - conforming - whether the mesh is conforming or not 307 308 Level: advanced 309 310 Note: 311 The discrete gradient matrix `G` is used to analyze the subdomain edges, and it should not contain any zero entry. 312 For variable order spaces, the order should be set to zero. 313 If `global` is `PETSC_TRUE`, the rows of `G` should be given in global ordering for the whole dofs; 314 if `PETSC_FALSE`, the ordering should be global for the Nedelec field. 315 In the latter case, it should hold gid[i] < gid[j] iff geid[i] < geid[j], with gid the global orderding for all the dofs 316 and geid the one for the Nedelec field. 317 318 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDofsSplitting()`, `PCBDDCSetDofsSplittingLocal()`, `MATAIJ`, `PCBDDCSetDivergenceMat()` 319 @*/ 320 PetscErrorCode PCBDDCSetDiscreteGradient(PC pc, Mat G, PetscInt order, PetscInt field, PetscBool global, PetscBool conforming) 321 { 322 PetscFunctionBegin; 323 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 324 PetscValidHeaderSpecific(G, MAT_CLASSID, 2); 325 PetscValidLogicalCollectiveInt(pc, order, 3); 326 PetscValidLogicalCollectiveInt(pc, field, 4); 327 PetscValidLogicalCollectiveBool(pc, global, 5); 328 PetscValidLogicalCollectiveBool(pc, conforming, 6); 329 PetscCheckSameComm(pc, 1, G, 2); 330 PetscTryMethod(pc, "PCBDDCSetDiscreteGradient_C", (PC, Mat, PetscInt, PetscInt, PetscBool, PetscBool), (pc, G, order, field, global, conforming)); 331 PetscFunctionReturn(PETSC_SUCCESS); 332 } 333 334 static PetscErrorCode PCBDDCSetDivergenceMat_BDDC(PC pc, Mat divudotp, PetscBool trans, IS vl2l) 335 { 336 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 337 338 PetscFunctionBegin; 339 PetscCall(PetscObjectReference((PetscObject)divudotp)); 340 PetscCall(MatDestroy(&pcbddc->divudotp)); 341 pcbddc->divudotp = divudotp; 342 pcbddc->divudotp_trans = trans; 343 pcbddc->compute_nonetflux = PETSC_TRUE; 344 if (vl2l) { 345 PetscCall(PetscObjectReference((PetscObject)vl2l)); 346 PetscCall(ISDestroy(&pcbddc->divudotp_vl2l)); 347 pcbddc->divudotp_vl2l = vl2l; 348 } 349 PetscFunctionReturn(PETSC_SUCCESS); 350 } 351 352 /*@ 353 PCBDDCSetDivergenceMat - Sets the linear operator representing \int_\Omega \div {\bf u} \cdot p dx for the `PCBDDC` preconditioner 354 355 Collective 356 357 Input Parameters: 358 + pc - the preconditioning context 359 . divudotp - the matrix (must be of type `MATIS`) 360 . trans - if `PETSC_FALSE` (resp. `PETSC_TRUE`), then pressures are in the test (trial) space and velocities are in the trial (test) space. 361 - vl2l - optional index set describing the local (wrt the local matrix in `divudotp`) to local (wrt the local matrix 362 in the preconditioning matrix) map for the velocities 363 364 Level: advanced 365 366 Notes: 367 This auxiliary matrix is used to compute quadrature weights representing the net-flux across subdomain boundaries 368 369 If `vl2l` is `NULL`, the local ordering for velocities in `divudotp` should match that of the preconditioning matrix 370 371 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDiscreteGradient()` 372 @*/ 373 PetscErrorCode PCBDDCSetDivergenceMat(PC pc, Mat divudotp, PetscBool trans, IS vl2l) 374 { 375 PetscBool ismatis; 376 377 PetscFunctionBegin; 378 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 379 PetscValidHeaderSpecific(divudotp, MAT_CLASSID, 2); 380 PetscCheckSameComm(pc, 1, divudotp, 2); 381 PetscValidLogicalCollectiveBool(pc, trans, 3); 382 if (vl2l) PetscValidHeaderSpecific(vl2l, IS_CLASSID, 4); 383 PetscCall(PetscObjectTypeCompare((PetscObject)divudotp, MATIS, &ismatis)); 384 PetscCheck(ismatis, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Divergence matrix needs to be of type MATIS"); 385 PetscTryMethod(pc, "PCBDDCSetDivergenceMat_C", (PC, Mat, PetscBool, IS), (pc, divudotp, trans, vl2l)); 386 PetscFunctionReturn(PETSC_SUCCESS); 387 } 388 389 static PetscErrorCode PCBDDCSetChangeOfBasisMat_BDDC(PC pc, Mat change, PetscBool interior) 390 { 391 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 392 393 PetscFunctionBegin; 394 PetscCall(PetscObjectReference((PetscObject)change)); 395 PetscCall(MatDestroy(&pcbddc->user_ChangeOfBasisMatrix)); 396 pcbddc->user_ChangeOfBasisMatrix = change; 397 pcbddc->change_interior = interior; 398 PetscFunctionReturn(PETSC_SUCCESS); 399 } 400 401 /*@ 402 PCBDDCSetChangeOfBasisMat - Set user defined change of basis for dofs 403 404 Collective 405 406 Input Parameters: 407 + pc - the preconditioning context 408 . change - the change of basis matrix 409 - interior - whether or not the change of basis modifies interior dofs 410 411 Level: intermediate 412 413 .seealso: [](ch_ksp), `PCBDDC` 414 @*/ 415 PetscErrorCode PCBDDCSetChangeOfBasisMat(PC pc, Mat change, PetscBool interior) 416 { 417 PetscFunctionBegin; 418 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 419 PetscValidHeaderSpecific(change, MAT_CLASSID, 2); 420 PetscCheckSameComm(pc, 1, change, 2); 421 if (pc->mat) { 422 PetscInt rows_c, cols_c, rows, cols; 423 PetscCall(MatGetSize(pc->mat, &rows, &cols)); 424 PetscCall(MatGetSize(change, &rows_c, &cols_c)); 425 PetscCheck(rows_c == rows, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of rows for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, rows_c, rows); 426 PetscCheck(cols_c == cols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of columns for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, cols_c, cols); 427 PetscCall(MatGetLocalSize(pc->mat, &rows, &cols)); 428 PetscCall(MatGetLocalSize(change, &rows_c, &cols_c)); 429 PetscCheck(rows_c == rows, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of local rows for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, rows_c, rows); 430 PetscCheck(cols_c == cols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of local columns for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, cols_c, cols); 431 } 432 PetscTryMethod(pc, "PCBDDCSetChangeOfBasisMat_C", (PC, Mat, PetscBool), (pc, change, interior)); 433 PetscFunctionReturn(PETSC_SUCCESS); 434 } 435 436 static PetscErrorCode PCBDDCSetPrimalVerticesIS_BDDC(PC pc, IS PrimalVertices) 437 { 438 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 439 PetscBool isequal = PETSC_FALSE; 440 441 PetscFunctionBegin; 442 PetscCall(PetscObjectReference((PetscObject)PrimalVertices)); 443 if (pcbddc->user_primal_vertices) PetscCall(ISEqual(PrimalVertices, pcbddc->user_primal_vertices, &isequal)); 444 PetscCall(ISDestroy(&pcbddc->user_primal_vertices)); 445 PetscCall(ISDestroy(&pcbddc->user_primal_vertices_local)); 446 pcbddc->user_primal_vertices = PrimalVertices; 447 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 448 PetscFunctionReturn(PETSC_SUCCESS); 449 } 450 451 /*@ 452 PCBDDCSetPrimalVerticesIS - Set additional user defined primal vertices in `PCBDDC` 453 454 Collective 455 456 Input Parameters: 457 + pc - the preconditioning context 458 - PrimalVertices - index set of primal vertices in global numbering (can be empty) 459 460 Level: intermediate 461 462 Note: 463 Any process can list any global node 464 465 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()`, `PCBDDCGetPrimalVerticesLocalIS()` 466 @*/ 467 PetscErrorCode PCBDDCSetPrimalVerticesIS(PC pc, IS PrimalVertices) 468 { 469 PetscFunctionBegin; 470 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 471 PetscValidHeaderSpecific(PrimalVertices, IS_CLASSID, 2); 472 PetscCheckSameComm(pc, 1, PrimalVertices, 2); 473 PetscTryMethod(pc, "PCBDDCSetPrimalVerticesIS_C", (PC, IS), (pc, PrimalVertices)); 474 PetscFunctionReturn(PETSC_SUCCESS); 475 } 476 477 static PetscErrorCode PCBDDCGetPrimalVerticesIS_BDDC(PC pc, IS *is) 478 { 479 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 480 481 PetscFunctionBegin; 482 *is = pcbddc->user_primal_vertices; 483 PetscFunctionReturn(PETSC_SUCCESS); 484 } 485 486 /*@ 487 PCBDDCGetPrimalVerticesIS - Get user defined primal vertices set with `PCBDDCSetPrimalVerticesIS()` 488 489 Collective 490 491 Input Parameter: 492 . pc - the preconditioning context 493 494 Output Parameter: 495 . is - index set of primal vertices in global numbering (`NULL` if not set) 496 497 Level: intermediate 498 499 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()`, `PCBDDCGetPrimalVerticesLocalIS()` 500 @*/ 501 PetscErrorCode PCBDDCGetPrimalVerticesIS(PC pc, IS *is) 502 { 503 PetscFunctionBegin; 504 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 505 PetscAssertPointer(is, 2); 506 PetscUseMethod(pc, "PCBDDCGetPrimalVerticesIS_C", (PC, IS *), (pc, is)); 507 PetscFunctionReturn(PETSC_SUCCESS); 508 } 509 510 static PetscErrorCode PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc, IS PrimalVertices) 511 { 512 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 513 PetscBool isequal = PETSC_FALSE; 514 515 PetscFunctionBegin; 516 PetscCall(PetscObjectReference((PetscObject)PrimalVertices)); 517 if (pcbddc->user_primal_vertices_local) PetscCall(ISEqual(PrimalVertices, pcbddc->user_primal_vertices_local, &isequal)); 518 PetscCall(ISDestroy(&pcbddc->user_primal_vertices)); 519 PetscCall(ISDestroy(&pcbddc->user_primal_vertices_local)); 520 pcbddc->user_primal_vertices_local = PrimalVertices; 521 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 522 PetscFunctionReturn(PETSC_SUCCESS); 523 } 524 525 /*@ 526 PCBDDCSetPrimalVerticesLocalIS - Set additional user defined primal vertices in `PCBDDC` 527 528 Collective 529 530 Input Parameters: 531 + pc - the preconditioning context 532 - PrimalVertices - index set of primal vertices in local numbering (can be empty) 533 534 Level: intermediate 535 536 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesLocalIS()` 537 @*/ 538 PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC pc, IS PrimalVertices) 539 { 540 PetscFunctionBegin; 541 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 542 PetscValidHeaderSpecific(PrimalVertices, IS_CLASSID, 2); 543 PetscCheckSameComm(pc, 1, PrimalVertices, 2); 544 PetscTryMethod(pc, "PCBDDCSetPrimalVerticesLocalIS_C", (PC, IS), (pc, PrimalVertices)); 545 PetscFunctionReturn(PETSC_SUCCESS); 546 } 547 548 static PetscErrorCode PCBDDCGetPrimalVerticesLocalIS_BDDC(PC pc, IS *is) 549 { 550 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 551 552 PetscFunctionBegin; 553 *is = pcbddc->user_primal_vertices_local; 554 PetscFunctionReturn(PETSC_SUCCESS); 555 } 556 557 /*@ 558 PCBDDCGetPrimalVerticesLocalIS - Get user defined primal vertices set with `PCBDDCSetPrimalVerticesLocalIS()` 559 560 Collective 561 562 Input Parameter: 563 . pc - the preconditioning context 564 565 Output Parameter: 566 . is - index set of primal vertices in local numbering (`NULL` if not set) 567 568 Level: intermediate 569 570 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()` 571 @*/ 572 PetscErrorCode PCBDDCGetPrimalVerticesLocalIS(PC pc, IS *is) 573 { 574 PetscFunctionBegin; 575 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 576 PetscAssertPointer(is, 2); 577 PetscUseMethod(pc, "PCBDDCGetPrimalVerticesLocalIS_C", (PC, IS *), (pc, is)); 578 PetscFunctionReturn(PETSC_SUCCESS); 579 } 580 581 static PetscErrorCode PCBDDCSetCoarseningRatio_BDDC(PC pc, PetscInt k) 582 { 583 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 584 585 PetscFunctionBegin; 586 pcbddc->coarsening_ratio = k; 587 PetscFunctionReturn(PETSC_SUCCESS); 588 } 589 590 /*@ 591 PCBDDCSetCoarseningRatio - Set coarsening ratio used in the multi-level version of `PCBDDC` 592 593 Logically Collective 594 595 Input Parameters: 596 + pc - the preconditioning context 597 - k - coarsening ratio (H/h at the coarser level) 598 599 Options Database Key: 600 . -pc_bddc_coarsening_ratio <int> - Set the coarsening ratio used in multi-level coarsening 601 602 Level: intermediate 603 604 Note: 605 Approximately `k` subdomains at the finer level will be aggregated into a single subdomain at the coarser level 606 607 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetLevels()` 608 @*/ 609 PetscErrorCode PCBDDCSetCoarseningRatio(PC pc, PetscInt k) 610 { 611 PetscFunctionBegin; 612 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 613 PetscValidLogicalCollectiveInt(pc, k, 2); 614 PetscTryMethod(pc, "PCBDDCSetCoarseningRatio_C", (PC, PetscInt), (pc, k)); 615 PetscFunctionReturn(PETSC_SUCCESS); 616 } 617 618 /* The following functions (PCBDDCSetUseExactDirichlet PCBDDCSetLevel) are not public */ 619 static PetscErrorCode PCBDDCSetUseExactDirichlet_BDDC(PC pc, PetscBool flg) 620 { 621 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 622 623 PetscFunctionBegin; 624 pcbddc->use_exact_dirichlet_trick = flg; 625 PetscFunctionReturn(PETSC_SUCCESS); 626 } 627 628 PetscErrorCode PCBDDCSetUseExactDirichlet(PC pc, PetscBool flg) 629 { 630 PetscFunctionBegin; 631 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 632 PetscValidLogicalCollectiveBool(pc, flg, 2); 633 PetscTryMethod(pc, "PCBDDCSetUseExactDirichlet_C", (PC, PetscBool), (pc, flg)); 634 PetscFunctionReturn(PETSC_SUCCESS); 635 } 636 637 static PetscErrorCode PCBDDCSetLevel_BDDC(PC pc, PetscInt level) 638 { 639 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 640 641 PetscFunctionBegin; 642 pcbddc->current_level = level; 643 PetscFunctionReturn(PETSC_SUCCESS); 644 } 645 646 PetscErrorCode PCBDDCSetLevel(PC pc, PetscInt level) 647 { 648 PetscFunctionBegin; 649 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 650 PetscValidLogicalCollectiveInt(pc, level, 2); 651 PetscTryMethod(pc, "PCBDDCSetLevel_C", (PC, PetscInt), (pc, level)); 652 PetscFunctionReturn(PETSC_SUCCESS); 653 } 654 655 static PetscErrorCode PCBDDCSetLevels_BDDC(PC pc, PetscInt levels) 656 { 657 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 658 659 PetscFunctionBegin; 660 PetscCheck(levels < PETSC_PCBDDC_MAXLEVELS, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Maximum number of additional levels for BDDC is %d", PETSC_PCBDDC_MAXLEVELS - 1); 661 pcbddc->max_levels = levels; 662 PetscFunctionReturn(PETSC_SUCCESS); 663 } 664 665 /*@ 666 PCBDDCSetLevels - Sets the maximum number of additional levels allowed for multilevel `PCBDDC` 667 668 Logically Collective 669 670 Input Parameters: 671 + pc - the preconditioning context 672 - levels - the maximum number of levels 673 674 Options Database Key: 675 . -pc_bddc_levels <int> - Set maximum number of levels for multilevel 676 677 Level: intermediate 678 679 Note: 680 The default value is 0, that gives the classical two-levels BDDC algorithm 681 682 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetCoarseningRatio()` 683 @*/ 684 PetscErrorCode PCBDDCSetLevels(PC pc, PetscInt levels) 685 { 686 PetscFunctionBegin; 687 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 688 PetscValidLogicalCollectiveInt(pc, levels, 2); 689 PetscTryMethod(pc, "PCBDDCSetLevels_C", (PC, PetscInt), (pc, levels)); 690 PetscFunctionReturn(PETSC_SUCCESS); 691 } 692 693 static PetscErrorCode PCBDDCSetDirichletBoundaries_BDDC(PC pc, IS DirichletBoundaries) 694 { 695 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 696 PetscBool isequal = PETSC_FALSE; 697 698 PetscFunctionBegin; 699 PetscCall(PetscObjectReference((PetscObject)DirichletBoundaries)); 700 if (pcbddc->DirichletBoundaries) PetscCall(ISEqual(DirichletBoundaries, pcbddc->DirichletBoundaries, &isequal)); 701 /* last user setting takes precedence -> destroy any other customization */ 702 PetscCall(ISDestroy(&pcbddc->DirichletBoundariesLocal)); 703 PetscCall(ISDestroy(&pcbddc->DirichletBoundaries)); 704 pcbddc->DirichletBoundaries = DirichletBoundaries; 705 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 706 PetscFunctionReturn(PETSC_SUCCESS); 707 } 708 709 /*@ 710 PCBDDCSetDirichletBoundaries - Set the `IS` defining Dirichlet boundaries for the global problem. 711 712 Collective 713 714 Input Parameters: 715 + pc - the preconditioning context 716 - DirichletBoundaries - parallel `IS` defining the Dirichlet boundaries 717 718 Level: intermediate 719 720 Note: 721 Provide the information if you used `MatZeroRows()` or `MatZeroRowsColumns()`. Any process can list any global node 722 723 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDirichletBoundariesLocal()`, `MatZeroRows()`, `MatZeroRowsColumns()` 724 @*/ 725 PetscErrorCode PCBDDCSetDirichletBoundaries(PC pc, IS DirichletBoundaries) 726 { 727 PetscFunctionBegin; 728 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 729 PetscValidHeaderSpecific(DirichletBoundaries, IS_CLASSID, 2); 730 PetscCheckSameComm(pc, 1, DirichletBoundaries, 2); 731 PetscTryMethod(pc, "PCBDDCSetDirichletBoundaries_C", (PC, IS), (pc, DirichletBoundaries)); 732 PetscFunctionReturn(PETSC_SUCCESS); 733 } 734 735 static PetscErrorCode PCBDDCSetDirichletBoundariesLocal_BDDC(PC pc, IS DirichletBoundaries) 736 { 737 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 738 PetscBool isequal = PETSC_FALSE; 739 740 PetscFunctionBegin; 741 PetscCall(PetscObjectReference((PetscObject)DirichletBoundaries)); 742 if (pcbddc->DirichletBoundariesLocal) PetscCall(ISEqual(DirichletBoundaries, pcbddc->DirichletBoundariesLocal, &isequal)); 743 /* last user setting takes precedence -> destroy any other customization */ 744 PetscCall(ISDestroy(&pcbddc->DirichletBoundariesLocal)); 745 PetscCall(ISDestroy(&pcbddc->DirichletBoundaries)); 746 pcbddc->DirichletBoundariesLocal = DirichletBoundaries; 747 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 748 PetscFunctionReturn(PETSC_SUCCESS); 749 } 750 751 /*@ 752 PCBDDCSetDirichletBoundariesLocal - Set the `IS` defining Dirichlet boundaries for the global problem in local ordering. 753 754 Collective 755 756 Input Parameters: 757 + pc - the preconditioning context 758 - DirichletBoundaries - parallel `IS` defining the Dirichlet boundaries (in local ordering) 759 760 Level: intermediate 761 762 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDirichletBoundaries()`, `MatZeroRows()`, `MatZeroRowsColumns()` 763 @*/ 764 PetscErrorCode PCBDDCSetDirichletBoundariesLocal(PC pc, IS DirichletBoundaries) 765 { 766 PetscFunctionBegin; 767 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 768 PetscValidHeaderSpecific(DirichletBoundaries, IS_CLASSID, 2); 769 PetscCheckSameComm(pc, 1, DirichletBoundaries, 2); 770 PetscTryMethod(pc, "PCBDDCSetDirichletBoundariesLocal_C", (PC, IS), (pc, DirichletBoundaries)); 771 PetscFunctionReturn(PETSC_SUCCESS); 772 } 773 774 static PetscErrorCode PCBDDCSetNeumannBoundaries_BDDC(PC pc, IS NeumannBoundaries) 775 { 776 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 777 PetscBool isequal = PETSC_FALSE; 778 779 PetscFunctionBegin; 780 PetscCall(PetscObjectReference((PetscObject)NeumannBoundaries)); 781 if (pcbddc->NeumannBoundaries) PetscCall(ISEqual(NeumannBoundaries, pcbddc->NeumannBoundaries, &isequal)); 782 /* last user setting takes precedence -> destroy any other customization */ 783 PetscCall(ISDestroy(&pcbddc->NeumannBoundariesLocal)); 784 PetscCall(ISDestroy(&pcbddc->NeumannBoundaries)); 785 pcbddc->NeumannBoundaries = NeumannBoundaries; 786 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 787 PetscFunctionReturn(PETSC_SUCCESS); 788 } 789 790 /*@ 791 PCBDDCSetNeumannBoundaries - Set the `IS` defining Neumann boundaries for the global problem. 792 793 Collective 794 795 Input Parameters: 796 + pc - the preconditioning context 797 - NeumannBoundaries - parallel `IS` defining the Neumann boundaries 798 799 Level: intermediate 800 801 Note: 802 Any process can list any global node 803 804 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundariesLocal()` 805 @*/ 806 PetscErrorCode PCBDDCSetNeumannBoundaries(PC pc, IS NeumannBoundaries) 807 { 808 PetscFunctionBegin; 809 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 810 PetscValidHeaderSpecific(NeumannBoundaries, IS_CLASSID, 2); 811 PetscCheckSameComm(pc, 1, NeumannBoundaries, 2); 812 PetscTryMethod(pc, "PCBDDCSetNeumannBoundaries_C", (PC, IS), (pc, NeumannBoundaries)); 813 PetscFunctionReturn(PETSC_SUCCESS); 814 } 815 816 static PetscErrorCode PCBDDCSetNeumannBoundariesLocal_BDDC(PC pc, IS NeumannBoundaries) 817 { 818 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 819 PetscBool isequal = PETSC_FALSE; 820 821 PetscFunctionBegin; 822 PetscCall(PetscObjectReference((PetscObject)NeumannBoundaries)); 823 if (pcbddc->NeumannBoundariesLocal) PetscCall(ISEqual(NeumannBoundaries, pcbddc->NeumannBoundariesLocal, &isequal)); 824 /* last user setting takes precedence -> destroy any other customization */ 825 PetscCall(ISDestroy(&pcbddc->NeumannBoundariesLocal)); 826 PetscCall(ISDestroy(&pcbddc->NeumannBoundaries)); 827 pcbddc->NeumannBoundariesLocal = NeumannBoundaries; 828 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 829 PetscFunctionReturn(PETSC_SUCCESS); 830 } 831 832 /*@ 833 PCBDDCSetNeumannBoundariesLocal - Set the `IS` defining Neumann boundaries for the global problem in local ordering. 834 835 Collective 836 837 Input Parameters: 838 + pc - the preconditioning context 839 - NeumannBoundaries - parallel `IS` defining the subdomain part of Neumann boundaries (in local ordering) 840 841 Level: intermediate 842 843 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCGetDirichletBoundaries()` 844 @*/ 845 PetscErrorCode PCBDDCSetNeumannBoundariesLocal(PC pc, IS NeumannBoundaries) 846 { 847 PetscFunctionBegin; 848 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 849 PetscValidHeaderSpecific(NeumannBoundaries, IS_CLASSID, 2); 850 PetscCheckSameComm(pc, 1, NeumannBoundaries, 2); 851 PetscTryMethod(pc, "PCBDDCSetNeumannBoundariesLocal_C", (PC, IS), (pc, NeumannBoundaries)); 852 PetscFunctionReturn(PETSC_SUCCESS); 853 } 854 855 static PetscErrorCode PCBDDCGetDirichletBoundaries_BDDC(PC pc, IS *DirichletBoundaries) 856 { 857 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 858 859 PetscFunctionBegin; 860 *DirichletBoundaries = pcbddc->DirichletBoundaries; 861 PetscFunctionReturn(PETSC_SUCCESS); 862 } 863 864 /*@ 865 PCBDDCGetDirichletBoundaries - Get parallel `IS` for Dirichlet boundaries 866 867 Collective 868 869 Input Parameter: 870 . pc - the preconditioning context 871 872 Output Parameter: 873 . DirichletBoundaries - index set defining the Dirichlet boundaries 874 875 Level: intermediate 876 877 Note: 878 The `IS` returned (if any) is the same passed in earlier by the user with `PCBDDCSetDirichletBoundaries()` 879 880 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDirichletBoundaries()` 881 @*/ 882 PetscErrorCode PCBDDCGetDirichletBoundaries(PC pc, IS *DirichletBoundaries) 883 { 884 PetscFunctionBegin; 885 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 886 PetscUseMethod(pc, "PCBDDCGetDirichletBoundaries_C", (PC, IS *), (pc, DirichletBoundaries)); 887 PetscFunctionReturn(PETSC_SUCCESS); 888 } 889 890 static PetscErrorCode PCBDDCGetDirichletBoundariesLocal_BDDC(PC pc, IS *DirichletBoundaries) 891 { 892 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 893 894 PetscFunctionBegin; 895 *DirichletBoundaries = pcbddc->DirichletBoundariesLocal; 896 PetscFunctionReturn(PETSC_SUCCESS); 897 } 898 899 /*@ 900 PCBDDCGetDirichletBoundariesLocal - Get parallel `IS` for Dirichlet boundaries (in local ordering) 901 902 Collective 903 904 Input Parameter: 905 . pc - the preconditioning context 906 907 Output Parameter: 908 . DirichletBoundaries - index set defining the subdomain part of Dirichlet boundaries 909 910 Level: intermediate 911 912 Note: 913 The `IS` returned could be the same passed in earlier by the user (if provided with `PCBDDCSetDirichletBoundariesLocal()`) 914 or a global-to-local map of the global `IS` (if provided with `PCBDDCSetDirichletBoundaries()`). 915 In the latter case, the `IS` will be available only after `PCSetUp()`. 916 917 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCGetDirichletBoundaries()`, `PCBDDCSetDirichletBoundaries()` 918 @*/ 919 PetscErrorCode PCBDDCGetDirichletBoundariesLocal(PC pc, IS *DirichletBoundaries) 920 { 921 PetscFunctionBegin; 922 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 923 PetscUseMethod(pc, "PCBDDCGetDirichletBoundariesLocal_C", (PC, IS *), (pc, DirichletBoundaries)); 924 PetscFunctionReturn(PETSC_SUCCESS); 925 } 926 927 static PetscErrorCode PCBDDCGetNeumannBoundaries_BDDC(PC pc, IS *NeumannBoundaries) 928 { 929 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 930 931 PetscFunctionBegin; 932 *NeumannBoundaries = pcbddc->NeumannBoundaries; 933 PetscFunctionReturn(PETSC_SUCCESS); 934 } 935 936 /*@ 937 PCBDDCGetNeumannBoundaries - Get parallel `IS` for Neumann boundaries 938 939 Not Collective 940 941 Input Parameter: 942 . pc - the preconditioning context 943 944 Output Parameter: 945 . NeumannBoundaries - index set defining the Neumann boundaries 946 947 Level: intermediate 948 949 Note: 950 The `IS` returned (if any) is the same passed in earlier by the user with `PCBDDCSetNeumannBoundaries()` 951 952 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCGetDirichletBoundaries()`, `PCBDDCSetDirichletBoundaries()` 953 @*/ 954 PetscErrorCode PCBDDCGetNeumannBoundaries(PC pc, IS *NeumannBoundaries) 955 { 956 PetscFunctionBegin; 957 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 958 PetscUseMethod(pc, "PCBDDCGetNeumannBoundaries_C", (PC, IS *), (pc, NeumannBoundaries)); 959 PetscFunctionReturn(PETSC_SUCCESS); 960 } 961 962 static PetscErrorCode PCBDDCGetNeumannBoundariesLocal_BDDC(PC pc, IS *NeumannBoundaries) 963 { 964 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 965 966 PetscFunctionBegin; 967 *NeumannBoundaries = pcbddc->NeumannBoundariesLocal; 968 PetscFunctionReturn(PETSC_SUCCESS); 969 } 970 971 /*@ 972 PCBDDCGetNeumannBoundariesLocal - Get parallel `IS` for Neumann boundaries (in local ordering) 973 974 Not Collective 975 976 Input Parameter: 977 . pc - the preconditioning context 978 979 Output Parameter: 980 . NeumannBoundaries - index set defining the subdomain part of Neumann boundaries 981 982 Level: intermediate 983 984 Note: 985 The `IS` returned could be the same passed in earlier by the user (if provided with `PCBDDCSetNeumannBoundariesLocal()` 986 or a global-to-local map of the global `IS` (if provided with `PCBDDCSetNeumannBoundaries()`). 987 In the latter case, the `IS` will be available after `PCSetUp()`. 988 989 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCSetNeumannBoundariesLocal)`, `PCBDDCGetNeumannBoundaries()` 990 @*/ 991 PetscErrorCode PCBDDCGetNeumannBoundariesLocal(PC pc, IS *NeumannBoundaries) 992 { 993 PetscFunctionBegin; 994 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 995 PetscUseMethod(pc, "PCBDDCGetNeumannBoundariesLocal_C", (PC, IS *), (pc, NeumannBoundaries)); 996 PetscFunctionReturn(PETSC_SUCCESS); 997 } 998 999 static PetscErrorCode PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc, PetscInt nvtxs, const PetscInt xadj[], const PetscInt adjncy[], PetscCopyMode copymode) 1000 { 1001 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1002 PCBDDCGraph mat_graph = pcbddc->mat_graph; 1003 PetscBool same_data = PETSC_FALSE; 1004 1005 PetscFunctionBegin; 1006 if (!nvtxs) { 1007 if (copymode == PETSC_OWN_POINTER) { 1008 PetscCall(PetscFree(xadj)); 1009 PetscCall(PetscFree(adjncy)); 1010 } 1011 PetscCall(PCBDDCGraphResetCSR(mat_graph)); 1012 PetscFunctionReturn(PETSC_SUCCESS); 1013 } 1014 if (mat_graph->nvtxs == nvtxs && mat_graph->freecsr) { /* we own the data */ 1015 if (mat_graph->xadj == xadj && mat_graph->adjncy == adjncy) same_data = PETSC_TRUE; 1016 if (!same_data && mat_graph->xadj[nvtxs] == xadj[nvtxs]) { 1017 PetscCall(PetscArraycmp(xadj, mat_graph->xadj, nvtxs + 1, &same_data)); 1018 if (same_data) PetscCall(PetscArraycmp(adjncy, mat_graph->adjncy, xadj[nvtxs], &same_data)); 1019 } 1020 } 1021 if (!same_data) { 1022 /* free old CSR */ 1023 PetscCall(PCBDDCGraphResetCSR(mat_graph)); 1024 /* get CSR into graph structure */ 1025 if (copymode == PETSC_COPY_VALUES) { 1026 PetscCall(PetscMalloc1(nvtxs + 1, &mat_graph->xadj)); 1027 PetscCall(PetscMalloc1(xadj[nvtxs], &mat_graph->adjncy)); 1028 PetscCall(PetscArraycpy(mat_graph->xadj, xadj, nvtxs + 1)); 1029 PetscCall(PetscArraycpy(mat_graph->adjncy, adjncy, xadj[nvtxs])); 1030 mat_graph->freecsr = PETSC_TRUE; 1031 } else if (copymode == PETSC_OWN_POINTER) { 1032 mat_graph->xadj = (PetscInt *)xadj; 1033 mat_graph->adjncy = (PetscInt *)adjncy; 1034 mat_graph->freecsr = PETSC_TRUE; 1035 } else if (copymode == PETSC_USE_POINTER) { 1036 mat_graph->xadj = (PetscInt *)xadj; 1037 mat_graph->adjncy = (PetscInt *)adjncy; 1038 mat_graph->freecsr = PETSC_FALSE; 1039 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported copy mode %d", copymode); 1040 mat_graph->nvtxs_csr = nvtxs; 1041 pcbddc->recompute_topography = PETSC_TRUE; 1042 } 1043 PetscFunctionReturn(PETSC_SUCCESS); 1044 } 1045 1046 /*@ 1047 PCBDDCSetLocalAdjacencyGraph - Set adjacency structure (CSR graph) of the local degrees of freedom. 1048 1049 Not collective 1050 1051 Input Parameters: 1052 + pc - the preconditioning context. 1053 . nvtxs - number of local vertices of the graph (i.e., the number of local dofs). 1054 . xadj - CSR format row pointers for the connectivity of the dofs 1055 . adjncy - CSR format column pointers for the connectivity of the dofs 1056 - copymode - supported modes are `PETSC_COPY_VALUES`, `PETSC_USE_POINTER` or `PETSC_OWN_POINTER`. 1057 1058 Level: intermediate 1059 1060 Note: 1061 A dof is considered connected with all local dofs if xadj[dof+1]-xadj[dof] == 1 and adjncy[xadj[dof]] is negative. 1062 1063 .seealso: [](ch_ksp), `PCBDDC`, `PetscCopyMode` 1064 @*/ 1065 PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC pc, PetscInt nvtxs, const PetscInt xadj[], const PetscInt adjncy[], PetscCopyMode copymode) 1066 { 1067 void (*f)(void) = NULL; 1068 1069 PetscFunctionBegin; 1070 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1071 if (nvtxs) { 1072 PetscAssertPointer(xadj, 3); 1073 if (xadj[nvtxs]) PetscAssertPointer(adjncy, 4); 1074 } 1075 PetscTryMethod(pc, "PCBDDCSetLocalAdjacencyGraph_C", (PC, PetscInt, const PetscInt[], const PetscInt[], PetscCopyMode), (pc, nvtxs, xadj, adjncy, copymode)); 1076 /* free arrays if PCBDDC is not the PC type */ 1077 PetscCall(PetscObjectQueryFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", &f)); 1078 if (!f && copymode == PETSC_OWN_POINTER) { 1079 PetscCall(PetscFree(xadj)); 1080 PetscCall(PetscFree(adjncy)); 1081 } 1082 PetscFunctionReturn(PETSC_SUCCESS); 1083 } 1084 1085 static PetscErrorCode PCBDDCSetDofsSplittingLocal_BDDC(PC pc, PetscInt n_is, IS ISForDofs[]) 1086 { 1087 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1088 PetscInt i; 1089 PetscBool isequal = PETSC_FALSE; 1090 1091 PetscFunctionBegin; 1092 if (pcbddc->n_ISForDofsLocal == n_is) { 1093 for (i = 0; i < n_is; i++) { 1094 PetscBool isequalt; 1095 PetscCall(ISEqual(ISForDofs[i], pcbddc->ISForDofsLocal[i], &isequalt)); 1096 if (!isequalt) break; 1097 } 1098 if (i == n_is) isequal = PETSC_TRUE; 1099 } 1100 for (i = 0; i < n_is; i++) PetscCall(PetscObjectReference((PetscObject)ISForDofs[i])); 1101 /* Destroy ISes if they were already set */ 1102 for (i = 0; i < pcbddc->n_ISForDofsLocal; i++) PetscCall(ISDestroy(&pcbddc->ISForDofsLocal[i])); 1103 PetscCall(PetscFree(pcbddc->ISForDofsLocal)); 1104 /* last user setting takes precedence -> destroy any other customization */ 1105 for (i = 0; i < pcbddc->n_ISForDofs; i++) PetscCall(ISDestroy(&pcbddc->ISForDofs[i])); 1106 PetscCall(PetscFree(pcbddc->ISForDofs)); 1107 pcbddc->n_ISForDofs = 0; 1108 /* allocate space then set */ 1109 if (n_is) PetscCall(PetscMalloc1(n_is, &pcbddc->ISForDofsLocal)); 1110 for (i = 0; i < n_is; i++) pcbddc->ISForDofsLocal[i] = ISForDofs[i]; 1111 pcbddc->n_ISForDofsLocal = n_is; 1112 if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE; 1113 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 1114 PetscFunctionReturn(PETSC_SUCCESS); 1115 } 1116 1117 /*@ 1118 PCBDDCSetDofsSplittingLocal - Set the `IS` defining fields of the local subdomain matrix 1119 1120 Collective 1121 1122 Input Parameters: 1123 + pc - the preconditioning context 1124 . n_is - number of index sets defining the fields, must be the same on all MPI processes 1125 - ISForDofs - array of `IS` describing the fields in local ordering 1126 1127 Level: intermediate 1128 1129 Note: 1130 Not all nodes need to be listed, unlisted nodes will belong to the complement field. 1131 1132 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDofsSplitting()` 1133 @*/ 1134 PetscErrorCode PCBDDCSetDofsSplittingLocal(PC pc, PetscInt n_is, IS ISForDofs[]) 1135 { 1136 PetscInt i; 1137 1138 PetscFunctionBegin; 1139 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1140 PetscValidLogicalCollectiveInt(pc, n_is, 2); 1141 for (i = 0; i < n_is; i++) { 1142 PetscCheckSameComm(pc, 1, ISForDofs[i], 3); 1143 PetscValidHeaderSpecific(ISForDofs[i], IS_CLASSID, 3); 1144 } 1145 PetscTryMethod(pc, "PCBDDCSetDofsSplittingLocal_C", (PC, PetscInt, IS[]), (pc, n_is, ISForDofs)); 1146 PetscFunctionReturn(PETSC_SUCCESS); 1147 } 1148 1149 static PetscErrorCode PCBDDCSetDofsSplitting_BDDC(PC pc, PetscInt n_is, IS ISForDofs[]) 1150 { 1151 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1152 PetscInt i; 1153 PetscBool isequal = PETSC_FALSE; 1154 1155 PetscFunctionBegin; 1156 if (pcbddc->n_ISForDofs == n_is) { 1157 for (i = 0; i < n_is; i++) { 1158 PetscBool isequalt; 1159 PetscCall(ISEqual(ISForDofs[i], pcbddc->ISForDofs[i], &isequalt)); 1160 if (!isequalt) break; 1161 } 1162 if (i == n_is) isequal = PETSC_TRUE; 1163 } 1164 for (i = 0; i < n_is; i++) PetscCall(PetscObjectReference((PetscObject)ISForDofs[i])); 1165 /* Destroy ISes if they were already set */ 1166 for (i = 0; i < pcbddc->n_ISForDofs; i++) PetscCall(ISDestroy(&pcbddc->ISForDofs[i])); 1167 PetscCall(PetscFree(pcbddc->ISForDofs)); 1168 /* last user setting takes precedence -> destroy any other customization */ 1169 for (i = 0; i < pcbddc->n_ISForDofsLocal; i++) PetscCall(ISDestroy(&pcbddc->ISForDofsLocal[i])); 1170 PetscCall(PetscFree(pcbddc->ISForDofsLocal)); 1171 pcbddc->n_ISForDofsLocal = 0; 1172 /* allocate space then set */ 1173 if (n_is) PetscCall(PetscMalloc1(n_is, &pcbddc->ISForDofs)); 1174 for (i = 0; i < n_is; i++) pcbddc->ISForDofs[i] = ISForDofs[i]; 1175 pcbddc->n_ISForDofs = n_is; 1176 if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE; 1177 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 1178 PetscFunctionReturn(PETSC_SUCCESS); 1179 } 1180 1181 /*@ 1182 PCBDDCSetDofsSplitting - Set the `IS` defining fields of the global matrix 1183 1184 Collective 1185 1186 Input Parameters: 1187 + pc - the preconditioning context 1188 . n_is - number of index sets defining the fields 1189 - ISForDofs - array of `IS` describing the fields in global ordering 1190 1191 Level: intermediate 1192 1193 Note: 1194 Any process can list any global node. Not all nodes need to be listed: unlisted nodes will belong to the complement field. 1195 1196 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDofsSplittingLocal()` 1197 @*/ 1198 PetscErrorCode PCBDDCSetDofsSplitting(PC pc, PetscInt n_is, IS ISForDofs[]) 1199 { 1200 PetscInt i; 1201 1202 PetscFunctionBegin; 1203 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1204 PetscValidLogicalCollectiveInt(pc, n_is, 2); 1205 for (i = 0; i < n_is; i++) { 1206 PetscValidHeaderSpecific(ISForDofs[i], IS_CLASSID, 3); 1207 PetscCheckSameComm(pc, 1, ISForDofs[i], 3); 1208 } 1209 PetscTryMethod(pc, "PCBDDCSetDofsSplitting_C", (PC, PetscInt, IS[]), (pc, n_is, ISForDofs)); 1210 PetscFunctionReturn(PETSC_SUCCESS); 1211 } 1212 1213 /* 1214 PCPreSolve_BDDC - Changes the right hand side and (if necessary) the initial 1215 guess if a transformation of basis approach has been selected. 1216 1217 Input Parameter: 1218 + pc - the preconditioner context 1219 1220 Note: 1221 The interface routine PCPreSolve() is not usually called directly by 1222 the user, but instead is called by KSPSolve(). 1223 */ 1224 static PetscErrorCode PCPreSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x) 1225 { 1226 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1227 PC_IS *pcis = (PC_IS *)(pc->data); 1228 Vec used_vec; 1229 PetscBool iscg, save_rhs = PETSC_TRUE, benign_correction_computed; 1230 1231 PetscFunctionBegin; 1232 /* if we are working with CG, one dirichlet solve can be avoided during Krylov iterations */ 1233 if (ksp) { 1234 PetscCall(PetscObjectTypeCompareAny((PetscObject)ksp, &iscg, KSPCG, KSPGROPPCG, KSPPIPECG, KSPPIPELCG, KSPPIPECGRR, "")); 1235 if (pcbddc->benign_apply_coarse_only || pcbddc->switch_static || !iscg || pc->mat != pc->pmat) PetscCall(PCBDDCSetUseExactDirichlet(pc, PETSC_FALSE)); 1236 } 1237 if (pcbddc->benign_apply_coarse_only || pcbddc->switch_static || pc->mat != pc->pmat) PetscCall(PCBDDCSetUseExactDirichlet(pc, PETSC_FALSE)); 1238 1239 /* Creates parallel work vectors used in presolve */ 1240 if (!pcbddc->original_rhs) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->original_rhs)); 1241 if (!pcbddc->temp_solution) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->temp_solution)); 1242 1243 pcbddc->temp_solution_used = PETSC_FALSE; 1244 if (x) { 1245 PetscCall(PetscObjectReference((PetscObject)x)); 1246 used_vec = x; 1247 } else { /* it can only happen when calling PCBDDCMatFETIDPGetRHS */ 1248 PetscCall(PetscObjectReference((PetscObject)pcbddc->temp_solution)); 1249 used_vec = pcbddc->temp_solution; 1250 PetscCall(VecSet(used_vec, 0.0)); 1251 pcbddc->temp_solution_used = PETSC_TRUE; 1252 PetscCall(VecCopy(rhs, pcbddc->original_rhs)); 1253 save_rhs = PETSC_FALSE; 1254 pcbddc->eliminate_dirdofs = PETSC_TRUE; 1255 } 1256 1257 /* hack into ksp data structure since PCPreSolve comes earlier than setting to zero the guess in src/ksp/ksp/interface/itfunc.c */ 1258 if (ksp) { 1259 /* store the flag for the initial guess since it will be restored back during PCPostSolve_BDDC */ 1260 PetscCall(KSPGetInitialGuessNonzero(ksp, &pcbddc->ksp_guess_nonzero)); 1261 if (!pcbddc->ksp_guess_nonzero) PetscCall(VecSet(used_vec, 0.0)); 1262 } 1263 1264 pcbddc->rhs_change = PETSC_FALSE; 1265 /* Take into account zeroed rows -> change rhs and store solution removed */ 1266 if (rhs && pcbddc->eliminate_dirdofs) { 1267 IS dirIS = NULL; 1268 1269 /* DirichletBoundariesLocal may not be consistent among neighbours; gets a dirichlet dofs IS from graph (may be cached) */ 1270 PetscCall(PCBDDCGraphGetDirichletDofs(pcbddc->mat_graph, &dirIS)); 1271 if (dirIS) { 1272 Mat_IS *matis = (Mat_IS *)pc->pmat->data; 1273 PetscInt dirsize, i, *is_indices; 1274 PetscScalar *array_x; 1275 const PetscScalar *array_diagonal; 1276 1277 PetscCall(MatGetDiagonal(pc->pmat, pcis->vec1_global)); 1278 PetscCall(VecPointwiseDivide(pcis->vec1_global, rhs, pcis->vec1_global)); 1279 PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_global, pcis->vec2_N, INSERT_VALUES, SCATTER_FORWARD)); 1280 PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_global, pcis->vec2_N, INSERT_VALUES, SCATTER_FORWARD)); 1281 PetscCall(VecScatterBegin(matis->rctx, used_vec, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD)); 1282 PetscCall(VecScatterEnd(matis->rctx, used_vec, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD)); 1283 PetscCall(ISGetLocalSize(dirIS, &dirsize)); 1284 PetscCall(VecGetArray(pcis->vec1_N, &array_x)); 1285 PetscCall(VecGetArrayRead(pcis->vec2_N, &array_diagonal)); 1286 PetscCall(ISGetIndices(dirIS, (const PetscInt **)&is_indices)); 1287 for (i = 0; i < dirsize; i++) array_x[is_indices[i]] = array_diagonal[is_indices[i]]; 1288 PetscCall(ISRestoreIndices(dirIS, (const PetscInt **)&is_indices)); 1289 PetscCall(VecRestoreArrayRead(pcis->vec2_N, &array_diagonal)); 1290 PetscCall(VecRestoreArray(pcis->vec1_N, &array_x)); 1291 PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_N, used_vec, INSERT_VALUES, SCATTER_REVERSE)); 1292 PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_N, used_vec, INSERT_VALUES, SCATTER_REVERSE)); 1293 pcbddc->rhs_change = PETSC_TRUE; 1294 PetscCall(ISDestroy(&dirIS)); 1295 } 1296 } 1297 1298 /* remove the computed solution or the initial guess from the rhs */ 1299 if (pcbddc->rhs_change || (ksp && pcbddc->ksp_guess_nonzero)) { 1300 /* save the original rhs */ 1301 if (save_rhs) { 1302 PetscCall(VecSwap(rhs, pcbddc->original_rhs)); 1303 save_rhs = PETSC_FALSE; 1304 } 1305 pcbddc->rhs_change = PETSC_TRUE; 1306 PetscCall(VecScale(used_vec, -1.0)); 1307 PetscCall(MatMultAdd(pc->mat, used_vec, pcbddc->original_rhs, rhs)); 1308 PetscCall(VecScale(used_vec, -1.0)); 1309 PetscCall(VecCopy(used_vec, pcbddc->temp_solution)); 1310 pcbddc->temp_solution_used = PETSC_TRUE; 1311 if (ksp) PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_FALSE)); 1312 } 1313 PetscCall(VecDestroy(&used_vec)); 1314 1315 /* compute initial vector in benign space if needed 1316 and remove non-benign solution from the rhs */ 1317 benign_correction_computed = PETSC_FALSE; 1318 if (rhs && pcbddc->benign_compute_correction && (pcbddc->benign_have_null || pcbddc->benign_apply_coarse_only)) { 1319 /* compute u^*_h using ideas similar to those in Xuemin Tu's PhD thesis (see Section 4.8.1) 1320 Recursively apply BDDC in the multilevel case */ 1321 if (!pcbddc->benign_vec) PetscCall(VecDuplicate(rhs, &pcbddc->benign_vec)); 1322 /* keep applying coarse solver unless we no longer have benign subdomains */ 1323 pcbddc->benign_apply_coarse_only = pcbddc->benign_have_null ? PETSC_TRUE : PETSC_FALSE; 1324 if (!pcbddc->benign_skip_correction) { 1325 PetscCall(PCApply_BDDC(pc, rhs, pcbddc->benign_vec)); 1326 benign_correction_computed = PETSC_TRUE; 1327 if (pcbddc->temp_solution_used) PetscCall(VecAXPY(pcbddc->temp_solution, 1.0, pcbddc->benign_vec)); 1328 PetscCall(VecScale(pcbddc->benign_vec, -1.0)); 1329 /* store the original rhs if not done earlier */ 1330 if (save_rhs) PetscCall(VecSwap(rhs, pcbddc->original_rhs)); 1331 if (pcbddc->rhs_change) { 1332 PetscCall(MatMultAdd(pc->mat, pcbddc->benign_vec, rhs, rhs)); 1333 } else { 1334 PetscCall(MatMultAdd(pc->mat, pcbddc->benign_vec, pcbddc->original_rhs, rhs)); 1335 } 1336 pcbddc->rhs_change = PETSC_TRUE; 1337 } 1338 pcbddc->benign_apply_coarse_only = PETSC_FALSE; 1339 } else { 1340 PetscCall(VecDestroy(&pcbddc->benign_vec)); 1341 } 1342 1343 /* dbg output */ 1344 if (pcbddc->dbg_flag && benign_correction_computed) { 1345 Vec v; 1346 1347 PetscCall(VecDuplicate(pcis->vec1_global, &v)); 1348 if (pcbddc->ChangeOfBasisMatrix) { 1349 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, rhs, v)); 1350 } else { 1351 PetscCall(VecCopy(rhs, v)); 1352 } 1353 PetscCall(PCBDDCBenignGetOrSetP0(pc, v, PETSC_TRUE)); 1354 PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "LEVEL %" PetscInt_FMT ": is the correction benign?\n", pcbddc->current_level)); 1355 PetscCall(PetscScalarView(pcbddc->benign_n, pcbddc->benign_p0, pcbddc->dbg_viewer)); 1356 PetscCall(PetscViewerFlush(pcbddc->dbg_viewer)); 1357 PetscCall(VecDestroy(&v)); 1358 } 1359 1360 /* set initial guess if using PCG */ 1361 pcbddc->exact_dirichlet_trick_app = PETSC_FALSE; 1362 if (x && pcbddc->use_exact_dirichlet_trick) { 1363 PetscCall(VecSet(x, 0.0)); 1364 if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior) { 1365 if (benign_correction_computed) { /* we have already saved the changed rhs */ 1366 PetscCall(VecLockReadPop(pcis->vec1_global)); 1367 } else { 1368 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, rhs, pcis->vec1_global)); 1369 } 1370 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_global, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1371 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_global, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1372 } else { 1373 PetscCall(VecScatterBegin(pcis->global_to_D, rhs, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1374 PetscCall(VecScatterEnd(pcis->global_to_D, rhs, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1375 } 1376 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1377 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D)); 1378 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1379 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D)); 1380 if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior) { 1381 PetscCall(VecSet(pcis->vec1_global, 0.)); 1382 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, pcis->vec1_global, INSERT_VALUES, SCATTER_REVERSE)); 1383 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, pcis->vec1_global, INSERT_VALUES, SCATTER_REVERSE)); 1384 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcis->vec1_global, x)); 1385 } else { 1386 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, x, INSERT_VALUES, SCATTER_REVERSE)); 1387 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, x, INSERT_VALUES, SCATTER_REVERSE)); 1388 } 1389 if (ksp) PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE)); 1390 pcbddc->exact_dirichlet_trick_app = PETSC_TRUE; 1391 } else if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior && benign_correction_computed && pcbddc->use_exact_dirichlet_trick) { 1392 PetscCall(VecLockReadPop(pcis->vec1_global)); 1393 } 1394 PetscFunctionReturn(PETSC_SUCCESS); 1395 } 1396 1397 /* 1398 PCPostSolve_BDDC - Changes the computed solution if a transformation of basis 1399 approach has been selected. Also, restores rhs to its original state. 1400 1401 Input Parameter: 1402 + pc - the preconditioner context 1403 1404 Application Interface Routine: PCPostSolve() 1405 1406 Note: 1407 The interface routine PCPostSolve() is not usually called directly by 1408 the user, but instead is called by KSPSolve(). 1409 */ 1410 static PetscErrorCode PCPostSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x) 1411 { 1412 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1413 1414 PetscFunctionBegin; 1415 /* add solution removed in presolve */ 1416 if (x && pcbddc->rhs_change) { 1417 if (pcbddc->temp_solution_used) { 1418 PetscCall(VecAXPY(x, 1.0, pcbddc->temp_solution)); 1419 } else if (pcbddc->benign_compute_correction && pcbddc->benign_vec) { 1420 PetscCall(VecAXPY(x, -1.0, pcbddc->benign_vec)); 1421 } 1422 /* restore to original state (not for FETI-DP) */ 1423 if (ksp) pcbddc->temp_solution_used = PETSC_FALSE; 1424 } 1425 1426 /* restore rhs to its original state (not needed for FETI-DP) */ 1427 if (rhs && pcbddc->rhs_change) { 1428 PetscCall(VecSwap(rhs, pcbddc->original_rhs)); 1429 pcbddc->rhs_change = PETSC_FALSE; 1430 } 1431 /* restore ksp guess state */ 1432 if (ksp) { 1433 PetscCall(KSPSetInitialGuessNonzero(ksp, pcbddc->ksp_guess_nonzero)); 1434 /* reset flag for exact dirichlet trick */ 1435 pcbddc->exact_dirichlet_trick_app = PETSC_FALSE; 1436 } 1437 PetscFunctionReturn(PETSC_SUCCESS); 1438 } 1439 1440 // PetscClangLinter pragma disable: -fdoc-sowing-chars 1441 /* 1442 PCSetUp_BDDC - Prepares for the use of the BDDC preconditioner 1443 by setting data structures and options. 1444 1445 Input Parameter: 1446 . pc - the preconditioner context 1447 1448 Application Interface Routine: PCSetUp() 1449 1450 */ 1451 static PetscErrorCode PCSetUp_BDDC(PC pc) 1452 { 1453 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1454 PCBDDCSubSchurs sub_schurs; 1455 Mat_IS *matis; 1456 MatNullSpace nearnullspace; 1457 Mat lA; 1458 IS lP, zerodiag = NULL; 1459 PetscInt nrows, ncols; 1460 PetscMPIInt size; 1461 PetscBool computesubschurs; 1462 PetscBool computeconstraintsmatrix; 1463 PetscBool new_nearnullspace_provided, ismatis, rl; 1464 PetscBool isset, issym, isspd; 1465 1466 PetscFunctionBegin; 1467 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATIS, &ismatis)); 1468 PetscCheck(ismatis, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PCBDDC preconditioner requires matrix of type MATIS"); 1469 PetscCall(MatGetSize(pc->pmat, &nrows, &ncols)); 1470 PetscCheck(nrows == ncols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCBDDC preconditioner requires a square preconditioning matrix"); 1471 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 1472 1473 matis = (Mat_IS *)pc->pmat->data; 1474 /* the following lines of code should be replaced by a better logic between PCIS, PCNN, PCBDDC and other future nonoverlapping preconditioners */ 1475 /* For BDDC we need to define a local "Neumann" problem different to that defined in PCISSetup 1476 Also, BDDC builds its own KSP for the Dirichlet problem */ 1477 rl = pcbddc->recompute_topography; 1478 if (!pc->setupcalled || pc->flag == DIFFERENT_NONZERO_PATTERN) rl = PETSC_TRUE; 1479 PetscCall(MPIU_Allreduce(&rl, &pcbddc->recompute_topography, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)pc))); 1480 if (pcbddc->recompute_topography) { 1481 pcbddc->graphanalyzed = PETSC_FALSE; 1482 computeconstraintsmatrix = PETSC_TRUE; 1483 } else { 1484 computeconstraintsmatrix = PETSC_FALSE; 1485 } 1486 1487 /* check parameters' compatibility */ 1488 if (!pcbddc->use_deluxe_scaling) pcbddc->deluxe_zerorows = PETSC_FALSE; 1489 pcbddc->adaptive_selection = (PetscBool)(pcbddc->adaptive_threshold[0] != 0.0 || pcbddc->adaptive_threshold[1] != 0.0); 1490 pcbddc->use_deluxe_scaling = (PetscBool)(pcbddc->use_deluxe_scaling && size > 1); 1491 pcbddc->adaptive_selection = (PetscBool)(pcbddc->adaptive_selection && size > 1); 1492 pcbddc->adaptive_userdefined = (PetscBool)(pcbddc->adaptive_selection && pcbddc->adaptive_userdefined); 1493 if (pcbddc->adaptive_selection) pcbddc->use_faces = PETSC_TRUE; 1494 1495 computesubschurs = (PetscBool)(pcbddc->adaptive_selection || pcbddc->use_deluxe_scaling); 1496 1497 /* activate all connected components if the netflux has been requested */ 1498 if (pcbddc->compute_nonetflux) { 1499 pcbddc->use_vertices = PETSC_TRUE; 1500 pcbddc->use_edges = PETSC_TRUE; 1501 pcbddc->use_faces = PETSC_TRUE; 1502 } 1503 1504 /* Get stdout for dbg */ 1505 if (pcbddc->dbg_flag) { 1506 if (!pcbddc->dbg_viewer) pcbddc->dbg_viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)pc)); 1507 PetscCall(PetscViewerASCIIPushSynchronized(pcbddc->dbg_viewer)); 1508 PetscCall(PetscViewerASCIIAddTab(pcbddc->dbg_viewer, 2 * pcbddc->current_level)); 1509 } 1510 1511 /* process topology information */ 1512 PetscCall(PetscLogEventBegin(PC_BDDC_Topology[pcbddc->current_level], pc, 0, 0, 0)); 1513 if (pcbddc->recompute_topography) { 1514 PetscCall(PCBDDCComputeLocalTopologyInfo(pc)); 1515 if (pcbddc->discretegradient) PetscCall(PCBDDCNedelecSupport(pc)); 1516 } 1517 if (pcbddc->corner_selected) pcbddc->use_vertices = PETSC_TRUE; 1518 1519 /* change basis if requested by the user */ 1520 if (pcbddc->user_ChangeOfBasisMatrix) { 1521 /* use_change_of_basis flag is used to automatically compute a change of basis from constraints */ 1522 pcbddc->use_change_of_basis = PETSC_FALSE; 1523 PetscCall(PCBDDCComputeLocalMatrix(pc, pcbddc->user_ChangeOfBasisMatrix)); 1524 } else { 1525 PetscCall(MatDestroy(&pcbddc->local_mat)); 1526 PetscCall(PetscObjectReference((PetscObject)matis->A)); 1527 pcbddc->local_mat = matis->A; 1528 } 1529 1530 /* 1531 Compute change of basis on local pressures (aka zerodiag dofs) with the benign trick 1532 This should come earlier then PCISSetUp for extracting the correct subdomain matrices 1533 */ 1534 PetscCall(PCBDDCBenignShellMat(pc, PETSC_TRUE)); 1535 if (pcbddc->benign_saddle_point) { 1536 PC_IS *pcis = (PC_IS *)pc->data; 1537 1538 if (pcbddc->user_ChangeOfBasisMatrix || pcbddc->use_change_of_basis || !computesubschurs) pcbddc->benign_change_explicit = PETSC_TRUE; 1539 /* detect local saddle point and change the basis in pcbddc->local_mat */ 1540 PetscCall(PCBDDCBenignDetectSaddlePoint(pc, (PetscBool)(!pcbddc->recompute_topography), &zerodiag)); 1541 /* pop B0 mat from local mat */ 1542 PetscCall(PCBDDCBenignPopOrPushB0(pc, PETSC_TRUE)); 1543 /* give pcis a hint to not reuse submatrices during PCISCreate */ 1544 if (pc->flag == SAME_NONZERO_PATTERN && pcis->reusesubmatrices == PETSC_TRUE) { 1545 if (pcbddc->benign_n && (pcbddc->benign_change_explicit || pcbddc->dbg_flag)) { 1546 pcis->reusesubmatrices = PETSC_FALSE; 1547 } else { 1548 pcis->reusesubmatrices = PETSC_TRUE; 1549 } 1550 } else { 1551 pcis->reusesubmatrices = PETSC_FALSE; 1552 } 1553 } 1554 1555 /* propagate relevant information */ 1556 PetscCall(MatIsSymmetricKnown(matis->A, &isset, &issym)); 1557 if (isset) PetscCall(MatSetOption(pcbddc->local_mat, MAT_SYMMETRIC, issym)); 1558 PetscCall(MatIsSPDKnown(matis->A, &isset, &isspd)); 1559 if (isset) PetscCall(MatSetOption(pcbddc->local_mat, MAT_SPD, isspd)); 1560 1561 /* Set up all the "iterative substructuring" common block without computing solvers */ 1562 { 1563 Mat temp_mat; 1564 1565 temp_mat = matis->A; 1566 matis->A = pcbddc->local_mat; 1567 PetscCall(PCISSetUp(pc, PETSC_TRUE, PETSC_FALSE)); 1568 pcbddc->local_mat = matis->A; 1569 matis->A = temp_mat; 1570 } 1571 1572 /* Analyze interface */ 1573 if (!pcbddc->graphanalyzed) { 1574 PetscCall(PCBDDCAnalyzeInterface(pc)); 1575 computeconstraintsmatrix = PETSC_TRUE; 1576 PetscCheck(!(pcbddc->adaptive_selection && !pcbddc->use_deluxe_scaling && !pcbddc->mat_graph->twodim), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot compute the adaptive primal space for a problem with 3D edges without deluxe scaling"); 1577 if (pcbddc->compute_nonetflux) { 1578 MatNullSpace nnfnnsp; 1579 1580 PetscCheck(pcbddc->divudotp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Missing divudotp operator"); 1581 PetscCall(PCBDDCComputeNoNetFlux(pc->pmat, pcbddc->divudotp, pcbddc->divudotp_trans, pcbddc->divudotp_vl2l, pcbddc->mat_graph, &nnfnnsp)); 1582 /* TODO what if a nearnullspace is already attached? */ 1583 if (nnfnnsp) { 1584 PetscCall(MatSetNearNullSpace(pc->pmat, nnfnnsp)); 1585 PetscCall(MatNullSpaceDestroy(&nnfnnsp)); 1586 } 1587 } 1588 } 1589 PetscCall(PetscLogEventEnd(PC_BDDC_Topology[pcbddc->current_level], pc, 0, 0, 0)); 1590 1591 /* check existence of a divergence free extension, i.e. 1592 b(v_I,p_0) = 0 for all v_I (raise error if not). 1593 Also, check that PCBDDCBenignGetOrSetP0 works */ 1594 if (pcbddc->benign_saddle_point && pcbddc->dbg_flag > 1) PetscCall(PCBDDCBenignCheck(pc, zerodiag)); 1595 PetscCall(ISDestroy(&zerodiag)); 1596 1597 /* Setup local dirichlet solver ksp_D and sub_schurs solvers */ 1598 if (computesubschurs && pcbddc->recompute_topography) PetscCall(PCBDDCInitSubSchurs(pc)); 1599 /* SetUp Scaling operator (scaling matrices could be needed in SubSchursSetUp)*/ 1600 if (!pcbddc->use_deluxe_scaling) PetscCall(PCBDDCScalingSetUp(pc)); 1601 1602 /* finish setup solvers and do adaptive selection of constraints */ 1603 sub_schurs = pcbddc->sub_schurs; 1604 if (sub_schurs && sub_schurs->schur_explicit) { 1605 if (computesubschurs) PetscCall(PCBDDCSetUpSubSchurs(pc)); 1606 PetscCall(PCBDDCSetUpLocalSolvers(pc, PETSC_TRUE, PETSC_FALSE)); 1607 } else { 1608 PetscCall(PCBDDCSetUpLocalSolvers(pc, PETSC_TRUE, PETSC_FALSE)); 1609 if (computesubschurs) PetscCall(PCBDDCSetUpSubSchurs(pc)); 1610 } 1611 if (pcbddc->adaptive_selection) { 1612 PetscCall(PCBDDCAdaptiveSelection(pc)); 1613 computeconstraintsmatrix = PETSC_TRUE; 1614 } 1615 1616 /* infer if NullSpace object attached to Mat via MatSetNearNullSpace has changed */ 1617 new_nearnullspace_provided = PETSC_FALSE; 1618 PetscCall(MatGetNearNullSpace(pc->pmat, &nearnullspace)); 1619 if (pcbddc->onearnullspace) { /* already used nearnullspace */ 1620 if (!nearnullspace) { /* near null space attached to mat has been destroyed */ 1621 new_nearnullspace_provided = PETSC_TRUE; 1622 } else { 1623 /* determine if the two nullspaces are different (should be lightweight) */ 1624 if (nearnullspace != pcbddc->onearnullspace) { 1625 new_nearnullspace_provided = PETSC_TRUE; 1626 } else { /* maybe the user has changed the content of the nearnullspace so check vectors ObjectStateId */ 1627 PetscInt i; 1628 const Vec *nearnullvecs; 1629 PetscObjectState state; 1630 PetscInt nnsp_size; 1631 PetscCall(MatNullSpaceGetVecs(nearnullspace, NULL, &nnsp_size, &nearnullvecs)); 1632 for (i = 0; i < nnsp_size; i++) { 1633 PetscCall(PetscObjectStateGet((PetscObject)nearnullvecs[i], &state)); 1634 if (pcbddc->onearnullvecs_state[i] != state) { 1635 new_nearnullspace_provided = PETSC_TRUE; 1636 break; 1637 } 1638 } 1639 } 1640 } 1641 } else { 1642 if (!nearnullspace) { /* both nearnullspaces are null */ 1643 new_nearnullspace_provided = PETSC_FALSE; 1644 } else { /* nearnullspace attached later */ 1645 new_nearnullspace_provided = PETSC_TRUE; 1646 } 1647 } 1648 1649 /* Setup constraints and related work vectors */ 1650 /* reset primal space flags */ 1651 PetscCall(PetscLogEventBegin(PC_BDDC_LocalWork[pcbddc->current_level], pc, 0, 0, 0)); 1652 pcbddc->new_primal_space = PETSC_FALSE; 1653 pcbddc->new_primal_space_local = PETSC_FALSE; 1654 if (computeconstraintsmatrix || new_nearnullspace_provided) { 1655 /* It also sets the primal space flags */ 1656 PetscCall(PCBDDCConstraintsSetUp(pc)); 1657 } 1658 /* Allocate needed local vectors (which depends on quantities defined during ConstraintsSetUp) */ 1659 PetscCall(PCBDDCSetUpLocalWorkVectors(pc)); 1660 1661 if (pcbddc->use_change_of_basis) { 1662 PC_IS *pcis = (PC_IS *)(pc->data); 1663 1664 PetscCall(PCBDDCComputeLocalMatrix(pc, pcbddc->ChangeOfBasisMatrix)); 1665 if (pcbddc->benign_change) { 1666 PetscCall(MatDestroy(&pcbddc->benign_B0)); 1667 /* pop B0 from pcbddc->local_mat */ 1668 PetscCall(PCBDDCBenignPopOrPushB0(pc, PETSC_TRUE)); 1669 } 1670 /* get submatrices */ 1671 PetscCall(MatDestroy(&pcis->A_IB)); 1672 PetscCall(MatDestroy(&pcis->A_BI)); 1673 PetscCall(MatDestroy(&pcis->A_BB)); 1674 PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_B_local, pcis->is_B_local, MAT_INITIAL_MATRIX, &pcis->A_BB)); 1675 PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_I_local, pcis->is_B_local, MAT_INITIAL_MATRIX, &pcis->A_IB)); 1676 PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_B_local, pcis->is_I_local, MAT_INITIAL_MATRIX, &pcis->A_BI)); 1677 /* set flag in pcis to not reuse submatrices during PCISCreate */ 1678 pcis->reusesubmatrices = PETSC_FALSE; 1679 } else if (!pcbddc->user_ChangeOfBasisMatrix && !pcbddc->benign_change) { 1680 PetscCall(MatDestroy(&pcbddc->local_mat)); 1681 PetscCall(PetscObjectReference((PetscObject)matis->A)); 1682 pcbddc->local_mat = matis->A; 1683 } 1684 1685 /* interface pressure block row for B_C */ 1686 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_lP", (PetscObject *)&lP)); 1687 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_lA", (PetscObject *)&lA)); 1688 if (lA && lP) { 1689 PC_IS *pcis = (PC_IS *)pc->data; 1690 Mat B_BI, B_BB, Bt_BI, Bt_BB; 1691 PetscBool issym; 1692 1693 PetscCall(MatIsSymmetric(lA, PETSC_SMALL, &issym)); 1694 if (issym) { 1695 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_I_local, MAT_INITIAL_MATRIX, &B_BI)); 1696 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_B_local, MAT_INITIAL_MATRIX, &B_BB)); 1697 PetscCall(MatCreateTranspose(B_BI, &Bt_BI)); 1698 PetscCall(MatCreateTranspose(B_BB, &Bt_BB)); 1699 } else { 1700 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_I_local, MAT_INITIAL_MATRIX, &B_BI)); 1701 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_B_local, MAT_INITIAL_MATRIX, &B_BB)); 1702 PetscCall(MatCreateSubMatrix(lA, pcis->is_I_local, lP, MAT_INITIAL_MATRIX, &Bt_BI)); 1703 PetscCall(MatCreateSubMatrix(lA, pcis->is_B_local, lP, MAT_INITIAL_MATRIX, &Bt_BB)); 1704 } 1705 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_B_BI", (PetscObject)B_BI)); 1706 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_B_BB", (PetscObject)B_BB)); 1707 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_Bt_BI", (PetscObject)Bt_BI)); 1708 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_Bt_BB", (PetscObject)Bt_BB)); 1709 PetscCall(MatDestroy(&B_BI)); 1710 PetscCall(MatDestroy(&B_BB)); 1711 PetscCall(MatDestroy(&Bt_BI)); 1712 PetscCall(MatDestroy(&Bt_BB)); 1713 } 1714 PetscCall(PetscLogEventEnd(PC_BDDC_LocalWork[pcbddc->current_level], pc, 0, 0, 0)); 1715 1716 /* SetUp coarse and local Neumann solvers */ 1717 PetscCall(PCBDDCSetUpSolvers(pc)); 1718 /* SetUp Scaling operator */ 1719 if (pcbddc->use_deluxe_scaling) PetscCall(PCBDDCScalingSetUp(pc)); 1720 1721 /* mark topography as done */ 1722 pcbddc->recompute_topography = PETSC_FALSE; 1723 1724 /* wrap pcis->A_IB and pcis->A_BI if we did not change explicitly the variables on the pressures */ 1725 PetscCall(PCBDDCBenignShellMat(pc, PETSC_FALSE)); 1726 1727 if (pcbddc->dbg_flag) { 1728 PetscCall(PetscViewerASCIISubtractTab(pcbddc->dbg_viewer, 2 * pcbddc->current_level)); 1729 PetscCall(PetscViewerASCIIPopSynchronized(pcbddc->dbg_viewer)); 1730 } 1731 PetscFunctionReturn(PETSC_SUCCESS); 1732 } 1733 1734 // PetscClangLinter pragma disable: -fdoc-sowing-chars 1735 /* 1736 PCApply_BDDC - Applies the BDDC operator to a vector. 1737 1738 Input Parameters: 1739 + pc - the preconditioner context 1740 - r - input vector (global) 1741 1742 Output Parameter: 1743 . z - output vector (global) 1744 1745 Application Interface Routine: PCApply() 1746 */ 1747 static PetscErrorCode PCApply_BDDC(PC pc, Vec r, Vec z) 1748 { 1749 PC_IS *pcis = (PC_IS *)(pc->data); 1750 PC_BDDC *pcbddc = (PC_BDDC *)(pc->data); 1751 Mat lA = NULL; 1752 PetscInt n_B = pcis->n_B, n_D = pcis->n - n_B; 1753 const PetscScalar one = 1.0; 1754 const PetscScalar m_one = -1.0; 1755 const PetscScalar zero = 0.0; 1756 /* This code is similar to that provided in nn.c for PCNN 1757 NN interface preconditioner changed to BDDC 1758 Added support for M_3 preconditioner in the reference article (code is active if pcbddc->switch_static == PETSC_TRUE) */ 1759 1760 PetscFunctionBegin; 1761 PetscCall(PetscCitationsRegister(citation, &cited)); 1762 if (pcbddc->switch_static) PetscCall(MatISGetLocalMat(pc->useAmat ? pc->mat : pc->pmat, &lA)); 1763 1764 if (pcbddc->ChangeOfBasisMatrix) { 1765 Vec swap; 1766 1767 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, r, pcbddc->work_change)); 1768 swap = pcbddc->work_change; 1769 pcbddc->work_change = r; 1770 r = swap; 1771 /* save rhs so that we don't need to apply the change of basis for the exact dirichlet trick in PreSolve */ 1772 if (pcbddc->benign_apply_coarse_only && pcbddc->use_exact_dirichlet_trick && pcbddc->change_interior) { 1773 PetscCall(VecCopy(r, pcis->vec1_global)); 1774 PetscCall(VecLockReadPush(pcis->vec1_global)); 1775 } 1776 } 1777 if (pcbddc->benign_have_null) { /* get p0 from r */ 1778 PetscCall(PCBDDCBenignGetOrSetP0(pc, r, PETSC_TRUE)); 1779 } 1780 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_DIRICHLET && !pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 1781 PetscCall(VecCopy(r, z)); 1782 /* First Dirichlet solve */ 1783 PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1784 PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1785 /* 1786 Assembling right hand side for BDDC operator 1787 - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE) 1788 - pcis->vec1_B the interface part of the global vector z 1789 */ 1790 if (n_D) { 1791 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1792 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D)); 1793 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1794 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D)); 1795 PetscCall(VecScale(pcis->vec2_D, m_one)); 1796 if (pcbddc->switch_static) { 1797 PetscCall(VecSet(pcis->vec1_N, 0.)); 1798 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1799 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1800 if (!pcbddc->switch_static_change) { 1801 PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N)); 1802 } else { 1803 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1804 PetscCall(MatMult(lA, pcis->vec2_N, pcis->vec1_N)); 1805 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1806 } 1807 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD)); 1808 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD)); 1809 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1810 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1811 } else { 1812 PetscCall(MatMult(pcis->A_BI, pcis->vec2_D, pcis->vec1_B)); 1813 } 1814 } else { 1815 PetscCall(VecSet(pcis->vec1_B, zero)); 1816 } 1817 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE)); 1818 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE)); 1819 PetscCall(PCBDDCScalingRestriction(pc, z, pcis->vec1_B)); 1820 } else { 1821 if (!pcbddc->benign_apply_coarse_only) PetscCall(PCBDDCScalingRestriction(pc, r, pcis->vec1_B)); 1822 } 1823 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_LUMP) { 1824 PetscCheck(pcbddc->switch_static, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "You forgot to pass -pc_bddc_switch_static"); 1825 PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1826 PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1827 } 1828 1829 /* Apply interface preconditioner 1830 input/output vecs: pcis->vec1_B and pcis->vec1_D */ 1831 PetscCall(PCBDDCApplyInterfacePreconditioner(pc, PETSC_FALSE)); 1832 1833 /* Apply transpose of partition of unity operator */ 1834 PetscCall(PCBDDCScalingExtension(pc, pcis->vec1_B, z)); 1835 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_LUMP) { 1836 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1837 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1838 PetscFunctionReturn(PETSC_SUCCESS); 1839 } 1840 /* Second Dirichlet solve and assembling of output */ 1841 PetscCall(VecScatterBegin(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1842 PetscCall(VecScatterEnd(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1843 if (n_B) { 1844 if (pcbddc->switch_static) { 1845 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1846 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1847 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1848 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1849 if (!pcbddc->switch_static_change) { 1850 PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N)); 1851 } else { 1852 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1853 PetscCall(MatMult(lA, pcis->vec2_N, pcis->vec1_N)); 1854 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1855 } 1856 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD)); 1857 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD)); 1858 } else { 1859 PetscCall(MatMult(pcis->A_IB, pcis->vec1_B, pcis->vec3_D)); 1860 } 1861 } else if (pcbddc->switch_static) { /* n_B is zero */ 1862 if (!pcbddc->switch_static_change) { 1863 PetscCall(MatMult(lA, pcis->vec1_D, pcis->vec3_D)); 1864 } else { 1865 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_D, pcis->vec1_N)); 1866 PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N)); 1867 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec2_N, pcis->vec3_D)); 1868 } 1869 } 1870 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1871 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec3_D, pcis->vec4_D)); 1872 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1873 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec4_D)); 1874 1875 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 1876 if (pcbddc->switch_static) { 1877 PetscCall(VecAXPBYPCZ(pcis->vec2_D, m_one, one, m_one, pcis->vec4_D, pcis->vec1_D)); 1878 } else { 1879 PetscCall(VecAXPBY(pcis->vec2_D, m_one, m_one, pcis->vec4_D)); 1880 } 1881 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1882 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1883 } else { 1884 if (pcbddc->switch_static) { 1885 PetscCall(VecAXPBY(pcis->vec4_D, one, m_one, pcis->vec1_D)); 1886 } else { 1887 PetscCall(VecScale(pcis->vec4_D, m_one)); 1888 } 1889 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1890 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1891 } 1892 if (pcbddc->benign_have_null) { /* set p0 (computed in PCBDDCApplyInterface) */ 1893 if (pcbddc->benign_apply_coarse_only) PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n)); 1894 PetscCall(PCBDDCBenignGetOrSetP0(pc, z, PETSC_FALSE)); 1895 } 1896 1897 if (pcbddc->ChangeOfBasisMatrix) { 1898 pcbddc->work_change = r; 1899 PetscCall(VecCopy(z, pcbddc->work_change)); 1900 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcbddc->work_change, z)); 1901 } 1902 PetscFunctionReturn(PETSC_SUCCESS); 1903 } 1904 1905 /* 1906 PCApplyTranspose_BDDC - Applies the transpose of the BDDC operator to a vector. 1907 1908 Input Parameters: 1909 + pc - the preconditioner context 1910 - r - input vector (global) 1911 1912 Output Parameter: 1913 . z - output vector (global) 1914 1915 Application Interface Routine: PCApplyTranspose() 1916 */ 1917 static PetscErrorCode PCApplyTranspose_BDDC(PC pc, Vec r, Vec z) 1918 { 1919 PC_IS *pcis = (PC_IS *)(pc->data); 1920 PC_BDDC *pcbddc = (PC_BDDC *)(pc->data); 1921 Mat lA = NULL; 1922 PetscInt n_B = pcis->n_B, n_D = pcis->n - n_B; 1923 const PetscScalar one = 1.0; 1924 const PetscScalar m_one = -1.0; 1925 const PetscScalar zero = 0.0; 1926 1927 PetscFunctionBegin; 1928 PetscCall(PetscCitationsRegister(citation, &cited)); 1929 if (pcbddc->switch_static) PetscCall(MatISGetLocalMat(pc->useAmat ? pc->mat : pc->pmat, &lA)); 1930 if (pcbddc->ChangeOfBasisMatrix) { 1931 Vec swap; 1932 1933 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, r, pcbddc->work_change)); 1934 swap = pcbddc->work_change; 1935 pcbddc->work_change = r; 1936 r = swap; 1937 /* save rhs so that we don't need to apply the change of basis for the exact dirichlet trick in PreSolve */ 1938 if (pcbddc->benign_apply_coarse_only && pcbddc->exact_dirichlet_trick_app && pcbddc->change_interior) { 1939 PetscCall(VecCopy(r, pcis->vec1_global)); 1940 PetscCall(VecLockReadPush(pcis->vec1_global)); 1941 } 1942 } 1943 if (pcbddc->benign_have_null) { /* get p0 from r */ 1944 PetscCall(PCBDDCBenignGetOrSetP0(pc, r, PETSC_TRUE)); 1945 } 1946 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 1947 PetscCall(VecCopy(r, z)); 1948 /* First Dirichlet solve */ 1949 PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1950 PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1951 /* 1952 Assembling right hand side for BDDC operator 1953 - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE) 1954 - pcis->vec1_B the interface part of the global vector z 1955 */ 1956 if (n_D) { 1957 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1958 PetscCall(KSPSolveTranspose(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D)); 1959 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1960 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D)); 1961 PetscCall(VecScale(pcis->vec2_D, m_one)); 1962 if (pcbddc->switch_static) { 1963 PetscCall(VecSet(pcis->vec1_N, 0.)); 1964 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1965 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1966 if (!pcbddc->switch_static_change) { 1967 PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N)); 1968 } else { 1969 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1970 PetscCall(MatMultTranspose(lA, pcis->vec2_N, pcis->vec1_N)); 1971 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1972 } 1973 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD)); 1974 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD)); 1975 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1976 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1977 } else { 1978 PetscCall(MatMultTranspose(pcis->A_IB, pcis->vec2_D, pcis->vec1_B)); 1979 } 1980 } else { 1981 PetscCall(VecSet(pcis->vec1_B, zero)); 1982 } 1983 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE)); 1984 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE)); 1985 PetscCall(PCBDDCScalingRestriction(pc, z, pcis->vec1_B)); 1986 } else { 1987 PetscCall(PCBDDCScalingRestriction(pc, r, pcis->vec1_B)); 1988 } 1989 1990 /* Apply interface preconditioner 1991 input/output vecs: pcis->vec1_B and pcis->vec1_D */ 1992 PetscCall(PCBDDCApplyInterfacePreconditioner(pc, PETSC_TRUE)); 1993 1994 /* Apply transpose of partition of unity operator */ 1995 PetscCall(PCBDDCScalingExtension(pc, pcis->vec1_B, z)); 1996 1997 /* Second Dirichlet solve and assembling of output */ 1998 PetscCall(VecScatterBegin(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1999 PetscCall(VecScatterEnd(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 2000 if (n_B) { 2001 if (pcbddc->switch_static) { 2002 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 2003 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 2004 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 2005 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 2006 if (!pcbddc->switch_static_change) { 2007 PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N)); 2008 } else { 2009 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 2010 PetscCall(MatMultTranspose(lA, pcis->vec2_N, pcis->vec1_N)); 2011 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 2012 } 2013 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD)); 2014 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD)); 2015 } else { 2016 PetscCall(MatMultTranspose(pcis->A_BI, pcis->vec1_B, pcis->vec3_D)); 2017 } 2018 } else if (pcbddc->switch_static) { /* n_B is zero */ 2019 if (!pcbddc->switch_static_change) { 2020 PetscCall(MatMultTranspose(lA, pcis->vec1_D, pcis->vec3_D)); 2021 } else { 2022 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_D, pcis->vec1_N)); 2023 PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N)); 2024 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec2_N, pcis->vec3_D)); 2025 } 2026 } 2027 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 2028 PetscCall(KSPSolveTranspose(pcbddc->ksp_D, pcis->vec3_D, pcis->vec4_D)); 2029 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 2030 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec4_D)); 2031 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 2032 if (pcbddc->switch_static) { 2033 PetscCall(VecAXPBYPCZ(pcis->vec2_D, m_one, one, m_one, pcis->vec4_D, pcis->vec1_D)); 2034 } else { 2035 PetscCall(VecAXPBY(pcis->vec2_D, m_one, m_one, pcis->vec4_D)); 2036 } 2037 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE)); 2038 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE)); 2039 } else { 2040 if (pcbddc->switch_static) { 2041 PetscCall(VecAXPBY(pcis->vec4_D, one, m_one, pcis->vec1_D)); 2042 } else { 2043 PetscCall(VecScale(pcis->vec4_D, m_one)); 2044 } 2045 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE)); 2046 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE)); 2047 } 2048 if (pcbddc->benign_have_null) { /* set p0 (computed in PCBDDCApplyInterface) */ 2049 PetscCall(PCBDDCBenignGetOrSetP0(pc, z, PETSC_FALSE)); 2050 } 2051 if (pcbddc->ChangeOfBasisMatrix) { 2052 pcbddc->work_change = r; 2053 PetscCall(VecCopy(z, pcbddc->work_change)); 2054 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcbddc->work_change, z)); 2055 } 2056 PetscFunctionReturn(PETSC_SUCCESS); 2057 } 2058 2059 static PetscErrorCode PCReset_BDDC(PC pc) 2060 { 2061 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 2062 PC_IS *pcis = (PC_IS *)pc->data; 2063 KSP kspD, kspR, kspC; 2064 2065 PetscFunctionBegin; 2066 /* free BDDC custom data */ 2067 PetscCall(PCBDDCResetCustomization(pc)); 2068 /* destroy objects related to topography */ 2069 PetscCall(PCBDDCResetTopography(pc)); 2070 /* destroy objects for scaling operator */ 2071 PetscCall(PCBDDCScalingDestroy(pc)); 2072 /* free solvers stuff */ 2073 PetscCall(PCBDDCResetSolvers(pc)); 2074 /* free global vectors needed in presolve */ 2075 PetscCall(VecDestroy(&pcbddc->temp_solution)); 2076 PetscCall(VecDestroy(&pcbddc->original_rhs)); 2077 /* free data created by PCIS */ 2078 PetscCall(PCISReset(pc)); 2079 2080 /* restore defaults */ 2081 kspD = pcbddc->ksp_D; 2082 kspR = pcbddc->ksp_R; 2083 kspC = pcbddc->coarse_ksp; 2084 PetscCall(PetscMemzero(pc->data, sizeof(*pcbddc))); 2085 pcis->n_neigh = -1; 2086 pcis->scaling_factor = 1.0; 2087 pcis->reusesubmatrices = PETSC_TRUE; 2088 pcbddc->use_local_adj = PETSC_TRUE; 2089 pcbddc->use_vertices = PETSC_TRUE; 2090 pcbddc->use_edges = PETSC_TRUE; 2091 pcbddc->symmetric_primal = PETSC_TRUE; 2092 pcbddc->vertex_size = 1; 2093 pcbddc->recompute_topography = PETSC_TRUE; 2094 pcbddc->coarse_size = -1; 2095 pcbddc->use_exact_dirichlet_trick = PETSC_TRUE; 2096 pcbddc->coarsening_ratio = 8; 2097 pcbddc->coarse_eqs_per_proc = 1; 2098 pcbddc->benign_compute_correction = PETSC_TRUE; 2099 pcbddc->nedfield = -1; 2100 pcbddc->nedglobal = PETSC_TRUE; 2101 pcbddc->graphmaxcount = PETSC_MAX_INT; 2102 pcbddc->sub_schurs_layers = -1; 2103 pcbddc->ksp_D = kspD; 2104 pcbddc->ksp_R = kspR; 2105 pcbddc->coarse_ksp = kspC; 2106 PetscFunctionReturn(PETSC_SUCCESS); 2107 } 2108 2109 static PetscErrorCode PCDestroy_BDDC(PC pc) 2110 { 2111 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 2112 2113 PetscFunctionBegin; 2114 PetscCall(PCReset_BDDC(pc)); 2115 PetscCall(KSPDestroy(&pcbddc->ksp_D)); 2116 PetscCall(KSPDestroy(&pcbddc->ksp_R)); 2117 PetscCall(KSPDestroy(&pcbddc->coarse_ksp)); 2118 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDiscreteGradient_C", NULL)); 2119 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDivergenceMat_C", NULL)); 2120 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetChangeOfBasisMat_C", NULL)); 2121 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesLocalIS_C", NULL)); 2122 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesIS_C", NULL)); 2123 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesLocalIS_C", NULL)); 2124 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesIS_C", NULL)); 2125 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetCoarseningRatio_C", NULL)); 2126 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevel_C", NULL)); 2127 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetUseExactDirichlet_C", NULL)); 2128 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevels_C", NULL)); 2129 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundaries_C", NULL)); 2130 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundariesLocal_C", NULL)); 2131 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundaries_C", NULL)); 2132 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundariesLocal_C", NULL)); 2133 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundaries_C", NULL)); 2134 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundariesLocal_C", NULL)); 2135 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundaries_C", NULL)); 2136 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundariesLocal_C", NULL)); 2137 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplitting_C", NULL)); 2138 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplittingLocal_C", NULL)); 2139 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", NULL)); 2140 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCCreateFETIDPOperators_C", NULL)); 2141 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetRHS_C", NULL)); 2142 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetSolution_C", NULL)); 2143 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCPreSolveChangeRHS_C", NULL)); 2144 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 2145 PetscCall(PetscFree(pc->data)); 2146 PetscFunctionReturn(PETSC_SUCCESS); 2147 } 2148 2149 static PetscErrorCode PCSetCoordinates_BDDC(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords) 2150 { 2151 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 2152 PCBDDCGraph mat_graph = pcbddc->mat_graph; 2153 2154 PetscFunctionBegin; 2155 PetscCall(PetscFree(mat_graph->coords)); 2156 PetscCall(PetscMalloc1(nloc * dim, &mat_graph->coords)); 2157 PetscCall(PetscArraycpy(mat_graph->coords, coords, nloc * dim)); 2158 mat_graph->cnloc = nloc; 2159 mat_graph->cdim = dim; 2160 mat_graph->cloc = PETSC_FALSE; 2161 /* flg setup */ 2162 pcbddc->recompute_topography = PETSC_TRUE; 2163 pcbddc->corner_selected = PETSC_FALSE; 2164 PetscFunctionReturn(PETSC_SUCCESS); 2165 } 2166 2167 static PetscErrorCode PCPreSolveChangeRHS_BDDC(PC pc, PetscBool *change) 2168 { 2169 PetscFunctionBegin; 2170 *change = PETSC_TRUE; 2171 PetscFunctionReturn(PETSC_SUCCESS); 2172 } 2173 2174 static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs) 2175 { 2176 FETIDPMat_ctx mat_ctx; 2177 Vec work; 2178 PC_IS *pcis; 2179 PC_BDDC *pcbddc; 2180 2181 PetscFunctionBegin; 2182 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx)); 2183 pcis = (PC_IS *)mat_ctx->pc->data; 2184 pcbddc = (PC_BDDC *)mat_ctx->pc->data; 2185 2186 PetscCall(VecSet(fetidp_flux_rhs, 0.0)); 2187 /* copy rhs since we may change it during PCPreSolve_BDDC */ 2188 if (!pcbddc->original_rhs) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->original_rhs)); 2189 if (mat_ctx->rhs_flip) { 2190 PetscCall(VecPointwiseMult(pcbddc->original_rhs, standard_rhs, mat_ctx->rhs_flip)); 2191 } else { 2192 PetscCall(VecCopy(standard_rhs, pcbddc->original_rhs)); 2193 } 2194 if (mat_ctx->g2g_p) { 2195 /* interface pressure rhs */ 2196 PetscCall(VecScatterBegin(mat_ctx->g2g_p, fetidp_flux_rhs, pcbddc->original_rhs, INSERT_VALUES, SCATTER_REVERSE)); 2197 PetscCall(VecScatterEnd(mat_ctx->g2g_p, fetidp_flux_rhs, pcbddc->original_rhs, INSERT_VALUES, SCATTER_REVERSE)); 2198 PetscCall(VecScatterBegin(mat_ctx->g2g_p, standard_rhs, fetidp_flux_rhs, INSERT_VALUES, SCATTER_FORWARD)); 2199 PetscCall(VecScatterEnd(mat_ctx->g2g_p, standard_rhs, fetidp_flux_rhs, INSERT_VALUES, SCATTER_FORWARD)); 2200 if (!mat_ctx->rhs_flip) PetscCall(VecScale(fetidp_flux_rhs, -1.)); 2201 } 2202 /* 2203 change of basis for physical rhs if needed 2204 It also changes the rhs in case of dirichlet boundaries 2205 */ 2206 PetscCall(PCPreSolve_BDDC(mat_ctx->pc, NULL, pcbddc->original_rhs, NULL)); 2207 if (pcbddc->ChangeOfBasisMatrix) { 2208 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, pcbddc->original_rhs, pcbddc->work_change)); 2209 work = pcbddc->work_change; 2210 } else { 2211 work = pcbddc->original_rhs; 2212 } 2213 /* store vectors for computation of fetidp final solution */ 2214 PetscCall(VecScatterBegin(pcis->global_to_D, work, mat_ctx->temp_solution_D, INSERT_VALUES, SCATTER_FORWARD)); 2215 PetscCall(VecScatterEnd(pcis->global_to_D, work, mat_ctx->temp_solution_D, INSERT_VALUES, SCATTER_FORWARD)); 2216 /* scale rhs since it should be unassembled */ 2217 /* TODO use counter scaling? (also below) */ 2218 PetscCall(VecScatterBegin(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD)); 2219 PetscCall(VecScatterEnd(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD)); 2220 /* Apply partition of unity */ 2221 PetscCall(VecPointwiseMult(mat_ctx->temp_solution_B, pcis->D, mat_ctx->temp_solution_B)); 2222 /* PetscCall(PCBDDCScalingRestriction(mat_ctx->pc,work,mat_ctx->temp_solution_B)); */ 2223 if (!pcbddc->switch_static) { 2224 /* compute partially subassembled Schur complement right-hand side */ 2225 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0)); 2226 PetscCall(KSPSolve(pcbddc->ksp_D, mat_ctx->temp_solution_D, pcis->vec1_D)); 2227 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0)); 2228 /* Cannot propagate up error in KSPSolve() because there is no access to the PC */ 2229 PetscCall(MatMult(pcis->A_BI, pcis->vec1_D, pcis->vec1_B)); 2230 PetscCall(VecAXPY(mat_ctx->temp_solution_B, -1.0, pcis->vec1_B)); 2231 PetscCall(VecSet(work, 0.0)); 2232 PetscCall(VecScatterBegin(pcis->global_to_B, mat_ctx->temp_solution_B, work, ADD_VALUES, SCATTER_REVERSE)); 2233 PetscCall(VecScatterEnd(pcis->global_to_B, mat_ctx->temp_solution_B, work, ADD_VALUES, SCATTER_REVERSE)); 2234 /* PetscCall(PCBDDCScalingRestriction(mat_ctx->pc,work,mat_ctx->temp_solution_B)); */ 2235 PetscCall(VecScatterBegin(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD)); 2236 PetscCall(VecScatterEnd(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD)); 2237 PetscCall(VecPointwiseMult(mat_ctx->temp_solution_B, pcis->D, mat_ctx->temp_solution_B)); 2238 } 2239 /* BDDC rhs */ 2240 PetscCall(VecCopy(mat_ctx->temp_solution_B, pcis->vec1_B)); 2241 if (pcbddc->switch_static) PetscCall(VecCopy(mat_ctx->temp_solution_D, pcis->vec1_D)); 2242 /* apply BDDC */ 2243 PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n)); 2244 PetscCall(PCBDDCApplyInterfacePreconditioner(mat_ctx->pc, PETSC_FALSE)); 2245 PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n)); 2246 2247 /* Application of B_delta and assembling of rhs for fetidp fluxes */ 2248 PetscCall(MatMult(mat_ctx->B_delta, pcis->vec1_B, mat_ctx->lambda_local)); 2249 PetscCall(VecScatterBegin(mat_ctx->l2g_lambda, mat_ctx->lambda_local, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD)); 2250 PetscCall(VecScatterEnd(mat_ctx->l2g_lambda, mat_ctx->lambda_local, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD)); 2251 /* Add contribution to interface pressures */ 2252 if (mat_ctx->l2g_p) { 2253 PetscCall(MatMult(mat_ctx->B_BB, pcis->vec1_B, mat_ctx->vP)); 2254 if (pcbddc->switch_static) PetscCall(MatMultAdd(mat_ctx->B_BI, pcis->vec1_D, mat_ctx->vP, mat_ctx->vP)); 2255 PetscCall(VecScatterBegin(mat_ctx->l2g_p, mat_ctx->vP, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD)); 2256 PetscCall(VecScatterEnd(mat_ctx->l2g_p, mat_ctx->vP, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD)); 2257 } 2258 PetscFunctionReturn(PETSC_SUCCESS); 2259 } 2260 2261 /*@ 2262 PCBDDCMatFETIDPGetRHS - Compute the right-hand side for a FETI-DP linear system using the physical right-hand side 2263 2264 Collective 2265 2266 Input Parameters: 2267 + fetidp_mat - the FETI-DP matrix object obtained by a call to `PCBDDCCreateFETIDPOperators()` 2268 - standard_rhs - the right-hand side of the original linear system 2269 2270 Output Parameter: 2271 . fetidp_flux_rhs - the right-hand side for the FETI-DP linear system 2272 2273 Level: developer 2274 2275 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCCreateFETIDPOperators()`, `PCBDDCMatFETIDPGetSolution()` 2276 @*/ 2277 PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs) 2278 { 2279 FETIDPMat_ctx mat_ctx; 2280 2281 PetscFunctionBegin; 2282 PetscValidHeaderSpecific(fetidp_mat, MAT_CLASSID, 1); 2283 PetscValidHeaderSpecific(standard_rhs, VEC_CLASSID, 2); 2284 PetscValidHeaderSpecific(fetidp_flux_rhs, VEC_CLASSID, 3); 2285 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx)); 2286 PetscUseMethod(mat_ctx->pc, "PCBDDCMatFETIDPGetRHS_C", (Mat, Vec, Vec), (fetidp_mat, standard_rhs, fetidp_flux_rhs)); 2287 PetscFunctionReturn(PETSC_SUCCESS); 2288 } 2289 2290 static PetscErrorCode PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol) 2291 { 2292 FETIDPMat_ctx mat_ctx; 2293 PC_IS *pcis; 2294 PC_BDDC *pcbddc; 2295 Vec work; 2296 2297 PetscFunctionBegin; 2298 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx)); 2299 pcis = (PC_IS *)mat_ctx->pc->data; 2300 pcbddc = (PC_BDDC *)mat_ctx->pc->data; 2301 2302 /* apply B_delta^T */ 2303 PetscCall(VecSet(pcis->vec1_B, 0.)); 2304 PetscCall(VecScatterBegin(mat_ctx->l2g_lambda, fetidp_flux_sol, mat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE)); 2305 PetscCall(VecScatterEnd(mat_ctx->l2g_lambda, fetidp_flux_sol, mat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE)); 2306 PetscCall(MatMultTranspose(mat_ctx->B_delta, mat_ctx->lambda_local, pcis->vec1_B)); 2307 if (mat_ctx->l2g_p) { 2308 PetscCall(VecScatterBegin(mat_ctx->l2g_p, fetidp_flux_sol, mat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE)); 2309 PetscCall(VecScatterEnd(mat_ctx->l2g_p, fetidp_flux_sol, mat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE)); 2310 PetscCall(MatMultAdd(mat_ctx->Bt_BB, mat_ctx->vP, pcis->vec1_B, pcis->vec1_B)); 2311 } 2312 2313 /* compute rhs for BDDC application */ 2314 PetscCall(VecAYPX(pcis->vec1_B, -1.0, mat_ctx->temp_solution_B)); 2315 if (pcbddc->switch_static) { 2316 PetscCall(VecCopy(mat_ctx->temp_solution_D, pcis->vec1_D)); 2317 if (mat_ctx->l2g_p) { 2318 PetscCall(VecScale(mat_ctx->vP, -1.)); 2319 PetscCall(MatMultAdd(mat_ctx->Bt_BI, mat_ctx->vP, pcis->vec1_D, pcis->vec1_D)); 2320 } 2321 } 2322 2323 /* apply BDDC */ 2324 PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n)); 2325 PetscCall(PCBDDCApplyInterfacePreconditioner(mat_ctx->pc, PETSC_FALSE)); 2326 2327 /* put values into global vector */ 2328 if (pcbddc->ChangeOfBasisMatrix) work = pcbddc->work_change; 2329 else work = standard_sol; 2330 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, work, INSERT_VALUES, SCATTER_REVERSE)); 2331 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, work, INSERT_VALUES, SCATTER_REVERSE)); 2332 if (!pcbddc->switch_static) { 2333 /* compute values into the interior if solved for the partially subassembled Schur complement */ 2334 PetscCall(MatMult(pcis->A_IB, pcis->vec1_B, pcis->vec1_D)); 2335 PetscCall(VecAYPX(pcis->vec1_D, -1.0, mat_ctx->temp_solution_D)); 2336 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0)); 2337 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec1_D)); 2338 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0)); 2339 /* Cannot propagate up error in KSPSolve() because there is no access to the PC */ 2340 } 2341 2342 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_D, work, INSERT_VALUES, SCATTER_REVERSE)); 2343 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_D, work, INSERT_VALUES, SCATTER_REVERSE)); 2344 /* add p0 solution to final solution */ 2345 PetscCall(PCBDDCBenignGetOrSetP0(mat_ctx->pc, work, PETSC_FALSE)); 2346 if (pcbddc->ChangeOfBasisMatrix) PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, work, standard_sol)); 2347 PetscCall(PCPostSolve_BDDC(mat_ctx->pc, NULL, NULL, standard_sol)); 2348 if (mat_ctx->g2g_p) { 2349 PetscCall(VecScatterBegin(mat_ctx->g2g_p, fetidp_flux_sol, standard_sol, INSERT_VALUES, SCATTER_REVERSE)); 2350 PetscCall(VecScatterEnd(mat_ctx->g2g_p, fetidp_flux_sol, standard_sol, INSERT_VALUES, SCATTER_REVERSE)); 2351 } 2352 PetscFunctionReturn(PETSC_SUCCESS); 2353 } 2354 2355 static PetscErrorCode PCView_BDDCIPC(PC pc, PetscViewer viewer) 2356 { 2357 BDDCIPC_ctx bddcipc_ctx; 2358 PetscBool isascii; 2359 2360 PetscFunctionBegin; 2361 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2362 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 2363 if (isascii) PetscCall(PetscViewerASCIIPrintf(viewer, "BDDC interface preconditioner\n")); 2364 PetscCall(PetscViewerASCIIPushTab(viewer)); 2365 PetscCall(PCView(bddcipc_ctx->bddc, viewer)); 2366 PetscCall(PetscViewerASCIIPopTab(viewer)); 2367 PetscFunctionReturn(PETSC_SUCCESS); 2368 } 2369 2370 static PetscErrorCode PCSetUp_BDDCIPC(PC pc) 2371 { 2372 BDDCIPC_ctx bddcipc_ctx; 2373 PetscBool isbddc; 2374 Vec vv; 2375 IS is; 2376 PC_IS *pcis; 2377 2378 PetscFunctionBegin; 2379 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2380 PetscCall(PetscObjectTypeCompare((PetscObject)bddcipc_ctx->bddc, PCBDDC, &isbddc)); 2381 PetscCheck(isbddc, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid type %s. Must be of type bddc", ((PetscObject)bddcipc_ctx->bddc)->type_name); 2382 PetscCall(PCSetUp(bddcipc_ctx->bddc)); 2383 2384 /* create interface scatter */ 2385 pcis = (PC_IS *)(bddcipc_ctx->bddc->data); 2386 PetscCall(VecScatterDestroy(&bddcipc_ctx->g2l)); 2387 PetscCall(MatCreateVecs(pc->pmat, &vv, NULL)); 2388 PetscCall(ISRenumber(pcis->is_B_global, NULL, NULL, &is)); 2389 PetscCall(VecScatterCreate(vv, is, pcis->vec1_B, NULL, &bddcipc_ctx->g2l)); 2390 PetscCall(ISDestroy(&is)); 2391 PetscCall(VecDestroy(&vv)); 2392 PetscFunctionReturn(PETSC_SUCCESS); 2393 } 2394 2395 static PetscErrorCode PCApply_BDDCIPC(PC pc, Vec r, Vec x) 2396 { 2397 BDDCIPC_ctx bddcipc_ctx; 2398 PC_IS *pcis; 2399 VecScatter tmps; 2400 2401 PetscFunctionBegin; 2402 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2403 pcis = (PC_IS *)(bddcipc_ctx->bddc->data); 2404 tmps = pcis->global_to_B; 2405 pcis->global_to_B = bddcipc_ctx->g2l; 2406 PetscCall(PCBDDCScalingRestriction(bddcipc_ctx->bddc, r, pcis->vec1_B)); 2407 PetscCall(PCBDDCApplyInterfacePreconditioner(bddcipc_ctx->bddc, PETSC_FALSE)); 2408 PetscCall(PCBDDCScalingExtension(bddcipc_ctx->bddc, pcis->vec1_B, x)); 2409 pcis->global_to_B = tmps; 2410 PetscFunctionReturn(PETSC_SUCCESS); 2411 } 2412 2413 static PetscErrorCode PCApplyTranspose_BDDCIPC(PC pc, Vec r, Vec x) 2414 { 2415 BDDCIPC_ctx bddcipc_ctx; 2416 PC_IS *pcis; 2417 VecScatter tmps; 2418 2419 PetscFunctionBegin; 2420 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2421 pcis = (PC_IS *)(bddcipc_ctx->bddc->data); 2422 tmps = pcis->global_to_B; 2423 pcis->global_to_B = bddcipc_ctx->g2l; 2424 PetscCall(PCBDDCScalingRestriction(bddcipc_ctx->bddc, r, pcis->vec1_B)); 2425 PetscCall(PCBDDCApplyInterfacePreconditioner(bddcipc_ctx->bddc, PETSC_TRUE)); 2426 PetscCall(PCBDDCScalingExtension(bddcipc_ctx->bddc, pcis->vec1_B, x)); 2427 pcis->global_to_B = tmps; 2428 PetscFunctionReturn(PETSC_SUCCESS); 2429 } 2430 2431 static PetscErrorCode PCDestroy_BDDCIPC(PC pc) 2432 { 2433 BDDCIPC_ctx bddcipc_ctx; 2434 2435 PetscFunctionBegin; 2436 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2437 PetscCall(PCDestroy(&bddcipc_ctx->bddc)); 2438 PetscCall(VecScatterDestroy(&bddcipc_ctx->g2l)); 2439 PetscCall(PetscFree(bddcipc_ctx)); 2440 PetscFunctionReturn(PETSC_SUCCESS); 2441 } 2442 2443 /*@ 2444 PCBDDCMatFETIDPGetSolution - Compute the physical solution using the solution of the FETI-DP linear system 2445 2446 Collective 2447 2448 Input Parameters: 2449 + fetidp_mat - the FETI-DP matrix obtained by a call to `PCBDDCCreateFETIDPOperators()` 2450 - fetidp_flux_sol - the solution of the FETI-DP linear system` 2451 2452 Output Parameter: 2453 . standard_sol - the solution defined on the physical domain 2454 2455 Level: developer 2456 2457 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCCreateFETIDPOperators()`, `PCBDDCMatFETIDPGetRHS()` 2458 @*/ 2459 PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol) 2460 { 2461 FETIDPMat_ctx mat_ctx; 2462 2463 PetscFunctionBegin; 2464 PetscValidHeaderSpecific(fetidp_mat, MAT_CLASSID, 1); 2465 PetscValidHeaderSpecific(fetidp_flux_sol, VEC_CLASSID, 2); 2466 PetscValidHeaderSpecific(standard_sol, VEC_CLASSID, 3); 2467 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx)); 2468 PetscUseMethod(mat_ctx->pc, "PCBDDCMatFETIDPGetSolution_C", (Mat, Vec, Vec), (fetidp_mat, fetidp_flux_sol, standard_sol)); 2469 PetscFunctionReturn(PETSC_SUCCESS); 2470 } 2471 2472 static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, PetscBool fully_redundant, const char *prefix, Mat *fetidp_mat, PC *fetidp_pc) 2473 { 2474 FETIDPMat_ctx fetidpmat_ctx; 2475 Mat newmat; 2476 FETIDPPC_ctx fetidppc_ctx; 2477 PC newpc; 2478 MPI_Comm comm; 2479 2480 PetscFunctionBegin; 2481 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 2482 /* FETI-DP matrix */ 2483 PetscCall(PCBDDCCreateFETIDPMatContext(pc, &fetidpmat_ctx)); 2484 fetidpmat_ctx->fully_redundant = fully_redundant; 2485 PetscCall(PCBDDCSetupFETIDPMatContext(fetidpmat_ctx)); 2486 PetscCall(MatCreateShell(comm, fetidpmat_ctx->n, fetidpmat_ctx->n, fetidpmat_ctx->N, fetidpmat_ctx->N, fetidpmat_ctx, &newmat)); 2487 PetscCall(PetscObjectSetName((PetscObject)newmat, !fetidpmat_ctx->l2g_lambda_only ? "F" : "G")); 2488 PetscCall(MatShellSetOperation(newmat, MATOP_MULT, (void (*)(void))FETIDPMatMult)); 2489 PetscCall(MatShellSetOperation(newmat, MATOP_MULT_TRANSPOSE, (void (*)(void))FETIDPMatMultTranspose)); 2490 PetscCall(MatShellSetOperation(newmat, MATOP_DESTROY, (void (*)(void))PCBDDCDestroyFETIDPMat)); 2491 /* propagate MatOptions */ 2492 { 2493 PC_BDDC *pcbddc = (PC_BDDC *)fetidpmat_ctx->pc->data; 2494 PetscBool isset, issym; 2495 2496 PetscCall(MatIsSymmetricKnown(pc->mat, &isset, &issym)); 2497 if ((isset && issym) || pcbddc->symmetric_primal) PetscCall(MatSetOption(newmat, MAT_SYMMETRIC, PETSC_TRUE)); 2498 } 2499 PetscCall(MatSetOptionsPrefix(newmat, prefix)); 2500 PetscCall(MatAppendOptionsPrefix(newmat, "fetidp_")); 2501 PetscCall(MatSetUp(newmat)); 2502 /* FETI-DP preconditioner */ 2503 PetscCall(PCBDDCCreateFETIDPPCContext(pc, &fetidppc_ctx)); 2504 PetscCall(PCBDDCSetupFETIDPPCContext(newmat, fetidppc_ctx)); 2505 PetscCall(PCCreate(comm, &newpc)); 2506 PetscCall(PCSetOperators(newpc, newmat, newmat)); 2507 PetscCall(PCSetOptionsPrefix(newpc, prefix)); 2508 PetscCall(PCAppendOptionsPrefix(newpc, "fetidp_")); 2509 PetscCall(PCSetErrorIfFailure(newpc, pc->erroriffailure)); 2510 if (!fetidpmat_ctx->l2g_lambda_only) { /* standard FETI-DP */ 2511 PetscCall(PCSetType(newpc, PCSHELL)); 2512 PetscCall(PCShellSetName(newpc, "FETI-DP multipliers")); 2513 PetscCall(PCShellSetContext(newpc, fetidppc_ctx)); 2514 PetscCall(PCShellSetApply(newpc, FETIDPPCApply)); 2515 PetscCall(PCShellSetApplyTranspose(newpc, FETIDPPCApplyTranspose)); 2516 PetscCall(PCShellSetView(newpc, FETIDPPCView)); 2517 PetscCall(PCShellSetDestroy(newpc, PCBDDCDestroyFETIDPPC)); 2518 } else { /* saddle-point FETI-DP */ 2519 Mat M; 2520 PetscInt psize; 2521 PetscBool fake = PETSC_FALSE, isfieldsplit; 2522 2523 PetscCall(ISViewFromOptions(fetidpmat_ctx->lagrange, NULL, "-lag_view")); 2524 PetscCall(ISViewFromOptions(fetidpmat_ctx->pressure, NULL, "-press_view")); 2525 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_PPmat", (PetscObject *)&M)); 2526 PetscCall(PCSetType(newpc, PCFIELDSPLIT)); 2527 PetscCall(PCFieldSplitSetIS(newpc, "lag", fetidpmat_ctx->lagrange)); 2528 PetscCall(PCFieldSplitSetIS(newpc, "p", fetidpmat_ctx->pressure)); 2529 PetscCall(PCFieldSplitSetType(newpc, PC_COMPOSITE_SCHUR)); 2530 PetscCall(PCFieldSplitSetSchurFactType(newpc, PC_FIELDSPLIT_SCHUR_FACT_DIAG)); 2531 PetscCall(ISGetSize(fetidpmat_ctx->pressure, &psize)); 2532 if (psize != M->rmap->N) { 2533 Mat M2; 2534 PetscInt lpsize; 2535 2536 fake = PETSC_TRUE; 2537 PetscCall(ISGetLocalSize(fetidpmat_ctx->pressure, &lpsize)); 2538 PetscCall(MatCreate(comm, &M2)); 2539 PetscCall(MatSetType(M2, MATAIJ)); 2540 PetscCall(MatSetSizes(M2, lpsize, lpsize, psize, psize)); 2541 PetscCall(MatSetUp(M2)); 2542 PetscCall(MatAssemblyBegin(M2, MAT_FINAL_ASSEMBLY)); 2543 PetscCall(MatAssemblyEnd(M2, MAT_FINAL_ASSEMBLY)); 2544 PetscCall(PCFieldSplitSetSchurPre(newpc, PC_FIELDSPLIT_SCHUR_PRE_USER, M2)); 2545 PetscCall(MatDestroy(&M2)); 2546 } else { 2547 PetscCall(PCFieldSplitSetSchurPre(newpc, PC_FIELDSPLIT_SCHUR_PRE_USER, M)); 2548 } 2549 PetscCall(PCFieldSplitSetSchurScale(newpc, 1.0)); 2550 2551 /* we need to setfromoptions and setup here to access the blocks */ 2552 PetscCall(PCSetFromOptions(newpc)); 2553 PetscCall(PCSetUp(newpc)); 2554 2555 /* user may have changed the type (e.g. -fetidp_pc_type none) */ 2556 PetscCall(PetscObjectTypeCompare((PetscObject)newpc, PCFIELDSPLIT, &isfieldsplit)); 2557 if (isfieldsplit) { 2558 KSP *ksps; 2559 PC ppc, lagpc; 2560 PetscInt nn; 2561 PetscBool ismatis, matisok = PETSC_FALSE, check = PETSC_FALSE; 2562 2563 /* set the solver for the (0,0) block */ 2564 PetscCall(PCFieldSplitSchurGetSubKSP(newpc, &nn, &ksps)); 2565 if (!nn) { /* not of type PC_COMPOSITE_SCHUR */ 2566 PetscCall(PCFieldSplitGetSubKSP(newpc, &nn, &ksps)); 2567 if (!fake) { /* pass pmat to the pressure solver */ 2568 Mat F; 2569 2570 PetscCall(KSPGetOperators(ksps[1], &F, NULL)); 2571 PetscCall(KSPSetOperators(ksps[1], F, M)); 2572 } 2573 } else { 2574 PetscBool issym, isset; 2575 Mat S; 2576 2577 PetscCall(PCFieldSplitSchurGetS(newpc, &S)); 2578 PetscCall(MatIsSymmetricKnown(newmat, &isset, &issym)); 2579 if (isset) PetscCall(MatSetOption(S, MAT_SYMMETRIC, issym)); 2580 } 2581 PetscCall(KSPGetPC(ksps[0], &lagpc)); 2582 PetscCall(PCSetType(lagpc, PCSHELL)); 2583 PetscCall(PCShellSetName(lagpc, "FETI-DP multipliers")); 2584 PetscCall(PCShellSetContext(lagpc, fetidppc_ctx)); 2585 PetscCall(PCShellSetApply(lagpc, FETIDPPCApply)); 2586 PetscCall(PCShellSetApplyTranspose(lagpc, FETIDPPCApplyTranspose)); 2587 PetscCall(PCShellSetView(lagpc, FETIDPPCView)); 2588 PetscCall(PCShellSetDestroy(lagpc, PCBDDCDestroyFETIDPPC)); 2589 2590 /* Olof's idea: interface Schur complement preconditioner for the mass matrix */ 2591 PetscCall(KSPGetPC(ksps[1], &ppc)); 2592 if (fake) { 2593 BDDCIPC_ctx bddcipc_ctx; 2594 PetscContainer c; 2595 2596 matisok = PETSC_TRUE; 2597 2598 /* create inner BDDC solver */ 2599 PetscCall(PetscNew(&bddcipc_ctx)); 2600 PetscCall(PCCreate(comm, &bddcipc_ctx->bddc)); 2601 PetscCall(PCSetType(bddcipc_ctx->bddc, PCBDDC)); 2602 PetscCall(PCSetOperators(bddcipc_ctx->bddc, M, M)); 2603 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_pCSR", (PetscObject *)&c)); 2604 PetscCall(PetscObjectTypeCompare((PetscObject)M, MATIS, &ismatis)); 2605 if (c && ismatis) { 2606 Mat lM; 2607 PetscInt *csr, n; 2608 2609 PetscCall(MatISGetLocalMat(M, &lM)); 2610 PetscCall(MatGetSize(lM, &n, NULL)); 2611 PetscCall(PetscContainerGetPointer(c, (void **)&csr)); 2612 PetscCall(PCBDDCSetLocalAdjacencyGraph(bddcipc_ctx->bddc, n, csr, csr + (n + 1), PETSC_COPY_VALUES)); 2613 PetscCall(MatISRestoreLocalMat(M, &lM)); 2614 } 2615 PetscCall(PCSetOptionsPrefix(bddcipc_ctx->bddc, ((PetscObject)ksps[1])->prefix)); 2616 PetscCall(PCSetErrorIfFailure(bddcipc_ctx->bddc, pc->erroriffailure)); 2617 PetscCall(PCSetFromOptions(bddcipc_ctx->bddc)); 2618 2619 /* wrap the interface application */ 2620 PetscCall(PCSetType(ppc, PCSHELL)); 2621 PetscCall(PCShellSetName(ppc, "FETI-DP pressure")); 2622 PetscCall(PCShellSetContext(ppc, bddcipc_ctx)); 2623 PetscCall(PCShellSetSetUp(ppc, PCSetUp_BDDCIPC)); 2624 PetscCall(PCShellSetApply(ppc, PCApply_BDDCIPC)); 2625 PetscCall(PCShellSetApplyTranspose(ppc, PCApplyTranspose_BDDCIPC)); 2626 PetscCall(PCShellSetView(ppc, PCView_BDDCIPC)); 2627 PetscCall(PCShellSetDestroy(ppc, PCDestroy_BDDCIPC)); 2628 } 2629 2630 /* determine if we need to assemble M to construct a preconditioner */ 2631 if (!matisok) { 2632 PetscCall(PetscObjectTypeCompare((PetscObject)M, MATIS, &ismatis)); 2633 PetscCall(PetscObjectTypeCompareAny((PetscObject)ppc, &matisok, PCBDDC, PCJACOBI, PCNONE, PCMG, "")); 2634 if (ismatis && !matisok) PetscCall(MatConvert(M, MATAIJ, MAT_INPLACE_MATRIX, &M)); 2635 } 2636 2637 /* run the subproblems to check convergence */ 2638 PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)newmat)->prefix, "-check_saddlepoint", &check, NULL)); 2639 if (check) { 2640 PetscInt i; 2641 2642 for (i = 0; i < nn; i++) { 2643 KSP kspC; 2644 PC npc; 2645 Mat F, pF; 2646 Vec x, y; 2647 PetscBool isschur, prec = PETSC_TRUE; 2648 2649 PetscCall(KSPCreate(PetscObjectComm((PetscObject)ksps[i]), &kspC)); 2650 PetscCall(KSPSetNestLevel(kspC, pc->kspnestlevel)); 2651 PetscCall(KSPSetOptionsPrefix(kspC, ((PetscObject)ksps[i])->prefix)); 2652 PetscCall(KSPAppendOptionsPrefix(kspC, "check_")); 2653 PetscCall(KSPGetOperators(ksps[i], &F, &pF)); 2654 PetscCall(PetscObjectTypeCompare((PetscObject)F, MATSCHURCOMPLEMENT, &isschur)); 2655 if (isschur) { 2656 KSP kspS, kspS2; 2657 Mat A00, pA00, A10, A01, A11; 2658 char prefix[256]; 2659 2660 PetscCall(MatSchurComplementGetKSP(F, &kspS)); 2661 PetscCall(MatSchurComplementGetSubMatrices(F, &A00, &pA00, &A01, &A10, &A11)); 2662 PetscCall(MatCreateSchurComplement(A00, pA00, A01, A10, A11, &F)); 2663 PetscCall(MatSchurComplementGetKSP(F, &kspS2)); 2664 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sschur_", ((PetscObject)kspC)->prefix)); 2665 PetscCall(KSPSetOptionsPrefix(kspS2, prefix)); 2666 PetscCall(KSPGetPC(kspS2, &npc)); 2667 PetscCall(PCSetType(npc, PCKSP)); 2668 PetscCall(PCKSPSetKSP(npc, kspS)); 2669 PetscCall(KSPSetFromOptions(kspS2)); 2670 PetscCall(KSPGetPC(kspS2, &npc)); 2671 PetscCall(PCSetUseAmat(npc, PETSC_TRUE)); 2672 } else { 2673 PetscCall(PetscObjectReference((PetscObject)F)); 2674 } 2675 PetscCall(KSPSetFromOptions(kspC)); 2676 PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)kspC)->prefix, "-preconditioned", &prec, NULL)); 2677 if (prec) { 2678 PetscCall(KSPGetPC(ksps[i], &npc)); 2679 PetscCall(KSPSetPC(kspC, npc)); 2680 } 2681 PetscCall(KSPSetOperators(kspC, F, pF)); 2682 PetscCall(MatCreateVecs(F, &x, &y)); 2683 PetscCall(VecSetRandom(x, NULL)); 2684 PetscCall(MatMult(F, x, y)); 2685 PetscCall(KSPSolve(kspC, y, x)); 2686 PetscCall(KSPCheckSolve(kspC, npc, x)); 2687 PetscCall(KSPDestroy(&kspC)); 2688 PetscCall(MatDestroy(&F)); 2689 PetscCall(VecDestroy(&x)); 2690 PetscCall(VecDestroy(&y)); 2691 } 2692 } 2693 PetscCall(PetscFree(ksps)); 2694 } 2695 } 2696 /* return pointers for objects created */ 2697 *fetidp_mat = newmat; 2698 *fetidp_pc = newpc; 2699 PetscFunctionReturn(PETSC_SUCCESS); 2700 } 2701 2702 /*@C 2703 PCBDDCCreateFETIDPOperators - Create FETI-DP operators 2704 2705 Collective 2706 2707 Input Parameters: 2708 + pc - the `PCBDDC` preconditioning context (setup should have been called before) 2709 . fully_redundant - true for a fully redundant set of Lagrange multipliers 2710 - prefix - optional options database prefix for the objects to be created (can be `NULL`) 2711 2712 Output Parameters: 2713 + fetidp_mat - shell FETI-DP matrix object 2714 - fetidp_pc - shell Dirichlet preconditioner for FETI-DP matrix 2715 2716 Level: developer 2717 2718 Note: 2719 Currently the only operations provided for FETI-DP matrix are `MatMult()` and `MatMultTranspose()` 2720 2721 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCMatFETIDPGetRHS()`, `PCBDDCMatFETIDPGetSolution()` 2722 @*/ 2723 PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, PetscBool fully_redundant, const char *prefix, Mat *fetidp_mat, PC *fetidp_pc) 2724 { 2725 PetscFunctionBegin; 2726 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2727 PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "You must call PCSetup_BDDC() first"); 2728 PetscUseMethod(pc, "PCBDDCCreateFETIDPOperators_C", (PC, PetscBool, const char *, Mat *, PC *), (pc, fully_redundant, prefix, fetidp_mat, fetidp_pc)); 2729 PetscFunctionReturn(PETSC_SUCCESS); 2730 } 2731 2732 /*MC 2733 PCBDDC - Balancing Domain Decomposition by Constraints preconditioners 2734 2735 Requires `MATIS` matrices (Pmat) with local matrices (inside the `MATIS`) of type `MATSEQAIJ`, `MATSEQBAIJ` or `MATSEQSBAIJ` 2736 2737 It also works with unsymmetric and indefinite problems. 2738 2739 Unlike 'conventional' interface preconditioners, `PCBDDC` iterates over all degrees of freedom, not just those on the interface. This allows the use 2740 of approximate solvers on the subdomains. 2741 2742 Approximate local solvers are automatically adapted (see [1]) if the user has attached a nullspace object to the subdomain matrices, and informed 2743 `PCBDDC` of using approximate solvers (via the command line). 2744 2745 Boundary nodes are split in vertices, edges and faces classes using information from the local to global mapping of dofs and the local connectivity graph of nodes. 2746 The latter can be customized by using `PCBDDCSetLocalAdjacencyGraph()` 2747 2748 Additional information on dofs can be provided by using `PCBDDCSetDofsSplitting()`, `PCBDDCSetDirichletBoundaries()`, `PCBDDCSetNeumannBoundaries()`, and 2749 `PCBDDCSetPrimalVerticesIS()` and their local counterparts. 2750 2751 Constraints can be customized by attaching a `MatNullSpace` object to the `MATIS` matrix via `MatSetNearNullSpace()`. Non-singular modes are retained via SVD. 2752 2753 Change of basis is performed similarly to [2] when requested. When more than one constraint is present on a single connected component 2754 (i.e. an edge or a face), a robust method based on local QR factorizations is used. 2755 User defined change of basis can be passed to `PCBDDC` with `PCBDDCSetChangeOfBasisMat()` 2756 2757 The PETSc implementation also supports multilevel `PCBDDC` [3]. Coarse grids are partitioned using a `MatPartitioning` object. 2758 2759 Adaptive selection of primal constraints [4] is supported for SPD systems with high-contrast in the coefficients if MUMPS or MKL_PARDISO are present. 2760 Future versions of the code will also consider using PASTIX. 2761 2762 An experimental interface to the FETI-DP method is available. FETI-DP operators could be created using `PCBDDCCreateFETIDPOperators()`. 2763 A stand-alone class for the FETI-DP method will be provided in the next releases. 2764 2765 Options Database Keys: 2766 + -pc_bddc_use_vertices <true> - use or not vertices in primal space 2767 . -pc_bddc_use_edges <true> - use or not edges in primal space 2768 . -pc_bddc_use_faces <false> - use or not faces in primal space 2769 . -pc_bddc_symmetric <true> - symmetric computation of primal basis functions. Specify false for unsymmetric problems 2770 . -pc_bddc_use_change_of_basis <false> - use change of basis approach (on edges only) 2771 . -pc_bddc_use_change_on_faces <false> - use change of basis approach on faces if change of basis has been requested 2772 . -pc_bddc_switch_static <false> - switches from M_2 (default) to M_3 operator (see reference article [1]) 2773 . -pc_bddc_levels <0> - maximum number of levels for multilevel 2774 . -pc_bddc_coarsening_ratio <8> - number of subdomains which will be aggregated together at the coarser level (e.g. H/h ratio at the coarser level, significative only in the multilevel case) 2775 . -pc_bddc_coarse_redistribute <0> - size of a subset of processors where the coarse problem will be remapped (the value is ignored if not at the coarsest level) 2776 . -pc_bddc_use_deluxe_scaling <false> - use deluxe scaling 2777 . -pc_bddc_schur_layers <\-1> - select the economic version of deluxe scaling by specifying the number of layers (-1 corresponds to the original deluxe scaling) 2778 . -pc_bddc_adaptive_threshold <0.0> - when a value different than zero is specified, adaptive selection of constraints is performed on edges and faces (requires deluxe scaling and MUMPS or MKL_PARDISO installed) 2779 - -pc_bddc_check_level <0> - set verbosity level of debugging output 2780 2781 Options for Dirichlet, Neumann or coarse solver can be set using the appropriate options prefix 2782 .vb 2783 -pc_bddc_dirichlet_ 2784 -pc_bddc_neumann_ 2785 -pc_bddc_coarse_ 2786 .ve 2787 e.g. -pc_bddc_dirichlet_ksp_type richardson -pc_bddc_dirichlet_pc_type gamg. `PCBDDC` uses by default `KSPPREONLY` and `PCLU`. 2788 2789 When using a multilevel approach, solvers' options at the N-th level (N > 1) can be specified using the options prefix 2790 .vb 2791 -pc_bddc_dirichlet_lN_ 2792 -pc_bddc_neumann_lN_ 2793 -pc_bddc_coarse_lN_ 2794 .ve 2795 Note that level number ranges from the finest (0) to the coarsest (N). 2796 In order to specify options for the `PCBDDC` operators at the coarser levels (and not for the solvers), prepend -pc_bddc_coarse_ or -pc_bddc_coarse_l 2797 to the option, e.g. 2798 .vb 2799 -pc_bddc_coarse_pc_bddc_adaptive_threshold 5 -pc_bddc_coarse_l1_pc_bddc_redistribute 3 2800 .ve 2801 will use a threshold of 5 for constraints' selection at the first coarse level and will redistribute the coarse problem of the first coarse level on 3 processors 2802 2803 References: 2804 + * - C. R. Dohrmann. "An approximate BDDC preconditioner", Numerical Linear Algebra with Applications Volume 14, Issue 2, pages 149--168, March 2007 2805 . * - A. Klawonn and O. B. Widlund. "Dual-Primal FETI Methods for Linear Elasticity", Communications on Pure and Applied Mathematics Volume 59, Issue 11, pages 1523--1572, November 2006 2806 . * - J. Mandel, B. Sousedik, C. R. Dohrmann. "Multispace and Multilevel BDDC", Computing Volume 83, Issue 2--3, pages 55--85, November 2008 2807 - * - C. Pechstein and C. R. Dohrmann. "Modern domain decomposition methods BDDC, deluxe scaling, and an algebraic approach", Seminar talk, Linz, December 2013, http://people.ricam.oeaw.ac.at/c.pechstein/pechstein-bddc2013.pdf 2808 2809 Level: intermediate 2810 2811 Contributed by Stefano Zampini 2812 2813 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `MATIS`, `PCLU`, `PCGAMG`, `PC`, `PCBDDCSetLocalAdjacencyGraph()`, `PCBDDCSetDofsSplitting()`, 2814 `PCBDDCSetDirichletBoundaries()`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCSetPrimalVerticesIS()`, `MatNullSpace`, `MatSetNearNullSpace()`, 2815 `PCBDDCSetChangeOfBasisMat()`, `PCBDDCCreateFETIDPOperators()`, `PCNN` 2816 M*/ 2817 2818 PETSC_EXTERN PetscErrorCode PCCreate_BDDC(PC pc) 2819 { 2820 PC_BDDC *pcbddc; 2821 2822 PetscFunctionBegin; 2823 PetscCall(PetscNew(&pcbddc)); 2824 pc->data = pcbddc; 2825 2826 PetscCall(PCISInitialize(pc)); 2827 2828 /* create local graph structure */ 2829 PetscCall(PCBDDCGraphCreate(&pcbddc->mat_graph)); 2830 2831 /* BDDC nonzero defaults */ 2832 pcbddc->use_nnsp = PETSC_TRUE; 2833 pcbddc->use_local_adj = PETSC_TRUE; 2834 pcbddc->use_vertices = PETSC_TRUE; 2835 pcbddc->use_edges = PETSC_TRUE; 2836 pcbddc->symmetric_primal = PETSC_TRUE; 2837 pcbddc->vertex_size = 1; 2838 pcbddc->recompute_topography = PETSC_TRUE; 2839 pcbddc->coarse_size = -1; 2840 pcbddc->use_exact_dirichlet_trick = PETSC_TRUE; 2841 pcbddc->coarsening_ratio = 8; 2842 pcbddc->coarse_eqs_per_proc = 1; 2843 pcbddc->benign_compute_correction = PETSC_TRUE; 2844 pcbddc->nedfield = -1; 2845 pcbddc->nedglobal = PETSC_TRUE; 2846 pcbddc->graphmaxcount = PETSC_MAX_INT; 2847 pcbddc->sub_schurs_layers = -1; 2848 pcbddc->adaptive_threshold[0] = 0.0; 2849 pcbddc->adaptive_threshold[1] = 0.0; 2850 2851 /* function pointers */ 2852 pc->ops->apply = PCApply_BDDC; 2853 pc->ops->applytranspose = PCApplyTranspose_BDDC; 2854 pc->ops->setup = PCSetUp_BDDC; 2855 pc->ops->destroy = PCDestroy_BDDC; 2856 pc->ops->setfromoptions = PCSetFromOptions_BDDC; 2857 pc->ops->view = PCView_BDDC; 2858 pc->ops->applyrichardson = NULL; 2859 pc->ops->applysymmetricleft = NULL; 2860 pc->ops->applysymmetricright = NULL; 2861 pc->ops->presolve = PCPreSolve_BDDC; 2862 pc->ops->postsolve = PCPostSolve_BDDC; 2863 pc->ops->reset = PCReset_BDDC; 2864 2865 /* composing function */ 2866 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDiscreteGradient_C", PCBDDCSetDiscreteGradient_BDDC)); 2867 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDivergenceMat_C", PCBDDCSetDivergenceMat_BDDC)); 2868 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetChangeOfBasisMat_C", PCBDDCSetChangeOfBasisMat_BDDC)); 2869 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesLocalIS_C", PCBDDCSetPrimalVerticesLocalIS_BDDC)); 2870 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesIS_C", PCBDDCSetPrimalVerticesIS_BDDC)); 2871 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesLocalIS_C", PCBDDCGetPrimalVerticesLocalIS_BDDC)); 2872 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesIS_C", PCBDDCGetPrimalVerticesIS_BDDC)); 2873 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetCoarseningRatio_C", PCBDDCSetCoarseningRatio_BDDC)); 2874 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevel_C", PCBDDCSetLevel_BDDC)); 2875 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetUseExactDirichlet_C", PCBDDCSetUseExactDirichlet_BDDC)); 2876 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevels_C", PCBDDCSetLevels_BDDC)); 2877 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundaries_C", PCBDDCSetDirichletBoundaries_BDDC)); 2878 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundariesLocal_C", PCBDDCSetDirichletBoundariesLocal_BDDC)); 2879 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundaries_C", PCBDDCSetNeumannBoundaries_BDDC)); 2880 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundariesLocal_C", PCBDDCSetNeumannBoundariesLocal_BDDC)); 2881 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundaries_C", PCBDDCGetDirichletBoundaries_BDDC)); 2882 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundariesLocal_C", PCBDDCGetDirichletBoundariesLocal_BDDC)); 2883 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundaries_C", PCBDDCGetNeumannBoundaries_BDDC)); 2884 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundariesLocal_C", PCBDDCGetNeumannBoundariesLocal_BDDC)); 2885 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplitting_C", PCBDDCSetDofsSplitting_BDDC)); 2886 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplittingLocal_C", PCBDDCSetDofsSplittingLocal_BDDC)); 2887 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", PCBDDCSetLocalAdjacencyGraph_BDDC)); 2888 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCCreateFETIDPOperators_C", PCBDDCCreateFETIDPOperators_BDDC)); 2889 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetRHS_C", PCBDDCMatFETIDPGetRHS_BDDC)); 2890 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetSolution_C", PCBDDCMatFETIDPGetSolution_BDDC)); 2891 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCPreSolveChangeRHS_C", PCPreSolveChangeRHS_BDDC)); 2892 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_BDDC)); 2893 PetscFunctionReturn(PETSC_SUCCESS); 2894 } 2895 2896 /*@C 2897 PCBDDCInitializePackage - This function initializes everything in the `PCBDDC` package. It is called 2898 from `PCInitializePackage()`. 2899 2900 Level: developer 2901 2902 .seealso: [](ch_ksp), `PetscInitialize()`, `PCBDDCFinalizePackage()` 2903 @*/ 2904 PetscErrorCode PCBDDCInitializePackage(void) 2905 { 2906 int i; 2907 2908 PetscFunctionBegin; 2909 if (PCBDDCPackageInitialized) PetscFunctionReturn(PETSC_SUCCESS); 2910 PCBDDCPackageInitialized = PETSC_TRUE; 2911 PetscCall(PetscRegisterFinalize(PCBDDCFinalizePackage)); 2912 2913 /* general events */ 2914 PetscCall(PetscLogEventRegister("PCBDDCTopo", PC_CLASSID, &PC_BDDC_Topology[0])); 2915 PetscCall(PetscLogEventRegister("PCBDDCLKSP", PC_CLASSID, &PC_BDDC_LocalSolvers[0])); 2916 PetscCall(PetscLogEventRegister("PCBDDCLWor", PC_CLASSID, &PC_BDDC_LocalWork[0])); 2917 PetscCall(PetscLogEventRegister("PCBDDCCorr", PC_CLASSID, &PC_BDDC_CorrectionSetUp[0])); 2918 PetscCall(PetscLogEventRegister("PCBDDCASet", PC_CLASSID, &PC_BDDC_ApproxSetUp[0])); 2919 PetscCall(PetscLogEventRegister("PCBDDCAApp", PC_CLASSID, &PC_BDDC_ApproxApply[0])); 2920 PetscCall(PetscLogEventRegister("PCBDDCCSet", PC_CLASSID, &PC_BDDC_CoarseSetUp[0])); 2921 PetscCall(PetscLogEventRegister("PCBDDCCKSP", PC_CLASSID, &PC_BDDC_CoarseSolver[0])); 2922 PetscCall(PetscLogEventRegister("PCBDDCAdap", PC_CLASSID, &PC_BDDC_AdaptiveSetUp[0])); 2923 PetscCall(PetscLogEventRegister("PCBDDCScal", PC_CLASSID, &PC_BDDC_Scaling[0])); 2924 PetscCall(PetscLogEventRegister("PCBDDCSchr", PC_CLASSID, &PC_BDDC_Schurs[0])); 2925 PetscCall(PetscLogEventRegister("PCBDDCDirS", PC_CLASSID, &PC_BDDC_Solves[0][0])); 2926 PetscCall(PetscLogEventRegister("PCBDDCNeuS", PC_CLASSID, &PC_BDDC_Solves[0][1])); 2927 PetscCall(PetscLogEventRegister("PCBDDCCoaS", PC_CLASSID, &PC_BDDC_Solves[0][2])); 2928 for (i = 1; i < PETSC_PCBDDC_MAXLEVELS; i++) { 2929 char ename[32]; 2930 2931 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCTopo l%02d", i)); 2932 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Topology[i])); 2933 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCLKSP l%02d", i)); 2934 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_LocalSolvers[i])); 2935 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCLWor l%02d", i)); 2936 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_LocalWork[i])); 2937 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCorr l%02d", i)); 2938 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CorrectionSetUp[i])); 2939 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCASet l%02d", i)); 2940 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_ApproxSetUp[i])); 2941 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCAApp l%02d", i)); 2942 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_ApproxApply[i])); 2943 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCSet l%02d", i)); 2944 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CoarseSetUp[i])); 2945 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCKSP l%02d", i)); 2946 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CoarseSolver[i])); 2947 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCAdap l%02d", i)); 2948 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_AdaptiveSetUp[i])); 2949 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCScal l%02d", i)); 2950 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Scaling[i])); 2951 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCSchr l%02d", i)); 2952 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Schurs[i])); 2953 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCDirS l%02d", i)); 2954 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][0])); 2955 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCNeuS l%02d", i)); 2956 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][1])); 2957 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCoaS l%02d", i)); 2958 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][2])); 2959 } 2960 PetscFunctionReturn(PETSC_SUCCESS); 2961 } 2962 2963 /*@C 2964 PCBDDCFinalizePackage - This function frees everything from the `PCBDDC` package. It is 2965 called from `PetscFinalize()` automatically. 2966 2967 Level: developer 2968 2969 .seealso: [](ch_ksp), `PetscFinalize()`, `PCBDDCInitializePackage()` 2970 @*/ 2971 PetscErrorCode PCBDDCFinalizePackage(void) 2972 { 2973 PetscFunctionBegin; 2974 PCBDDCPackageInitialized = PETSC_FALSE; 2975 PetscFunctionReturn(PETSC_SUCCESS); 2976 } 2977