1 /* TODOLIST 2 3 Solvers 4 - Add support for cholesky for coarse solver (similar to local solvers) 5 - Propagate ksp prefixes for solvers to mat objects? 6 7 User interface 8 - ** DM attached to pc? 9 10 Debugging output 11 - * Better management of verbosity levels of debugging output 12 13 Extra 14 - *** Is it possible to work with PCBDDCGraph on boundary indices only (less memory consumed)? 15 - BDDC with MG framework? 16 17 MATIS related operations contained in BDDC code 18 - Provide general case for subassembling 19 20 */ 21 22 #include <petsc/private/pcbddcimpl.h> /*I "petscpc.h" I*/ /* includes for fortran wrappers */ 23 #include <petsc/private/pcbddcprivateimpl.h> 24 #include <petscblaslapack.h> 25 26 static PetscBool PCBDDCPackageInitialized = PETSC_FALSE; 27 28 static PetscBool cited = PETSC_FALSE; 29 static const char citation[] = "@article{ZampiniPCBDDC,\n" 30 "author = {Stefano Zampini},\n" 31 "title = {{PCBDDC}: A Class of Robust Dual-Primal Methods in {PETS}c},\n" 32 "journal = {SIAM Journal on Scientific Computing},\n" 33 "volume = {38},\n" 34 "number = {5},\n" 35 "pages = {S282-S306},\n" 36 "year = {2016},\n" 37 "doi = {10.1137/15M1025785},\n" 38 "URL = {http://dx.doi.org/10.1137/15M1025785},\n" 39 "eprint = {http://dx.doi.org/10.1137/15M1025785}\n" 40 "}\n"; 41 42 PetscLogEvent PC_BDDC_Topology[PETSC_PCBDDC_MAXLEVELS]; 43 PetscLogEvent PC_BDDC_LocalSolvers[PETSC_PCBDDC_MAXLEVELS]; 44 PetscLogEvent PC_BDDC_LocalWork[PETSC_PCBDDC_MAXLEVELS]; 45 PetscLogEvent PC_BDDC_CorrectionSetUp[PETSC_PCBDDC_MAXLEVELS]; 46 PetscLogEvent PC_BDDC_ApproxSetUp[PETSC_PCBDDC_MAXLEVELS]; 47 PetscLogEvent PC_BDDC_ApproxApply[PETSC_PCBDDC_MAXLEVELS]; 48 PetscLogEvent PC_BDDC_CoarseSetUp[PETSC_PCBDDC_MAXLEVELS]; 49 PetscLogEvent PC_BDDC_CoarseSolver[PETSC_PCBDDC_MAXLEVELS]; 50 PetscLogEvent PC_BDDC_AdaptiveSetUp[PETSC_PCBDDC_MAXLEVELS]; 51 PetscLogEvent PC_BDDC_Scaling[PETSC_PCBDDC_MAXLEVELS]; 52 PetscLogEvent PC_BDDC_Schurs[PETSC_PCBDDC_MAXLEVELS]; 53 PetscLogEvent PC_BDDC_Solves[PETSC_PCBDDC_MAXLEVELS][3]; 54 55 const char *const PCBDDCInterfaceExtTypes[] = {"DIRICHLET", "LUMP", "PCBDDCInterfaceExtType", "PC_BDDC_INTERFACE_EXT_", NULL}; 56 57 PetscErrorCode PCApply_BDDC(PC, Vec, Vec); 58 59 PetscErrorCode PCSetFromOptions_BDDC(PC pc, PetscOptionItems *PetscOptionsObject) { 60 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 61 PetscInt nt, i; 62 63 PetscFunctionBegin; 64 PetscOptionsHeadBegin(PetscOptionsObject, "BDDC options"); 65 /* Verbose debugging */ 66 PetscCall(PetscOptionsInt("-pc_bddc_check_level", "Verbose output for PCBDDC (intended for debug)", "none", pcbddc->dbg_flag, &pcbddc->dbg_flag, NULL)); 67 /* Approximate solvers */ 68 PetscCall(PetscOptionsEnum("-pc_bddc_interface_ext_type", "Use DIRICHLET or LUMP to extend interface corrections to interior", "PCBDDCSetInterfaceExtType", PCBDDCInterfaceExtTypes, (PetscEnum)pcbddc->interface_extension, (PetscEnum *)&pcbddc->interface_extension, NULL)); 69 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_DIRICHLET) { 70 PetscCall(PetscOptionsBool("-pc_bddc_dirichlet_approximate", "Inform PCBDDC that we are using approximate Dirichlet solvers", "none", pcbddc->NullSpace_corr[0], &pcbddc->NullSpace_corr[0], NULL)); 71 PetscCall(PetscOptionsBool("-pc_bddc_dirichlet_approximate_scale", "Inform PCBDDC that we need to scale the Dirichlet solve", "none", pcbddc->NullSpace_corr[1], &pcbddc->NullSpace_corr[1], NULL)); 72 } else { 73 /* This flag is needed/implied by lumping */ 74 pcbddc->switch_static = PETSC_TRUE; 75 } 76 PetscCall(PetscOptionsBool("-pc_bddc_neumann_approximate", "Inform PCBDDC that we are using approximate Neumann solvers", "none", pcbddc->NullSpace_corr[2], &pcbddc->NullSpace_corr[2], NULL)); 77 PetscCall(PetscOptionsBool("-pc_bddc_neumann_approximate_scale", "Inform PCBDDC that we need to scale the Neumann solve", "none", pcbddc->NullSpace_corr[3], &pcbddc->NullSpace_corr[3], NULL)); 78 /* Primal space customization */ 79 PetscCall(PetscOptionsBool("-pc_bddc_use_local_mat_graph", "Use or not adjacency graph of local mat for interface analysis", "none", pcbddc->use_local_adj, &pcbddc->use_local_adj, NULL)); 80 PetscCall(PetscOptionsInt("-pc_bddc_graph_maxcount", "Maximum number of shared subdomains for a connected component", "none", pcbddc->graphmaxcount, &pcbddc->graphmaxcount, NULL)); 81 PetscCall(PetscOptionsBool("-pc_bddc_corner_selection", "Activates face-based corner selection", "none", pcbddc->corner_selection, &pcbddc->corner_selection, NULL)); 82 PetscCall(PetscOptionsBool("-pc_bddc_use_vertices", "Use or not corner dofs in coarse space", "none", pcbddc->use_vertices, &pcbddc->use_vertices, NULL)); 83 PetscCall(PetscOptionsBool("-pc_bddc_use_edges", "Use or not edge constraints in coarse space", "none", pcbddc->use_edges, &pcbddc->use_edges, NULL)); 84 PetscCall(PetscOptionsBool("-pc_bddc_use_faces", "Use or not face constraints in coarse space", "none", pcbddc->use_faces, &pcbddc->use_faces, NULL)); 85 PetscCall(PetscOptionsInt("-pc_bddc_vertex_size", "Connected components smaller or equal to vertex size will be considered as primal vertices", "none", pcbddc->vertex_size, &pcbddc->vertex_size, NULL)); 86 PetscCall(PetscOptionsBool("-pc_bddc_use_nnsp", "Use near null space attached to the matrix to compute constraints", "none", pcbddc->use_nnsp, &pcbddc->use_nnsp, NULL)); 87 PetscCall(PetscOptionsBool("-pc_bddc_use_nnsp_true", "Use near null space attached to the matrix to compute constraints as is", "none", pcbddc->use_nnsp_true, &pcbddc->use_nnsp_true, NULL)); 88 PetscCall(PetscOptionsBool("-pc_bddc_use_qr_single", "Use QR factorization for single constraints on cc (QR is always used when multiple constraints are present)", "none", pcbddc->use_qr_single, &pcbddc->use_qr_single, NULL)); 89 /* Change of basis */ 90 PetscCall(PetscOptionsBool("-pc_bddc_use_change_of_basis", "Use or not internal change of basis on local edge nodes", "none", pcbddc->use_change_of_basis, &pcbddc->use_change_of_basis, NULL)); 91 PetscCall(PetscOptionsBool("-pc_bddc_use_change_on_faces", "Use or not internal change of basis on local face nodes", "none", pcbddc->use_change_on_faces, &pcbddc->use_change_on_faces, NULL)); 92 if (!pcbddc->use_change_of_basis) pcbddc->use_change_on_faces = PETSC_FALSE; 93 /* Switch between M_2 (default) and M_3 preconditioners (as defined by C. Dohrmann in the ref. article) */ 94 PetscCall(PetscOptionsBool("-pc_bddc_switch_static", "Switch on static condensation ops around the interface preconditioner", "none", pcbddc->switch_static, &pcbddc->switch_static, NULL)); 95 PetscCall(PetscOptionsInt("-pc_bddc_coarse_eqs_per_proc", "Target number of equations per process for coarse problem redistribution (significant only at the coarsest level)", "none", pcbddc->coarse_eqs_per_proc, &pcbddc->coarse_eqs_per_proc, NULL)); 96 i = pcbddc->coarsening_ratio; 97 PetscCall(PetscOptionsInt("-pc_bddc_coarsening_ratio", "Set coarsening ratio used in multilevel coarsening", "PCBDDCSetCoarseningRatio", i, &i, NULL)); 98 PetscCall(PCBDDCSetCoarseningRatio(pc, i)); 99 i = pcbddc->max_levels; 100 PetscCall(PetscOptionsInt("-pc_bddc_levels", "Set maximum number of levels for multilevel", "PCBDDCSetLevels", i, &i, NULL)); 101 PetscCall(PCBDDCSetLevels(pc, i)); 102 PetscCall(PetscOptionsInt("-pc_bddc_coarse_eqs_limit", "Set maximum number of equations on coarsest grid to aim for", "none", pcbddc->coarse_eqs_limit, &pcbddc->coarse_eqs_limit, NULL)); 103 PetscCall(PetscOptionsBool("-pc_bddc_use_coarse_estimates", "Use estimated eigenvalues for coarse problem", "none", pcbddc->use_coarse_estimates, &pcbddc->use_coarse_estimates, NULL)); 104 PetscCall(PetscOptionsBool("-pc_bddc_use_deluxe_scaling", "Use deluxe scaling for BDDC", "none", pcbddc->use_deluxe_scaling, &pcbddc->use_deluxe_scaling, NULL)); 105 PetscCall(PetscOptionsBool("-pc_bddc_schur_rebuild", "Whether or not the interface graph for Schur principal minors has to be rebuilt (i.e. define the interface without any adjacency)", "none", pcbddc->sub_schurs_rebuild, &pcbddc->sub_schurs_rebuild, NULL)); 106 PetscCall(PetscOptionsInt("-pc_bddc_schur_layers", "Number of dofs' layers for the computation of principal minors (i.e. -1 uses all dofs)", "none", pcbddc->sub_schurs_layers, &pcbddc->sub_schurs_layers, NULL)); 107 PetscCall(PetscOptionsBool("-pc_bddc_schur_use_useradj", "Whether or not the CSR graph specified by the user should be used for computing successive layers (default is to use adj of local mat)", "none", pcbddc->sub_schurs_use_useradj, &pcbddc->sub_schurs_use_useradj, NULL)); 108 PetscCall(PetscOptionsBool("-pc_bddc_schur_exact", "Whether or not to use the exact Schur complement instead of the reduced one (which excludes size 1 cc)", "none", pcbddc->sub_schurs_exact_schur, &pcbddc->sub_schurs_exact_schur, NULL)); 109 PetscCall(PetscOptionsBool("-pc_bddc_deluxe_zerorows", "Zero rows and columns of deluxe operators associated with primal dofs", "none", pcbddc->deluxe_zerorows, &pcbddc->deluxe_zerorows, NULL)); 110 PetscCall(PetscOptionsBool("-pc_bddc_deluxe_singlemat", "Collapse deluxe operators", "none", pcbddc->deluxe_singlemat, &pcbddc->deluxe_singlemat, NULL)); 111 PetscCall(PetscOptionsBool("-pc_bddc_adaptive_userdefined", "Use user-defined constraints (should be attached via MatSetNearNullSpace to pmat) in addition to those adaptively generated", "none", pcbddc->adaptive_userdefined, &pcbddc->adaptive_userdefined, NULL)); 112 nt = 2; 113 PetscCall(PetscOptionsRealArray("-pc_bddc_adaptive_threshold", "Thresholds to be used for adaptive selection of constraints", "none", pcbddc->adaptive_threshold, &nt, NULL)); 114 if (nt == 1) pcbddc->adaptive_threshold[1] = pcbddc->adaptive_threshold[0]; 115 PetscCall(PetscOptionsInt("-pc_bddc_adaptive_nmin", "Minimum number of constraints per connected components", "none", pcbddc->adaptive_nmin, &pcbddc->adaptive_nmin, NULL)); 116 PetscCall(PetscOptionsInt("-pc_bddc_adaptive_nmax", "Maximum number of constraints per connected components", "none", pcbddc->adaptive_nmax, &pcbddc->adaptive_nmax, NULL)); 117 PetscCall(PetscOptionsBool("-pc_bddc_symmetric", "Symmetric computation of primal basis functions", "none", pcbddc->symmetric_primal, &pcbddc->symmetric_primal, NULL)); 118 PetscCall(PetscOptionsInt("-pc_bddc_coarse_adj", "Number of processors where to map the coarse adjacency list", "none", pcbddc->coarse_adj_red, &pcbddc->coarse_adj_red, NULL)); 119 PetscCall(PetscOptionsBool("-pc_bddc_benign_trick", "Apply the benign subspace trick to saddle point problems with discontinuous pressures", "none", pcbddc->benign_saddle_point, &pcbddc->benign_saddle_point, NULL)); 120 PetscCall(PetscOptionsBool("-pc_bddc_benign_change", "Compute the pressure change of basis explicitly", "none", pcbddc->benign_change_explicit, &pcbddc->benign_change_explicit, NULL)); 121 PetscCall(PetscOptionsBool("-pc_bddc_benign_compute_correction", "Compute the benign correction during PreSolve", "none", pcbddc->benign_compute_correction, &pcbddc->benign_compute_correction, NULL)); 122 PetscCall(PetscOptionsBool("-pc_bddc_nonetflux", "Automatic computation of no-net-flux quadrature weights", "none", pcbddc->compute_nonetflux, &pcbddc->compute_nonetflux, NULL)); 123 PetscCall(PetscOptionsBool("-pc_bddc_detect_disconnected", "Detects disconnected subdomains", "none", pcbddc->detect_disconnected, &pcbddc->detect_disconnected, NULL)); 124 PetscCall(PetscOptionsBool("-pc_bddc_detect_disconnected_filter", "Filters out small entries in the local matrix when detecting disconnected subdomains", "none", pcbddc->detect_disconnected_filter, &pcbddc->detect_disconnected_filter, NULL)); 125 PetscCall(PetscOptionsBool("-pc_bddc_eliminate_dirichlet", "Whether or not we want to eliminate dirichlet dofs during presolve", "none", pcbddc->eliminate_dirdofs, &pcbddc->eliminate_dirdofs, NULL)); 126 PetscOptionsHeadEnd(); 127 PetscFunctionReturn(0); 128 } 129 130 static PetscErrorCode PCView_BDDC(PC pc, PetscViewer viewer) { 131 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 132 PC_IS *pcis = (PC_IS *)pc->data; 133 PetscBool isascii; 134 PetscSubcomm subcomm; 135 PetscViewer subviewer; 136 137 PetscFunctionBegin; 138 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 139 /* ASCII viewer */ 140 if (isascii) { 141 PetscMPIInt color, rank, size; 142 PetscInt64 loc[7], gsum[6], gmax[6], gmin[6], totbenign; 143 PetscScalar interface_size; 144 PetscReal ratio1 = 0., ratio2 = 0.; 145 Vec counter; 146 147 if (!pc->setupcalled) PetscCall(PetscViewerASCIIPrintf(viewer, " Partial information available: preconditioner has not been setup yet\n")); 148 PetscCall(PetscViewerASCIIPrintf(viewer, " Use verbose output: %" PetscInt_FMT "\n", pcbddc->dbg_flag)); 149 PetscCall(PetscViewerASCIIPrintf(viewer, " Use user-defined CSR: %d\n", !!pcbddc->mat_graph->nvtxs_csr)); 150 PetscCall(PetscViewerASCIIPrintf(viewer, " Use local mat graph: %d\n", pcbddc->use_local_adj && !pcbddc->mat_graph->nvtxs_csr)); 151 if (pcbddc->mat_graph->twodim) { 152 PetscCall(PetscViewerASCIIPrintf(viewer, " Connectivity graph topological dimension: 2\n")); 153 } else { 154 PetscCall(PetscViewerASCIIPrintf(viewer, " Connectivity graph topological dimension: 3\n")); 155 } 156 if (pcbddc->graphmaxcount != PETSC_MAX_INT) PetscCall(PetscViewerASCIIPrintf(viewer, " Graph max count: %" PetscInt_FMT "\n", pcbddc->graphmaxcount)); 157 PetscCall(PetscViewerASCIIPrintf(viewer, " Corner selection: %d (selected %d)\n", pcbddc->corner_selection, pcbddc->corner_selected)); 158 PetscCall(PetscViewerASCIIPrintf(viewer, " Use vertices: %d (vertex size %" PetscInt_FMT ")\n", pcbddc->use_vertices, pcbddc->vertex_size)); 159 PetscCall(PetscViewerASCIIPrintf(viewer, " Use edges: %d\n", pcbddc->use_edges)); 160 PetscCall(PetscViewerASCIIPrintf(viewer, " Use faces: %d\n", pcbddc->use_faces)); 161 PetscCall(PetscViewerASCIIPrintf(viewer, " Use true near null space: %d\n", pcbddc->use_nnsp_true)); 162 PetscCall(PetscViewerASCIIPrintf(viewer, " Use QR for single constraints on cc: %d\n", pcbddc->use_qr_single)); 163 PetscCall(PetscViewerASCIIPrintf(viewer, " Use change of basis on local edge nodes: %d\n", pcbddc->use_change_of_basis)); 164 PetscCall(PetscViewerASCIIPrintf(viewer, " Use change of basis on local face nodes: %d\n", pcbddc->use_change_on_faces)); 165 PetscCall(PetscViewerASCIIPrintf(viewer, " User defined change of basis matrix: %d\n", !!pcbddc->user_ChangeOfBasisMatrix)); 166 PetscCall(PetscViewerASCIIPrintf(viewer, " Has change of basis matrix: %d\n", !!pcbddc->ChangeOfBasisMatrix)); 167 PetscCall(PetscViewerASCIIPrintf(viewer, " Eliminate dirichlet boundary dofs: %d\n", pcbddc->eliminate_dirdofs)); 168 PetscCall(PetscViewerASCIIPrintf(viewer, " Switch on static condensation ops around the interface preconditioner: %d\n", pcbddc->switch_static)); 169 PetscCall(PetscViewerASCIIPrintf(viewer, " Use exact dirichlet trick: %d\n", pcbddc->use_exact_dirichlet_trick)); 170 PetscCall(PetscViewerASCIIPrintf(viewer, " Interface extension: %s\n", PCBDDCInterfaceExtTypes[pcbddc->interface_extension])); 171 PetscCall(PetscViewerASCIIPrintf(viewer, " Multilevel max levels: %" PetscInt_FMT "\n", pcbddc->max_levels)); 172 PetscCall(PetscViewerASCIIPrintf(viewer, " Multilevel coarsening ratio: %" PetscInt_FMT "\n", pcbddc->coarsening_ratio)); 173 PetscCall(PetscViewerASCIIPrintf(viewer, " Use estimated eigs for coarse problem: %d\n", pcbddc->use_coarse_estimates)); 174 PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe scaling: %d\n", pcbddc->use_deluxe_scaling)); 175 PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe zerorows: %d\n", pcbddc->deluxe_zerorows)); 176 PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe singlemat: %d\n", pcbddc->deluxe_singlemat)); 177 PetscCall(PetscViewerASCIIPrintf(viewer, " Rebuild interface graph for Schur principal minors: %d\n", pcbddc->sub_schurs_rebuild)); 178 PetscCall(PetscViewerASCIIPrintf(viewer, " Number of dofs' layers for the computation of principal minors: %" PetscInt_FMT "\n", pcbddc->sub_schurs_layers)); 179 PetscCall(PetscViewerASCIIPrintf(viewer, " Use user CSR graph to compute successive layers: %d\n", pcbddc->sub_schurs_use_useradj)); 180 if (pcbddc->adaptive_threshold[1] != pcbddc->adaptive_threshold[0]) { 181 PetscCall(PetscViewerASCIIPrintf(viewer, " Adaptive constraint selection thresholds (active %d, userdefined %d): %g,%g\n", pcbddc->adaptive_selection, pcbddc->adaptive_userdefined, (double)pcbddc->adaptive_threshold[0], (double)pcbddc->adaptive_threshold[1])); 182 } else { 183 PetscCall(PetscViewerASCIIPrintf(viewer, " Adaptive constraint selection threshold (active %d, userdefined %d): %g\n", pcbddc->adaptive_selection, pcbddc->adaptive_userdefined, (double)pcbddc->adaptive_threshold[0])); 184 } 185 PetscCall(PetscViewerASCIIPrintf(viewer, " Min constraints / connected component: %" PetscInt_FMT "\n", pcbddc->adaptive_nmin)); 186 PetscCall(PetscViewerASCIIPrintf(viewer, " Max constraints / connected component: %" PetscInt_FMT "\n", pcbddc->adaptive_nmax)); 187 PetscCall(PetscViewerASCIIPrintf(viewer, " Invert exact Schur complement for adaptive selection: %d\n", pcbddc->sub_schurs_exact_schur)); 188 PetscCall(PetscViewerASCIIPrintf(viewer, " Symmetric computation of primal basis functions: %d\n", pcbddc->symmetric_primal)); 189 PetscCall(PetscViewerASCIIPrintf(viewer, " Num. Procs. to map coarse adjacency list: %" PetscInt_FMT "\n", pcbddc->coarse_adj_red)); 190 PetscCall(PetscViewerASCIIPrintf(viewer, " Coarse eqs per proc (significant at the coarsest level): %" PetscInt_FMT "\n", pcbddc->coarse_eqs_per_proc)); 191 PetscCall(PetscViewerASCIIPrintf(viewer, " Detect disconnected: %d (filter %d)\n", pcbddc->detect_disconnected, pcbddc->detect_disconnected_filter)); 192 PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subspace trick: %d (change explicit %d)\n", pcbddc->benign_saddle_point, pcbddc->benign_change_explicit)); 193 PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subspace trick is active: %d\n", pcbddc->benign_have_null)); 194 PetscCall(PetscViewerASCIIPrintf(viewer, " Algebraic computation of no-net-flux: %d\n", pcbddc->compute_nonetflux)); 195 if (!pc->setupcalled) PetscFunctionReturn(0); 196 197 /* compute interface size */ 198 PetscCall(VecSet(pcis->vec1_B, 1.0)); 199 PetscCall(MatCreateVecs(pc->pmat, &counter, NULL)); 200 PetscCall(VecSet(counter, 0.0)); 201 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, counter, INSERT_VALUES, SCATTER_REVERSE)); 202 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, counter, INSERT_VALUES, SCATTER_REVERSE)); 203 PetscCall(VecSum(counter, &interface_size)); 204 PetscCall(VecDestroy(&counter)); 205 206 /* compute some statistics on the domain decomposition */ 207 gsum[0] = 1; 208 gsum[1] = gsum[2] = gsum[3] = gsum[4] = gsum[5] = 0; 209 loc[0] = !!pcis->n; 210 loc[1] = pcis->n - pcis->n_B; 211 loc[2] = pcis->n_B; 212 loc[3] = pcbddc->local_primal_size; 213 loc[4] = pcis->n; 214 loc[5] = pcbddc->n_local_subs > 0 ? pcbddc->n_local_subs : (pcis->n ? 1 : 0); 215 loc[6] = pcbddc->benign_n; 216 PetscCallMPI(MPI_Reduce(loc, gsum, 6, MPIU_INT64, MPI_SUM, 0, PetscObjectComm((PetscObject)pc))); 217 if (!loc[0]) loc[1] = loc[2] = loc[3] = loc[4] = loc[5] = -1; 218 PetscCallMPI(MPI_Reduce(loc, gmax, 6, MPIU_INT64, MPI_MAX, 0, PetscObjectComm((PetscObject)pc))); 219 if (!loc[0]) loc[1] = loc[2] = loc[3] = loc[4] = loc[5] = PETSC_MAX_INT; 220 PetscCallMPI(MPI_Reduce(loc, gmin, 6, MPIU_INT64, MPI_MIN, 0, PetscObjectComm((PetscObject)pc))); 221 PetscCallMPI(MPI_Reduce(&loc[6], &totbenign, 1, MPIU_INT64, MPI_SUM, 0, PetscObjectComm((PetscObject)pc))); 222 if (pcbddc->coarse_size) { 223 ratio1 = pc->pmat->rmap->N / (1. * pcbddc->coarse_size); 224 ratio2 = PetscRealPart(interface_size) / pcbddc->coarse_size; 225 } 226 PetscCall(PetscViewerASCIIPrintf(viewer, "********************************** STATISTICS AT LEVEL %" PetscInt_FMT " **********************************\n", pcbddc->current_level)); 227 PetscCall(PetscViewerASCIIPrintf(viewer, " Global dofs sizes: all %" PetscInt_FMT " interface %" PetscInt_FMT " coarse %" PetscInt_FMT "\n", pc->pmat->rmap->N, (PetscInt)PetscRealPart(interface_size), pcbddc->coarse_size)); 228 PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsening ratios: all/coarse %" PetscInt_FMT " interface/coarse %" PetscInt_FMT "\n", (PetscInt)ratio1, (PetscInt)ratio2)); 229 PetscCall(PetscViewerASCIIPrintf(viewer, " Active processes : %" PetscInt_FMT "\n", (PetscInt)gsum[0])); 230 PetscCall(PetscViewerASCIIPrintf(viewer, " Total subdomains : %" PetscInt_FMT "\n", (PetscInt)gsum[5])); 231 if (pcbddc->benign_have_null) PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subs : %" PetscInt_FMT "\n", (PetscInt)totbenign)); 232 PetscCall(PetscViewerASCIIPrintf(viewer, " Dofs type :\tMIN\tMAX\tMEAN\n")); 233 PetscCall(PetscViewerASCIIPrintf(viewer, " Interior dofs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[1], (PetscInt)gmax[1], (PetscInt)(gsum[1] / gsum[0]))); 234 PetscCall(PetscViewerASCIIPrintf(viewer, " Interface dofs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[2], (PetscInt)gmax[2], (PetscInt)(gsum[2] / gsum[0]))); 235 PetscCall(PetscViewerASCIIPrintf(viewer, " Primal dofs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[3], (PetscInt)gmax[3], (PetscInt)(gsum[3] / gsum[0]))); 236 PetscCall(PetscViewerASCIIPrintf(viewer, " Local dofs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[4], (PetscInt)gmax[4], (PetscInt)(gsum[4] / gsum[0]))); 237 PetscCall(PetscViewerASCIIPrintf(viewer, " Local subs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[5], (PetscInt)gmax[5])); 238 PetscCall(PetscViewerFlush(viewer)); 239 240 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank)); 241 242 /* local solvers */ 243 PetscCall(PetscViewerGetSubViewer(viewer, PetscObjectComm((PetscObject)pcbddc->ksp_D), &subviewer)); 244 if (rank == 0) { 245 PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Interior solver (rank 0)\n")); 246 PetscCall(PetscViewerASCIIPushTab(subviewer)); 247 PetscCall(KSPView(pcbddc->ksp_D, subviewer)); 248 PetscCall(PetscViewerASCIIPopTab(subviewer)); 249 PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Correction solver (rank 0)\n")); 250 PetscCall(PetscViewerASCIIPushTab(subviewer)); 251 PetscCall(KSPView(pcbddc->ksp_R, subviewer)); 252 PetscCall(PetscViewerASCIIPopTab(subviewer)); 253 PetscCall(PetscViewerFlush(subviewer)); 254 } 255 PetscCall(PetscViewerRestoreSubViewer(viewer, PetscObjectComm((PetscObject)pcbddc->ksp_D), &subviewer)); 256 PetscCall(PetscViewerFlush(viewer)); 257 258 /* the coarse problem can be handled by a different communicator */ 259 if (pcbddc->coarse_ksp) color = 1; 260 else color = 0; 261 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 262 PetscCall(PetscSubcommCreate(PetscObjectComm((PetscObject)pc), &subcomm)); 263 PetscCall(PetscSubcommSetNumber(subcomm, PetscMin(size, 2))); 264 PetscCall(PetscSubcommSetTypeGeneral(subcomm, color, rank)); 265 PetscCall(PetscViewerGetSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer)); 266 if (color == 1) { 267 PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Coarse solver\n")); 268 PetscCall(PetscViewerASCIIPushTab(subviewer)); 269 PetscCall(KSPView(pcbddc->coarse_ksp, subviewer)); 270 PetscCall(PetscViewerASCIIPopTab(subviewer)); 271 PetscCall(PetscViewerFlush(subviewer)); 272 } 273 PetscCall(PetscViewerRestoreSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer)); 274 PetscCall(PetscSubcommDestroy(&subcomm)); 275 PetscCall(PetscViewerFlush(viewer)); 276 } 277 PetscFunctionReturn(0); 278 } 279 280 static PetscErrorCode PCBDDCSetDiscreteGradient_BDDC(PC pc, Mat G, PetscInt order, PetscInt field, PetscBool global, PetscBool conforming) { 281 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 282 283 PetscFunctionBegin; 284 PetscCall(PetscObjectReference((PetscObject)G)); 285 PetscCall(MatDestroy(&pcbddc->discretegradient)); 286 pcbddc->discretegradient = G; 287 pcbddc->nedorder = order > 0 ? order : -order; 288 pcbddc->nedfield = field; 289 pcbddc->nedglobal = global; 290 pcbddc->conforming = conforming; 291 PetscFunctionReturn(0); 292 } 293 294 /*@ 295 PCBDDCSetDiscreteGradient - Sets the discrete gradient 296 297 Collective on PC 298 299 Input Parameters: 300 + pc - the preconditioning context 301 . G - the discrete gradient matrix (should be in AIJ format) 302 . order - the order of the Nedelec space (1 for the lowest order) 303 . field - the field id of the Nedelec dofs (not used if the fields have not been specified) 304 . global - the type of global ordering for the rows of G 305 - conforming - whether the mesh is conforming or not 306 307 Level: advanced 308 309 Notes: 310 The discrete gradient matrix G is used to analyze the subdomain edges, and it should not contain any zero entry. 311 For variable order spaces, the order should be set to zero. 312 If global is true, the rows of G should be given in global ordering for the whole dofs; 313 if false, the ordering should be global for the Nedelec field. 314 In the latter case, it should hold gid[i] < gid[j] iff geid[i] < geid[j], with gid the global orderding for all the dofs 315 and geid the one for the Nedelec field. 316 317 .seealso: `PCBDDC`, `PCBDDCSetDofsSplitting()`, `PCBDDCSetDofsSplittingLocal()` 318 @*/ 319 PetscErrorCode PCBDDCSetDiscreteGradient(PC pc, Mat G, PetscInt order, PetscInt field, PetscBool global, PetscBool conforming) { 320 PetscFunctionBegin; 321 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 322 PetscValidHeaderSpecific(G, MAT_CLASSID, 2); 323 PetscValidLogicalCollectiveInt(pc, order, 3); 324 PetscValidLogicalCollectiveInt(pc, field, 4); 325 PetscValidLogicalCollectiveBool(pc, global, 5); 326 PetscValidLogicalCollectiveBool(pc, conforming, 6); 327 PetscCheckSameComm(pc, 1, G, 2); 328 PetscTryMethod(pc, "PCBDDCSetDiscreteGradient_C", (PC, Mat, PetscInt, PetscInt, PetscBool, PetscBool), (pc, G, order, field, global, conforming)); 329 PetscFunctionReturn(0); 330 } 331 332 static PetscErrorCode PCBDDCSetDivergenceMat_BDDC(PC pc, Mat divudotp, PetscBool trans, IS vl2l) { 333 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 334 335 PetscFunctionBegin; 336 PetscCall(PetscObjectReference((PetscObject)divudotp)); 337 PetscCall(MatDestroy(&pcbddc->divudotp)); 338 pcbddc->divudotp = divudotp; 339 pcbddc->divudotp_trans = trans; 340 pcbddc->compute_nonetflux = PETSC_TRUE; 341 if (vl2l) { 342 PetscCall(PetscObjectReference((PetscObject)vl2l)); 343 PetscCall(ISDestroy(&pcbddc->divudotp_vl2l)); 344 pcbddc->divudotp_vl2l = vl2l; 345 } 346 PetscFunctionReturn(0); 347 } 348 349 /*@ 350 PCBDDCSetDivergenceMat - Sets the linear operator representing \int_\Omega \div {\bf u} \cdot p dx 351 352 Collective on PC 353 354 Input Parameters: 355 + pc - the preconditioning context 356 . divudotp - the matrix (must be of type MATIS) 357 . trans - if trans if false (resp. true), then pressures are in the test (trial) space and velocities are in the trial (test) space. 358 - vl2l - optional index set describing the local (wrt the local matrix in divudotp) to local (wrt the local matrix in the preconditioning matrix) map for the velocities 359 360 Level: advanced 361 362 Notes: 363 This auxiliary matrix is used to compute quadrature weights representing the net-flux across subdomain boundaries 364 If vl2l is NULL, the local ordering for velocities in divudotp should match that of the preconditioning matrix 365 366 .seealso: `PCBDDC` 367 @*/ 368 PetscErrorCode PCBDDCSetDivergenceMat(PC pc, Mat divudotp, PetscBool trans, IS vl2l) { 369 PetscBool ismatis; 370 371 PetscFunctionBegin; 372 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 373 PetscValidHeaderSpecific(divudotp, MAT_CLASSID, 2); 374 PetscCheckSameComm(pc, 1, divudotp, 2); 375 PetscValidLogicalCollectiveBool(pc, trans, 3); 376 if (vl2l) PetscValidHeaderSpecific(vl2l, IS_CLASSID, 4); 377 PetscCall(PetscObjectTypeCompare((PetscObject)divudotp, MATIS, &ismatis)); 378 PetscCheck(ismatis, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Divergence matrix needs to be of type MATIS"); 379 PetscTryMethod(pc, "PCBDDCSetDivergenceMat_C", (PC, Mat, PetscBool, IS), (pc, divudotp, trans, vl2l)); 380 PetscFunctionReturn(0); 381 } 382 383 static PetscErrorCode PCBDDCSetChangeOfBasisMat_BDDC(PC pc, Mat change, PetscBool interior) { 384 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 385 386 PetscFunctionBegin; 387 PetscCall(PetscObjectReference((PetscObject)change)); 388 PetscCall(MatDestroy(&pcbddc->user_ChangeOfBasisMatrix)); 389 pcbddc->user_ChangeOfBasisMatrix = change; 390 pcbddc->change_interior = interior; 391 PetscFunctionReturn(0); 392 } 393 394 /*@ 395 PCBDDCSetChangeOfBasisMat - Set user defined change of basis for dofs 396 397 Collective on PC 398 399 Input Parameters: 400 + pc - the preconditioning context 401 . change - the change of basis matrix 402 - interior - whether or not the change of basis modifies interior dofs 403 404 Level: intermediate 405 406 Notes: 407 408 .seealso: `PCBDDC` 409 @*/ 410 PetscErrorCode PCBDDCSetChangeOfBasisMat(PC pc, Mat change, PetscBool interior) { 411 PetscFunctionBegin; 412 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 413 PetscValidHeaderSpecific(change, MAT_CLASSID, 2); 414 PetscCheckSameComm(pc, 1, change, 2); 415 if (pc->mat) { 416 PetscInt rows_c, cols_c, rows, cols; 417 PetscCall(MatGetSize(pc->mat, &rows, &cols)); 418 PetscCall(MatGetSize(change, &rows_c, &cols_c)); 419 PetscCheck(rows_c == rows, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of rows for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, rows_c, rows); 420 PetscCheck(cols_c == cols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of columns for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, cols_c, cols); 421 PetscCall(MatGetLocalSize(pc->mat, &rows, &cols)); 422 PetscCall(MatGetLocalSize(change, &rows_c, &cols_c)); 423 PetscCheck(rows_c == rows, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of local rows for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, rows_c, rows); 424 PetscCheck(cols_c == cols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of local columns for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, cols_c, cols); 425 } 426 PetscTryMethod(pc, "PCBDDCSetChangeOfBasisMat_C", (PC, Mat, PetscBool), (pc, change, interior)); 427 PetscFunctionReturn(0); 428 } 429 430 static PetscErrorCode PCBDDCSetPrimalVerticesIS_BDDC(PC pc, IS PrimalVertices) { 431 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 432 PetscBool isequal = PETSC_FALSE; 433 434 PetscFunctionBegin; 435 PetscCall(PetscObjectReference((PetscObject)PrimalVertices)); 436 if (pcbddc->user_primal_vertices) PetscCall(ISEqual(PrimalVertices, pcbddc->user_primal_vertices, &isequal)); 437 PetscCall(ISDestroy(&pcbddc->user_primal_vertices)); 438 PetscCall(ISDestroy(&pcbddc->user_primal_vertices_local)); 439 pcbddc->user_primal_vertices = PrimalVertices; 440 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 441 PetscFunctionReturn(0); 442 } 443 444 /*@ 445 PCBDDCSetPrimalVerticesIS - Set additional user defined primal vertices in PCBDDC 446 447 Collective 448 449 Input Parameters: 450 + pc - the preconditioning context 451 - PrimalVertices - index set of primal vertices in global numbering (can be empty) 452 453 Level: intermediate 454 455 Notes: 456 Any process can list any global node 457 458 .seealso: `PCBDDC`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()`, `PCBDDCGetPrimalVerticesLocalIS()` 459 @*/ 460 PetscErrorCode PCBDDCSetPrimalVerticesIS(PC pc, IS PrimalVertices) { 461 PetscFunctionBegin; 462 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 463 PetscValidHeaderSpecific(PrimalVertices, IS_CLASSID, 2); 464 PetscCheckSameComm(pc, 1, PrimalVertices, 2); 465 PetscTryMethod(pc, "PCBDDCSetPrimalVerticesIS_C", (PC, IS), (pc, PrimalVertices)); 466 PetscFunctionReturn(0); 467 } 468 469 static PetscErrorCode PCBDDCGetPrimalVerticesIS_BDDC(PC pc, IS *is) { 470 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 471 472 PetscFunctionBegin; 473 *is = pcbddc->user_primal_vertices; 474 PetscFunctionReturn(0); 475 } 476 477 /*@ 478 PCBDDCGetPrimalVerticesIS - Get user defined primal vertices set with PCBDDCSetPrimalVerticesIS() 479 480 Collective 481 482 Input Parameters: 483 . pc - the preconditioning context 484 485 Output Parameters: 486 . is - index set of primal vertices in global numbering (NULL if not set) 487 488 Level: intermediate 489 490 Notes: 491 492 .seealso: `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()`, `PCBDDCGetPrimalVerticesLocalIS()` 493 @*/ 494 PetscErrorCode PCBDDCGetPrimalVerticesIS(PC pc, IS *is) { 495 PetscFunctionBegin; 496 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 497 PetscValidPointer(is, 2); 498 PetscUseMethod(pc, "PCBDDCGetPrimalVerticesIS_C", (PC, IS *), (pc, is)); 499 PetscFunctionReturn(0); 500 } 501 502 static PetscErrorCode PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc, IS PrimalVertices) { 503 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 504 PetscBool isequal = PETSC_FALSE; 505 506 PetscFunctionBegin; 507 PetscCall(PetscObjectReference((PetscObject)PrimalVertices)); 508 if (pcbddc->user_primal_vertices_local) PetscCall(ISEqual(PrimalVertices, pcbddc->user_primal_vertices_local, &isequal)); 509 PetscCall(ISDestroy(&pcbddc->user_primal_vertices)); 510 PetscCall(ISDestroy(&pcbddc->user_primal_vertices_local)); 511 pcbddc->user_primal_vertices_local = PrimalVertices; 512 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 513 PetscFunctionReturn(0); 514 } 515 516 /*@ 517 PCBDDCSetPrimalVerticesLocalIS - Set additional user defined primal vertices in PCBDDC 518 519 Collective 520 521 Input Parameters: 522 + pc - the preconditioning context 523 - PrimalVertices - index set of primal vertices in local numbering (can be empty) 524 525 Level: intermediate 526 527 Notes: 528 529 .seealso: `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesLocalIS()` 530 @*/ 531 PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC pc, IS PrimalVertices) { 532 PetscFunctionBegin; 533 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 534 PetscValidHeaderSpecific(PrimalVertices, IS_CLASSID, 2); 535 PetscCheckSameComm(pc, 1, PrimalVertices, 2); 536 PetscTryMethod(pc, "PCBDDCSetPrimalVerticesLocalIS_C", (PC, IS), (pc, PrimalVertices)); 537 PetscFunctionReturn(0); 538 } 539 540 static PetscErrorCode PCBDDCGetPrimalVerticesLocalIS_BDDC(PC pc, IS *is) { 541 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 542 543 PetscFunctionBegin; 544 *is = pcbddc->user_primal_vertices_local; 545 PetscFunctionReturn(0); 546 } 547 548 /*@ 549 PCBDDCGetPrimalVerticesLocalIS - Get user defined primal vertices set with PCBDDCSetPrimalVerticesLocalIS() 550 551 Collective 552 553 Input Parameters: 554 . pc - the preconditioning context 555 556 Output Parameters: 557 . is - index set of primal vertices in local numbering (NULL if not set) 558 559 Level: intermediate 560 561 Notes: 562 563 .seealso: `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()` 564 @*/ 565 PetscErrorCode PCBDDCGetPrimalVerticesLocalIS(PC pc, IS *is) { 566 PetscFunctionBegin; 567 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 568 PetscValidPointer(is, 2); 569 PetscUseMethod(pc, "PCBDDCGetPrimalVerticesLocalIS_C", (PC, IS *), (pc, is)); 570 PetscFunctionReturn(0); 571 } 572 573 static PetscErrorCode PCBDDCSetCoarseningRatio_BDDC(PC pc, PetscInt k) { 574 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 575 576 PetscFunctionBegin; 577 pcbddc->coarsening_ratio = k; 578 PetscFunctionReturn(0); 579 } 580 581 /*@ 582 PCBDDCSetCoarseningRatio - Set coarsening ratio used in multilevel 583 584 Logically collective on PC 585 586 Input Parameters: 587 + pc - the preconditioning context 588 - k - coarsening ratio (H/h at the coarser level) 589 590 Options Database Keys: 591 . -pc_bddc_coarsening_ratio <int> - Set coarsening ratio used in multilevel coarsening 592 593 Level: intermediate 594 595 Notes: 596 Approximatively k subdomains at the finer level will be aggregated into a single subdomain at the coarser level 597 598 .seealso: `PCBDDC`, `PCBDDCSetLevels()` 599 @*/ 600 PetscErrorCode PCBDDCSetCoarseningRatio(PC pc, PetscInt k) { 601 PetscFunctionBegin; 602 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 603 PetscValidLogicalCollectiveInt(pc, k, 2); 604 PetscTryMethod(pc, "PCBDDCSetCoarseningRatio_C", (PC, PetscInt), (pc, k)); 605 PetscFunctionReturn(0); 606 } 607 608 /* The following functions (PCBDDCSetUseExactDirichlet PCBDDCSetLevel) are not public */ 609 static PetscErrorCode PCBDDCSetUseExactDirichlet_BDDC(PC pc, PetscBool flg) { 610 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 611 612 PetscFunctionBegin; 613 pcbddc->use_exact_dirichlet_trick = flg; 614 PetscFunctionReturn(0); 615 } 616 617 PetscErrorCode PCBDDCSetUseExactDirichlet(PC pc, PetscBool flg) { 618 PetscFunctionBegin; 619 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 620 PetscValidLogicalCollectiveBool(pc, flg, 2); 621 PetscTryMethod(pc, "PCBDDCSetUseExactDirichlet_C", (PC, PetscBool), (pc, flg)); 622 PetscFunctionReturn(0); 623 } 624 625 static PetscErrorCode PCBDDCSetLevel_BDDC(PC pc, PetscInt level) { 626 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 627 628 PetscFunctionBegin; 629 pcbddc->current_level = level; 630 PetscFunctionReturn(0); 631 } 632 633 PetscErrorCode PCBDDCSetLevel(PC pc, PetscInt level) { 634 PetscFunctionBegin; 635 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 636 PetscValidLogicalCollectiveInt(pc, level, 2); 637 PetscTryMethod(pc, "PCBDDCSetLevel_C", (PC, PetscInt), (pc, level)); 638 PetscFunctionReturn(0); 639 } 640 641 static PetscErrorCode PCBDDCSetLevels_BDDC(PC pc, PetscInt levels) { 642 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 643 644 PetscFunctionBegin; 645 PetscCheck(levels < PETSC_PCBDDC_MAXLEVELS, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Maximum number of additional levels for BDDC is %d", PETSC_PCBDDC_MAXLEVELS - 1); 646 pcbddc->max_levels = levels; 647 PetscFunctionReturn(0); 648 } 649 650 /*@ 651 PCBDDCSetLevels - Sets the maximum number of additional levels allowed for multilevel BDDC 652 653 Logically collective on PC 654 655 Input Parameters: 656 + pc - the preconditioning context 657 - levels - the maximum number of levels 658 659 Options Database Keys: 660 . -pc_bddc_levels <int> - Set maximum number of levels for multilevel 661 662 Level: intermediate 663 664 Notes: 665 The default value is 0, that gives the classical two-levels BDDC 666 667 .seealso: `PCBDDC`, `PCBDDCSetCoarseningRatio()` 668 @*/ 669 PetscErrorCode PCBDDCSetLevels(PC pc, PetscInt levels) { 670 PetscFunctionBegin; 671 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 672 PetscValidLogicalCollectiveInt(pc, levels, 2); 673 PetscTryMethod(pc, "PCBDDCSetLevels_C", (PC, PetscInt), (pc, levels)); 674 PetscFunctionReturn(0); 675 } 676 677 static PetscErrorCode PCBDDCSetDirichletBoundaries_BDDC(PC pc, IS DirichletBoundaries) { 678 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 679 PetscBool isequal = PETSC_FALSE; 680 681 PetscFunctionBegin; 682 PetscCall(PetscObjectReference((PetscObject)DirichletBoundaries)); 683 if (pcbddc->DirichletBoundaries) PetscCall(ISEqual(DirichletBoundaries, pcbddc->DirichletBoundaries, &isequal)); 684 /* last user setting takes precedence -> destroy any other customization */ 685 PetscCall(ISDestroy(&pcbddc->DirichletBoundariesLocal)); 686 PetscCall(ISDestroy(&pcbddc->DirichletBoundaries)); 687 pcbddc->DirichletBoundaries = DirichletBoundaries; 688 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 689 PetscFunctionReturn(0); 690 } 691 692 /*@ 693 PCBDDCSetDirichletBoundaries - Set IS defining Dirichlet boundaries for the global problem. 694 695 Collective 696 697 Input Parameters: 698 + pc - the preconditioning context 699 - DirichletBoundaries - parallel IS defining the Dirichlet boundaries 700 701 Level: intermediate 702 703 Notes: 704 Provide the information if you used MatZeroRows/Columns routines. Any process can list any global node 705 706 .seealso: `PCBDDC`, `PCBDDCSetDirichletBoundariesLocal()`, `MatZeroRows()`, `MatZeroRowsColumns()` 707 @*/ 708 PetscErrorCode PCBDDCSetDirichletBoundaries(PC pc, IS DirichletBoundaries) { 709 PetscFunctionBegin; 710 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 711 PetscValidHeaderSpecific(DirichletBoundaries, IS_CLASSID, 2); 712 PetscCheckSameComm(pc, 1, DirichletBoundaries, 2); 713 PetscTryMethod(pc, "PCBDDCSetDirichletBoundaries_C", (PC, IS), (pc, DirichletBoundaries)); 714 PetscFunctionReturn(0); 715 } 716 717 static PetscErrorCode PCBDDCSetDirichletBoundariesLocal_BDDC(PC pc, IS DirichletBoundaries) { 718 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 719 PetscBool isequal = PETSC_FALSE; 720 721 PetscFunctionBegin; 722 PetscCall(PetscObjectReference((PetscObject)DirichletBoundaries)); 723 if (pcbddc->DirichletBoundariesLocal) PetscCall(ISEqual(DirichletBoundaries, pcbddc->DirichletBoundariesLocal, &isequal)); 724 /* last user setting takes precedence -> destroy any other customization */ 725 PetscCall(ISDestroy(&pcbddc->DirichletBoundariesLocal)); 726 PetscCall(ISDestroy(&pcbddc->DirichletBoundaries)); 727 pcbddc->DirichletBoundariesLocal = DirichletBoundaries; 728 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 729 PetscFunctionReturn(0); 730 } 731 732 /*@ 733 PCBDDCSetDirichletBoundariesLocal - Set IS defining Dirichlet boundaries for the global problem in local ordering. 734 735 Collective 736 737 Input Parameters: 738 + pc - the preconditioning context 739 - DirichletBoundaries - parallel IS defining the Dirichlet boundaries (in local ordering) 740 741 Level: intermediate 742 743 Notes: 744 745 .seealso: `PCBDDC`, `PCBDDCSetDirichletBoundaries()`, `MatZeroRows()`, `MatZeroRowsColumns()` 746 @*/ 747 PetscErrorCode PCBDDCSetDirichletBoundariesLocal(PC pc, IS DirichletBoundaries) { 748 PetscFunctionBegin; 749 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 750 PetscValidHeaderSpecific(DirichletBoundaries, IS_CLASSID, 2); 751 PetscCheckSameComm(pc, 1, DirichletBoundaries, 2); 752 PetscTryMethod(pc, "PCBDDCSetDirichletBoundariesLocal_C", (PC, IS), (pc, DirichletBoundaries)); 753 PetscFunctionReturn(0); 754 } 755 756 static PetscErrorCode PCBDDCSetNeumannBoundaries_BDDC(PC pc, IS NeumannBoundaries) { 757 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 758 PetscBool isequal = PETSC_FALSE; 759 760 PetscFunctionBegin; 761 PetscCall(PetscObjectReference((PetscObject)NeumannBoundaries)); 762 if (pcbddc->NeumannBoundaries) PetscCall(ISEqual(NeumannBoundaries, pcbddc->NeumannBoundaries, &isequal)); 763 /* last user setting takes precedence -> destroy any other customization */ 764 PetscCall(ISDestroy(&pcbddc->NeumannBoundariesLocal)); 765 PetscCall(ISDestroy(&pcbddc->NeumannBoundaries)); 766 pcbddc->NeumannBoundaries = NeumannBoundaries; 767 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 768 PetscFunctionReturn(0); 769 } 770 771 /*@ 772 PCBDDCSetNeumannBoundaries - Set IS defining Neumann boundaries for the global problem. 773 774 Collective 775 776 Input Parameters: 777 + pc - the preconditioning context 778 - NeumannBoundaries - parallel IS defining the Neumann boundaries 779 780 Level: intermediate 781 782 Notes: 783 Any process can list any global node 784 785 .seealso: `PCBDDC`, `PCBDDCSetNeumannBoundariesLocal()` 786 @*/ 787 PetscErrorCode PCBDDCSetNeumannBoundaries(PC pc, IS NeumannBoundaries) { 788 PetscFunctionBegin; 789 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 790 PetscValidHeaderSpecific(NeumannBoundaries, IS_CLASSID, 2); 791 PetscCheckSameComm(pc, 1, NeumannBoundaries, 2); 792 PetscTryMethod(pc, "PCBDDCSetNeumannBoundaries_C", (PC, IS), (pc, NeumannBoundaries)); 793 PetscFunctionReturn(0); 794 } 795 796 static PetscErrorCode PCBDDCSetNeumannBoundariesLocal_BDDC(PC pc, IS NeumannBoundaries) { 797 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 798 PetscBool isequal = PETSC_FALSE; 799 800 PetscFunctionBegin; 801 PetscCall(PetscObjectReference((PetscObject)NeumannBoundaries)); 802 if (pcbddc->NeumannBoundariesLocal) PetscCall(ISEqual(NeumannBoundaries, pcbddc->NeumannBoundariesLocal, &isequal)); 803 /* last user setting takes precedence -> destroy any other customization */ 804 PetscCall(ISDestroy(&pcbddc->NeumannBoundariesLocal)); 805 PetscCall(ISDestroy(&pcbddc->NeumannBoundaries)); 806 pcbddc->NeumannBoundariesLocal = NeumannBoundaries; 807 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 808 PetscFunctionReturn(0); 809 } 810 811 /*@ 812 PCBDDCSetNeumannBoundariesLocal - Set IS defining Neumann boundaries for the global problem in local ordering. 813 814 Collective 815 816 Input Parameters: 817 + pc - the preconditioning context 818 - NeumannBoundaries - parallel IS defining the subdomain part of Neumann boundaries (in local ordering) 819 820 Level: intermediate 821 822 Notes: 823 824 .seealso: `PCBDDC`, `PCBDDCSetNeumannBoundaries()` 825 @*/ 826 PetscErrorCode PCBDDCSetNeumannBoundariesLocal(PC pc, IS NeumannBoundaries) { 827 PetscFunctionBegin; 828 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 829 PetscValidHeaderSpecific(NeumannBoundaries, IS_CLASSID, 2); 830 PetscCheckSameComm(pc, 1, NeumannBoundaries, 2); 831 PetscTryMethod(pc, "PCBDDCSetNeumannBoundariesLocal_C", (PC, IS), (pc, NeumannBoundaries)); 832 PetscFunctionReturn(0); 833 } 834 835 static PetscErrorCode PCBDDCGetDirichletBoundaries_BDDC(PC pc, IS *DirichletBoundaries) { 836 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 837 838 PetscFunctionBegin; 839 *DirichletBoundaries = pcbddc->DirichletBoundaries; 840 PetscFunctionReturn(0); 841 } 842 843 /*@ 844 PCBDDCGetDirichletBoundaries - Get parallel IS for Dirichlet boundaries 845 846 Collective 847 848 Input Parameters: 849 . pc - the preconditioning context 850 851 Output Parameters: 852 . DirichletBoundaries - index set defining the Dirichlet boundaries 853 854 Level: intermediate 855 856 Notes: 857 The IS returned (if any) is the same passed in earlier by the user with PCBDDCSetDirichletBoundaries 858 859 .seealso: `PCBDDC` 860 @*/ 861 PetscErrorCode PCBDDCGetDirichletBoundaries(PC pc, IS *DirichletBoundaries) { 862 PetscFunctionBegin; 863 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 864 PetscUseMethod(pc, "PCBDDCGetDirichletBoundaries_C", (PC, IS *), (pc, DirichletBoundaries)); 865 PetscFunctionReturn(0); 866 } 867 868 static PetscErrorCode PCBDDCGetDirichletBoundariesLocal_BDDC(PC pc, IS *DirichletBoundaries) { 869 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 870 871 PetscFunctionBegin; 872 *DirichletBoundaries = pcbddc->DirichletBoundariesLocal; 873 PetscFunctionReturn(0); 874 } 875 876 /*@ 877 PCBDDCGetDirichletBoundariesLocal - Get parallel IS for Dirichlet boundaries (in local ordering) 878 879 Collective 880 881 Input Parameters: 882 . pc - the preconditioning context 883 884 Output Parameters: 885 . DirichletBoundaries - index set defining the subdomain part of Dirichlet boundaries 886 887 Level: intermediate 888 889 Notes: 890 The IS returned could be the same passed in earlier by the user (if provided with PCBDDCSetDirichletBoundariesLocal) or a global-to-local map of the global IS (if provided with PCBDDCSetDirichletBoundaries). 891 In the latter case, the IS will be available after PCSetUp. 892 893 .seealso: `PCBDDC` 894 @*/ 895 PetscErrorCode PCBDDCGetDirichletBoundariesLocal(PC pc, IS *DirichletBoundaries) { 896 PetscFunctionBegin; 897 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 898 PetscUseMethod(pc, "PCBDDCGetDirichletBoundariesLocal_C", (PC, IS *), (pc, DirichletBoundaries)); 899 PetscFunctionReturn(0); 900 } 901 902 static PetscErrorCode PCBDDCGetNeumannBoundaries_BDDC(PC pc, IS *NeumannBoundaries) { 903 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 904 905 PetscFunctionBegin; 906 *NeumannBoundaries = pcbddc->NeumannBoundaries; 907 PetscFunctionReturn(0); 908 } 909 910 /*@ 911 PCBDDCGetNeumannBoundaries - Get parallel IS for Neumann boundaries 912 913 Collective 914 915 Input Parameters: 916 . pc - the preconditioning context 917 918 Output Parameters: 919 . NeumannBoundaries - index set defining the Neumann boundaries 920 921 Level: intermediate 922 923 Notes: 924 The IS returned (if any) is the same passed in earlier by the user with PCBDDCSetNeumannBoundaries 925 926 .seealso: `PCBDDC` 927 @*/ 928 PetscErrorCode PCBDDCGetNeumannBoundaries(PC pc, IS *NeumannBoundaries) { 929 PetscFunctionBegin; 930 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 931 PetscUseMethod(pc, "PCBDDCGetNeumannBoundaries_C", (PC, IS *), (pc, NeumannBoundaries)); 932 PetscFunctionReturn(0); 933 } 934 935 static PetscErrorCode PCBDDCGetNeumannBoundariesLocal_BDDC(PC pc, IS *NeumannBoundaries) { 936 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 937 938 PetscFunctionBegin; 939 *NeumannBoundaries = pcbddc->NeumannBoundariesLocal; 940 PetscFunctionReturn(0); 941 } 942 943 /*@ 944 PCBDDCGetNeumannBoundariesLocal - Get parallel IS for Neumann boundaries (in local ordering) 945 946 Collective 947 948 Input Parameters: 949 . pc - the preconditioning context 950 951 Output Parameters: 952 . NeumannBoundaries - index set defining the subdomain part of Neumann boundaries 953 954 Level: intermediate 955 956 Notes: 957 The IS returned could be the same passed in earlier by the user (if provided with PCBDDCSetNeumannBoundariesLocal) or a global-to-local map of the global IS (if provided with PCBDDCSetNeumannBoundaries). 958 In the latter case, the IS will be available after PCSetUp. 959 960 .seealso: `PCBDDC` 961 @*/ 962 PetscErrorCode PCBDDCGetNeumannBoundariesLocal(PC pc, IS *NeumannBoundaries) { 963 PetscFunctionBegin; 964 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 965 PetscUseMethod(pc, "PCBDDCGetNeumannBoundariesLocal_C", (PC, IS *), (pc, NeumannBoundaries)); 966 PetscFunctionReturn(0); 967 } 968 969 static PetscErrorCode PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc, PetscInt nvtxs, const PetscInt xadj[], const PetscInt adjncy[], PetscCopyMode copymode) { 970 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 971 PCBDDCGraph mat_graph = pcbddc->mat_graph; 972 PetscBool same_data = PETSC_FALSE; 973 974 PetscFunctionBegin; 975 if (!nvtxs) { 976 if (copymode == PETSC_OWN_POINTER) { 977 PetscCall(PetscFree(xadj)); 978 PetscCall(PetscFree(adjncy)); 979 } 980 PetscCall(PCBDDCGraphResetCSR(mat_graph)); 981 PetscFunctionReturn(0); 982 } 983 if (mat_graph->nvtxs == nvtxs && mat_graph->freecsr) { /* we own the data */ 984 if (mat_graph->xadj == xadj && mat_graph->adjncy == adjncy) same_data = PETSC_TRUE; 985 if (!same_data && mat_graph->xadj[nvtxs] == xadj[nvtxs]) { 986 PetscCall(PetscArraycmp(xadj, mat_graph->xadj, nvtxs + 1, &same_data)); 987 if (same_data) PetscCall(PetscArraycmp(adjncy, mat_graph->adjncy, xadj[nvtxs], &same_data)); 988 } 989 } 990 if (!same_data) { 991 /* free old CSR */ 992 PetscCall(PCBDDCGraphResetCSR(mat_graph)); 993 /* get CSR into graph structure */ 994 if (copymode == PETSC_COPY_VALUES) { 995 PetscCall(PetscMalloc1(nvtxs + 1, &mat_graph->xadj)); 996 PetscCall(PetscMalloc1(xadj[nvtxs], &mat_graph->adjncy)); 997 PetscCall(PetscArraycpy(mat_graph->xadj, xadj, nvtxs + 1)); 998 PetscCall(PetscArraycpy(mat_graph->adjncy, adjncy, xadj[nvtxs])); 999 mat_graph->freecsr = PETSC_TRUE; 1000 } else if (copymode == PETSC_OWN_POINTER) { 1001 mat_graph->xadj = (PetscInt *)xadj; 1002 mat_graph->adjncy = (PetscInt *)adjncy; 1003 mat_graph->freecsr = PETSC_TRUE; 1004 } else if (copymode == PETSC_USE_POINTER) { 1005 mat_graph->xadj = (PetscInt *)xadj; 1006 mat_graph->adjncy = (PetscInt *)adjncy; 1007 mat_graph->freecsr = PETSC_FALSE; 1008 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported copy mode %d", copymode); 1009 mat_graph->nvtxs_csr = nvtxs; 1010 pcbddc->recompute_topography = PETSC_TRUE; 1011 } 1012 PetscFunctionReturn(0); 1013 } 1014 1015 /*@ 1016 PCBDDCSetLocalAdjacencyGraph - Set adjacency structure (CSR graph) of the local degrees of freedom. 1017 1018 Not collective 1019 1020 Input Parameters: 1021 + pc - the preconditioning context. 1022 . nvtxs - number of local vertices of the graph (i.e., the number of local dofs). 1023 . xadj, adjncy - the connectivity of the dofs in CSR format. 1024 - copymode - supported modes are PETSC_COPY_VALUES, PETSC_USE_POINTER or PETSC_OWN_POINTER. 1025 1026 Level: intermediate 1027 1028 Notes: 1029 A dof is considered connected with all local dofs if xadj[dof+1]-xadj[dof] == 1 and adjncy[xadj[dof]] is negative. 1030 1031 .seealso: `PCBDDC`, `PetscCopyMode` 1032 @*/ 1033 PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC pc, PetscInt nvtxs, const PetscInt xadj[], const PetscInt adjncy[], PetscCopyMode copymode) { 1034 void (*f)(void) = NULL; 1035 1036 PetscFunctionBegin; 1037 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1038 if (nvtxs) { 1039 PetscValidIntPointer(xadj, 3); 1040 if (xadj[nvtxs]) PetscValidIntPointer(adjncy, 4); 1041 } 1042 PetscTryMethod(pc, "PCBDDCSetLocalAdjacencyGraph_C", (PC, PetscInt, const PetscInt[], const PetscInt[], PetscCopyMode), (pc, nvtxs, xadj, adjncy, copymode)); 1043 /* free arrays if PCBDDC is not the PC type */ 1044 PetscCall(PetscObjectQueryFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", &f)); 1045 if (!f && copymode == PETSC_OWN_POINTER) { 1046 PetscCall(PetscFree(xadj)); 1047 PetscCall(PetscFree(adjncy)); 1048 } 1049 PetscFunctionReturn(0); 1050 } 1051 1052 static PetscErrorCode PCBDDCSetDofsSplittingLocal_BDDC(PC pc, PetscInt n_is, IS ISForDofs[]) { 1053 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1054 PetscInt i; 1055 PetscBool isequal = PETSC_FALSE; 1056 1057 PetscFunctionBegin; 1058 if (pcbddc->n_ISForDofsLocal == n_is) { 1059 for (i = 0; i < n_is; i++) { 1060 PetscBool isequalt; 1061 PetscCall(ISEqual(ISForDofs[i], pcbddc->ISForDofsLocal[i], &isequalt)); 1062 if (!isequalt) break; 1063 } 1064 if (i == n_is) isequal = PETSC_TRUE; 1065 } 1066 for (i = 0; i < n_is; i++) PetscCall(PetscObjectReference((PetscObject)ISForDofs[i])); 1067 /* Destroy ISes if they were already set */ 1068 for (i = 0; i < pcbddc->n_ISForDofsLocal; i++) PetscCall(ISDestroy(&pcbddc->ISForDofsLocal[i])); 1069 PetscCall(PetscFree(pcbddc->ISForDofsLocal)); 1070 /* last user setting takes precedence -> destroy any other customization */ 1071 for (i = 0; i < pcbddc->n_ISForDofs; i++) PetscCall(ISDestroy(&pcbddc->ISForDofs[i])); 1072 PetscCall(PetscFree(pcbddc->ISForDofs)); 1073 pcbddc->n_ISForDofs = 0; 1074 /* allocate space then set */ 1075 if (n_is) PetscCall(PetscMalloc1(n_is, &pcbddc->ISForDofsLocal)); 1076 for (i = 0; i < n_is; i++) pcbddc->ISForDofsLocal[i] = ISForDofs[i]; 1077 pcbddc->n_ISForDofsLocal = n_is; 1078 if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE; 1079 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 1080 PetscFunctionReturn(0); 1081 } 1082 1083 /*@ 1084 PCBDDCSetDofsSplittingLocal - Set index sets defining fields of the local subdomain matrix 1085 1086 Collective 1087 1088 Input Parameters: 1089 + pc - the preconditioning context 1090 . n_is - number of index sets defining the fields 1091 - ISForDofs - array of IS describing the fields in local ordering 1092 1093 Level: intermediate 1094 1095 Notes: 1096 n_is should be the same among processes. Not all nodes need to be listed: unlisted nodes will belong to the complement field. 1097 1098 .seealso: `PCBDDC` 1099 @*/ 1100 PetscErrorCode PCBDDCSetDofsSplittingLocal(PC pc, PetscInt n_is, IS ISForDofs[]) { 1101 PetscInt i; 1102 1103 PetscFunctionBegin; 1104 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1105 PetscValidLogicalCollectiveInt(pc, n_is, 2); 1106 for (i = 0; i < n_is; i++) { 1107 PetscCheckSameComm(pc, 1, ISForDofs[i], 3); 1108 PetscValidHeaderSpecific(ISForDofs[i], IS_CLASSID, 3); 1109 } 1110 PetscTryMethod(pc, "PCBDDCSetDofsSplittingLocal_C", (PC, PetscInt, IS[]), (pc, n_is, ISForDofs)); 1111 PetscFunctionReturn(0); 1112 } 1113 1114 static PetscErrorCode PCBDDCSetDofsSplitting_BDDC(PC pc, PetscInt n_is, IS ISForDofs[]) { 1115 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1116 PetscInt i; 1117 PetscBool isequal = PETSC_FALSE; 1118 1119 PetscFunctionBegin; 1120 if (pcbddc->n_ISForDofs == n_is) { 1121 for (i = 0; i < n_is; i++) { 1122 PetscBool isequalt; 1123 PetscCall(ISEqual(ISForDofs[i], pcbddc->ISForDofs[i], &isequalt)); 1124 if (!isequalt) break; 1125 } 1126 if (i == n_is) isequal = PETSC_TRUE; 1127 } 1128 for (i = 0; i < n_is; i++) PetscCall(PetscObjectReference((PetscObject)ISForDofs[i])); 1129 /* Destroy ISes if they were already set */ 1130 for (i = 0; i < pcbddc->n_ISForDofs; i++) PetscCall(ISDestroy(&pcbddc->ISForDofs[i])); 1131 PetscCall(PetscFree(pcbddc->ISForDofs)); 1132 /* last user setting takes precedence -> destroy any other customization */ 1133 for (i = 0; i < pcbddc->n_ISForDofsLocal; i++) PetscCall(ISDestroy(&pcbddc->ISForDofsLocal[i])); 1134 PetscCall(PetscFree(pcbddc->ISForDofsLocal)); 1135 pcbddc->n_ISForDofsLocal = 0; 1136 /* allocate space then set */ 1137 if (n_is) PetscCall(PetscMalloc1(n_is, &pcbddc->ISForDofs)); 1138 for (i = 0; i < n_is; i++) pcbddc->ISForDofs[i] = ISForDofs[i]; 1139 pcbddc->n_ISForDofs = n_is; 1140 if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE; 1141 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 1142 PetscFunctionReturn(0); 1143 } 1144 1145 /*@ 1146 PCBDDCSetDofsSplitting - Set index sets defining fields of the global matrix 1147 1148 Collective 1149 1150 Input Parameters: 1151 + pc - the preconditioning context 1152 . n_is - number of index sets defining the fields 1153 - ISForDofs - array of IS describing the fields in global ordering 1154 1155 Level: intermediate 1156 1157 Notes: 1158 Any process can list any global node. Not all nodes need to be listed: unlisted nodes will belong to the complement field. 1159 1160 .seealso: `PCBDDC` 1161 @*/ 1162 PetscErrorCode PCBDDCSetDofsSplitting(PC pc, PetscInt n_is, IS ISForDofs[]) { 1163 PetscInt i; 1164 1165 PetscFunctionBegin; 1166 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1167 PetscValidLogicalCollectiveInt(pc, n_is, 2); 1168 for (i = 0; i < n_is; i++) { 1169 PetscValidHeaderSpecific(ISForDofs[i], IS_CLASSID, 3); 1170 PetscCheckSameComm(pc, 1, ISForDofs[i], 3); 1171 } 1172 PetscTryMethod(pc, "PCBDDCSetDofsSplitting_C", (PC, PetscInt, IS[]), (pc, n_is, ISForDofs)); 1173 PetscFunctionReturn(0); 1174 } 1175 1176 /* 1177 PCPreSolve_BDDC - Changes the right hand side and (if necessary) the initial 1178 guess if a transformation of basis approach has been selected. 1179 1180 Input Parameter: 1181 + pc - the preconditioner context 1182 1183 Application Interface Routine: PCPreSolve() 1184 1185 Notes: 1186 The interface routine PCPreSolve() is not usually called directly by 1187 the user, but instead is called by KSPSolve(). 1188 */ 1189 static PetscErrorCode PCPreSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x) { 1190 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1191 PC_IS *pcis = (PC_IS *)(pc->data); 1192 Vec used_vec; 1193 PetscBool iscg, save_rhs = PETSC_TRUE, benign_correction_computed; 1194 1195 PetscFunctionBegin; 1196 /* if we are working with CG, one dirichlet solve can be avoided during Krylov iterations */ 1197 if (ksp) { 1198 PetscCall(PetscObjectTypeCompareAny((PetscObject)ksp, &iscg, KSPCG, KSPGROPPCG, KSPPIPECG, KSPPIPELCG, KSPPIPECGRR, "")); 1199 if (pcbddc->benign_apply_coarse_only || pcbddc->switch_static || !iscg || pc->mat != pc->pmat) PetscCall(PCBDDCSetUseExactDirichlet(pc, PETSC_FALSE)); 1200 } 1201 if (pcbddc->benign_apply_coarse_only || pcbddc->switch_static || pc->mat != pc->pmat) PetscCall(PCBDDCSetUseExactDirichlet(pc, PETSC_FALSE)); 1202 1203 /* Creates parallel work vectors used in presolve */ 1204 if (!pcbddc->original_rhs) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->original_rhs)); 1205 if (!pcbddc->temp_solution) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->temp_solution)); 1206 1207 pcbddc->temp_solution_used = PETSC_FALSE; 1208 if (x) { 1209 PetscCall(PetscObjectReference((PetscObject)x)); 1210 used_vec = x; 1211 } else { /* it can only happen when calling PCBDDCMatFETIDPGetRHS */ 1212 PetscCall(PetscObjectReference((PetscObject)pcbddc->temp_solution)); 1213 used_vec = pcbddc->temp_solution; 1214 PetscCall(VecSet(used_vec, 0.0)); 1215 pcbddc->temp_solution_used = PETSC_TRUE; 1216 PetscCall(VecCopy(rhs, pcbddc->original_rhs)); 1217 save_rhs = PETSC_FALSE; 1218 pcbddc->eliminate_dirdofs = PETSC_TRUE; 1219 } 1220 1221 /* hack into ksp data structure since PCPreSolve comes earlier than setting to zero the guess in src/ksp/ksp/interface/itfunc.c */ 1222 if (ksp) { 1223 /* store the flag for the initial guess since it will be restored back during PCPostSolve_BDDC */ 1224 PetscCall(KSPGetInitialGuessNonzero(ksp, &pcbddc->ksp_guess_nonzero)); 1225 if (!pcbddc->ksp_guess_nonzero) PetscCall(VecSet(used_vec, 0.0)); 1226 } 1227 1228 pcbddc->rhs_change = PETSC_FALSE; 1229 /* Take into account zeroed rows -> change rhs and store solution removed */ 1230 if (rhs && pcbddc->eliminate_dirdofs) { 1231 IS dirIS = NULL; 1232 1233 /* DirichletBoundariesLocal may not be consistent among neighbours; gets a dirichlet dofs IS from graph (may be cached) */ 1234 PetscCall(PCBDDCGraphGetDirichletDofs(pcbddc->mat_graph, &dirIS)); 1235 if (dirIS) { 1236 Mat_IS *matis = (Mat_IS *)pc->pmat->data; 1237 PetscInt dirsize, i, *is_indices; 1238 PetscScalar *array_x; 1239 const PetscScalar *array_diagonal; 1240 1241 PetscCall(MatGetDiagonal(pc->pmat, pcis->vec1_global)); 1242 PetscCall(VecPointwiseDivide(pcis->vec1_global, rhs, pcis->vec1_global)); 1243 PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_global, pcis->vec2_N, INSERT_VALUES, SCATTER_FORWARD)); 1244 PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_global, pcis->vec2_N, INSERT_VALUES, SCATTER_FORWARD)); 1245 PetscCall(VecScatterBegin(matis->rctx, used_vec, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD)); 1246 PetscCall(VecScatterEnd(matis->rctx, used_vec, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD)); 1247 PetscCall(ISGetLocalSize(dirIS, &dirsize)); 1248 PetscCall(VecGetArray(pcis->vec1_N, &array_x)); 1249 PetscCall(VecGetArrayRead(pcis->vec2_N, &array_diagonal)); 1250 PetscCall(ISGetIndices(dirIS, (const PetscInt **)&is_indices)); 1251 for (i = 0; i < dirsize; i++) array_x[is_indices[i]] = array_diagonal[is_indices[i]]; 1252 PetscCall(ISRestoreIndices(dirIS, (const PetscInt **)&is_indices)); 1253 PetscCall(VecRestoreArrayRead(pcis->vec2_N, &array_diagonal)); 1254 PetscCall(VecRestoreArray(pcis->vec1_N, &array_x)); 1255 PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_N, used_vec, INSERT_VALUES, SCATTER_REVERSE)); 1256 PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_N, used_vec, INSERT_VALUES, SCATTER_REVERSE)); 1257 pcbddc->rhs_change = PETSC_TRUE; 1258 PetscCall(ISDestroy(&dirIS)); 1259 } 1260 } 1261 1262 /* remove the computed solution or the initial guess from the rhs */ 1263 if (pcbddc->rhs_change || (ksp && pcbddc->ksp_guess_nonzero)) { 1264 /* save the original rhs */ 1265 if (save_rhs) { 1266 PetscCall(VecSwap(rhs, pcbddc->original_rhs)); 1267 save_rhs = PETSC_FALSE; 1268 } 1269 pcbddc->rhs_change = PETSC_TRUE; 1270 PetscCall(VecScale(used_vec, -1.0)); 1271 PetscCall(MatMultAdd(pc->mat, used_vec, pcbddc->original_rhs, rhs)); 1272 PetscCall(VecScale(used_vec, -1.0)); 1273 PetscCall(VecCopy(used_vec, pcbddc->temp_solution)); 1274 pcbddc->temp_solution_used = PETSC_TRUE; 1275 if (ksp) PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_FALSE)); 1276 } 1277 PetscCall(VecDestroy(&used_vec)); 1278 1279 /* compute initial vector in benign space if needed 1280 and remove non-benign solution from the rhs */ 1281 benign_correction_computed = PETSC_FALSE; 1282 if (rhs && pcbddc->benign_compute_correction && (pcbddc->benign_have_null || pcbddc->benign_apply_coarse_only)) { 1283 /* compute u^*_h using ideas similar to those in Xuemin Tu's PhD thesis (see Section 4.8.1) 1284 Recursively apply BDDC in the multilevel case */ 1285 if (!pcbddc->benign_vec) PetscCall(VecDuplicate(rhs, &pcbddc->benign_vec)); 1286 /* keep applying coarse solver unless we no longer have benign subdomains */ 1287 pcbddc->benign_apply_coarse_only = pcbddc->benign_have_null ? PETSC_TRUE : PETSC_FALSE; 1288 if (!pcbddc->benign_skip_correction) { 1289 PetscCall(PCApply_BDDC(pc, rhs, pcbddc->benign_vec)); 1290 benign_correction_computed = PETSC_TRUE; 1291 if (pcbddc->temp_solution_used) PetscCall(VecAXPY(pcbddc->temp_solution, 1.0, pcbddc->benign_vec)); 1292 PetscCall(VecScale(pcbddc->benign_vec, -1.0)); 1293 /* store the original rhs if not done earlier */ 1294 if (save_rhs) PetscCall(VecSwap(rhs, pcbddc->original_rhs)); 1295 if (pcbddc->rhs_change) { 1296 PetscCall(MatMultAdd(pc->mat, pcbddc->benign_vec, rhs, rhs)); 1297 } else { 1298 PetscCall(MatMultAdd(pc->mat, pcbddc->benign_vec, pcbddc->original_rhs, rhs)); 1299 } 1300 pcbddc->rhs_change = PETSC_TRUE; 1301 } 1302 pcbddc->benign_apply_coarse_only = PETSC_FALSE; 1303 } else { 1304 PetscCall(VecDestroy(&pcbddc->benign_vec)); 1305 } 1306 1307 /* dbg output */ 1308 if (pcbddc->dbg_flag && benign_correction_computed) { 1309 Vec v; 1310 1311 PetscCall(VecDuplicate(pcis->vec1_global, &v)); 1312 if (pcbddc->ChangeOfBasisMatrix) { 1313 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, rhs, v)); 1314 } else { 1315 PetscCall(VecCopy(rhs, v)); 1316 } 1317 PetscCall(PCBDDCBenignGetOrSetP0(pc, v, PETSC_TRUE)); 1318 PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "LEVEL %" PetscInt_FMT ": is the correction benign?\n", pcbddc->current_level)); 1319 PetscCall(PetscScalarView(pcbddc->benign_n, pcbddc->benign_p0, pcbddc->dbg_viewer)); 1320 PetscCall(PetscViewerFlush(pcbddc->dbg_viewer)); 1321 PetscCall(VecDestroy(&v)); 1322 } 1323 1324 /* set initial guess if using PCG */ 1325 pcbddc->exact_dirichlet_trick_app = PETSC_FALSE; 1326 if (x && pcbddc->use_exact_dirichlet_trick) { 1327 PetscCall(VecSet(x, 0.0)); 1328 if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior) { 1329 if (benign_correction_computed) { /* we have already saved the changed rhs */ 1330 PetscCall(VecLockReadPop(pcis->vec1_global)); 1331 } else { 1332 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, rhs, pcis->vec1_global)); 1333 } 1334 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_global, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1335 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_global, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1336 } else { 1337 PetscCall(VecScatterBegin(pcis->global_to_D, rhs, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1338 PetscCall(VecScatterEnd(pcis->global_to_D, rhs, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1339 } 1340 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1341 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D)); 1342 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1343 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D)); 1344 if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior) { 1345 PetscCall(VecSet(pcis->vec1_global, 0.)); 1346 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, pcis->vec1_global, INSERT_VALUES, SCATTER_REVERSE)); 1347 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, pcis->vec1_global, INSERT_VALUES, SCATTER_REVERSE)); 1348 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcis->vec1_global, x)); 1349 } else { 1350 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, x, INSERT_VALUES, SCATTER_REVERSE)); 1351 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, x, INSERT_VALUES, SCATTER_REVERSE)); 1352 } 1353 if (ksp) PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE)); 1354 pcbddc->exact_dirichlet_trick_app = PETSC_TRUE; 1355 } else if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior && benign_correction_computed && pcbddc->use_exact_dirichlet_trick) { 1356 PetscCall(VecLockReadPop(pcis->vec1_global)); 1357 } 1358 PetscFunctionReturn(0); 1359 } 1360 1361 /* 1362 PCPostSolve_BDDC - Changes the computed solution if a transformation of basis 1363 approach has been selected. Also, restores rhs to its original state. 1364 1365 Input Parameter: 1366 + pc - the preconditioner context 1367 1368 Application Interface Routine: PCPostSolve() 1369 1370 Notes: 1371 The interface routine PCPostSolve() is not usually called directly by 1372 the user, but instead is called by KSPSolve(). 1373 */ 1374 static PetscErrorCode PCPostSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x) { 1375 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1376 1377 PetscFunctionBegin; 1378 /* add solution removed in presolve */ 1379 if (x && pcbddc->rhs_change) { 1380 if (pcbddc->temp_solution_used) { 1381 PetscCall(VecAXPY(x, 1.0, pcbddc->temp_solution)); 1382 } else if (pcbddc->benign_compute_correction && pcbddc->benign_vec) { 1383 PetscCall(VecAXPY(x, -1.0, pcbddc->benign_vec)); 1384 } 1385 /* restore to original state (not for FETI-DP) */ 1386 if (ksp) pcbddc->temp_solution_used = PETSC_FALSE; 1387 } 1388 1389 /* restore rhs to its original state (not needed for FETI-DP) */ 1390 if (rhs && pcbddc->rhs_change) { 1391 PetscCall(VecSwap(rhs, pcbddc->original_rhs)); 1392 pcbddc->rhs_change = PETSC_FALSE; 1393 } 1394 /* restore ksp guess state */ 1395 if (ksp) { 1396 PetscCall(KSPSetInitialGuessNonzero(ksp, pcbddc->ksp_guess_nonzero)); 1397 /* reset flag for exact dirichlet trick */ 1398 pcbddc->exact_dirichlet_trick_app = PETSC_FALSE; 1399 } 1400 PetscFunctionReturn(0); 1401 } 1402 1403 /* 1404 PCSetUp_BDDC - Prepares for the use of the BDDC preconditioner 1405 by setting data structures and options. 1406 1407 Input Parameter: 1408 + pc - the preconditioner context 1409 1410 Application Interface Routine: PCSetUp() 1411 1412 Notes: 1413 The interface routine PCSetUp() is not usually called directly by 1414 the user, but instead is called by PCApply() if necessary. 1415 */ 1416 PetscErrorCode PCSetUp_BDDC(PC pc) { 1417 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1418 PCBDDCSubSchurs sub_schurs; 1419 Mat_IS *matis; 1420 MatNullSpace nearnullspace; 1421 Mat lA; 1422 IS lP, zerodiag = NULL; 1423 PetscInt nrows, ncols; 1424 PetscMPIInt size; 1425 PetscBool computesubschurs; 1426 PetscBool computeconstraintsmatrix; 1427 PetscBool new_nearnullspace_provided, ismatis, rl; 1428 PetscBool isset, issym, isspd; 1429 1430 PetscFunctionBegin; 1431 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATIS, &ismatis)); 1432 PetscCheck(ismatis, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PCBDDC preconditioner requires matrix of type MATIS"); 1433 PetscCall(MatGetSize(pc->pmat, &nrows, &ncols)); 1434 PetscCheck(nrows == ncols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCBDDC preconditioner requires a square preconditioning matrix"); 1435 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 1436 1437 matis = (Mat_IS *)pc->pmat->data; 1438 /* the following lines of code should be replaced by a better logic between PCIS, PCNN, PCBDDC and other future nonoverlapping preconditioners */ 1439 /* For BDDC we need to define a local "Neumann" problem different to that defined in PCISSetup 1440 Also, BDDC builds its own KSP for the Dirichlet problem */ 1441 rl = pcbddc->recompute_topography; 1442 if (!pc->setupcalled || pc->flag == DIFFERENT_NONZERO_PATTERN) rl = PETSC_TRUE; 1443 PetscCall(MPIU_Allreduce(&rl, &pcbddc->recompute_topography, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)pc))); 1444 if (pcbddc->recompute_topography) { 1445 pcbddc->graphanalyzed = PETSC_FALSE; 1446 computeconstraintsmatrix = PETSC_TRUE; 1447 } else { 1448 computeconstraintsmatrix = PETSC_FALSE; 1449 } 1450 1451 /* check parameters' compatibility */ 1452 if (!pcbddc->use_deluxe_scaling) pcbddc->deluxe_zerorows = PETSC_FALSE; 1453 pcbddc->adaptive_selection = (PetscBool)(pcbddc->adaptive_threshold[0] != 0.0 || pcbddc->adaptive_threshold[1] != 0.0); 1454 pcbddc->use_deluxe_scaling = (PetscBool)(pcbddc->use_deluxe_scaling && size > 1); 1455 pcbddc->adaptive_selection = (PetscBool)(pcbddc->adaptive_selection && size > 1); 1456 pcbddc->adaptive_userdefined = (PetscBool)(pcbddc->adaptive_selection && pcbddc->adaptive_userdefined); 1457 if (pcbddc->adaptive_selection) pcbddc->use_faces = PETSC_TRUE; 1458 1459 computesubschurs = (PetscBool)(pcbddc->adaptive_selection || pcbddc->use_deluxe_scaling); 1460 1461 /* activate all connected components if the netflux has been requested */ 1462 if (pcbddc->compute_nonetflux) { 1463 pcbddc->use_vertices = PETSC_TRUE; 1464 pcbddc->use_edges = PETSC_TRUE; 1465 pcbddc->use_faces = PETSC_TRUE; 1466 } 1467 1468 /* Get stdout for dbg */ 1469 if (pcbddc->dbg_flag) { 1470 if (!pcbddc->dbg_viewer) pcbddc->dbg_viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)pc)); 1471 PetscCall(PetscViewerASCIIPushSynchronized(pcbddc->dbg_viewer)); 1472 PetscCall(PetscViewerASCIIAddTab(pcbddc->dbg_viewer, 2 * pcbddc->current_level)); 1473 } 1474 1475 /* process topology information */ 1476 PetscCall(PetscLogEventBegin(PC_BDDC_Topology[pcbddc->current_level], pc, 0, 0, 0)); 1477 if (pcbddc->recompute_topography) { 1478 PetscCall(PCBDDCComputeLocalTopologyInfo(pc)); 1479 if (pcbddc->discretegradient) PetscCall(PCBDDCNedelecSupport(pc)); 1480 } 1481 if (pcbddc->corner_selected) pcbddc->use_vertices = PETSC_TRUE; 1482 1483 /* change basis if requested by the user */ 1484 if (pcbddc->user_ChangeOfBasisMatrix) { 1485 /* use_change_of_basis flag is used to automatically compute a change of basis from constraints */ 1486 pcbddc->use_change_of_basis = PETSC_FALSE; 1487 PetscCall(PCBDDCComputeLocalMatrix(pc, pcbddc->user_ChangeOfBasisMatrix)); 1488 } else { 1489 PetscCall(MatDestroy(&pcbddc->local_mat)); 1490 PetscCall(PetscObjectReference((PetscObject)matis->A)); 1491 pcbddc->local_mat = matis->A; 1492 } 1493 1494 /* 1495 Compute change of basis on local pressures (aka zerodiag dofs) with the benign trick 1496 This should come earlier then PCISSetUp for extracting the correct subdomain matrices 1497 */ 1498 PetscCall(PCBDDCBenignShellMat(pc, PETSC_TRUE)); 1499 if (pcbddc->benign_saddle_point) { 1500 PC_IS *pcis = (PC_IS *)pc->data; 1501 1502 if (pcbddc->user_ChangeOfBasisMatrix || pcbddc->use_change_of_basis || !computesubschurs) pcbddc->benign_change_explicit = PETSC_TRUE; 1503 /* detect local saddle point and change the basis in pcbddc->local_mat */ 1504 PetscCall(PCBDDCBenignDetectSaddlePoint(pc, (PetscBool)(!pcbddc->recompute_topography), &zerodiag)); 1505 /* pop B0 mat from local mat */ 1506 PetscCall(PCBDDCBenignPopOrPushB0(pc, PETSC_TRUE)); 1507 /* give pcis a hint to not reuse submatrices during PCISCreate */ 1508 if (pc->flag == SAME_NONZERO_PATTERN && pcis->reusesubmatrices == PETSC_TRUE) { 1509 if (pcbddc->benign_n && (pcbddc->benign_change_explicit || pcbddc->dbg_flag)) { 1510 pcis->reusesubmatrices = PETSC_FALSE; 1511 } else { 1512 pcis->reusesubmatrices = PETSC_TRUE; 1513 } 1514 } else { 1515 pcis->reusesubmatrices = PETSC_FALSE; 1516 } 1517 } 1518 1519 /* propagate relevant information */ 1520 PetscCall(MatIsSymmetricKnown(matis->A, &isset, &issym)); 1521 if (isset) PetscCall(MatSetOption(pcbddc->local_mat, MAT_SYMMETRIC, issym)); 1522 PetscCall(MatIsSPDKnown(matis->A, &isset, &isspd)); 1523 if (isset) PetscCall(MatSetOption(pcbddc->local_mat, MAT_SPD, isspd)); 1524 1525 /* Set up all the "iterative substructuring" common block without computing solvers */ 1526 { 1527 Mat temp_mat; 1528 1529 temp_mat = matis->A; 1530 matis->A = pcbddc->local_mat; 1531 PetscCall(PCISSetUp(pc, PETSC_TRUE, PETSC_FALSE)); 1532 pcbddc->local_mat = matis->A; 1533 matis->A = temp_mat; 1534 } 1535 1536 /* Analyze interface */ 1537 if (!pcbddc->graphanalyzed) { 1538 PetscCall(PCBDDCAnalyzeInterface(pc)); 1539 computeconstraintsmatrix = PETSC_TRUE; 1540 if (pcbddc->adaptive_selection && !pcbddc->use_deluxe_scaling && !pcbddc->mat_graph->twodim) { 1541 SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot compute the adaptive primal space for a problem with 3D edges without deluxe scaling"); 1542 } 1543 if (pcbddc->compute_nonetflux) { 1544 MatNullSpace nnfnnsp; 1545 1546 PetscCheck(pcbddc->divudotp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Missing divudotp operator"); 1547 PetscCall(PCBDDCComputeNoNetFlux(pc->pmat, pcbddc->divudotp, pcbddc->divudotp_trans, pcbddc->divudotp_vl2l, pcbddc->mat_graph, &nnfnnsp)); 1548 /* TODO what if a nearnullspace is already attached? */ 1549 if (nnfnnsp) { 1550 PetscCall(MatSetNearNullSpace(pc->pmat, nnfnnsp)); 1551 PetscCall(MatNullSpaceDestroy(&nnfnnsp)); 1552 } 1553 } 1554 } 1555 PetscCall(PetscLogEventEnd(PC_BDDC_Topology[pcbddc->current_level], pc, 0, 0, 0)); 1556 1557 /* check existence of a divergence free extension, i.e. 1558 b(v_I,p_0) = 0 for all v_I (raise error if not). 1559 Also, check that PCBDDCBenignGetOrSetP0 works */ 1560 if (pcbddc->benign_saddle_point && pcbddc->dbg_flag > 1) PetscCall(PCBDDCBenignCheck(pc, zerodiag)); 1561 PetscCall(ISDestroy(&zerodiag)); 1562 1563 /* Setup local dirichlet solver ksp_D and sub_schurs solvers */ 1564 if (computesubschurs && pcbddc->recompute_topography) PetscCall(PCBDDCInitSubSchurs(pc)); 1565 /* SetUp Scaling operator (scaling matrices could be needed in SubSchursSetUp)*/ 1566 if (!pcbddc->use_deluxe_scaling) PetscCall(PCBDDCScalingSetUp(pc)); 1567 1568 /* finish setup solvers and do adaptive selection of constraints */ 1569 sub_schurs = pcbddc->sub_schurs; 1570 if (sub_schurs && sub_schurs->schur_explicit) { 1571 if (computesubschurs) PetscCall(PCBDDCSetUpSubSchurs(pc)); 1572 PetscCall(PCBDDCSetUpLocalSolvers(pc, PETSC_TRUE, PETSC_FALSE)); 1573 } else { 1574 PetscCall(PCBDDCSetUpLocalSolvers(pc, PETSC_TRUE, PETSC_FALSE)); 1575 if (computesubschurs) PetscCall(PCBDDCSetUpSubSchurs(pc)); 1576 } 1577 if (pcbddc->adaptive_selection) { 1578 PetscCall(PCBDDCAdaptiveSelection(pc)); 1579 computeconstraintsmatrix = PETSC_TRUE; 1580 } 1581 1582 /* infer if NullSpace object attached to Mat via MatSetNearNullSpace has changed */ 1583 new_nearnullspace_provided = PETSC_FALSE; 1584 PetscCall(MatGetNearNullSpace(pc->pmat, &nearnullspace)); 1585 if (pcbddc->onearnullspace) { /* already used nearnullspace */ 1586 if (!nearnullspace) { /* near null space attached to mat has been destroyed */ 1587 new_nearnullspace_provided = PETSC_TRUE; 1588 } else { 1589 /* determine if the two nullspaces are different (should be lightweight) */ 1590 if (nearnullspace != pcbddc->onearnullspace) { 1591 new_nearnullspace_provided = PETSC_TRUE; 1592 } else { /* maybe the user has changed the content of the nearnullspace so check vectors ObjectStateId */ 1593 PetscInt i; 1594 const Vec *nearnullvecs; 1595 PetscObjectState state; 1596 PetscInt nnsp_size; 1597 PetscCall(MatNullSpaceGetVecs(nearnullspace, NULL, &nnsp_size, &nearnullvecs)); 1598 for (i = 0; i < nnsp_size; i++) { 1599 PetscCall(PetscObjectStateGet((PetscObject)nearnullvecs[i], &state)); 1600 if (pcbddc->onearnullvecs_state[i] != state) { 1601 new_nearnullspace_provided = PETSC_TRUE; 1602 break; 1603 } 1604 } 1605 } 1606 } 1607 } else { 1608 if (!nearnullspace) { /* both nearnullspaces are null */ 1609 new_nearnullspace_provided = PETSC_FALSE; 1610 } else { /* nearnullspace attached later */ 1611 new_nearnullspace_provided = PETSC_TRUE; 1612 } 1613 } 1614 1615 /* Setup constraints and related work vectors */ 1616 /* reset primal space flags */ 1617 PetscCall(PetscLogEventBegin(PC_BDDC_LocalWork[pcbddc->current_level], pc, 0, 0, 0)); 1618 pcbddc->new_primal_space = PETSC_FALSE; 1619 pcbddc->new_primal_space_local = PETSC_FALSE; 1620 if (computeconstraintsmatrix || new_nearnullspace_provided) { 1621 /* It also sets the primal space flags */ 1622 PetscCall(PCBDDCConstraintsSetUp(pc)); 1623 } 1624 /* Allocate needed local vectors (which depends on quantities defined during ConstraintsSetUp) */ 1625 PetscCall(PCBDDCSetUpLocalWorkVectors(pc)); 1626 1627 if (pcbddc->use_change_of_basis) { 1628 PC_IS *pcis = (PC_IS *)(pc->data); 1629 1630 PetscCall(PCBDDCComputeLocalMatrix(pc, pcbddc->ChangeOfBasisMatrix)); 1631 if (pcbddc->benign_change) { 1632 PetscCall(MatDestroy(&pcbddc->benign_B0)); 1633 /* pop B0 from pcbddc->local_mat */ 1634 PetscCall(PCBDDCBenignPopOrPushB0(pc, PETSC_TRUE)); 1635 } 1636 /* get submatrices */ 1637 PetscCall(MatDestroy(&pcis->A_IB)); 1638 PetscCall(MatDestroy(&pcis->A_BI)); 1639 PetscCall(MatDestroy(&pcis->A_BB)); 1640 PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_B_local, pcis->is_B_local, MAT_INITIAL_MATRIX, &pcis->A_BB)); 1641 PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_I_local, pcis->is_B_local, MAT_INITIAL_MATRIX, &pcis->A_IB)); 1642 PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_B_local, pcis->is_I_local, MAT_INITIAL_MATRIX, &pcis->A_BI)); 1643 /* set flag in pcis to not reuse submatrices during PCISCreate */ 1644 pcis->reusesubmatrices = PETSC_FALSE; 1645 } else if (!pcbddc->user_ChangeOfBasisMatrix && !pcbddc->benign_change) { 1646 PetscCall(MatDestroy(&pcbddc->local_mat)); 1647 PetscCall(PetscObjectReference((PetscObject)matis->A)); 1648 pcbddc->local_mat = matis->A; 1649 } 1650 1651 /* interface pressure block row for B_C */ 1652 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_lP", (PetscObject *)&lP)); 1653 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_lA", (PetscObject *)&lA)); 1654 if (lA && lP) { 1655 PC_IS *pcis = (PC_IS *)pc->data; 1656 Mat B_BI, B_BB, Bt_BI, Bt_BB; 1657 PetscBool issym; 1658 1659 PetscCall(MatIsSymmetric(lA, PETSC_SMALL, &issym)); 1660 if (issym) { 1661 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_I_local, MAT_INITIAL_MATRIX, &B_BI)); 1662 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_B_local, MAT_INITIAL_MATRIX, &B_BB)); 1663 PetscCall(MatCreateTranspose(B_BI, &Bt_BI)); 1664 PetscCall(MatCreateTranspose(B_BB, &Bt_BB)); 1665 } else { 1666 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_I_local, MAT_INITIAL_MATRIX, &B_BI)); 1667 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_B_local, MAT_INITIAL_MATRIX, &B_BB)); 1668 PetscCall(MatCreateSubMatrix(lA, pcis->is_I_local, lP, MAT_INITIAL_MATRIX, &Bt_BI)); 1669 PetscCall(MatCreateSubMatrix(lA, pcis->is_B_local, lP, MAT_INITIAL_MATRIX, &Bt_BB)); 1670 } 1671 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_B_BI", (PetscObject)B_BI)); 1672 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_B_BB", (PetscObject)B_BB)); 1673 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_Bt_BI", (PetscObject)Bt_BI)); 1674 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_Bt_BB", (PetscObject)Bt_BB)); 1675 PetscCall(MatDestroy(&B_BI)); 1676 PetscCall(MatDestroy(&B_BB)); 1677 PetscCall(MatDestroy(&Bt_BI)); 1678 PetscCall(MatDestroy(&Bt_BB)); 1679 } 1680 PetscCall(PetscLogEventEnd(PC_BDDC_LocalWork[pcbddc->current_level], pc, 0, 0, 0)); 1681 1682 /* SetUp coarse and local Neumann solvers */ 1683 PetscCall(PCBDDCSetUpSolvers(pc)); 1684 /* SetUp Scaling operator */ 1685 if (pcbddc->use_deluxe_scaling) PetscCall(PCBDDCScalingSetUp(pc)); 1686 1687 /* mark topography as done */ 1688 pcbddc->recompute_topography = PETSC_FALSE; 1689 1690 /* wrap pcis->A_IB and pcis->A_BI if we did not change explicitly the variables on the pressures */ 1691 PetscCall(PCBDDCBenignShellMat(pc, PETSC_FALSE)); 1692 1693 if (pcbddc->dbg_flag) { 1694 PetscCall(PetscViewerASCIISubtractTab(pcbddc->dbg_viewer, 2 * pcbddc->current_level)); 1695 PetscCall(PetscViewerASCIIPopSynchronized(pcbddc->dbg_viewer)); 1696 } 1697 PetscFunctionReturn(0); 1698 } 1699 1700 /* 1701 PCApply_BDDC - Applies the BDDC operator to a vector. 1702 1703 Input Parameters: 1704 + pc - the preconditioner context 1705 - r - input vector (global) 1706 1707 Output Parameter: 1708 . z - output vector (global) 1709 1710 Application Interface Routine: PCApply() 1711 */ 1712 PetscErrorCode PCApply_BDDC(PC pc, Vec r, Vec z) { 1713 PC_IS *pcis = (PC_IS *)(pc->data); 1714 PC_BDDC *pcbddc = (PC_BDDC *)(pc->data); 1715 Mat lA = NULL; 1716 PetscInt n_B = pcis->n_B, n_D = pcis->n - n_B; 1717 const PetscScalar one = 1.0; 1718 const PetscScalar m_one = -1.0; 1719 const PetscScalar zero = 0.0; 1720 /* This code is similar to that provided in nn.c for PCNN 1721 NN interface preconditioner changed to BDDC 1722 Added support for M_3 preconditioner in the reference article (code is active if pcbddc->switch_static == PETSC_TRUE) */ 1723 1724 PetscFunctionBegin; 1725 PetscCall(PetscCitationsRegister(citation, &cited)); 1726 if (pcbddc->switch_static) PetscCall(MatISGetLocalMat(pc->useAmat ? pc->mat : pc->pmat, &lA)); 1727 1728 if (pcbddc->ChangeOfBasisMatrix) { 1729 Vec swap; 1730 1731 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, r, pcbddc->work_change)); 1732 swap = pcbddc->work_change; 1733 pcbddc->work_change = r; 1734 r = swap; 1735 /* save rhs so that we don't need to apply the change of basis for the exact dirichlet trick in PreSolve */ 1736 if (pcbddc->benign_apply_coarse_only && pcbddc->use_exact_dirichlet_trick && pcbddc->change_interior) { 1737 PetscCall(VecCopy(r, pcis->vec1_global)); 1738 PetscCall(VecLockReadPush(pcis->vec1_global)); 1739 } 1740 } 1741 if (pcbddc->benign_have_null) { /* get p0 from r */ 1742 PetscCall(PCBDDCBenignGetOrSetP0(pc, r, PETSC_TRUE)); 1743 } 1744 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_DIRICHLET && !pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 1745 PetscCall(VecCopy(r, z)); 1746 /* First Dirichlet solve */ 1747 PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1748 PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1749 /* 1750 Assembling right hand side for BDDC operator 1751 - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE) 1752 - pcis->vec1_B the interface part of the global vector z 1753 */ 1754 if (n_D) { 1755 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1756 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D)); 1757 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1758 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D)); 1759 PetscCall(VecScale(pcis->vec2_D, m_one)); 1760 if (pcbddc->switch_static) { 1761 PetscCall(VecSet(pcis->vec1_N, 0.)); 1762 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1763 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1764 if (!pcbddc->switch_static_change) { 1765 PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N)); 1766 } else { 1767 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1768 PetscCall(MatMult(lA, pcis->vec2_N, pcis->vec1_N)); 1769 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1770 } 1771 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD)); 1772 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD)); 1773 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1774 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1775 } else { 1776 PetscCall(MatMult(pcis->A_BI, pcis->vec2_D, pcis->vec1_B)); 1777 } 1778 } else { 1779 PetscCall(VecSet(pcis->vec1_B, zero)); 1780 } 1781 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE)); 1782 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE)); 1783 PetscCall(PCBDDCScalingRestriction(pc, z, pcis->vec1_B)); 1784 } else { 1785 if (!pcbddc->benign_apply_coarse_only) PetscCall(PCBDDCScalingRestriction(pc, r, pcis->vec1_B)); 1786 } 1787 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_LUMP) { 1788 PetscCheck(pcbddc->switch_static, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "You forgot to pass -pc_bddc_switch_static"); 1789 PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1790 PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1791 } 1792 1793 /* Apply interface preconditioner 1794 input/output vecs: pcis->vec1_B and pcis->vec1_D */ 1795 PetscCall(PCBDDCApplyInterfacePreconditioner(pc, PETSC_FALSE)); 1796 1797 /* Apply transpose of partition of unity operator */ 1798 PetscCall(PCBDDCScalingExtension(pc, pcis->vec1_B, z)); 1799 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_LUMP) { 1800 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1801 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1802 PetscFunctionReturn(0); 1803 } 1804 /* Second Dirichlet solve and assembling of output */ 1805 PetscCall(VecScatterBegin(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1806 PetscCall(VecScatterEnd(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1807 if (n_B) { 1808 if (pcbddc->switch_static) { 1809 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1810 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1811 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1812 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1813 if (!pcbddc->switch_static_change) { 1814 PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N)); 1815 } else { 1816 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1817 PetscCall(MatMult(lA, pcis->vec2_N, pcis->vec1_N)); 1818 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1819 } 1820 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD)); 1821 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD)); 1822 } else { 1823 PetscCall(MatMult(pcis->A_IB, pcis->vec1_B, pcis->vec3_D)); 1824 } 1825 } else if (pcbddc->switch_static) { /* n_B is zero */ 1826 if (!pcbddc->switch_static_change) { 1827 PetscCall(MatMult(lA, pcis->vec1_D, pcis->vec3_D)); 1828 } else { 1829 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_D, pcis->vec1_N)); 1830 PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N)); 1831 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec2_N, pcis->vec3_D)); 1832 } 1833 } 1834 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1835 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec3_D, pcis->vec4_D)); 1836 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1837 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec4_D)); 1838 1839 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 1840 if (pcbddc->switch_static) { 1841 PetscCall(VecAXPBYPCZ(pcis->vec2_D, m_one, one, m_one, pcis->vec4_D, pcis->vec1_D)); 1842 } else { 1843 PetscCall(VecAXPBY(pcis->vec2_D, m_one, m_one, pcis->vec4_D)); 1844 } 1845 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1846 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1847 } else { 1848 if (pcbddc->switch_static) { 1849 PetscCall(VecAXPBY(pcis->vec4_D, one, m_one, pcis->vec1_D)); 1850 } else { 1851 PetscCall(VecScale(pcis->vec4_D, m_one)); 1852 } 1853 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1854 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1855 } 1856 if (pcbddc->benign_have_null) { /* set p0 (computed in PCBDDCApplyInterface) */ 1857 if (pcbddc->benign_apply_coarse_only) PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n)); 1858 PetscCall(PCBDDCBenignGetOrSetP0(pc, z, PETSC_FALSE)); 1859 } 1860 1861 if (pcbddc->ChangeOfBasisMatrix) { 1862 pcbddc->work_change = r; 1863 PetscCall(VecCopy(z, pcbddc->work_change)); 1864 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcbddc->work_change, z)); 1865 } 1866 PetscFunctionReturn(0); 1867 } 1868 1869 /* 1870 PCApplyTranspose_BDDC - Applies the transpose of the BDDC operator to a vector. 1871 1872 Input Parameters: 1873 + pc - the preconditioner context 1874 - r - input vector (global) 1875 1876 Output Parameter: 1877 . z - output vector (global) 1878 1879 Application Interface Routine: PCApplyTranspose() 1880 */ 1881 PetscErrorCode PCApplyTranspose_BDDC(PC pc, Vec r, Vec z) { 1882 PC_IS *pcis = (PC_IS *)(pc->data); 1883 PC_BDDC *pcbddc = (PC_BDDC *)(pc->data); 1884 Mat lA = NULL; 1885 PetscInt n_B = pcis->n_B, n_D = pcis->n - n_B; 1886 const PetscScalar one = 1.0; 1887 const PetscScalar m_one = -1.0; 1888 const PetscScalar zero = 0.0; 1889 1890 PetscFunctionBegin; 1891 PetscCall(PetscCitationsRegister(citation, &cited)); 1892 if (pcbddc->switch_static) PetscCall(MatISGetLocalMat(pc->useAmat ? pc->mat : pc->pmat, &lA)); 1893 if (pcbddc->ChangeOfBasisMatrix) { 1894 Vec swap; 1895 1896 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, r, pcbddc->work_change)); 1897 swap = pcbddc->work_change; 1898 pcbddc->work_change = r; 1899 r = swap; 1900 /* save rhs so that we don't need to apply the change of basis for the exact dirichlet trick in PreSolve */ 1901 if (pcbddc->benign_apply_coarse_only && pcbddc->exact_dirichlet_trick_app && pcbddc->change_interior) { 1902 PetscCall(VecCopy(r, pcis->vec1_global)); 1903 PetscCall(VecLockReadPush(pcis->vec1_global)); 1904 } 1905 } 1906 if (pcbddc->benign_have_null) { /* get p0 from r */ 1907 PetscCall(PCBDDCBenignGetOrSetP0(pc, r, PETSC_TRUE)); 1908 } 1909 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 1910 PetscCall(VecCopy(r, z)); 1911 /* First Dirichlet solve */ 1912 PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1913 PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1914 /* 1915 Assembling right hand side for BDDC operator 1916 - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE) 1917 - pcis->vec1_B the interface part of the global vector z 1918 */ 1919 if (n_D) { 1920 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1921 PetscCall(KSPSolveTranspose(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D)); 1922 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1923 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D)); 1924 PetscCall(VecScale(pcis->vec2_D, m_one)); 1925 if (pcbddc->switch_static) { 1926 PetscCall(VecSet(pcis->vec1_N, 0.)); 1927 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1928 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1929 if (!pcbddc->switch_static_change) { 1930 PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N)); 1931 } else { 1932 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1933 PetscCall(MatMultTranspose(lA, pcis->vec2_N, pcis->vec1_N)); 1934 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1935 } 1936 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD)); 1937 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD)); 1938 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1939 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1940 } else { 1941 PetscCall(MatMultTranspose(pcis->A_IB, pcis->vec2_D, pcis->vec1_B)); 1942 } 1943 } else { 1944 PetscCall(VecSet(pcis->vec1_B, zero)); 1945 } 1946 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE)); 1947 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE)); 1948 PetscCall(PCBDDCScalingRestriction(pc, z, pcis->vec1_B)); 1949 } else { 1950 PetscCall(PCBDDCScalingRestriction(pc, r, pcis->vec1_B)); 1951 } 1952 1953 /* Apply interface preconditioner 1954 input/output vecs: pcis->vec1_B and pcis->vec1_D */ 1955 PetscCall(PCBDDCApplyInterfacePreconditioner(pc, PETSC_TRUE)); 1956 1957 /* Apply transpose of partition of unity operator */ 1958 PetscCall(PCBDDCScalingExtension(pc, pcis->vec1_B, z)); 1959 1960 /* Second Dirichlet solve and assembling of output */ 1961 PetscCall(VecScatterBegin(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1962 PetscCall(VecScatterEnd(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1963 if (n_B) { 1964 if (pcbddc->switch_static) { 1965 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1966 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1967 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1968 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1969 if (!pcbddc->switch_static_change) { 1970 PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N)); 1971 } else { 1972 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1973 PetscCall(MatMultTranspose(lA, pcis->vec2_N, pcis->vec1_N)); 1974 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1975 } 1976 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD)); 1977 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD)); 1978 } else { 1979 PetscCall(MatMultTranspose(pcis->A_BI, pcis->vec1_B, pcis->vec3_D)); 1980 } 1981 } else if (pcbddc->switch_static) { /* n_B is zero */ 1982 if (!pcbddc->switch_static_change) { 1983 PetscCall(MatMultTranspose(lA, pcis->vec1_D, pcis->vec3_D)); 1984 } else { 1985 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_D, pcis->vec1_N)); 1986 PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N)); 1987 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec2_N, pcis->vec3_D)); 1988 } 1989 } 1990 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1991 PetscCall(KSPSolveTranspose(pcbddc->ksp_D, pcis->vec3_D, pcis->vec4_D)); 1992 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1993 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec4_D)); 1994 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 1995 if (pcbddc->switch_static) { 1996 PetscCall(VecAXPBYPCZ(pcis->vec2_D, m_one, one, m_one, pcis->vec4_D, pcis->vec1_D)); 1997 } else { 1998 PetscCall(VecAXPBY(pcis->vec2_D, m_one, m_one, pcis->vec4_D)); 1999 } 2000 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE)); 2001 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE)); 2002 } else { 2003 if (pcbddc->switch_static) { 2004 PetscCall(VecAXPBY(pcis->vec4_D, one, m_one, pcis->vec1_D)); 2005 } else { 2006 PetscCall(VecScale(pcis->vec4_D, m_one)); 2007 } 2008 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE)); 2009 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE)); 2010 } 2011 if (pcbddc->benign_have_null) { /* set p0 (computed in PCBDDCApplyInterface) */ 2012 PetscCall(PCBDDCBenignGetOrSetP0(pc, z, PETSC_FALSE)); 2013 } 2014 if (pcbddc->ChangeOfBasisMatrix) { 2015 pcbddc->work_change = r; 2016 PetscCall(VecCopy(z, pcbddc->work_change)); 2017 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcbddc->work_change, z)); 2018 } 2019 PetscFunctionReturn(0); 2020 } 2021 2022 PetscErrorCode PCReset_BDDC(PC pc) { 2023 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 2024 PC_IS *pcis = (PC_IS *)pc->data; 2025 KSP kspD, kspR, kspC; 2026 2027 PetscFunctionBegin; 2028 /* free BDDC custom data */ 2029 PetscCall(PCBDDCResetCustomization(pc)); 2030 /* destroy objects related to topography */ 2031 PetscCall(PCBDDCResetTopography(pc)); 2032 /* destroy objects for scaling operator */ 2033 PetscCall(PCBDDCScalingDestroy(pc)); 2034 /* free solvers stuff */ 2035 PetscCall(PCBDDCResetSolvers(pc)); 2036 /* free global vectors needed in presolve */ 2037 PetscCall(VecDestroy(&pcbddc->temp_solution)); 2038 PetscCall(VecDestroy(&pcbddc->original_rhs)); 2039 /* free data created by PCIS */ 2040 PetscCall(PCISDestroy(pc)); 2041 2042 /* restore defaults */ 2043 kspD = pcbddc->ksp_D; 2044 kspR = pcbddc->ksp_R; 2045 kspC = pcbddc->coarse_ksp; 2046 PetscCall(PetscMemzero(pc->data, sizeof(*pcbddc))); 2047 pcis->n_neigh = -1; 2048 pcis->scaling_factor = 1.0; 2049 pcis->reusesubmatrices = PETSC_TRUE; 2050 pcbddc->use_local_adj = PETSC_TRUE; 2051 pcbddc->use_vertices = PETSC_TRUE; 2052 pcbddc->use_edges = PETSC_TRUE; 2053 pcbddc->symmetric_primal = PETSC_TRUE; 2054 pcbddc->vertex_size = 1; 2055 pcbddc->recompute_topography = PETSC_TRUE; 2056 pcbddc->coarse_size = -1; 2057 pcbddc->use_exact_dirichlet_trick = PETSC_TRUE; 2058 pcbddc->coarsening_ratio = 8; 2059 pcbddc->coarse_eqs_per_proc = 1; 2060 pcbddc->benign_compute_correction = PETSC_TRUE; 2061 pcbddc->nedfield = -1; 2062 pcbddc->nedglobal = PETSC_TRUE; 2063 pcbddc->graphmaxcount = PETSC_MAX_INT; 2064 pcbddc->sub_schurs_layers = -1; 2065 pcbddc->ksp_D = kspD; 2066 pcbddc->ksp_R = kspR; 2067 pcbddc->coarse_ksp = kspC; 2068 PetscFunctionReturn(0); 2069 } 2070 2071 PetscErrorCode PCDestroy_BDDC(PC pc) { 2072 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 2073 2074 PetscFunctionBegin; 2075 PetscCall(PCReset_BDDC(pc)); 2076 PetscCall(KSPDestroy(&pcbddc->ksp_D)); 2077 PetscCall(KSPDestroy(&pcbddc->ksp_R)); 2078 PetscCall(KSPDestroy(&pcbddc->coarse_ksp)); 2079 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDiscreteGradient_C", NULL)); 2080 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDivergenceMat_C", NULL)); 2081 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetChangeOfBasisMat_C", NULL)); 2082 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesLocalIS_C", NULL)); 2083 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesIS_C", NULL)); 2084 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesLocalIS_C", NULL)); 2085 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesIS_C", NULL)); 2086 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetCoarseningRatio_C", NULL)); 2087 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevel_C", NULL)); 2088 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetUseExactDirichlet_C", NULL)); 2089 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevels_C", NULL)); 2090 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundaries_C", NULL)); 2091 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundariesLocal_C", NULL)); 2092 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundaries_C", NULL)); 2093 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundariesLocal_C", NULL)); 2094 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundaries_C", NULL)); 2095 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundariesLocal_C", NULL)); 2096 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundaries_C", NULL)); 2097 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundariesLocal_C", NULL)); 2098 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplitting_C", NULL)); 2099 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplittingLocal_C", NULL)); 2100 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", NULL)); 2101 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCCreateFETIDPOperators_C", NULL)); 2102 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetRHS_C", NULL)); 2103 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetSolution_C", NULL)); 2104 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCPreSolveChangeRHS_C", NULL)); 2105 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 2106 PetscCall(PetscFree(pc->data)); 2107 PetscFunctionReturn(0); 2108 } 2109 2110 static PetscErrorCode PCSetCoordinates_BDDC(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords) { 2111 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 2112 PCBDDCGraph mat_graph = pcbddc->mat_graph; 2113 2114 PetscFunctionBegin; 2115 PetscCall(PetscFree(mat_graph->coords)); 2116 PetscCall(PetscMalloc1(nloc * dim, &mat_graph->coords)); 2117 PetscCall(PetscArraycpy(mat_graph->coords, coords, nloc * dim)); 2118 mat_graph->cnloc = nloc; 2119 mat_graph->cdim = dim; 2120 mat_graph->cloc = PETSC_FALSE; 2121 /* flg setup */ 2122 pcbddc->recompute_topography = PETSC_TRUE; 2123 pcbddc->corner_selected = PETSC_FALSE; 2124 PetscFunctionReturn(0); 2125 } 2126 2127 static PetscErrorCode PCPreSolveChangeRHS_BDDC(PC pc, PetscBool *change) { 2128 PetscFunctionBegin; 2129 *change = PETSC_TRUE; 2130 PetscFunctionReturn(0); 2131 } 2132 2133 static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs) { 2134 FETIDPMat_ctx mat_ctx; 2135 Vec work; 2136 PC_IS *pcis; 2137 PC_BDDC *pcbddc; 2138 2139 PetscFunctionBegin; 2140 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx)); 2141 pcis = (PC_IS *)mat_ctx->pc->data; 2142 pcbddc = (PC_BDDC *)mat_ctx->pc->data; 2143 2144 PetscCall(VecSet(fetidp_flux_rhs, 0.0)); 2145 /* copy rhs since we may change it during PCPreSolve_BDDC */ 2146 if (!pcbddc->original_rhs) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->original_rhs)); 2147 if (mat_ctx->rhs_flip) { 2148 PetscCall(VecPointwiseMult(pcbddc->original_rhs, standard_rhs, mat_ctx->rhs_flip)); 2149 } else { 2150 PetscCall(VecCopy(standard_rhs, pcbddc->original_rhs)); 2151 } 2152 if (mat_ctx->g2g_p) { 2153 /* interface pressure rhs */ 2154 PetscCall(VecScatterBegin(mat_ctx->g2g_p, fetidp_flux_rhs, pcbddc->original_rhs, INSERT_VALUES, SCATTER_REVERSE)); 2155 PetscCall(VecScatterEnd(mat_ctx->g2g_p, fetidp_flux_rhs, pcbddc->original_rhs, INSERT_VALUES, SCATTER_REVERSE)); 2156 PetscCall(VecScatterBegin(mat_ctx->g2g_p, standard_rhs, fetidp_flux_rhs, INSERT_VALUES, SCATTER_FORWARD)); 2157 PetscCall(VecScatterEnd(mat_ctx->g2g_p, standard_rhs, fetidp_flux_rhs, INSERT_VALUES, SCATTER_FORWARD)); 2158 if (!mat_ctx->rhs_flip) PetscCall(VecScale(fetidp_flux_rhs, -1.)); 2159 } 2160 /* 2161 change of basis for physical rhs if needed 2162 It also changes the rhs in case of dirichlet boundaries 2163 */ 2164 PetscCall(PCPreSolve_BDDC(mat_ctx->pc, NULL, pcbddc->original_rhs, NULL)); 2165 if (pcbddc->ChangeOfBasisMatrix) { 2166 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, pcbddc->original_rhs, pcbddc->work_change)); 2167 work = pcbddc->work_change; 2168 } else { 2169 work = pcbddc->original_rhs; 2170 } 2171 /* store vectors for computation of fetidp final solution */ 2172 PetscCall(VecScatterBegin(pcis->global_to_D, work, mat_ctx->temp_solution_D, INSERT_VALUES, SCATTER_FORWARD)); 2173 PetscCall(VecScatterEnd(pcis->global_to_D, work, mat_ctx->temp_solution_D, INSERT_VALUES, SCATTER_FORWARD)); 2174 /* scale rhs since it should be unassembled */ 2175 /* TODO use counter scaling? (also below) */ 2176 PetscCall(VecScatterBegin(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD)); 2177 PetscCall(VecScatterEnd(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD)); 2178 /* Apply partition of unity */ 2179 PetscCall(VecPointwiseMult(mat_ctx->temp_solution_B, pcis->D, mat_ctx->temp_solution_B)); 2180 /* PetscCall(PCBDDCScalingRestriction(mat_ctx->pc,work,mat_ctx->temp_solution_B)); */ 2181 if (!pcbddc->switch_static) { 2182 /* compute partially subassembled Schur complement right-hand side */ 2183 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0)); 2184 PetscCall(KSPSolve(pcbddc->ksp_D, mat_ctx->temp_solution_D, pcis->vec1_D)); 2185 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0)); 2186 /* Cannot propagate up error in KSPSolve() because there is no access to the PC */ 2187 PetscCall(MatMult(pcis->A_BI, pcis->vec1_D, pcis->vec1_B)); 2188 PetscCall(VecAXPY(mat_ctx->temp_solution_B, -1.0, pcis->vec1_B)); 2189 PetscCall(VecSet(work, 0.0)); 2190 PetscCall(VecScatterBegin(pcis->global_to_B, mat_ctx->temp_solution_B, work, ADD_VALUES, SCATTER_REVERSE)); 2191 PetscCall(VecScatterEnd(pcis->global_to_B, mat_ctx->temp_solution_B, work, ADD_VALUES, SCATTER_REVERSE)); 2192 /* PetscCall(PCBDDCScalingRestriction(mat_ctx->pc,work,mat_ctx->temp_solution_B)); */ 2193 PetscCall(VecScatterBegin(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD)); 2194 PetscCall(VecScatterEnd(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD)); 2195 PetscCall(VecPointwiseMult(mat_ctx->temp_solution_B, pcis->D, mat_ctx->temp_solution_B)); 2196 } 2197 /* BDDC rhs */ 2198 PetscCall(VecCopy(mat_ctx->temp_solution_B, pcis->vec1_B)); 2199 if (pcbddc->switch_static) PetscCall(VecCopy(mat_ctx->temp_solution_D, pcis->vec1_D)); 2200 /* apply BDDC */ 2201 PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n)); 2202 PetscCall(PCBDDCApplyInterfacePreconditioner(mat_ctx->pc, PETSC_FALSE)); 2203 PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n)); 2204 2205 /* Application of B_delta and assembling of rhs for fetidp fluxes */ 2206 PetscCall(MatMult(mat_ctx->B_delta, pcis->vec1_B, mat_ctx->lambda_local)); 2207 PetscCall(VecScatterBegin(mat_ctx->l2g_lambda, mat_ctx->lambda_local, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD)); 2208 PetscCall(VecScatterEnd(mat_ctx->l2g_lambda, mat_ctx->lambda_local, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD)); 2209 /* Add contribution to interface pressures */ 2210 if (mat_ctx->l2g_p) { 2211 PetscCall(MatMult(mat_ctx->B_BB, pcis->vec1_B, mat_ctx->vP)); 2212 if (pcbddc->switch_static) PetscCall(MatMultAdd(mat_ctx->B_BI, pcis->vec1_D, mat_ctx->vP, mat_ctx->vP)); 2213 PetscCall(VecScatterBegin(mat_ctx->l2g_p, mat_ctx->vP, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD)); 2214 PetscCall(VecScatterEnd(mat_ctx->l2g_p, mat_ctx->vP, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD)); 2215 } 2216 PetscFunctionReturn(0); 2217 } 2218 2219 /*@ 2220 PCBDDCMatFETIDPGetRHS - Compute the right-hand side for FETI-DP linear system using the physical right-hand side 2221 2222 Collective 2223 2224 Input Parameters: 2225 + fetidp_mat - the FETI-DP matrix object obtained by a call to PCBDDCCreateFETIDPOperators 2226 - standard_rhs - the right-hand side of the original linear system 2227 2228 Output Parameters: 2229 . fetidp_flux_rhs - the right-hand side for the FETI-DP linear system 2230 2231 Level: developer 2232 2233 Notes: 2234 2235 .seealso: `PCBDDC`, `PCBDDCCreateFETIDPOperators()`, `PCBDDCMatFETIDPGetSolution()` 2236 @*/ 2237 PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs) { 2238 FETIDPMat_ctx mat_ctx; 2239 2240 PetscFunctionBegin; 2241 PetscValidHeaderSpecific(fetidp_mat, MAT_CLASSID, 1); 2242 PetscValidHeaderSpecific(standard_rhs, VEC_CLASSID, 2); 2243 PetscValidHeaderSpecific(fetidp_flux_rhs, VEC_CLASSID, 3); 2244 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx)); 2245 PetscUseMethod(mat_ctx->pc, "PCBDDCMatFETIDPGetRHS_C", (Mat, Vec, Vec), (fetidp_mat, standard_rhs, fetidp_flux_rhs)); 2246 PetscFunctionReturn(0); 2247 } 2248 2249 static PetscErrorCode PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol) { 2250 FETIDPMat_ctx mat_ctx; 2251 PC_IS *pcis; 2252 PC_BDDC *pcbddc; 2253 Vec work; 2254 2255 PetscFunctionBegin; 2256 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx)); 2257 pcis = (PC_IS *)mat_ctx->pc->data; 2258 pcbddc = (PC_BDDC *)mat_ctx->pc->data; 2259 2260 /* apply B_delta^T */ 2261 PetscCall(VecSet(pcis->vec1_B, 0.)); 2262 PetscCall(VecScatterBegin(mat_ctx->l2g_lambda, fetidp_flux_sol, mat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE)); 2263 PetscCall(VecScatterEnd(mat_ctx->l2g_lambda, fetidp_flux_sol, mat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE)); 2264 PetscCall(MatMultTranspose(mat_ctx->B_delta, mat_ctx->lambda_local, pcis->vec1_B)); 2265 if (mat_ctx->l2g_p) { 2266 PetscCall(VecScatterBegin(mat_ctx->l2g_p, fetidp_flux_sol, mat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE)); 2267 PetscCall(VecScatterEnd(mat_ctx->l2g_p, fetidp_flux_sol, mat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE)); 2268 PetscCall(MatMultAdd(mat_ctx->Bt_BB, mat_ctx->vP, pcis->vec1_B, pcis->vec1_B)); 2269 } 2270 2271 /* compute rhs for BDDC application */ 2272 PetscCall(VecAYPX(pcis->vec1_B, -1.0, mat_ctx->temp_solution_B)); 2273 if (pcbddc->switch_static) { 2274 PetscCall(VecCopy(mat_ctx->temp_solution_D, pcis->vec1_D)); 2275 if (mat_ctx->l2g_p) { 2276 PetscCall(VecScale(mat_ctx->vP, -1.)); 2277 PetscCall(MatMultAdd(mat_ctx->Bt_BI, mat_ctx->vP, pcis->vec1_D, pcis->vec1_D)); 2278 } 2279 } 2280 2281 /* apply BDDC */ 2282 PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n)); 2283 PetscCall(PCBDDCApplyInterfacePreconditioner(mat_ctx->pc, PETSC_FALSE)); 2284 2285 /* put values into global vector */ 2286 if (pcbddc->ChangeOfBasisMatrix) work = pcbddc->work_change; 2287 else work = standard_sol; 2288 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, work, INSERT_VALUES, SCATTER_REVERSE)); 2289 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, work, INSERT_VALUES, SCATTER_REVERSE)); 2290 if (!pcbddc->switch_static) { 2291 /* compute values into the interior if solved for the partially subassembled Schur complement */ 2292 PetscCall(MatMult(pcis->A_IB, pcis->vec1_B, pcis->vec1_D)); 2293 PetscCall(VecAYPX(pcis->vec1_D, -1.0, mat_ctx->temp_solution_D)); 2294 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0)); 2295 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec1_D)); 2296 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0)); 2297 /* Cannot propagate up error in KSPSolve() because there is no access to the PC */ 2298 } 2299 2300 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_D, work, INSERT_VALUES, SCATTER_REVERSE)); 2301 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_D, work, INSERT_VALUES, SCATTER_REVERSE)); 2302 /* add p0 solution to final solution */ 2303 PetscCall(PCBDDCBenignGetOrSetP0(mat_ctx->pc, work, PETSC_FALSE)); 2304 if (pcbddc->ChangeOfBasisMatrix) PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, work, standard_sol)); 2305 PetscCall(PCPostSolve_BDDC(mat_ctx->pc, NULL, NULL, standard_sol)); 2306 if (mat_ctx->g2g_p) { 2307 PetscCall(VecScatterBegin(mat_ctx->g2g_p, fetidp_flux_sol, standard_sol, INSERT_VALUES, SCATTER_REVERSE)); 2308 PetscCall(VecScatterEnd(mat_ctx->g2g_p, fetidp_flux_sol, standard_sol, INSERT_VALUES, SCATTER_REVERSE)); 2309 } 2310 PetscFunctionReturn(0); 2311 } 2312 2313 static PetscErrorCode PCView_BDDCIPC(PC pc, PetscViewer viewer) { 2314 BDDCIPC_ctx bddcipc_ctx; 2315 PetscBool isascii; 2316 2317 PetscFunctionBegin; 2318 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2319 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 2320 if (isascii) PetscCall(PetscViewerASCIIPrintf(viewer, "BDDC interface preconditioner\n")); 2321 PetscCall(PetscViewerASCIIPushTab(viewer)); 2322 PetscCall(PCView(bddcipc_ctx->bddc, viewer)); 2323 PetscCall(PetscViewerASCIIPopTab(viewer)); 2324 PetscFunctionReturn(0); 2325 } 2326 2327 static PetscErrorCode PCSetUp_BDDCIPC(PC pc) { 2328 BDDCIPC_ctx bddcipc_ctx; 2329 PetscBool isbddc; 2330 Vec vv; 2331 IS is; 2332 PC_IS *pcis; 2333 2334 PetscFunctionBegin; 2335 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2336 PetscCall(PetscObjectTypeCompare((PetscObject)bddcipc_ctx->bddc, PCBDDC, &isbddc)); 2337 PetscCheck(isbddc, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid type %s. Must be of type bddc", ((PetscObject)bddcipc_ctx->bddc)->type_name); 2338 PetscCall(PCSetUp(bddcipc_ctx->bddc)); 2339 2340 /* create interface scatter */ 2341 pcis = (PC_IS *)(bddcipc_ctx->bddc->data); 2342 PetscCall(VecScatterDestroy(&bddcipc_ctx->g2l)); 2343 PetscCall(MatCreateVecs(pc->pmat, &vv, NULL)); 2344 PetscCall(ISRenumber(pcis->is_B_global, NULL, NULL, &is)); 2345 PetscCall(VecScatterCreate(vv, is, pcis->vec1_B, NULL, &bddcipc_ctx->g2l)); 2346 PetscCall(ISDestroy(&is)); 2347 PetscCall(VecDestroy(&vv)); 2348 PetscFunctionReturn(0); 2349 } 2350 2351 static PetscErrorCode PCApply_BDDCIPC(PC pc, Vec r, Vec x) { 2352 BDDCIPC_ctx bddcipc_ctx; 2353 PC_IS *pcis; 2354 VecScatter tmps; 2355 2356 PetscFunctionBegin; 2357 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2358 pcis = (PC_IS *)(bddcipc_ctx->bddc->data); 2359 tmps = pcis->global_to_B; 2360 pcis->global_to_B = bddcipc_ctx->g2l; 2361 PetscCall(PCBDDCScalingRestriction(bddcipc_ctx->bddc, r, pcis->vec1_B)); 2362 PetscCall(PCBDDCApplyInterfacePreconditioner(bddcipc_ctx->bddc, PETSC_FALSE)); 2363 PetscCall(PCBDDCScalingExtension(bddcipc_ctx->bddc, pcis->vec1_B, x)); 2364 pcis->global_to_B = tmps; 2365 PetscFunctionReturn(0); 2366 } 2367 2368 static PetscErrorCode PCApplyTranspose_BDDCIPC(PC pc, Vec r, Vec x) { 2369 BDDCIPC_ctx bddcipc_ctx; 2370 PC_IS *pcis; 2371 VecScatter tmps; 2372 2373 PetscFunctionBegin; 2374 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2375 pcis = (PC_IS *)(bddcipc_ctx->bddc->data); 2376 tmps = pcis->global_to_B; 2377 pcis->global_to_B = bddcipc_ctx->g2l; 2378 PetscCall(PCBDDCScalingRestriction(bddcipc_ctx->bddc, r, pcis->vec1_B)); 2379 PetscCall(PCBDDCApplyInterfacePreconditioner(bddcipc_ctx->bddc, PETSC_TRUE)); 2380 PetscCall(PCBDDCScalingExtension(bddcipc_ctx->bddc, pcis->vec1_B, x)); 2381 pcis->global_to_B = tmps; 2382 PetscFunctionReturn(0); 2383 } 2384 2385 static PetscErrorCode PCDestroy_BDDCIPC(PC pc) { 2386 BDDCIPC_ctx bddcipc_ctx; 2387 2388 PetscFunctionBegin; 2389 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2390 PetscCall(PCDestroy(&bddcipc_ctx->bddc)); 2391 PetscCall(VecScatterDestroy(&bddcipc_ctx->g2l)); 2392 PetscCall(PetscFree(bddcipc_ctx)); 2393 PetscFunctionReturn(0); 2394 } 2395 2396 /*@ 2397 PCBDDCMatFETIDPGetSolution - Compute the physical solution using the solution of the FETI-DP linear system 2398 2399 Collective 2400 2401 Input Parameters: 2402 + fetidp_mat - the FETI-DP matrix obtained by a call to PCBDDCCreateFETIDPOperators 2403 - fetidp_flux_sol - the solution of the FETI-DP linear system 2404 2405 Output Parameters: 2406 . standard_sol - the solution defined on the physical domain 2407 2408 Level: developer 2409 2410 Notes: 2411 2412 .seealso: `PCBDDC`, `PCBDDCCreateFETIDPOperators()`, `PCBDDCMatFETIDPGetRHS()` 2413 @*/ 2414 PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol) { 2415 FETIDPMat_ctx mat_ctx; 2416 2417 PetscFunctionBegin; 2418 PetscValidHeaderSpecific(fetidp_mat, MAT_CLASSID, 1); 2419 PetscValidHeaderSpecific(fetidp_flux_sol, VEC_CLASSID, 2); 2420 PetscValidHeaderSpecific(standard_sol, VEC_CLASSID, 3); 2421 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx)); 2422 PetscUseMethod(mat_ctx->pc, "PCBDDCMatFETIDPGetSolution_C", (Mat, Vec, Vec), (fetidp_mat, fetidp_flux_sol, standard_sol)); 2423 PetscFunctionReturn(0); 2424 } 2425 2426 static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, PetscBool fully_redundant, const char *prefix, Mat *fetidp_mat, PC *fetidp_pc) { 2427 FETIDPMat_ctx fetidpmat_ctx; 2428 Mat newmat; 2429 FETIDPPC_ctx fetidppc_ctx; 2430 PC newpc; 2431 MPI_Comm comm; 2432 2433 PetscFunctionBegin; 2434 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 2435 /* FETI-DP matrix */ 2436 PetscCall(PCBDDCCreateFETIDPMatContext(pc, &fetidpmat_ctx)); 2437 fetidpmat_ctx->fully_redundant = fully_redundant; 2438 PetscCall(PCBDDCSetupFETIDPMatContext(fetidpmat_ctx)); 2439 PetscCall(MatCreateShell(comm, fetidpmat_ctx->n, fetidpmat_ctx->n, fetidpmat_ctx->N, fetidpmat_ctx->N, fetidpmat_ctx, &newmat)); 2440 PetscCall(PetscObjectSetName((PetscObject)newmat, !fetidpmat_ctx->l2g_lambda_only ? "F" : "G")); 2441 PetscCall(MatShellSetOperation(newmat, MATOP_MULT, (void (*)(void))FETIDPMatMult)); 2442 PetscCall(MatShellSetOperation(newmat, MATOP_MULT_TRANSPOSE, (void (*)(void))FETIDPMatMultTranspose)); 2443 PetscCall(MatShellSetOperation(newmat, MATOP_DESTROY, (void (*)(void))PCBDDCDestroyFETIDPMat)); 2444 /* propagate MatOptions */ 2445 { 2446 PC_BDDC *pcbddc = (PC_BDDC *)fetidpmat_ctx->pc->data; 2447 PetscBool isset, issym; 2448 2449 PetscCall(MatIsSymmetricKnown(pc->mat, &isset, &issym)); 2450 if ((isset && issym) || pcbddc->symmetric_primal) PetscCall(MatSetOption(newmat, MAT_SYMMETRIC, PETSC_TRUE)); 2451 } 2452 PetscCall(MatSetOptionsPrefix(newmat, prefix)); 2453 PetscCall(MatAppendOptionsPrefix(newmat, "fetidp_")); 2454 PetscCall(MatSetUp(newmat)); 2455 /* FETI-DP preconditioner */ 2456 PetscCall(PCBDDCCreateFETIDPPCContext(pc, &fetidppc_ctx)); 2457 PetscCall(PCBDDCSetupFETIDPPCContext(newmat, fetidppc_ctx)); 2458 PetscCall(PCCreate(comm, &newpc)); 2459 PetscCall(PCSetOperators(newpc, newmat, newmat)); 2460 PetscCall(PCSetOptionsPrefix(newpc, prefix)); 2461 PetscCall(PCAppendOptionsPrefix(newpc, "fetidp_")); 2462 PetscCall(PCSetErrorIfFailure(newpc, pc->erroriffailure)); 2463 if (!fetidpmat_ctx->l2g_lambda_only) { /* standard FETI-DP */ 2464 PetscCall(PCSetType(newpc, PCSHELL)); 2465 PetscCall(PCShellSetName(newpc, "FETI-DP multipliers")); 2466 PetscCall(PCShellSetContext(newpc, fetidppc_ctx)); 2467 PetscCall(PCShellSetApply(newpc, FETIDPPCApply)); 2468 PetscCall(PCShellSetApplyTranspose(newpc, FETIDPPCApplyTranspose)); 2469 PetscCall(PCShellSetView(newpc, FETIDPPCView)); 2470 PetscCall(PCShellSetDestroy(newpc, PCBDDCDestroyFETIDPPC)); 2471 } else { /* saddle-point FETI-DP */ 2472 Mat M; 2473 PetscInt psize; 2474 PetscBool fake = PETSC_FALSE, isfieldsplit; 2475 2476 PetscCall(ISViewFromOptions(fetidpmat_ctx->lagrange, NULL, "-lag_view")); 2477 PetscCall(ISViewFromOptions(fetidpmat_ctx->pressure, NULL, "-press_view")); 2478 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_PPmat", (PetscObject *)&M)); 2479 PetscCall(PCSetType(newpc, PCFIELDSPLIT)); 2480 PetscCall(PCFieldSplitSetIS(newpc, "lag", fetidpmat_ctx->lagrange)); 2481 PetscCall(PCFieldSplitSetIS(newpc, "p", fetidpmat_ctx->pressure)); 2482 PetscCall(PCFieldSplitSetType(newpc, PC_COMPOSITE_SCHUR)); 2483 PetscCall(PCFieldSplitSetSchurFactType(newpc, PC_FIELDSPLIT_SCHUR_FACT_DIAG)); 2484 PetscCall(ISGetSize(fetidpmat_ctx->pressure, &psize)); 2485 if (psize != M->rmap->N) { 2486 Mat M2; 2487 PetscInt lpsize; 2488 2489 fake = PETSC_TRUE; 2490 PetscCall(ISGetLocalSize(fetidpmat_ctx->pressure, &lpsize)); 2491 PetscCall(MatCreate(comm, &M2)); 2492 PetscCall(MatSetType(M2, MATAIJ)); 2493 PetscCall(MatSetSizes(M2, lpsize, lpsize, psize, psize)); 2494 PetscCall(MatSetUp(M2)); 2495 PetscCall(MatAssemblyBegin(M2, MAT_FINAL_ASSEMBLY)); 2496 PetscCall(MatAssemblyEnd(M2, MAT_FINAL_ASSEMBLY)); 2497 PetscCall(PCFieldSplitSetSchurPre(newpc, PC_FIELDSPLIT_SCHUR_PRE_USER, M2)); 2498 PetscCall(MatDestroy(&M2)); 2499 } else { 2500 PetscCall(PCFieldSplitSetSchurPre(newpc, PC_FIELDSPLIT_SCHUR_PRE_USER, M)); 2501 } 2502 PetscCall(PCFieldSplitSetSchurScale(newpc, 1.0)); 2503 2504 /* we need to setfromoptions and setup here to access the blocks */ 2505 PetscCall(PCSetFromOptions(newpc)); 2506 PetscCall(PCSetUp(newpc)); 2507 2508 /* user may have changed the type (e.g. -fetidp_pc_type none) */ 2509 PetscCall(PetscObjectTypeCompare((PetscObject)newpc, PCFIELDSPLIT, &isfieldsplit)); 2510 if (isfieldsplit) { 2511 KSP *ksps; 2512 PC ppc, lagpc; 2513 PetscInt nn; 2514 PetscBool ismatis, matisok = PETSC_FALSE, check = PETSC_FALSE; 2515 2516 /* set the solver for the (0,0) block */ 2517 PetscCall(PCFieldSplitSchurGetSubKSP(newpc, &nn, &ksps)); 2518 if (!nn) { /* not of type PC_COMPOSITE_SCHUR */ 2519 PetscCall(PCFieldSplitGetSubKSP(newpc, &nn, &ksps)); 2520 if (!fake) { /* pass pmat to the pressure solver */ 2521 Mat F; 2522 2523 PetscCall(KSPGetOperators(ksps[1], &F, NULL)); 2524 PetscCall(KSPSetOperators(ksps[1], F, M)); 2525 } 2526 } else { 2527 PetscBool issym, isset; 2528 Mat S; 2529 2530 PetscCall(PCFieldSplitSchurGetS(newpc, &S)); 2531 PetscCall(MatIsSymmetricKnown(newmat, &isset, &issym)); 2532 if (isset) PetscCall(MatSetOption(S, MAT_SYMMETRIC, issym)); 2533 } 2534 PetscCall(KSPGetPC(ksps[0], &lagpc)); 2535 PetscCall(PCSetType(lagpc, PCSHELL)); 2536 PetscCall(PCShellSetName(lagpc, "FETI-DP multipliers")); 2537 PetscCall(PCShellSetContext(lagpc, fetidppc_ctx)); 2538 PetscCall(PCShellSetApply(lagpc, FETIDPPCApply)); 2539 PetscCall(PCShellSetApplyTranspose(lagpc, FETIDPPCApplyTranspose)); 2540 PetscCall(PCShellSetView(lagpc, FETIDPPCView)); 2541 PetscCall(PCShellSetDestroy(lagpc, PCBDDCDestroyFETIDPPC)); 2542 2543 /* Olof's idea: interface Schur complement preconditioner for the mass matrix */ 2544 PetscCall(KSPGetPC(ksps[1], &ppc)); 2545 if (fake) { 2546 BDDCIPC_ctx bddcipc_ctx; 2547 PetscContainer c; 2548 2549 matisok = PETSC_TRUE; 2550 2551 /* create inner BDDC solver */ 2552 PetscCall(PetscNew(&bddcipc_ctx)); 2553 PetscCall(PCCreate(comm, &bddcipc_ctx->bddc)); 2554 PetscCall(PCSetType(bddcipc_ctx->bddc, PCBDDC)); 2555 PetscCall(PCSetOperators(bddcipc_ctx->bddc, M, M)); 2556 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_pCSR", (PetscObject *)&c)); 2557 PetscCall(PetscObjectTypeCompare((PetscObject)M, MATIS, &ismatis)); 2558 if (c && ismatis) { 2559 Mat lM; 2560 PetscInt *csr, n; 2561 2562 PetscCall(MatISGetLocalMat(M, &lM)); 2563 PetscCall(MatGetSize(lM, &n, NULL)); 2564 PetscCall(PetscContainerGetPointer(c, (void **)&csr)); 2565 PetscCall(PCBDDCSetLocalAdjacencyGraph(bddcipc_ctx->bddc, n, csr, csr + (n + 1), PETSC_COPY_VALUES)); 2566 PetscCall(MatISRestoreLocalMat(M, &lM)); 2567 } 2568 PetscCall(PCSetOptionsPrefix(bddcipc_ctx->bddc, ((PetscObject)ksps[1])->prefix)); 2569 PetscCall(PCSetErrorIfFailure(bddcipc_ctx->bddc, pc->erroriffailure)); 2570 PetscCall(PCSetFromOptions(bddcipc_ctx->bddc)); 2571 2572 /* wrap the interface application */ 2573 PetscCall(PCSetType(ppc, PCSHELL)); 2574 PetscCall(PCShellSetName(ppc, "FETI-DP pressure")); 2575 PetscCall(PCShellSetContext(ppc, bddcipc_ctx)); 2576 PetscCall(PCShellSetSetUp(ppc, PCSetUp_BDDCIPC)); 2577 PetscCall(PCShellSetApply(ppc, PCApply_BDDCIPC)); 2578 PetscCall(PCShellSetApplyTranspose(ppc, PCApplyTranspose_BDDCIPC)); 2579 PetscCall(PCShellSetView(ppc, PCView_BDDCIPC)); 2580 PetscCall(PCShellSetDestroy(ppc, PCDestroy_BDDCIPC)); 2581 } 2582 2583 /* determine if we need to assemble M to construct a preconditioner */ 2584 if (!matisok) { 2585 PetscCall(PetscObjectTypeCompare((PetscObject)M, MATIS, &ismatis)); 2586 PetscCall(PetscObjectTypeCompareAny((PetscObject)ppc, &matisok, PCBDDC, PCJACOBI, PCNONE, PCMG, "")); 2587 if (ismatis && !matisok) PetscCall(MatConvert(M, MATAIJ, MAT_INPLACE_MATRIX, &M)); 2588 } 2589 2590 /* run the subproblems to check convergence */ 2591 PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)newmat)->prefix, "-check_saddlepoint", &check, NULL)); 2592 if (check) { 2593 PetscInt i; 2594 2595 for (i = 0; i < nn; i++) { 2596 KSP kspC; 2597 PC pc; 2598 Mat F, pF; 2599 Vec x, y; 2600 PetscBool isschur, prec = PETSC_TRUE; 2601 2602 PetscCall(KSPCreate(PetscObjectComm((PetscObject)ksps[i]), &kspC)); 2603 PetscCall(KSPSetOptionsPrefix(kspC, ((PetscObject)ksps[i])->prefix)); 2604 PetscCall(KSPAppendOptionsPrefix(kspC, "check_")); 2605 PetscCall(KSPGetOperators(ksps[i], &F, &pF)); 2606 PetscCall(PetscObjectTypeCompare((PetscObject)F, MATSCHURCOMPLEMENT, &isschur)); 2607 if (isschur) { 2608 KSP kspS, kspS2; 2609 Mat A00, pA00, A10, A01, A11; 2610 char prefix[256]; 2611 2612 PetscCall(MatSchurComplementGetKSP(F, &kspS)); 2613 PetscCall(MatSchurComplementGetSubMatrices(F, &A00, &pA00, &A01, &A10, &A11)); 2614 PetscCall(MatCreateSchurComplement(A00, pA00, A01, A10, A11, &F)); 2615 PetscCall(MatSchurComplementGetKSP(F, &kspS2)); 2616 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sschur_", ((PetscObject)kspC)->prefix)); 2617 PetscCall(KSPSetOptionsPrefix(kspS2, prefix)); 2618 PetscCall(KSPGetPC(kspS2, &pc)); 2619 PetscCall(PCSetType(pc, PCKSP)); 2620 PetscCall(PCKSPSetKSP(pc, kspS)); 2621 PetscCall(KSPSetFromOptions(kspS2)); 2622 PetscCall(KSPGetPC(kspS2, &pc)); 2623 PetscCall(PCSetUseAmat(pc, PETSC_TRUE)); 2624 } else { 2625 PetscCall(PetscObjectReference((PetscObject)F)); 2626 } 2627 PetscCall(KSPSetFromOptions(kspC)); 2628 PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)kspC)->prefix, "-preconditioned", &prec, NULL)); 2629 if (prec) { 2630 PetscCall(KSPGetPC(ksps[i], &pc)); 2631 PetscCall(KSPSetPC(kspC, pc)); 2632 } 2633 PetscCall(KSPSetOperators(kspC, F, pF)); 2634 PetscCall(MatCreateVecs(F, &x, &y)); 2635 PetscCall(VecSetRandom(x, NULL)); 2636 PetscCall(MatMult(F, x, y)); 2637 PetscCall(KSPSolve(kspC, y, x)); 2638 PetscCall(KSPCheckSolve(kspC, pc, x)); 2639 PetscCall(KSPDestroy(&kspC)); 2640 PetscCall(MatDestroy(&F)); 2641 PetscCall(VecDestroy(&x)); 2642 PetscCall(VecDestroy(&y)); 2643 } 2644 } 2645 PetscCall(PetscFree(ksps)); 2646 } 2647 } 2648 /* return pointers for objects created */ 2649 *fetidp_mat = newmat; 2650 *fetidp_pc = newpc; 2651 PetscFunctionReturn(0); 2652 } 2653 2654 /*@C 2655 PCBDDCCreateFETIDPOperators - Create FETI-DP operators 2656 2657 Collective 2658 2659 Input Parameters: 2660 + pc - the BDDC preconditioning context (setup should have been called before) 2661 . fully_redundant - true for a fully redundant set of Lagrange multipliers 2662 - prefix - optional options database prefix for the objects to be created (can be NULL) 2663 2664 Output Parameters: 2665 + fetidp_mat - shell FETI-DP matrix object 2666 - fetidp_pc - shell Dirichlet preconditioner for FETI-DP matrix 2667 2668 Level: developer 2669 2670 Notes: 2671 Currently the only operations provided for FETI-DP matrix are MatMult and MatMultTranspose 2672 2673 .seealso: `PCBDDC`, `PCBDDCMatFETIDPGetRHS()`, `PCBDDCMatFETIDPGetSolution()` 2674 @*/ 2675 PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, PetscBool fully_redundant, const char *prefix, Mat *fetidp_mat, PC *fetidp_pc) { 2676 PetscFunctionBegin; 2677 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2678 if (pc->setupcalled) { 2679 PetscUseMethod(pc, "PCBDDCCreateFETIDPOperators_C", (PC, PetscBool, const char *, Mat *, PC *), (pc, fully_redundant, prefix, fetidp_mat, fetidp_pc)); 2680 } else SETERRQ(PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "You must call PCSetup_BDDC() first"); 2681 PetscFunctionReturn(0); 2682 } 2683 /* -------------------------------------------------------------------------- */ 2684 /*MC 2685 PCBDDC - Balancing Domain Decomposition by Constraints. 2686 2687 An implementation of the BDDC preconditioner based on the bibliography found below. 2688 2689 The matrix to be preconditioned (Pmat) must be of type MATIS. 2690 2691 Currently works with MATIS matrices with local matrices of type MATSEQAIJ, MATSEQBAIJ or MATSEQSBAIJ, either with real or complex numbers. 2692 2693 It also works with unsymmetric and indefinite problems. 2694 2695 Unlike 'conventional' interface preconditioners, PCBDDC iterates over all degrees of freedom, not just those on the interface. This allows the use of approximate solvers on the subdomains. 2696 2697 Approximate local solvers are automatically adapted (see [1]) if the user has attached a nullspace object to the subdomain matrices, and informed BDDC of using approximate solvers (via the command line). 2698 2699 Boundary nodes are split in vertices, edges and faces classes using information from the local to global mapping of dofs and the local connectivity graph of nodes. The latter can be customized by using PCBDDCSetLocalAdjacencyGraph() 2700 Additional information on dofs can be provided by using PCBDDCSetDofsSplitting(), PCBDDCSetDirichletBoundaries(), PCBDDCSetNeumannBoundaries(), and PCBDDCSetPrimalVerticesIS() and their local counterparts. 2701 2702 Constraints can be customized by attaching a MatNullSpace object to the MATIS matrix via MatSetNearNullSpace(). Non-singular modes are retained via SVD. 2703 2704 Change of basis is performed similarly to [2] when requested. When more than one constraint is present on a single connected component (i.e. an edge or a face), a robust method based on local QR factorizations is used. 2705 User defined change of basis can be passed to PCBDDC by using PCBDDCSetChangeOfBasisMat() 2706 2707 The PETSc implementation also supports multilevel BDDC [3]. Coarse grids are partitioned using a MatPartitioning object. 2708 2709 Adaptive selection of primal constraints [4] is supported for SPD systems with high-contrast in the coefficients if MUMPS or MKL_PARDISO are present. Future versions of the code will also consider using PASTIX. 2710 2711 An experimental interface to the FETI-DP method is available. FETI-DP operators could be created using PCBDDCCreateFETIDPOperators(). A stand-alone class for the FETI-DP method will be provided in the next releases. 2712 2713 Options Database Keys (some of them, run with -help for a complete list): 2714 2715 + -pc_bddc_use_vertices <true> - use or not vertices in primal space 2716 . -pc_bddc_use_edges <true> - use or not edges in primal space 2717 . -pc_bddc_use_faces <false> - use or not faces in primal space 2718 . -pc_bddc_symmetric <true> - symmetric computation of primal basis functions. Specify false for unsymmetric problems 2719 . -pc_bddc_use_change_of_basis <false> - use change of basis approach (on edges only) 2720 . -pc_bddc_use_change_on_faces <false> - use change of basis approach on faces if change of basis has been requested 2721 . -pc_bddc_switch_static <false> - switches from M_2 (default) to M_3 operator (see reference article [1]) 2722 . -pc_bddc_levels <0> - maximum number of levels for multilevel 2723 . -pc_bddc_coarsening_ratio <8> - number of subdomains which will be aggregated together at the coarser level (e.g. H/h ratio at the coarser level, significative only in the multilevel case) 2724 . -pc_bddc_coarse_redistribute <0> - size of a subset of processors where the coarse problem will be remapped (the value is ignored if not at the coarsest level) 2725 . -pc_bddc_use_deluxe_scaling <false> - use deluxe scaling 2726 . -pc_bddc_schur_layers <\-1> - select the economic version of deluxe scaling by specifying the number of layers (-1 corresponds to the original deluxe scaling) 2727 . -pc_bddc_adaptive_threshold <0.0> - when a value different than zero is specified, adaptive selection of constraints is performed on edges and faces (requires deluxe scaling and MUMPS or MKL_PARDISO installed) 2728 - -pc_bddc_check_level <0> - set verbosity level of debugging output 2729 2730 Options for Dirichlet, Neumann or coarse solver can be set with 2731 .vb 2732 -pc_bddc_dirichlet_ 2733 -pc_bddc_neumann_ 2734 -pc_bddc_coarse_ 2735 .ve 2736 e.g. -pc_bddc_dirichlet_ksp_type richardson -pc_bddc_dirichlet_pc_type gamg. PCBDDC uses by default KSPPREONLY and PCLU. 2737 2738 When using a multilevel approach, solvers' options at the N-th level (N > 1) can be specified as 2739 .vb 2740 -pc_bddc_dirichlet_lN_ 2741 -pc_bddc_neumann_lN_ 2742 -pc_bddc_coarse_lN_ 2743 .ve 2744 Note that level number ranges from the finest (0) to the coarsest (N). 2745 In order to specify options for the BDDC operators at the coarser levels (and not for the solvers), prepend -pc_bddc_coarse_ or -pc_bddc_coarse_l to the option, e.g. 2746 .vb 2747 -pc_bddc_coarse_pc_bddc_adaptive_threshold 5 -pc_bddc_coarse_l1_pc_bddc_redistribute 3 2748 .ve 2749 will use a threshold of 5 for constraints' selection at the first coarse level and will redistribute the coarse problem of the first coarse level on 3 processors 2750 2751 References: 2752 + * - C. R. Dohrmann. "An approximate BDDC preconditioner", Numerical Linear Algebra with Applications Volume 14, Issue 2, pages 149--168, March 2007 2753 . * - A. Klawonn and O. B. Widlund. "Dual-Primal FETI Methods for Linear Elasticity", Communications on Pure and Applied Mathematics Volume 59, Issue 11, pages 1523--1572, November 2006 2754 . * - J. Mandel, B. Sousedik, C. R. Dohrmann. "Multispace and Multilevel BDDC", Computing Volume 83, Issue 2--3, pages 55--85, November 2008 2755 - * - C. Pechstein and C. R. Dohrmann. "Modern domain decomposition methods BDDC, deluxe scaling, and an algebraic approach", Seminar talk, Linz, December 2013, http://people.ricam.oeaw.ac.at/c.pechstein/pechstein-bddc2013.pdf 2756 2757 Level: intermediate 2758 2759 Developer Notes: 2760 2761 Contributed by Stefano Zampini 2762 2763 .seealso: `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `MATIS` 2764 M*/ 2765 2766 PETSC_EXTERN PetscErrorCode PCCreate_BDDC(PC pc) { 2767 PC_BDDC *pcbddc; 2768 2769 PetscFunctionBegin; 2770 PetscCall(PetscNewLog(pc, &pcbddc)); 2771 pc->data = pcbddc; 2772 2773 /* create PCIS data structure */ 2774 PetscCall(PCISCreate(pc)); 2775 2776 /* create local graph structure */ 2777 PetscCall(PCBDDCGraphCreate(&pcbddc->mat_graph)); 2778 2779 /* BDDC nonzero defaults */ 2780 pcbddc->use_nnsp = PETSC_TRUE; 2781 pcbddc->use_local_adj = PETSC_TRUE; 2782 pcbddc->use_vertices = PETSC_TRUE; 2783 pcbddc->use_edges = PETSC_TRUE; 2784 pcbddc->symmetric_primal = PETSC_TRUE; 2785 pcbddc->vertex_size = 1; 2786 pcbddc->recompute_topography = PETSC_TRUE; 2787 pcbddc->coarse_size = -1; 2788 pcbddc->use_exact_dirichlet_trick = PETSC_TRUE; 2789 pcbddc->coarsening_ratio = 8; 2790 pcbddc->coarse_eqs_per_proc = 1; 2791 pcbddc->benign_compute_correction = PETSC_TRUE; 2792 pcbddc->nedfield = -1; 2793 pcbddc->nedglobal = PETSC_TRUE; 2794 pcbddc->graphmaxcount = PETSC_MAX_INT; 2795 pcbddc->sub_schurs_layers = -1; 2796 pcbddc->adaptive_threshold[0] = 0.0; 2797 pcbddc->adaptive_threshold[1] = 0.0; 2798 2799 /* function pointers */ 2800 pc->ops->apply = PCApply_BDDC; 2801 pc->ops->applytranspose = PCApplyTranspose_BDDC; 2802 pc->ops->setup = PCSetUp_BDDC; 2803 pc->ops->destroy = PCDestroy_BDDC; 2804 pc->ops->setfromoptions = PCSetFromOptions_BDDC; 2805 pc->ops->view = PCView_BDDC; 2806 pc->ops->applyrichardson = NULL; 2807 pc->ops->applysymmetricleft = NULL; 2808 pc->ops->applysymmetricright = NULL; 2809 pc->ops->presolve = PCPreSolve_BDDC; 2810 pc->ops->postsolve = PCPostSolve_BDDC; 2811 pc->ops->reset = PCReset_BDDC; 2812 2813 /* composing function */ 2814 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDiscreteGradient_C", PCBDDCSetDiscreteGradient_BDDC)); 2815 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDivergenceMat_C", PCBDDCSetDivergenceMat_BDDC)); 2816 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetChangeOfBasisMat_C", PCBDDCSetChangeOfBasisMat_BDDC)); 2817 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesLocalIS_C", PCBDDCSetPrimalVerticesLocalIS_BDDC)); 2818 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesIS_C", PCBDDCSetPrimalVerticesIS_BDDC)); 2819 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesLocalIS_C", PCBDDCGetPrimalVerticesLocalIS_BDDC)); 2820 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesIS_C", PCBDDCGetPrimalVerticesIS_BDDC)); 2821 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetCoarseningRatio_C", PCBDDCSetCoarseningRatio_BDDC)); 2822 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevel_C", PCBDDCSetLevel_BDDC)); 2823 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetUseExactDirichlet_C", PCBDDCSetUseExactDirichlet_BDDC)); 2824 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevels_C", PCBDDCSetLevels_BDDC)); 2825 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundaries_C", PCBDDCSetDirichletBoundaries_BDDC)); 2826 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundariesLocal_C", PCBDDCSetDirichletBoundariesLocal_BDDC)); 2827 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundaries_C", PCBDDCSetNeumannBoundaries_BDDC)); 2828 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundariesLocal_C", PCBDDCSetNeumannBoundariesLocal_BDDC)); 2829 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundaries_C", PCBDDCGetDirichletBoundaries_BDDC)); 2830 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundariesLocal_C", PCBDDCGetDirichletBoundariesLocal_BDDC)); 2831 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundaries_C", PCBDDCGetNeumannBoundaries_BDDC)); 2832 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundariesLocal_C", PCBDDCGetNeumannBoundariesLocal_BDDC)); 2833 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplitting_C", PCBDDCSetDofsSplitting_BDDC)); 2834 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplittingLocal_C", PCBDDCSetDofsSplittingLocal_BDDC)); 2835 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", PCBDDCSetLocalAdjacencyGraph_BDDC)); 2836 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCCreateFETIDPOperators_C", PCBDDCCreateFETIDPOperators_BDDC)); 2837 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetRHS_C", PCBDDCMatFETIDPGetRHS_BDDC)); 2838 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetSolution_C", PCBDDCMatFETIDPGetSolution_BDDC)); 2839 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCPreSolveChangeRHS_C", PCPreSolveChangeRHS_BDDC)); 2840 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_BDDC)); 2841 PetscFunctionReturn(0); 2842 } 2843 2844 /*@C 2845 PCBDDCInitializePackage - This function initializes everything in the PCBDDC package. It is called 2846 from PCInitializePackage(). 2847 2848 Level: developer 2849 2850 .seealso: `PetscInitialize()` 2851 @*/ 2852 PetscErrorCode PCBDDCInitializePackage(void) { 2853 int i; 2854 2855 PetscFunctionBegin; 2856 if (PCBDDCPackageInitialized) PetscFunctionReturn(0); 2857 PCBDDCPackageInitialized = PETSC_TRUE; 2858 PetscCall(PetscRegisterFinalize(PCBDDCFinalizePackage)); 2859 2860 /* general events */ 2861 PetscCall(PetscLogEventRegister("PCBDDCTopo", PC_CLASSID, &PC_BDDC_Topology[0])); 2862 PetscCall(PetscLogEventRegister("PCBDDCLKSP", PC_CLASSID, &PC_BDDC_LocalSolvers[0])); 2863 PetscCall(PetscLogEventRegister("PCBDDCLWor", PC_CLASSID, &PC_BDDC_LocalWork[0])); 2864 PetscCall(PetscLogEventRegister("PCBDDCCorr", PC_CLASSID, &PC_BDDC_CorrectionSetUp[0])); 2865 PetscCall(PetscLogEventRegister("PCBDDCASet", PC_CLASSID, &PC_BDDC_ApproxSetUp[0])); 2866 PetscCall(PetscLogEventRegister("PCBDDCAApp", PC_CLASSID, &PC_BDDC_ApproxApply[0])); 2867 PetscCall(PetscLogEventRegister("PCBDDCCSet", PC_CLASSID, &PC_BDDC_CoarseSetUp[0])); 2868 PetscCall(PetscLogEventRegister("PCBDDCCKSP", PC_CLASSID, &PC_BDDC_CoarseSolver[0])); 2869 PetscCall(PetscLogEventRegister("PCBDDCAdap", PC_CLASSID, &PC_BDDC_AdaptiveSetUp[0])); 2870 PetscCall(PetscLogEventRegister("PCBDDCScal", PC_CLASSID, &PC_BDDC_Scaling[0])); 2871 PetscCall(PetscLogEventRegister("PCBDDCSchr", PC_CLASSID, &PC_BDDC_Schurs[0])); 2872 PetscCall(PetscLogEventRegister("PCBDDCDirS", PC_CLASSID, &PC_BDDC_Solves[0][0])); 2873 PetscCall(PetscLogEventRegister("PCBDDCNeuS", PC_CLASSID, &PC_BDDC_Solves[0][1])); 2874 PetscCall(PetscLogEventRegister("PCBDDCCoaS", PC_CLASSID, &PC_BDDC_Solves[0][2])); 2875 for (i = 1; i < PETSC_PCBDDC_MAXLEVELS; i++) { 2876 char ename[32]; 2877 2878 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCTopo l%02d", i)); 2879 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Topology[i])); 2880 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCLKSP l%02d", i)); 2881 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_LocalSolvers[i])); 2882 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCLWor l%02d", i)); 2883 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_LocalWork[i])); 2884 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCorr l%02d", i)); 2885 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CorrectionSetUp[i])); 2886 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCASet l%02d", i)); 2887 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_ApproxSetUp[i])); 2888 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCAApp l%02d", i)); 2889 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_ApproxApply[i])); 2890 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCSet l%02d", i)); 2891 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CoarseSetUp[i])); 2892 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCKSP l%02d", i)); 2893 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CoarseSolver[i])); 2894 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCAdap l%02d", i)); 2895 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_AdaptiveSetUp[i])); 2896 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCScal l%02d", i)); 2897 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Scaling[i])); 2898 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCSchr l%02d", i)); 2899 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Schurs[i])); 2900 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCDirS l%02d", i)); 2901 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][0])); 2902 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCNeuS l%02d", i)); 2903 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][1])); 2904 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCoaS l%02d", i)); 2905 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][2])); 2906 } 2907 PetscFunctionReturn(0); 2908 } 2909 2910 /*@C 2911 PCBDDCFinalizePackage - This function frees everything from the PCBDDC package. It is 2912 called from PetscFinalize() automatically. 2913 2914 Level: developer 2915 2916 .seealso: `PetscFinalize()` 2917 @*/ 2918 PetscErrorCode PCBDDCFinalizePackage(void) { 2919 PetscFunctionBegin; 2920 PCBDDCPackageInitialized = PETSC_FALSE; 2921 PetscFunctionReturn(0); 2922 } 2923