1 #include <petsc/private/pcbddcimpl.h> /*I "petscpc.h" I*/ /* header file for Fortran wrappers */ 2 #include <petsc/private/pcbddcprivateimpl.h> 3 #include <petscblaslapack.h> 4 5 static PetscBool PCBDDCPackageInitialized = PETSC_FALSE; 6 7 static PetscBool cited = PETSC_FALSE; 8 static const char citation[] = "@article{ZampiniPCBDDC,\n" 9 "author = {Stefano Zampini},\n" 10 "title = {{PCBDDC}: A Class of Robust Dual-Primal Methods in {PETS}c},\n" 11 "journal = {SIAM Journal on Scientific Computing},\n" 12 "volume = {38},\n" 13 "number = {5},\n" 14 "pages = {S282-S306},\n" 15 "year = {2016},\n" 16 "doi = {10.1137/15M1025785},\n" 17 "URL = {http://dx.doi.org/10.1137/15M1025785},\n" 18 "eprint = {http://dx.doi.org/10.1137/15M1025785}\n" 19 "}\n"; 20 21 PetscLogEvent PC_BDDC_Topology[PETSC_PCBDDC_MAXLEVELS]; 22 PetscLogEvent PC_BDDC_LocalSolvers[PETSC_PCBDDC_MAXLEVELS]; 23 PetscLogEvent PC_BDDC_LocalWork[PETSC_PCBDDC_MAXLEVELS]; 24 PetscLogEvent PC_BDDC_CorrectionSetUp[PETSC_PCBDDC_MAXLEVELS]; 25 PetscLogEvent PC_BDDC_ApproxSetUp[PETSC_PCBDDC_MAXLEVELS]; 26 PetscLogEvent PC_BDDC_ApproxApply[PETSC_PCBDDC_MAXLEVELS]; 27 PetscLogEvent PC_BDDC_CoarseSetUp[PETSC_PCBDDC_MAXLEVELS]; 28 PetscLogEvent PC_BDDC_CoarseSolver[PETSC_PCBDDC_MAXLEVELS]; 29 PetscLogEvent PC_BDDC_AdaptiveSetUp[PETSC_PCBDDC_MAXLEVELS]; 30 PetscLogEvent PC_BDDC_Scaling[PETSC_PCBDDC_MAXLEVELS]; 31 PetscLogEvent PC_BDDC_Schurs[PETSC_PCBDDC_MAXLEVELS]; 32 PetscLogEvent PC_BDDC_Solves[PETSC_PCBDDC_MAXLEVELS][3]; 33 34 const char *const PCBDDCInterfaceExtTypes[] = {"DIRICHLET", "LUMP", "PCBDDCInterfaceExtType", "PC_BDDC_INTERFACE_EXT_", NULL}; 35 36 static PetscErrorCode PCApply_BDDC(PC, Vec, Vec); 37 38 static PetscErrorCode PCSetFromOptions_BDDC(PC pc, PetscOptionItems *PetscOptionsObject) 39 { 40 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 41 PetscInt nt, i; 42 43 PetscFunctionBegin; 44 PetscOptionsHeadBegin(PetscOptionsObject, "BDDC options"); 45 /* Verbose debugging */ 46 PetscCall(PetscOptionsInt("-pc_bddc_check_level", "Verbose output for PCBDDC (intended for debug)", "none", pcbddc->dbg_flag, &pcbddc->dbg_flag, NULL)); 47 /* Approximate solvers */ 48 PetscCall(PetscOptionsEnum("-pc_bddc_interface_ext_type", "Use DIRICHLET or LUMP to extend interface corrections to interior", "PCBDDCSetInterfaceExtType", PCBDDCInterfaceExtTypes, (PetscEnum)pcbddc->interface_extension, (PetscEnum *)&pcbddc->interface_extension, NULL)); 49 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_DIRICHLET) { 50 PetscCall(PetscOptionsBool("-pc_bddc_dirichlet_approximate", "Inform PCBDDC that we are using approximate Dirichlet solvers", "none", pcbddc->NullSpace_corr[0], &pcbddc->NullSpace_corr[0], NULL)); 51 PetscCall(PetscOptionsBool("-pc_bddc_dirichlet_approximate_scale", "Inform PCBDDC that we need to scale the Dirichlet solve", "none", pcbddc->NullSpace_corr[1], &pcbddc->NullSpace_corr[1], NULL)); 52 } else { 53 /* This flag is needed/implied by lumping */ 54 pcbddc->switch_static = PETSC_TRUE; 55 } 56 PetscCall(PetscOptionsBool("-pc_bddc_neumann_approximate", "Inform PCBDDC that we are using approximate Neumann solvers", "none", pcbddc->NullSpace_corr[2], &pcbddc->NullSpace_corr[2], NULL)); 57 PetscCall(PetscOptionsBool("-pc_bddc_neumann_approximate_scale", "Inform PCBDDC that we need to scale the Neumann solve", "none", pcbddc->NullSpace_corr[3], &pcbddc->NullSpace_corr[3], NULL)); 58 /* Primal space customization */ 59 PetscCall(PetscOptionsBool("-pc_bddc_use_local_mat_graph", "Use or not adjacency graph of local mat for interface analysis", "none", pcbddc->use_local_adj, &pcbddc->use_local_adj, NULL)); 60 PetscCall(PetscOptionsInt("-pc_bddc_graph_maxcount", "Maximum number of shared subdomains for a connected component", "none", pcbddc->graphmaxcount, &pcbddc->graphmaxcount, NULL)); 61 PetscCall(PetscOptionsBool("-pc_bddc_corner_selection", "Activates face-based corner selection", "none", pcbddc->corner_selection, &pcbddc->corner_selection, NULL)); 62 PetscCall(PetscOptionsBool("-pc_bddc_use_vertices", "Use or not corner dofs in coarse space", "none", pcbddc->use_vertices, &pcbddc->use_vertices, NULL)); 63 PetscCall(PetscOptionsBool("-pc_bddc_use_edges", "Use or not edge constraints in coarse space", "none", pcbddc->use_edges, &pcbddc->use_edges, NULL)); 64 PetscCall(PetscOptionsBool("-pc_bddc_use_faces", "Use or not face constraints in coarse space", "none", pcbddc->use_faces, &pcbddc->use_faces, NULL)); 65 PetscCall(PetscOptionsInt("-pc_bddc_vertex_size", "Connected components smaller or equal to vertex size will be considered as primal vertices", "none", pcbddc->vertex_size, &pcbddc->vertex_size, NULL)); 66 PetscCall(PetscOptionsBool("-pc_bddc_use_nnsp", "Use near null space attached to the matrix to compute constraints", "none", pcbddc->use_nnsp, &pcbddc->use_nnsp, NULL)); 67 PetscCall(PetscOptionsBool("-pc_bddc_use_nnsp_true", "Use near null space attached to the matrix to compute constraints as is", "none", pcbddc->use_nnsp_true, &pcbddc->use_nnsp_true, NULL)); 68 PetscCall(PetscOptionsBool("-pc_bddc_use_qr_single", "Use QR factorization for single constraints on cc (QR is always used when multiple constraints are present)", "none", pcbddc->use_qr_single, &pcbddc->use_qr_single, NULL)); 69 /* Change of basis */ 70 PetscCall(PetscOptionsBool("-pc_bddc_use_change_of_basis", "Use or not internal change of basis on local edge nodes", "none", pcbddc->use_change_of_basis, &pcbddc->use_change_of_basis, NULL)); 71 PetscCall(PetscOptionsBool("-pc_bddc_use_change_on_faces", "Use or not internal change of basis on local face nodes", "none", pcbddc->use_change_on_faces, &pcbddc->use_change_on_faces, NULL)); 72 if (!pcbddc->use_change_of_basis) pcbddc->use_change_on_faces = PETSC_FALSE; 73 /* Switch between M_2 (default) and M_3 preconditioners (as defined by C. Dohrmann in the ref. article) */ 74 PetscCall(PetscOptionsBool("-pc_bddc_switch_static", "Switch on static condensation ops around the interface preconditioner", "none", pcbddc->switch_static, &pcbddc->switch_static, NULL)); 75 PetscCall(PetscOptionsInt("-pc_bddc_coarse_eqs_per_proc", "Target number of equations per process for coarse problem redistribution (significant only at the coarsest level)", "none", pcbddc->coarse_eqs_per_proc, &pcbddc->coarse_eqs_per_proc, NULL)); 76 i = pcbddc->coarsening_ratio; 77 PetscCall(PetscOptionsInt("-pc_bddc_coarsening_ratio", "Set coarsening ratio used in multilevel coarsening", "PCBDDCSetCoarseningRatio", i, &i, NULL)); 78 PetscCall(PCBDDCSetCoarseningRatio(pc, i)); 79 i = pcbddc->max_levels; 80 PetscCall(PetscOptionsInt("-pc_bddc_levels", "Set maximum number of levels for multilevel", "PCBDDCSetLevels", i, &i, NULL)); 81 PetscCall(PCBDDCSetLevels(pc, i)); 82 PetscCall(PetscOptionsInt("-pc_bddc_coarse_eqs_limit", "Set maximum number of equations on coarsest grid to aim for", "none", pcbddc->coarse_eqs_limit, &pcbddc->coarse_eqs_limit, NULL)); 83 PetscCall(PetscOptionsBool("-pc_bddc_use_coarse_estimates", "Use estimated eigenvalues for coarse problem", "none", pcbddc->use_coarse_estimates, &pcbddc->use_coarse_estimates, NULL)); 84 PetscCall(PetscOptionsBool("-pc_bddc_use_deluxe_scaling", "Use deluxe scaling for BDDC", "none", pcbddc->use_deluxe_scaling, &pcbddc->use_deluxe_scaling, NULL)); 85 PetscCall(PetscOptionsBool("-pc_bddc_schur_rebuild", "Whether or not the interface graph for Schur principal minors has to be rebuilt (i.e. define the interface without any adjacency)", "none", pcbddc->sub_schurs_rebuild, &pcbddc->sub_schurs_rebuild, NULL)); 86 PetscCall(PetscOptionsInt("-pc_bddc_schur_layers", "Number of dofs' layers for the computation of principal minors (i.e. -1 uses all dofs)", "none", pcbddc->sub_schurs_layers, &pcbddc->sub_schurs_layers, NULL)); 87 PetscCall(PetscOptionsBool("-pc_bddc_schur_use_useradj", "Whether or not the CSR graph specified by the user should be used for computing successive layers (default is to use adj of local mat)", "none", pcbddc->sub_schurs_use_useradj, &pcbddc->sub_schurs_use_useradj, NULL)); 88 PetscCall(PetscOptionsBool("-pc_bddc_schur_exact", "Whether or not to use the exact Schur complement instead of the reduced one (which excludes size 1 cc)", "none", pcbddc->sub_schurs_exact_schur, &pcbddc->sub_schurs_exact_schur, NULL)); 89 PetscCall(PetscOptionsBool("-pc_bddc_deluxe_zerorows", "Zero rows and columns of deluxe operators associated with primal dofs", "none", pcbddc->deluxe_zerorows, &pcbddc->deluxe_zerorows, NULL)); 90 PetscCall(PetscOptionsBool("-pc_bddc_deluxe_singlemat", "Collapse deluxe operators", "none", pcbddc->deluxe_singlemat, &pcbddc->deluxe_singlemat, NULL)); 91 PetscCall(PetscOptionsBool("-pc_bddc_adaptive_userdefined", "Use user-defined constraints (should be attached via MatSetNearNullSpace to pmat) in addition to those adaptively generated", "none", pcbddc->adaptive_userdefined, &pcbddc->adaptive_userdefined, NULL)); 92 nt = 2; 93 PetscCall(PetscOptionsRealArray("-pc_bddc_adaptive_threshold", "Thresholds to be used for adaptive selection of constraints", "none", pcbddc->adaptive_threshold, &nt, NULL)); 94 if (nt == 1) pcbddc->adaptive_threshold[1] = pcbddc->adaptive_threshold[0]; 95 PetscCall(PetscOptionsInt("-pc_bddc_adaptive_nmin", "Minimum number of constraints per connected components", "none", pcbddc->adaptive_nmin, &pcbddc->adaptive_nmin, NULL)); 96 PetscCall(PetscOptionsInt("-pc_bddc_adaptive_nmax", "Maximum number of constraints per connected components", "none", pcbddc->adaptive_nmax, &pcbddc->adaptive_nmax, NULL)); 97 PetscCall(PetscOptionsBool("-pc_bddc_symmetric", "Symmetric computation of primal basis functions", "none", pcbddc->symmetric_primal, &pcbddc->symmetric_primal, NULL)); 98 PetscCall(PetscOptionsInt("-pc_bddc_coarse_adj", "Number of processors where to map the coarse adjacency list", "none", pcbddc->coarse_adj_red, &pcbddc->coarse_adj_red, NULL)); 99 PetscCall(PetscOptionsBool("-pc_bddc_benign_trick", "Apply the benign subspace trick to saddle point problems with discontinuous pressures", "none", pcbddc->benign_saddle_point, &pcbddc->benign_saddle_point, NULL)); 100 PetscCall(PetscOptionsBool("-pc_bddc_benign_change", "Compute the pressure change of basis explicitly", "none", pcbddc->benign_change_explicit, &pcbddc->benign_change_explicit, NULL)); 101 PetscCall(PetscOptionsBool("-pc_bddc_benign_compute_correction", "Compute the benign correction during PreSolve", "none", pcbddc->benign_compute_correction, &pcbddc->benign_compute_correction, NULL)); 102 PetscCall(PetscOptionsBool("-pc_bddc_nonetflux", "Automatic computation of no-net-flux quadrature weights", "none", pcbddc->compute_nonetflux, &pcbddc->compute_nonetflux, NULL)); 103 PetscCall(PetscOptionsBool("-pc_bddc_detect_disconnected", "Detects disconnected subdomains", "none", pcbddc->detect_disconnected, &pcbddc->detect_disconnected, NULL)); 104 PetscCall(PetscOptionsBool("-pc_bddc_detect_disconnected_filter", "Filters out small entries in the local matrix when detecting disconnected subdomains", "none", pcbddc->detect_disconnected_filter, &pcbddc->detect_disconnected_filter, NULL)); 105 PetscCall(PetscOptionsBool("-pc_bddc_eliminate_dirichlet", "Whether or not we want to eliminate dirichlet dofs during presolve", "none", pcbddc->eliminate_dirdofs, &pcbddc->eliminate_dirdofs, NULL)); 106 PetscOptionsHeadEnd(); 107 PetscFunctionReturn(PETSC_SUCCESS); 108 } 109 110 static PetscErrorCode PCView_BDDC(PC pc, PetscViewer viewer) 111 { 112 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 113 PC_IS *pcis = (PC_IS *)pc->data; 114 PetscBool isascii; 115 PetscSubcomm subcomm; 116 PetscViewer subviewer; 117 118 PetscFunctionBegin; 119 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 120 /* ASCII viewer */ 121 if (isascii) { 122 PetscMPIInt color, rank, size; 123 PetscInt64 loc[7], gsum[6], gmax[6], gmin[6], totbenign; 124 PetscScalar interface_size; 125 PetscReal ratio1 = 0., ratio2 = 0.; 126 Vec counter; 127 128 if (!pc->setupcalled) PetscCall(PetscViewerASCIIPrintf(viewer, " Partial information available: preconditioner has not been setup yet\n")); 129 PetscCall(PetscViewerASCIIPrintf(viewer, " Use verbose output: %" PetscInt_FMT "\n", pcbddc->dbg_flag)); 130 PetscCall(PetscViewerASCIIPrintf(viewer, " Use user-defined CSR: %d\n", !!pcbddc->mat_graph->nvtxs_csr)); 131 PetscCall(PetscViewerASCIIPrintf(viewer, " Use local mat graph: %d\n", pcbddc->use_local_adj && !pcbddc->mat_graph->nvtxs_csr)); 132 if (pcbddc->mat_graph->twodim) { 133 PetscCall(PetscViewerASCIIPrintf(viewer, " Connectivity graph topological dimension: 2\n")); 134 } else { 135 PetscCall(PetscViewerASCIIPrintf(viewer, " Connectivity graph topological dimension: 3\n")); 136 } 137 if (pcbddc->graphmaxcount != PETSC_MAX_INT) PetscCall(PetscViewerASCIIPrintf(viewer, " Graph max count: %" PetscInt_FMT "\n", pcbddc->graphmaxcount)); 138 PetscCall(PetscViewerASCIIPrintf(viewer, " Corner selection: %d (selected %d)\n", pcbddc->corner_selection, pcbddc->corner_selected)); 139 PetscCall(PetscViewerASCIIPrintf(viewer, " Use vertices: %d (vertex size %" PetscInt_FMT ")\n", pcbddc->use_vertices, pcbddc->vertex_size)); 140 PetscCall(PetscViewerASCIIPrintf(viewer, " Use edges: %d\n", pcbddc->use_edges)); 141 PetscCall(PetscViewerASCIIPrintf(viewer, " Use faces: %d\n", pcbddc->use_faces)); 142 PetscCall(PetscViewerASCIIPrintf(viewer, " Use true near null space: %d\n", pcbddc->use_nnsp_true)); 143 PetscCall(PetscViewerASCIIPrintf(viewer, " Use QR for single constraints on cc: %d\n", pcbddc->use_qr_single)); 144 PetscCall(PetscViewerASCIIPrintf(viewer, " Use change of basis on local edge nodes: %d\n", pcbddc->use_change_of_basis)); 145 PetscCall(PetscViewerASCIIPrintf(viewer, " Use change of basis on local face nodes: %d\n", pcbddc->use_change_on_faces)); 146 PetscCall(PetscViewerASCIIPrintf(viewer, " User defined change of basis matrix: %d\n", !!pcbddc->user_ChangeOfBasisMatrix)); 147 PetscCall(PetscViewerASCIIPrintf(viewer, " Has change of basis matrix: %d\n", !!pcbddc->ChangeOfBasisMatrix)); 148 PetscCall(PetscViewerASCIIPrintf(viewer, " Eliminate dirichlet boundary dofs: %d\n", pcbddc->eliminate_dirdofs)); 149 PetscCall(PetscViewerASCIIPrintf(viewer, " Switch on static condensation ops around the interface preconditioner: %d\n", pcbddc->switch_static)); 150 PetscCall(PetscViewerASCIIPrintf(viewer, " Use exact dirichlet trick: %d\n", pcbddc->use_exact_dirichlet_trick)); 151 PetscCall(PetscViewerASCIIPrintf(viewer, " Interface extension: %s\n", PCBDDCInterfaceExtTypes[pcbddc->interface_extension])); 152 PetscCall(PetscViewerASCIIPrintf(viewer, " Multilevel max levels: %" PetscInt_FMT "\n", pcbddc->max_levels)); 153 PetscCall(PetscViewerASCIIPrintf(viewer, " Multilevel coarsening ratio: %" PetscInt_FMT "\n", pcbddc->coarsening_ratio)); 154 PetscCall(PetscViewerASCIIPrintf(viewer, " Use estimated eigs for coarse problem: %d\n", pcbddc->use_coarse_estimates)); 155 PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe scaling: %d\n", pcbddc->use_deluxe_scaling)); 156 PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe zerorows: %d\n", pcbddc->deluxe_zerorows)); 157 PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe singlemat: %d\n", pcbddc->deluxe_singlemat)); 158 PetscCall(PetscViewerASCIIPrintf(viewer, " Rebuild interface graph for Schur principal minors: %d\n", pcbddc->sub_schurs_rebuild)); 159 PetscCall(PetscViewerASCIIPrintf(viewer, " Number of dofs' layers for the computation of principal minors: %" PetscInt_FMT "\n", pcbddc->sub_schurs_layers)); 160 PetscCall(PetscViewerASCIIPrintf(viewer, " Use user CSR graph to compute successive layers: %d\n", pcbddc->sub_schurs_use_useradj)); 161 if (pcbddc->adaptive_threshold[1] != pcbddc->adaptive_threshold[0]) { 162 PetscCall(PetscViewerASCIIPrintf(viewer, " Adaptive constraint selection thresholds (active %d, userdefined %d): %g,%g\n", pcbddc->adaptive_selection, pcbddc->adaptive_userdefined, (double)pcbddc->adaptive_threshold[0], (double)pcbddc->adaptive_threshold[1])); 163 } else { 164 PetscCall(PetscViewerASCIIPrintf(viewer, " Adaptive constraint selection threshold (active %d, userdefined %d): %g\n", pcbddc->adaptive_selection, pcbddc->adaptive_userdefined, (double)pcbddc->adaptive_threshold[0])); 165 } 166 PetscCall(PetscViewerASCIIPrintf(viewer, " Min constraints / connected component: %" PetscInt_FMT "\n", pcbddc->adaptive_nmin)); 167 PetscCall(PetscViewerASCIIPrintf(viewer, " Max constraints / connected component: %" PetscInt_FMT "\n", pcbddc->adaptive_nmax)); 168 PetscCall(PetscViewerASCIIPrintf(viewer, " Invert exact Schur complement for adaptive selection: %d\n", pcbddc->sub_schurs_exact_schur)); 169 PetscCall(PetscViewerASCIIPrintf(viewer, " Symmetric computation of primal basis functions: %d\n", pcbddc->symmetric_primal)); 170 PetscCall(PetscViewerASCIIPrintf(viewer, " Num. Procs. to map coarse adjacency list: %" PetscInt_FMT "\n", pcbddc->coarse_adj_red)); 171 PetscCall(PetscViewerASCIIPrintf(viewer, " Coarse eqs per proc (significant at the coarsest level): %" PetscInt_FMT "\n", pcbddc->coarse_eqs_per_proc)); 172 PetscCall(PetscViewerASCIIPrintf(viewer, " Detect disconnected: %d (filter %d)\n", pcbddc->detect_disconnected, pcbddc->detect_disconnected_filter)); 173 PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subspace trick: %d (change explicit %d)\n", pcbddc->benign_saddle_point, pcbddc->benign_change_explicit)); 174 PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subspace trick is active: %d\n", pcbddc->benign_have_null)); 175 PetscCall(PetscViewerASCIIPrintf(viewer, " Algebraic computation of no-net-flux: %d\n", pcbddc->compute_nonetflux)); 176 if (!pc->setupcalled) PetscFunctionReturn(PETSC_SUCCESS); 177 178 /* compute interface size */ 179 PetscCall(VecSet(pcis->vec1_B, 1.0)); 180 PetscCall(MatCreateVecs(pc->pmat, &counter, NULL)); 181 PetscCall(VecSet(counter, 0.0)); 182 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, counter, INSERT_VALUES, SCATTER_REVERSE)); 183 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, counter, INSERT_VALUES, SCATTER_REVERSE)); 184 PetscCall(VecSum(counter, &interface_size)); 185 PetscCall(VecDestroy(&counter)); 186 187 /* compute some statistics on the domain decomposition */ 188 gsum[0] = 1; 189 gsum[1] = gsum[2] = gsum[3] = gsum[4] = gsum[5] = 0; 190 loc[0] = !!pcis->n; 191 loc[1] = pcis->n - pcis->n_B; 192 loc[2] = pcis->n_B; 193 loc[3] = pcbddc->local_primal_size; 194 loc[4] = pcis->n; 195 loc[5] = pcbddc->n_local_subs > 0 ? pcbddc->n_local_subs : (pcis->n ? 1 : 0); 196 loc[6] = pcbddc->benign_n; 197 PetscCallMPI(MPI_Reduce(loc, gsum, 6, MPIU_INT64, MPI_SUM, 0, PetscObjectComm((PetscObject)pc))); 198 if (!loc[0]) loc[1] = loc[2] = loc[3] = loc[4] = loc[5] = -1; 199 PetscCallMPI(MPI_Reduce(loc, gmax, 6, MPIU_INT64, MPI_MAX, 0, PetscObjectComm((PetscObject)pc))); 200 if (!loc[0]) loc[1] = loc[2] = loc[3] = loc[4] = loc[5] = PETSC_MAX_INT; 201 PetscCallMPI(MPI_Reduce(loc, gmin, 6, MPIU_INT64, MPI_MIN, 0, PetscObjectComm((PetscObject)pc))); 202 PetscCallMPI(MPI_Reduce(&loc[6], &totbenign, 1, MPIU_INT64, MPI_SUM, 0, PetscObjectComm((PetscObject)pc))); 203 if (pcbddc->coarse_size) { 204 ratio1 = pc->pmat->rmap->N / (1. * pcbddc->coarse_size); 205 ratio2 = PetscRealPart(interface_size) / pcbddc->coarse_size; 206 } 207 PetscCall(PetscViewerASCIIPrintf(viewer, "********************************** STATISTICS AT LEVEL %" PetscInt_FMT " **********************************\n", pcbddc->current_level)); 208 PetscCall(PetscViewerASCIIPrintf(viewer, " Global dofs sizes: all %" PetscInt_FMT " interface %" PetscInt_FMT " coarse %" PetscInt_FMT "\n", pc->pmat->rmap->N, (PetscInt)PetscRealPart(interface_size), pcbddc->coarse_size)); 209 PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsening ratios: all/coarse %" PetscInt_FMT " interface/coarse %" PetscInt_FMT "\n", (PetscInt)ratio1, (PetscInt)ratio2)); 210 PetscCall(PetscViewerASCIIPrintf(viewer, " Active processes : %" PetscInt_FMT "\n", (PetscInt)gsum[0])); 211 PetscCall(PetscViewerASCIIPrintf(viewer, " Total subdomains : %" PetscInt_FMT "\n", (PetscInt)gsum[5])); 212 if (pcbddc->benign_have_null) PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subs : %" PetscInt_FMT "\n", (PetscInt)totbenign)); 213 PetscCall(PetscViewerASCIIPrintf(viewer, " Dofs type :\tMIN\tMAX\tMEAN\n")); 214 PetscCall(PetscViewerASCIIPrintf(viewer, " Interior dofs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[1], (PetscInt)gmax[1], (PetscInt)(gsum[1] / gsum[0]))); 215 PetscCall(PetscViewerASCIIPrintf(viewer, " Interface dofs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[2], (PetscInt)gmax[2], (PetscInt)(gsum[2] / gsum[0]))); 216 PetscCall(PetscViewerASCIIPrintf(viewer, " Primal dofs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[3], (PetscInt)gmax[3], (PetscInt)(gsum[3] / gsum[0]))); 217 PetscCall(PetscViewerASCIIPrintf(viewer, " Local dofs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[4], (PetscInt)gmax[4], (PetscInt)(gsum[4] / gsum[0]))); 218 PetscCall(PetscViewerASCIIPrintf(viewer, " Local subs :\t%" PetscInt_FMT "\t%" PetscInt_FMT "\n", (PetscInt)gmin[5], (PetscInt)gmax[5])); 219 PetscCall(PetscViewerFlush(viewer)); 220 221 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank)); 222 223 /* local solvers */ 224 PetscCall(PetscViewerGetSubViewer(viewer, PetscObjectComm((PetscObject)pcbddc->ksp_D), &subviewer)); 225 if (rank == 0) { 226 PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Interior solver (rank 0)\n")); 227 PetscCall(PetscViewerASCIIPushTab(subviewer)); 228 PetscCall(KSPView(pcbddc->ksp_D, subviewer)); 229 PetscCall(PetscViewerASCIIPopTab(subviewer)); 230 PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Correction solver (rank 0)\n")); 231 PetscCall(PetscViewerASCIIPushTab(subviewer)); 232 PetscCall(KSPView(pcbddc->ksp_R, subviewer)); 233 PetscCall(PetscViewerASCIIPopTab(subviewer)); 234 PetscCall(PetscViewerFlush(subviewer)); 235 } 236 PetscCall(PetscViewerRestoreSubViewer(viewer, PetscObjectComm((PetscObject)pcbddc->ksp_D), &subviewer)); 237 /* the coarse problem can be handled by a different communicator */ 238 if (pcbddc->coarse_ksp) color = 1; 239 else color = 0; 240 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 241 PetscCall(PetscSubcommCreate(PetscObjectComm((PetscObject)pc), &subcomm)); 242 PetscCall(PetscSubcommSetNumber(subcomm, PetscMin(size, 2))); 243 PetscCall(PetscSubcommSetTypeGeneral(subcomm, color, rank)); 244 PetscCall(PetscViewerGetSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer)); 245 if (color == 1) { 246 PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Coarse solver\n")); 247 PetscCall(PetscViewerASCIIPushTab(subviewer)); 248 PetscCall(KSPView(pcbddc->coarse_ksp, subviewer)); 249 PetscCall(PetscViewerASCIIPopTab(subviewer)); 250 PetscCall(PetscViewerFlush(subviewer)); 251 } 252 PetscCall(PetscViewerRestoreSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer)); 253 PetscCall(PetscSubcommDestroy(&subcomm)); 254 PetscCall(PetscViewerFlush(viewer)); 255 } 256 PetscFunctionReturn(PETSC_SUCCESS); 257 } 258 259 static PetscErrorCode PCBDDCSetDiscreteGradient_BDDC(PC pc, Mat G, PetscInt order, PetscInt field, PetscBool global, PetscBool conforming) 260 { 261 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 262 263 PetscFunctionBegin; 264 PetscCall(PetscObjectReference((PetscObject)G)); 265 PetscCall(MatDestroy(&pcbddc->discretegradient)); 266 pcbddc->discretegradient = G; 267 pcbddc->nedorder = order > 0 ? order : -order; 268 pcbddc->nedfield = field; 269 pcbddc->nedglobal = global; 270 pcbddc->conforming = conforming; 271 PetscFunctionReturn(PETSC_SUCCESS); 272 } 273 274 /*@ 275 PCBDDCSetDiscreteGradient - Sets the discrete gradient to be used by the `PCBDDC` preconditioner 276 277 Collective 278 279 Input Parameters: 280 + pc - the preconditioning context 281 . G - the discrete gradient matrix (in `MATAIJ` format) 282 . order - the order of the Nedelec space (1 for the lowest order) 283 . field - the field id of the Nedelec dofs (not used if the fields have not been specified) 284 . global - the type of global ordering for the rows of `G` 285 - conforming - whether the mesh is conforming or not 286 287 Level: advanced 288 289 Note: 290 The discrete gradient matrix `G` is used to analyze the subdomain edges, and it should not contain any zero entry. 291 For variable order spaces, the order should be set to zero. 292 If `global` is `PETSC_TRUE`, the rows of `G` should be given in global ordering for the whole dofs; 293 if `PETSC_FALSE`, the ordering should be global for the Nedelec field. 294 In the latter case, it should hold gid[i] < gid[j] iff geid[i] < geid[j], with gid the global orderding for all the dofs 295 and geid the one for the Nedelec field. 296 297 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDofsSplitting()`, `PCBDDCSetDofsSplittingLocal()`, `MATAIJ`, `PCBDDCSetDivergenceMat()` 298 @*/ 299 PetscErrorCode PCBDDCSetDiscreteGradient(PC pc, Mat G, PetscInt order, PetscInt field, PetscBool global, PetscBool conforming) 300 { 301 PetscFunctionBegin; 302 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 303 PetscValidHeaderSpecific(G, MAT_CLASSID, 2); 304 PetscValidLogicalCollectiveInt(pc, order, 3); 305 PetscValidLogicalCollectiveInt(pc, field, 4); 306 PetscValidLogicalCollectiveBool(pc, global, 5); 307 PetscValidLogicalCollectiveBool(pc, conforming, 6); 308 PetscCheckSameComm(pc, 1, G, 2); 309 PetscTryMethod(pc, "PCBDDCSetDiscreteGradient_C", (PC, Mat, PetscInt, PetscInt, PetscBool, PetscBool), (pc, G, order, field, global, conforming)); 310 PetscFunctionReturn(PETSC_SUCCESS); 311 } 312 313 static PetscErrorCode PCBDDCSetDivergenceMat_BDDC(PC pc, Mat divudotp, PetscBool trans, IS vl2l) 314 { 315 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 316 317 PetscFunctionBegin; 318 PetscCall(PetscObjectReference((PetscObject)divudotp)); 319 PetscCall(MatDestroy(&pcbddc->divudotp)); 320 pcbddc->divudotp = divudotp; 321 pcbddc->divudotp_trans = trans; 322 pcbddc->compute_nonetflux = PETSC_TRUE; 323 if (vl2l) { 324 PetscCall(PetscObjectReference((PetscObject)vl2l)); 325 PetscCall(ISDestroy(&pcbddc->divudotp_vl2l)); 326 pcbddc->divudotp_vl2l = vl2l; 327 } 328 PetscFunctionReturn(PETSC_SUCCESS); 329 } 330 331 /*@ 332 PCBDDCSetDivergenceMat - Sets the linear operator representing \int_\Omega \div {\bf u} \cdot p dx for the `PCBDDC` preconditioner 333 334 Collective 335 336 Input Parameters: 337 + pc - the preconditioning context 338 . divudotp - the matrix (must be of type `MATIS`) 339 . trans - if `PETSC_FALSE` (resp. `PETSC_TRUE`), then pressures are in the test (trial) space and velocities are in the trial (test) space. 340 - vl2l - optional index set describing the local (wrt the local matrix in `divudotp`) to local (wrt the local matrix 341 in the preconditioning matrix) map for the velocities 342 343 Level: advanced 344 345 Notes: 346 This auxiliary matrix is used to compute quadrature weights representing the net-flux across subdomain boundaries 347 348 If `vl2l` is `NULL`, the local ordering for velocities in `divudotp` should match that of the preconditioning matrix 349 350 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDiscreteGradient()` 351 @*/ 352 PetscErrorCode PCBDDCSetDivergenceMat(PC pc, Mat divudotp, PetscBool trans, IS vl2l) 353 { 354 PetscBool ismatis; 355 356 PetscFunctionBegin; 357 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 358 PetscValidHeaderSpecific(divudotp, MAT_CLASSID, 2); 359 PetscCheckSameComm(pc, 1, divudotp, 2); 360 PetscValidLogicalCollectiveBool(pc, trans, 3); 361 if (vl2l) PetscValidHeaderSpecific(vl2l, IS_CLASSID, 4); 362 PetscCall(PetscObjectTypeCompare((PetscObject)divudotp, MATIS, &ismatis)); 363 PetscCheck(ismatis, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Divergence matrix needs to be of type MATIS"); 364 PetscTryMethod(pc, "PCBDDCSetDivergenceMat_C", (PC, Mat, PetscBool, IS), (pc, divudotp, trans, vl2l)); 365 PetscFunctionReturn(PETSC_SUCCESS); 366 } 367 368 static PetscErrorCode PCBDDCSetChangeOfBasisMat_BDDC(PC pc, Mat change, PetscBool interior) 369 { 370 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 371 372 PetscFunctionBegin; 373 PetscCall(PetscObjectReference((PetscObject)change)); 374 PetscCall(MatDestroy(&pcbddc->user_ChangeOfBasisMatrix)); 375 pcbddc->user_ChangeOfBasisMatrix = change; 376 pcbddc->change_interior = interior; 377 PetscFunctionReturn(PETSC_SUCCESS); 378 } 379 380 /*@ 381 PCBDDCSetChangeOfBasisMat - Set user defined change of basis for dofs 382 383 Collective 384 385 Input Parameters: 386 + pc - the preconditioning context 387 . change - the change of basis matrix 388 - interior - whether or not the change of basis modifies interior dofs 389 390 Level: intermediate 391 392 .seealso: [](ch_ksp), `PCBDDC` 393 @*/ 394 PetscErrorCode PCBDDCSetChangeOfBasisMat(PC pc, Mat change, PetscBool interior) 395 { 396 PetscFunctionBegin; 397 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 398 PetscValidHeaderSpecific(change, MAT_CLASSID, 2); 399 PetscCheckSameComm(pc, 1, change, 2); 400 if (pc->mat) { 401 PetscInt rows_c, cols_c, rows, cols; 402 PetscCall(MatGetSize(pc->mat, &rows, &cols)); 403 PetscCall(MatGetSize(change, &rows_c, &cols_c)); 404 PetscCheck(rows_c == rows, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of rows for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, rows_c, rows); 405 PetscCheck(cols_c == cols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of columns for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, cols_c, cols); 406 PetscCall(MatGetLocalSize(pc->mat, &rows, &cols)); 407 PetscCall(MatGetLocalSize(change, &rows_c, &cols_c)); 408 PetscCheck(rows_c == rows, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of local rows for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, rows_c, rows); 409 PetscCheck(cols_c == cols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of local columns for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, cols_c, cols); 410 } 411 PetscTryMethod(pc, "PCBDDCSetChangeOfBasisMat_C", (PC, Mat, PetscBool), (pc, change, interior)); 412 PetscFunctionReturn(PETSC_SUCCESS); 413 } 414 415 static PetscErrorCode PCBDDCSetPrimalVerticesIS_BDDC(PC pc, IS PrimalVertices) 416 { 417 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 418 PetscBool isequal = PETSC_FALSE; 419 420 PetscFunctionBegin; 421 PetscCall(PetscObjectReference((PetscObject)PrimalVertices)); 422 if (pcbddc->user_primal_vertices) PetscCall(ISEqual(PrimalVertices, pcbddc->user_primal_vertices, &isequal)); 423 PetscCall(ISDestroy(&pcbddc->user_primal_vertices)); 424 PetscCall(ISDestroy(&pcbddc->user_primal_vertices_local)); 425 pcbddc->user_primal_vertices = PrimalVertices; 426 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 427 PetscFunctionReturn(PETSC_SUCCESS); 428 } 429 430 /*@ 431 PCBDDCSetPrimalVerticesIS - Set additional user defined primal vertices in `PCBDDC` 432 433 Collective 434 435 Input Parameters: 436 + pc - the preconditioning context 437 - PrimalVertices - index set of primal vertices in global numbering (can be empty) 438 439 Level: intermediate 440 441 Note: 442 Any process can list any global node 443 444 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()`, `PCBDDCGetPrimalVerticesLocalIS()` 445 @*/ 446 PetscErrorCode PCBDDCSetPrimalVerticesIS(PC pc, IS PrimalVertices) 447 { 448 PetscFunctionBegin; 449 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 450 PetscValidHeaderSpecific(PrimalVertices, IS_CLASSID, 2); 451 PetscCheckSameComm(pc, 1, PrimalVertices, 2); 452 PetscTryMethod(pc, "PCBDDCSetPrimalVerticesIS_C", (PC, IS), (pc, PrimalVertices)); 453 PetscFunctionReturn(PETSC_SUCCESS); 454 } 455 456 static PetscErrorCode PCBDDCGetPrimalVerticesIS_BDDC(PC pc, IS *is) 457 { 458 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 459 460 PetscFunctionBegin; 461 *is = pcbddc->user_primal_vertices; 462 PetscFunctionReturn(PETSC_SUCCESS); 463 } 464 465 /*@ 466 PCBDDCGetPrimalVerticesIS - Get user defined primal vertices set with `PCBDDCSetPrimalVerticesIS()` 467 468 Collective 469 470 Input Parameter: 471 . pc - the preconditioning context 472 473 Output Parameter: 474 . is - index set of primal vertices in global numbering (`NULL` if not set) 475 476 Level: intermediate 477 478 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()`, `PCBDDCGetPrimalVerticesLocalIS()` 479 @*/ 480 PetscErrorCode PCBDDCGetPrimalVerticesIS(PC pc, IS *is) 481 { 482 PetscFunctionBegin; 483 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 484 PetscAssertPointer(is, 2); 485 PetscUseMethod(pc, "PCBDDCGetPrimalVerticesIS_C", (PC, IS *), (pc, is)); 486 PetscFunctionReturn(PETSC_SUCCESS); 487 } 488 489 static PetscErrorCode PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc, IS PrimalVertices) 490 { 491 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 492 PetscBool isequal = PETSC_FALSE; 493 494 PetscFunctionBegin; 495 PetscCall(PetscObjectReference((PetscObject)PrimalVertices)); 496 if (pcbddc->user_primal_vertices_local) PetscCall(ISEqual(PrimalVertices, pcbddc->user_primal_vertices_local, &isequal)); 497 PetscCall(ISDestroy(&pcbddc->user_primal_vertices)); 498 PetscCall(ISDestroy(&pcbddc->user_primal_vertices_local)); 499 pcbddc->user_primal_vertices_local = PrimalVertices; 500 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 501 PetscFunctionReturn(PETSC_SUCCESS); 502 } 503 504 /*@ 505 PCBDDCSetPrimalVerticesLocalIS - Set additional user defined primal vertices in `PCBDDC` 506 507 Collective 508 509 Input Parameters: 510 + pc - the preconditioning context 511 - PrimalVertices - index set of primal vertices in local numbering (can be empty) 512 513 Level: intermediate 514 515 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesLocalIS()` 516 @*/ 517 PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC pc, IS PrimalVertices) 518 { 519 PetscFunctionBegin; 520 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 521 PetscValidHeaderSpecific(PrimalVertices, IS_CLASSID, 2); 522 PetscCheckSameComm(pc, 1, PrimalVertices, 2); 523 PetscTryMethod(pc, "PCBDDCSetPrimalVerticesLocalIS_C", (PC, IS), (pc, PrimalVertices)); 524 PetscFunctionReturn(PETSC_SUCCESS); 525 } 526 527 static PetscErrorCode PCBDDCGetPrimalVerticesLocalIS_BDDC(PC pc, IS *is) 528 { 529 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 530 531 PetscFunctionBegin; 532 *is = pcbddc->user_primal_vertices_local; 533 PetscFunctionReturn(PETSC_SUCCESS); 534 } 535 536 /*@ 537 PCBDDCGetPrimalVerticesLocalIS - Get user defined primal vertices set with `PCBDDCSetPrimalVerticesLocalIS()` 538 539 Collective 540 541 Input Parameter: 542 . pc - the preconditioning context 543 544 Output Parameter: 545 . is - index set of primal vertices in local numbering (`NULL` if not set) 546 547 Level: intermediate 548 549 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()` 550 @*/ 551 PetscErrorCode PCBDDCGetPrimalVerticesLocalIS(PC pc, IS *is) 552 { 553 PetscFunctionBegin; 554 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 555 PetscAssertPointer(is, 2); 556 PetscUseMethod(pc, "PCBDDCGetPrimalVerticesLocalIS_C", (PC, IS *), (pc, is)); 557 PetscFunctionReturn(PETSC_SUCCESS); 558 } 559 560 static PetscErrorCode PCBDDCSetCoarseningRatio_BDDC(PC pc, PetscInt k) 561 { 562 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 563 564 PetscFunctionBegin; 565 pcbddc->coarsening_ratio = k; 566 PetscFunctionReturn(PETSC_SUCCESS); 567 } 568 569 /*@ 570 PCBDDCSetCoarseningRatio - Set coarsening ratio used in the multi-level version of `PCBDDC` 571 572 Logically Collective 573 574 Input Parameters: 575 + pc - the preconditioning context 576 - k - coarsening ratio (H/h at the coarser level) 577 578 Options Database Key: 579 . -pc_bddc_coarsening_ratio <int> - Set the coarsening ratio used in multi-level coarsening 580 581 Level: intermediate 582 583 Note: 584 Approximately `k` subdomains at the finer level will be aggregated into a single subdomain at the coarser level 585 586 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetLevels()` 587 @*/ 588 PetscErrorCode PCBDDCSetCoarseningRatio(PC pc, PetscInt k) 589 { 590 PetscFunctionBegin; 591 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 592 PetscValidLogicalCollectiveInt(pc, k, 2); 593 PetscTryMethod(pc, "PCBDDCSetCoarseningRatio_C", (PC, PetscInt), (pc, k)); 594 PetscFunctionReturn(PETSC_SUCCESS); 595 } 596 597 /* The following functions (PCBDDCSetUseExactDirichlet PCBDDCSetLevel) are not public */ 598 static PetscErrorCode PCBDDCSetUseExactDirichlet_BDDC(PC pc, PetscBool flg) 599 { 600 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 601 602 PetscFunctionBegin; 603 pcbddc->use_exact_dirichlet_trick = flg; 604 PetscFunctionReturn(PETSC_SUCCESS); 605 } 606 607 PetscErrorCode PCBDDCSetUseExactDirichlet(PC pc, PetscBool flg) 608 { 609 PetscFunctionBegin; 610 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 611 PetscValidLogicalCollectiveBool(pc, flg, 2); 612 PetscTryMethod(pc, "PCBDDCSetUseExactDirichlet_C", (PC, PetscBool), (pc, flg)); 613 PetscFunctionReturn(PETSC_SUCCESS); 614 } 615 616 static PetscErrorCode PCBDDCSetLevel_BDDC(PC pc, PetscInt level) 617 { 618 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 619 620 PetscFunctionBegin; 621 pcbddc->current_level = level; 622 PetscFunctionReturn(PETSC_SUCCESS); 623 } 624 625 PetscErrorCode PCBDDCSetLevel(PC pc, PetscInt level) 626 { 627 PetscFunctionBegin; 628 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 629 PetscValidLogicalCollectiveInt(pc, level, 2); 630 PetscTryMethod(pc, "PCBDDCSetLevel_C", (PC, PetscInt), (pc, level)); 631 PetscFunctionReturn(PETSC_SUCCESS); 632 } 633 634 static PetscErrorCode PCBDDCSetLevels_BDDC(PC pc, PetscInt levels) 635 { 636 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 637 638 PetscFunctionBegin; 639 PetscCheck(levels < PETSC_PCBDDC_MAXLEVELS, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Maximum number of additional levels for BDDC is %d", PETSC_PCBDDC_MAXLEVELS - 1); 640 pcbddc->max_levels = levels; 641 PetscFunctionReturn(PETSC_SUCCESS); 642 } 643 644 /*@ 645 PCBDDCSetLevels - Sets the maximum number of additional levels allowed for multilevel `PCBDDC` 646 647 Logically Collective 648 649 Input Parameters: 650 + pc - the preconditioning context 651 - levels - the maximum number of levels 652 653 Options Database Key: 654 . -pc_bddc_levels <int> - Set maximum number of levels for multilevel 655 656 Level: intermediate 657 658 Note: 659 The default value is 0, that gives the classical two-levels BDDC algorithm 660 661 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetCoarseningRatio()` 662 @*/ 663 PetscErrorCode PCBDDCSetLevels(PC pc, PetscInt levels) 664 { 665 PetscFunctionBegin; 666 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 667 PetscValidLogicalCollectiveInt(pc, levels, 2); 668 PetscTryMethod(pc, "PCBDDCSetLevels_C", (PC, PetscInt), (pc, levels)); 669 PetscFunctionReturn(PETSC_SUCCESS); 670 } 671 672 static PetscErrorCode PCBDDCSetDirichletBoundaries_BDDC(PC pc, IS DirichletBoundaries) 673 { 674 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 675 PetscBool isequal = PETSC_FALSE; 676 677 PetscFunctionBegin; 678 PetscCall(PetscObjectReference((PetscObject)DirichletBoundaries)); 679 if (pcbddc->DirichletBoundaries) PetscCall(ISEqual(DirichletBoundaries, pcbddc->DirichletBoundaries, &isequal)); 680 /* last user setting takes precedence -> destroy any other customization */ 681 PetscCall(ISDestroy(&pcbddc->DirichletBoundariesLocal)); 682 PetscCall(ISDestroy(&pcbddc->DirichletBoundaries)); 683 pcbddc->DirichletBoundaries = DirichletBoundaries; 684 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 685 PetscFunctionReturn(PETSC_SUCCESS); 686 } 687 688 /*@ 689 PCBDDCSetDirichletBoundaries - Set the `IS` defining Dirichlet boundaries for the global problem. 690 691 Collective 692 693 Input Parameters: 694 + pc - the preconditioning context 695 - DirichletBoundaries - parallel `IS` defining the Dirichlet boundaries 696 697 Level: intermediate 698 699 Note: 700 Provide the information if you used `MatZeroRows()` or `MatZeroRowsColumns()`. Any process can list any global node 701 702 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDirichletBoundariesLocal()`, `MatZeroRows()`, `MatZeroRowsColumns()` 703 @*/ 704 PetscErrorCode PCBDDCSetDirichletBoundaries(PC pc, IS DirichletBoundaries) 705 { 706 PetscFunctionBegin; 707 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 708 PetscValidHeaderSpecific(DirichletBoundaries, IS_CLASSID, 2); 709 PetscCheckSameComm(pc, 1, DirichletBoundaries, 2); 710 PetscTryMethod(pc, "PCBDDCSetDirichletBoundaries_C", (PC, IS), (pc, DirichletBoundaries)); 711 PetscFunctionReturn(PETSC_SUCCESS); 712 } 713 714 static PetscErrorCode PCBDDCSetDirichletBoundariesLocal_BDDC(PC pc, IS DirichletBoundaries) 715 { 716 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 717 PetscBool isequal = PETSC_FALSE; 718 719 PetscFunctionBegin; 720 PetscCall(PetscObjectReference((PetscObject)DirichletBoundaries)); 721 if (pcbddc->DirichletBoundariesLocal) PetscCall(ISEqual(DirichletBoundaries, pcbddc->DirichletBoundariesLocal, &isequal)); 722 /* last user setting takes precedence -> destroy any other customization */ 723 PetscCall(ISDestroy(&pcbddc->DirichletBoundariesLocal)); 724 PetscCall(ISDestroy(&pcbddc->DirichletBoundaries)); 725 pcbddc->DirichletBoundariesLocal = DirichletBoundaries; 726 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 727 PetscFunctionReturn(PETSC_SUCCESS); 728 } 729 730 /*@ 731 PCBDDCSetDirichletBoundariesLocal - Set the `IS` defining Dirichlet boundaries for the global problem in local ordering. 732 733 Collective 734 735 Input Parameters: 736 + pc - the preconditioning context 737 - DirichletBoundaries - parallel `IS` defining the Dirichlet boundaries (in local ordering) 738 739 Level: intermediate 740 741 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDirichletBoundaries()`, `MatZeroRows()`, `MatZeroRowsColumns()` 742 @*/ 743 PetscErrorCode PCBDDCSetDirichletBoundariesLocal(PC pc, IS DirichletBoundaries) 744 { 745 PetscFunctionBegin; 746 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 747 PetscValidHeaderSpecific(DirichletBoundaries, IS_CLASSID, 2); 748 PetscCheckSameComm(pc, 1, DirichletBoundaries, 2); 749 PetscTryMethod(pc, "PCBDDCSetDirichletBoundariesLocal_C", (PC, IS), (pc, DirichletBoundaries)); 750 PetscFunctionReturn(PETSC_SUCCESS); 751 } 752 753 static PetscErrorCode PCBDDCSetNeumannBoundaries_BDDC(PC pc, IS NeumannBoundaries) 754 { 755 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 756 PetscBool isequal = PETSC_FALSE; 757 758 PetscFunctionBegin; 759 PetscCall(PetscObjectReference((PetscObject)NeumannBoundaries)); 760 if (pcbddc->NeumannBoundaries) PetscCall(ISEqual(NeumannBoundaries, pcbddc->NeumannBoundaries, &isequal)); 761 /* last user setting takes precedence -> destroy any other customization */ 762 PetscCall(ISDestroy(&pcbddc->NeumannBoundariesLocal)); 763 PetscCall(ISDestroy(&pcbddc->NeumannBoundaries)); 764 pcbddc->NeumannBoundaries = NeumannBoundaries; 765 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 766 PetscFunctionReturn(PETSC_SUCCESS); 767 } 768 769 /*@ 770 PCBDDCSetNeumannBoundaries - Set the `IS` defining Neumann boundaries for the global problem. 771 772 Collective 773 774 Input Parameters: 775 + pc - the preconditioning context 776 - NeumannBoundaries - parallel `IS` defining the Neumann boundaries 777 778 Level: intermediate 779 780 Note: 781 Any process can list any global node 782 783 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundariesLocal()` 784 @*/ 785 PetscErrorCode PCBDDCSetNeumannBoundaries(PC pc, IS NeumannBoundaries) 786 { 787 PetscFunctionBegin; 788 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 789 PetscValidHeaderSpecific(NeumannBoundaries, IS_CLASSID, 2); 790 PetscCheckSameComm(pc, 1, NeumannBoundaries, 2); 791 PetscTryMethod(pc, "PCBDDCSetNeumannBoundaries_C", (PC, IS), (pc, NeumannBoundaries)); 792 PetscFunctionReturn(PETSC_SUCCESS); 793 } 794 795 static PetscErrorCode PCBDDCSetNeumannBoundariesLocal_BDDC(PC pc, IS NeumannBoundaries) 796 { 797 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 798 PetscBool isequal = PETSC_FALSE; 799 800 PetscFunctionBegin; 801 PetscCall(PetscObjectReference((PetscObject)NeumannBoundaries)); 802 if (pcbddc->NeumannBoundariesLocal) PetscCall(ISEqual(NeumannBoundaries, pcbddc->NeumannBoundariesLocal, &isequal)); 803 /* last user setting takes precedence -> destroy any other customization */ 804 PetscCall(ISDestroy(&pcbddc->NeumannBoundariesLocal)); 805 PetscCall(ISDestroy(&pcbddc->NeumannBoundaries)); 806 pcbddc->NeumannBoundariesLocal = NeumannBoundaries; 807 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 808 PetscFunctionReturn(PETSC_SUCCESS); 809 } 810 811 /*@ 812 PCBDDCSetNeumannBoundariesLocal - Set the `IS` defining Neumann boundaries for the global problem in local ordering. 813 814 Collective 815 816 Input Parameters: 817 + pc - the preconditioning context 818 - NeumannBoundaries - parallel `IS` defining the subdomain part of Neumann boundaries (in local ordering) 819 820 Level: intermediate 821 822 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCGetDirichletBoundaries()` 823 @*/ 824 PetscErrorCode PCBDDCSetNeumannBoundariesLocal(PC pc, IS NeumannBoundaries) 825 { 826 PetscFunctionBegin; 827 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 828 PetscValidHeaderSpecific(NeumannBoundaries, IS_CLASSID, 2); 829 PetscCheckSameComm(pc, 1, NeumannBoundaries, 2); 830 PetscTryMethod(pc, "PCBDDCSetNeumannBoundariesLocal_C", (PC, IS), (pc, NeumannBoundaries)); 831 PetscFunctionReturn(PETSC_SUCCESS); 832 } 833 834 static PetscErrorCode PCBDDCGetDirichletBoundaries_BDDC(PC pc, IS *DirichletBoundaries) 835 { 836 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 837 838 PetscFunctionBegin; 839 *DirichletBoundaries = pcbddc->DirichletBoundaries; 840 PetscFunctionReturn(PETSC_SUCCESS); 841 } 842 843 /*@ 844 PCBDDCGetDirichletBoundaries - Get parallel `IS` for Dirichlet boundaries 845 846 Collective 847 848 Input Parameter: 849 . pc - the preconditioning context 850 851 Output Parameter: 852 . DirichletBoundaries - index set defining the Dirichlet boundaries 853 854 Level: intermediate 855 856 Note: 857 The `IS` returned (if any) is the same passed in earlier by the user with `PCBDDCSetDirichletBoundaries()` 858 859 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDirichletBoundaries()` 860 @*/ 861 PetscErrorCode PCBDDCGetDirichletBoundaries(PC pc, IS *DirichletBoundaries) 862 { 863 PetscFunctionBegin; 864 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 865 PetscUseMethod(pc, "PCBDDCGetDirichletBoundaries_C", (PC, IS *), (pc, DirichletBoundaries)); 866 PetscFunctionReturn(PETSC_SUCCESS); 867 } 868 869 static PetscErrorCode PCBDDCGetDirichletBoundariesLocal_BDDC(PC pc, IS *DirichletBoundaries) 870 { 871 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 872 873 PetscFunctionBegin; 874 *DirichletBoundaries = pcbddc->DirichletBoundariesLocal; 875 PetscFunctionReturn(PETSC_SUCCESS); 876 } 877 878 /*@ 879 PCBDDCGetDirichletBoundariesLocal - Get parallel `IS` for Dirichlet boundaries (in local ordering) 880 881 Collective 882 883 Input Parameter: 884 . pc - the preconditioning context 885 886 Output Parameter: 887 . DirichletBoundaries - index set defining the subdomain part of Dirichlet boundaries 888 889 Level: intermediate 890 891 Note: 892 The `IS` returned could be the same passed in earlier by the user (if provided with `PCBDDCSetDirichletBoundariesLocal()`) 893 or a global-to-local map of the global `IS` (if provided with `PCBDDCSetDirichletBoundaries()`). 894 In the latter case, the `IS` will be available only after `PCSetUp()`. 895 896 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCGetDirichletBoundaries()`, `PCBDDCSetDirichletBoundaries()` 897 @*/ 898 PetscErrorCode PCBDDCGetDirichletBoundariesLocal(PC pc, IS *DirichletBoundaries) 899 { 900 PetscFunctionBegin; 901 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 902 PetscUseMethod(pc, "PCBDDCGetDirichletBoundariesLocal_C", (PC, IS *), (pc, DirichletBoundaries)); 903 PetscFunctionReturn(PETSC_SUCCESS); 904 } 905 906 static PetscErrorCode PCBDDCGetNeumannBoundaries_BDDC(PC pc, IS *NeumannBoundaries) 907 { 908 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 909 910 PetscFunctionBegin; 911 *NeumannBoundaries = pcbddc->NeumannBoundaries; 912 PetscFunctionReturn(PETSC_SUCCESS); 913 } 914 915 /*@ 916 PCBDDCGetNeumannBoundaries - Get parallel `IS` for Neumann boundaries 917 918 Not Collective 919 920 Input Parameter: 921 . pc - the preconditioning context 922 923 Output Parameter: 924 . NeumannBoundaries - index set defining the Neumann boundaries 925 926 Level: intermediate 927 928 Note: 929 The `IS` returned (if any) is the same passed in earlier by the user with `PCBDDCSetNeumannBoundaries()` 930 931 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCGetDirichletBoundaries()`, `PCBDDCSetDirichletBoundaries()` 932 @*/ 933 PetscErrorCode PCBDDCGetNeumannBoundaries(PC pc, IS *NeumannBoundaries) 934 { 935 PetscFunctionBegin; 936 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 937 PetscUseMethod(pc, "PCBDDCGetNeumannBoundaries_C", (PC, IS *), (pc, NeumannBoundaries)); 938 PetscFunctionReturn(PETSC_SUCCESS); 939 } 940 941 static PetscErrorCode PCBDDCGetNeumannBoundariesLocal_BDDC(PC pc, IS *NeumannBoundaries) 942 { 943 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 944 945 PetscFunctionBegin; 946 *NeumannBoundaries = pcbddc->NeumannBoundariesLocal; 947 PetscFunctionReturn(PETSC_SUCCESS); 948 } 949 950 /*@ 951 PCBDDCGetNeumannBoundariesLocal - Get parallel `IS` for Neumann boundaries (in local ordering) 952 953 Not Collective 954 955 Input Parameter: 956 . pc - the preconditioning context 957 958 Output Parameter: 959 . NeumannBoundaries - index set defining the subdomain part of Neumann boundaries 960 961 Level: intermediate 962 963 Note: 964 The `IS` returned could be the same passed in earlier by the user (if provided with `PCBDDCSetNeumannBoundariesLocal()` 965 or a global-to-local map of the global `IS` (if provided with `PCBDDCSetNeumannBoundaries()`). 966 In the latter case, the `IS` will be available after `PCSetUp()`. 967 968 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCSetNeumannBoundariesLocal)`, `PCBDDCGetNeumannBoundaries()` 969 @*/ 970 PetscErrorCode PCBDDCGetNeumannBoundariesLocal(PC pc, IS *NeumannBoundaries) 971 { 972 PetscFunctionBegin; 973 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 974 PetscUseMethod(pc, "PCBDDCGetNeumannBoundariesLocal_C", (PC, IS *), (pc, NeumannBoundaries)); 975 PetscFunctionReturn(PETSC_SUCCESS); 976 } 977 978 static PetscErrorCode PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc, PetscInt nvtxs, const PetscInt xadj[], const PetscInt adjncy[], PetscCopyMode copymode) 979 { 980 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 981 PCBDDCGraph mat_graph = pcbddc->mat_graph; 982 PetscBool same_data = PETSC_FALSE; 983 984 PetscFunctionBegin; 985 if (!nvtxs) { 986 if (copymode == PETSC_OWN_POINTER) { 987 PetscCall(PetscFree(xadj)); 988 PetscCall(PetscFree(adjncy)); 989 } 990 PetscCall(PCBDDCGraphResetCSR(mat_graph)); 991 PetscFunctionReturn(PETSC_SUCCESS); 992 } 993 if (mat_graph->nvtxs == nvtxs && mat_graph->freecsr) { /* we own the data */ 994 if (mat_graph->xadj == xadj && mat_graph->adjncy == adjncy) same_data = PETSC_TRUE; 995 if (!same_data && mat_graph->xadj[nvtxs] == xadj[nvtxs]) { 996 PetscCall(PetscArraycmp(xadj, mat_graph->xadj, nvtxs + 1, &same_data)); 997 if (same_data) PetscCall(PetscArraycmp(adjncy, mat_graph->adjncy, xadj[nvtxs], &same_data)); 998 } 999 } 1000 if (!same_data) { 1001 /* free old CSR */ 1002 PetscCall(PCBDDCGraphResetCSR(mat_graph)); 1003 /* get CSR into graph structure */ 1004 if (copymode == PETSC_COPY_VALUES) { 1005 PetscCall(PetscMalloc1(nvtxs + 1, &mat_graph->xadj)); 1006 PetscCall(PetscMalloc1(xadj[nvtxs], &mat_graph->adjncy)); 1007 PetscCall(PetscArraycpy(mat_graph->xadj, xadj, nvtxs + 1)); 1008 PetscCall(PetscArraycpy(mat_graph->adjncy, adjncy, xadj[nvtxs])); 1009 mat_graph->freecsr = PETSC_TRUE; 1010 } else if (copymode == PETSC_OWN_POINTER) { 1011 mat_graph->xadj = (PetscInt *)xadj; 1012 mat_graph->adjncy = (PetscInt *)adjncy; 1013 mat_graph->freecsr = PETSC_TRUE; 1014 } else if (copymode == PETSC_USE_POINTER) { 1015 mat_graph->xadj = (PetscInt *)xadj; 1016 mat_graph->adjncy = (PetscInt *)adjncy; 1017 mat_graph->freecsr = PETSC_FALSE; 1018 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported copy mode %d", copymode); 1019 mat_graph->nvtxs_csr = nvtxs; 1020 pcbddc->recompute_topography = PETSC_TRUE; 1021 } 1022 PetscFunctionReturn(PETSC_SUCCESS); 1023 } 1024 1025 /*@ 1026 PCBDDCSetLocalAdjacencyGraph - Set adjacency structure (CSR graph) of the local degrees of freedom. 1027 1028 Not collective 1029 1030 Input Parameters: 1031 + pc - the preconditioning context. 1032 . nvtxs - number of local vertices of the graph (i.e., the number of local dofs). 1033 . xadj - CSR format row pointers for the connectivity of the dofs 1034 . adjncy - CSR format column pointers for the connectivity of the dofs 1035 - copymode - supported modes are `PETSC_COPY_VALUES`, `PETSC_USE_POINTER` or `PETSC_OWN_POINTER`. 1036 1037 Level: intermediate 1038 1039 Note: 1040 A dof is considered connected with all local dofs if xadj[dof+1]-xadj[dof] == 1 and adjncy[xadj[dof]] is negative. 1041 1042 .seealso: [](ch_ksp), `PCBDDC`, `PetscCopyMode` 1043 @*/ 1044 PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC pc, PetscInt nvtxs, const PetscInt xadj[], const PetscInt adjncy[], PetscCopyMode copymode) 1045 { 1046 void (*f)(void) = NULL; 1047 1048 PetscFunctionBegin; 1049 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1050 if (nvtxs) { 1051 PetscAssertPointer(xadj, 3); 1052 if (xadj[nvtxs]) PetscAssertPointer(adjncy, 4); 1053 } 1054 PetscTryMethod(pc, "PCBDDCSetLocalAdjacencyGraph_C", (PC, PetscInt, const PetscInt[], const PetscInt[], PetscCopyMode), (pc, nvtxs, xadj, adjncy, copymode)); 1055 /* free arrays if PCBDDC is not the PC type */ 1056 PetscCall(PetscObjectQueryFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", &f)); 1057 if (!f && copymode == PETSC_OWN_POINTER) { 1058 PetscCall(PetscFree(xadj)); 1059 PetscCall(PetscFree(adjncy)); 1060 } 1061 PetscFunctionReturn(PETSC_SUCCESS); 1062 } 1063 1064 static PetscErrorCode PCBDDCSetDofsSplittingLocal_BDDC(PC pc, PetscInt n_is, IS ISForDofs[]) 1065 { 1066 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1067 PetscInt i; 1068 PetscBool isequal = PETSC_FALSE; 1069 1070 PetscFunctionBegin; 1071 if (pcbddc->n_ISForDofsLocal == n_is) { 1072 for (i = 0; i < n_is; i++) { 1073 PetscBool isequalt; 1074 PetscCall(ISEqual(ISForDofs[i], pcbddc->ISForDofsLocal[i], &isequalt)); 1075 if (!isequalt) break; 1076 } 1077 if (i == n_is) isequal = PETSC_TRUE; 1078 } 1079 for (i = 0; i < n_is; i++) PetscCall(PetscObjectReference((PetscObject)ISForDofs[i])); 1080 /* Destroy ISes if they were already set */ 1081 for (i = 0; i < pcbddc->n_ISForDofsLocal; i++) PetscCall(ISDestroy(&pcbddc->ISForDofsLocal[i])); 1082 PetscCall(PetscFree(pcbddc->ISForDofsLocal)); 1083 /* last user setting takes precedence -> destroy any other customization */ 1084 for (i = 0; i < pcbddc->n_ISForDofs; i++) PetscCall(ISDestroy(&pcbddc->ISForDofs[i])); 1085 PetscCall(PetscFree(pcbddc->ISForDofs)); 1086 pcbddc->n_ISForDofs = 0; 1087 /* allocate space then set */ 1088 if (n_is) PetscCall(PetscMalloc1(n_is, &pcbddc->ISForDofsLocal)); 1089 for (i = 0; i < n_is; i++) pcbddc->ISForDofsLocal[i] = ISForDofs[i]; 1090 pcbddc->n_ISForDofsLocal = n_is; 1091 if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE; 1092 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 1093 PetscFunctionReturn(PETSC_SUCCESS); 1094 } 1095 1096 /*@ 1097 PCBDDCSetDofsSplittingLocal - Set the `IS` defining fields of the local subdomain matrix 1098 1099 Collective 1100 1101 Input Parameters: 1102 + pc - the preconditioning context 1103 . n_is - number of index sets defining the fields, must be the same on all MPI processes 1104 - ISForDofs - array of `IS` describing the fields in local ordering 1105 1106 Level: intermediate 1107 1108 Note: 1109 Not all nodes need to be listed, unlisted nodes will belong to the complement field. 1110 1111 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDofsSplitting()` 1112 @*/ 1113 PetscErrorCode PCBDDCSetDofsSplittingLocal(PC pc, PetscInt n_is, IS ISForDofs[]) 1114 { 1115 PetscInt i; 1116 1117 PetscFunctionBegin; 1118 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1119 PetscValidLogicalCollectiveInt(pc, n_is, 2); 1120 for (i = 0; i < n_is; i++) { 1121 PetscCheckSameComm(pc, 1, ISForDofs[i], 3); 1122 PetscValidHeaderSpecific(ISForDofs[i], IS_CLASSID, 3); 1123 } 1124 PetscTryMethod(pc, "PCBDDCSetDofsSplittingLocal_C", (PC, PetscInt, IS[]), (pc, n_is, ISForDofs)); 1125 PetscFunctionReturn(PETSC_SUCCESS); 1126 } 1127 1128 static PetscErrorCode PCBDDCSetDofsSplitting_BDDC(PC pc, PetscInt n_is, IS ISForDofs[]) 1129 { 1130 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1131 PetscInt i; 1132 PetscBool isequal = PETSC_FALSE; 1133 1134 PetscFunctionBegin; 1135 if (pcbddc->n_ISForDofs == n_is) { 1136 for (i = 0; i < n_is; i++) { 1137 PetscBool isequalt; 1138 PetscCall(ISEqual(ISForDofs[i], pcbddc->ISForDofs[i], &isequalt)); 1139 if (!isequalt) break; 1140 } 1141 if (i == n_is) isequal = PETSC_TRUE; 1142 } 1143 for (i = 0; i < n_is; i++) PetscCall(PetscObjectReference((PetscObject)ISForDofs[i])); 1144 /* Destroy ISes if they were already set */ 1145 for (i = 0; i < pcbddc->n_ISForDofs; i++) PetscCall(ISDestroy(&pcbddc->ISForDofs[i])); 1146 PetscCall(PetscFree(pcbddc->ISForDofs)); 1147 /* last user setting takes precedence -> destroy any other customization */ 1148 for (i = 0; i < pcbddc->n_ISForDofsLocal; i++) PetscCall(ISDestroy(&pcbddc->ISForDofsLocal[i])); 1149 PetscCall(PetscFree(pcbddc->ISForDofsLocal)); 1150 pcbddc->n_ISForDofsLocal = 0; 1151 /* allocate space then set */ 1152 if (n_is) PetscCall(PetscMalloc1(n_is, &pcbddc->ISForDofs)); 1153 for (i = 0; i < n_is; i++) pcbddc->ISForDofs[i] = ISForDofs[i]; 1154 pcbddc->n_ISForDofs = n_is; 1155 if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE; 1156 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 1157 PetscFunctionReturn(PETSC_SUCCESS); 1158 } 1159 1160 /*@ 1161 PCBDDCSetDofsSplitting - Set the `IS` defining fields of the global matrix 1162 1163 Collective 1164 1165 Input Parameters: 1166 + pc - the preconditioning context 1167 . n_is - number of index sets defining the fields 1168 - ISForDofs - array of `IS` describing the fields in global ordering 1169 1170 Level: intermediate 1171 1172 Note: 1173 Any process can list any global node. Not all nodes need to be listed: unlisted nodes will belong to the complement field. 1174 1175 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDofsSplittingLocal()` 1176 @*/ 1177 PetscErrorCode PCBDDCSetDofsSplitting(PC pc, PetscInt n_is, IS ISForDofs[]) 1178 { 1179 PetscInt i; 1180 1181 PetscFunctionBegin; 1182 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 1183 PetscValidLogicalCollectiveInt(pc, n_is, 2); 1184 for (i = 0; i < n_is; i++) { 1185 PetscValidHeaderSpecific(ISForDofs[i], IS_CLASSID, 3); 1186 PetscCheckSameComm(pc, 1, ISForDofs[i], 3); 1187 } 1188 PetscTryMethod(pc, "PCBDDCSetDofsSplitting_C", (PC, PetscInt, IS[]), (pc, n_is, ISForDofs)); 1189 PetscFunctionReturn(PETSC_SUCCESS); 1190 } 1191 1192 static PetscErrorCode PCPreSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x) 1193 { 1194 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1195 PC_IS *pcis = (PC_IS *)(pc->data); 1196 Vec used_vec; 1197 PetscBool iscg, save_rhs = PETSC_TRUE, benign_correction_computed; 1198 1199 PetscFunctionBegin; 1200 /* if we are working with CG, one dirichlet solve can be avoided during Krylov iterations */ 1201 if (ksp) { 1202 PetscCall(PetscObjectTypeCompareAny((PetscObject)ksp, &iscg, KSPCG, KSPGROPPCG, KSPPIPECG, KSPPIPELCG, KSPPIPECGRR, "")); 1203 if (pcbddc->benign_apply_coarse_only || pcbddc->switch_static || !iscg || pc->mat != pc->pmat) PetscCall(PCBDDCSetUseExactDirichlet(pc, PETSC_FALSE)); 1204 } 1205 if (pcbddc->benign_apply_coarse_only || pcbddc->switch_static || pc->mat != pc->pmat) PetscCall(PCBDDCSetUseExactDirichlet(pc, PETSC_FALSE)); 1206 1207 /* Creates parallel work vectors used in presolve */ 1208 if (!pcbddc->original_rhs) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->original_rhs)); 1209 if (!pcbddc->temp_solution) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->temp_solution)); 1210 1211 pcbddc->temp_solution_used = PETSC_FALSE; 1212 if (x) { 1213 PetscCall(PetscObjectReference((PetscObject)x)); 1214 used_vec = x; 1215 } else { /* it can only happen when calling PCBDDCMatFETIDPGetRHS */ 1216 PetscCall(PetscObjectReference((PetscObject)pcbddc->temp_solution)); 1217 used_vec = pcbddc->temp_solution; 1218 PetscCall(VecSet(used_vec, 0.0)); 1219 pcbddc->temp_solution_used = PETSC_TRUE; 1220 PetscCall(VecCopy(rhs, pcbddc->original_rhs)); 1221 save_rhs = PETSC_FALSE; 1222 pcbddc->eliminate_dirdofs = PETSC_TRUE; 1223 } 1224 1225 /* hack into ksp data structure since PCPreSolve comes earlier than setting to zero the guess in src/ksp/ksp/interface/itfunc.c */ 1226 if (ksp) { 1227 /* store the flag for the initial guess since it will be restored back during PCPostSolve_BDDC */ 1228 PetscCall(KSPGetInitialGuessNonzero(ksp, &pcbddc->ksp_guess_nonzero)); 1229 if (!pcbddc->ksp_guess_nonzero) PetscCall(VecSet(used_vec, 0.0)); 1230 } 1231 1232 pcbddc->rhs_change = PETSC_FALSE; 1233 /* Take into account zeroed rows -> change rhs and store solution removed */ 1234 if (rhs && pcbddc->eliminate_dirdofs) { 1235 IS dirIS = NULL; 1236 1237 /* DirichletBoundariesLocal may not be consistent among neighbours; gets a dirichlet dofs IS from graph (may be cached) */ 1238 PetscCall(PCBDDCGraphGetDirichletDofs(pcbddc->mat_graph, &dirIS)); 1239 if (dirIS) { 1240 Mat_IS *matis = (Mat_IS *)pc->pmat->data; 1241 PetscInt dirsize, i, *is_indices; 1242 PetscScalar *array_x; 1243 const PetscScalar *array_diagonal; 1244 1245 PetscCall(MatGetDiagonal(pc->pmat, pcis->vec1_global)); 1246 PetscCall(VecPointwiseDivide(pcis->vec1_global, rhs, pcis->vec1_global)); 1247 PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_global, pcis->vec2_N, INSERT_VALUES, SCATTER_FORWARD)); 1248 PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_global, pcis->vec2_N, INSERT_VALUES, SCATTER_FORWARD)); 1249 PetscCall(VecScatterBegin(matis->rctx, used_vec, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD)); 1250 PetscCall(VecScatterEnd(matis->rctx, used_vec, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD)); 1251 PetscCall(ISGetLocalSize(dirIS, &dirsize)); 1252 PetscCall(VecGetArray(pcis->vec1_N, &array_x)); 1253 PetscCall(VecGetArrayRead(pcis->vec2_N, &array_diagonal)); 1254 PetscCall(ISGetIndices(dirIS, (const PetscInt **)&is_indices)); 1255 for (i = 0; i < dirsize; i++) array_x[is_indices[i]] = array_diagonal[is_indices[i]]; 1256 PetscCall(ISRestoreIndices(dirIS, (const PetscInt **)&is_indices)); 1257 PetscCall(VecRestoreArrayRead(pcis->vec2_N, &array_diagonal)); 1258 PetscCall(VecRestoreArray(pcis->vec1_N, &array_x)); 1259 PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_N, used_vec, INSERT_VALUES, SCATTER_REVERSE)); 1260 PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_N, used_vec, INSERT_VALUES, SCATTER_REVERSE)); 1261 pcbddc->rhs_change = PETSC_TRUE; 1262 PetscCall(ISDestroy(&dirIS)); 1263 } 1264 } 1265 1266 /* remove the computed solution or the initial guess from the rhs */ 1267 if (pcbddc->rhs_change || (ksp && pcbddc->ksp_guess_nonzero)) { 1268 /* save the original rhs */ 1269 if (save_rhs) { 1270 PetscCall(VecSwap(rhs, pcbddc->original_rhs)); 1271 save_rhs = PETSC_FALSE; 1272 } 1273 pcbddc->rhs_change = PETSC_TRUE; 1274 PetscCall(VecScale(used_vec, -1.0)); 1275 PetscCall(MatMultAdd(pc->mat, used_vec, pcbddc->original_rhs, rhs)); 1276 PetscCall(VecScale(used_vec, -1.0)); 1277 PetscCall(VecCopy(used_vec, pcbddc->temp_solution)); 1278 pcbddc->temp_solution_used = PETSC_TRUE; 1279 if (ksp) PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_FALSE)); 1280 } 1281 PetscCall(VecDestroy(&used_vec)); 1282 1283 /* compute initial vector in benign space if needed 1284 and remove non-benign solution from the rhs */ 1285 benign_correction_computed = PETSC_FALSE; 1286 if (rhs && pcbddc->benign_compute_correction && (pcbddc->benign_have_null || pcbddc->benign_apply_coarse_only)) { 1287 /* compute u^*_h using ideas similar to those in Xuemin Tu's PhD thesis (see Section 4.8.1) 1288 Recursively apply BDDC in the multilevel case */ 1289 if (!pcbddc->benign_vec) PetscCall(VecDuplicate(rhs, &pcbddc->benign_vec)); 1290 /* keep applying coarse solver unless we no longer have benign subdomains */ 1291 pcbddc->benign_apply_coarse_only = pcbddc->benign_have_null ? PETSC_TRUE : PETSC_FALSE; 1292 if (!pcbddc->benign_skip_correction) { 1293 PetscCall(PCApply_BDDC(pc, rhs, pcbddc->benign_vec)); 1294 benign_correction_computed = PETSC_TRUE; 1295 if (pcbddc->temp_solution_used) PetscCall(VecAXPY(pcbddc->temp_solution, 1.0, pcbddc->benign_vec)); 1296 PetscCall(VecScale(pcbddc->benign_vec, -1.0)); 1297 /* store the original rhs if not done earlier */ 1298 if (save_rhs) PetscCall(VecSwap(rhs, pcbddc->original_rhs)); 1299 if (pcbddc->rhs_change) { 1300 PetscCall(MatMultAdd(pc->mat, pcbddc->benign_vec, rhs, rhs)); 1301 } else { 1302 PetscCall(MatMultAdd(pc->mat, pcbddc->benign_vec, pcbddc->original_rhs, rhs)); 1303 } 1304 pcbddc->rhs_change = PETSC_TRUE; 1305 } 1306 pcbddc->benign_apply_coarse_only = PETSC_FALSE; 1307 } else { 1308 PetscCall(VecDestroy(&pcbddc->benign_vec)); 1309 } 1310 1311 /* dbg output */ 1312 if (pcbddc->dbg_flag && benign_correction_computed) { 1313 Vec v; 1314 1315 PetscCall(VecDuplicate(pcis->vec1_global, &v)); 1316 if (pcbddc->ChangeOfBasisMatrix) { 1317 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, rhs, v)); 1318 } else { 1319 PetscCall(VecCopy(rhs, v)); 1320 } 1321 PetscCall(PCBDDCBenignGetOrSetP0(pc, v, PETSC_TRUE)); 1322 PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "LEVEL %" PetscInt_FMT ": is the correction benign?\n", pcbddc->current_level)); 1323 PetscCall(PetscScalarView(pcbddc->benign_n, pcbddc->benign_p0, pcbddc->dbg_viewer)); 1324 PetscCall(PetscViewerFlush(pcbddc->dbg_viewer)); 1325 PetscCall(VecDestroy(&v)); 1326 } 1327 1328 /* set initial guess if using PCG */ 1329 pcbddc->exact_dirichlet_trick_app = PETSC_FALSE; 1330 if (x && pcbddc->use_exact_dirichlet_trick) { 1331 PetscCall(VecSet(x, 0.0)); 1332 if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior) { 1333 if (benign_correction_computed) { /* we have already saved the changed rhs */ 1334 PetscCall(VecLockReadPop(pcis->vec1_global)); 1335 } else { 1336 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, rhs, pcis->vec1_global)); 1337 } 1338 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_global, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1339 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_global, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1340 } else { 1341 PetscCall(VecScatterBegin(pcis->global_to_D, rhs, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1342 PetscCall(VecScatterEnd(pcis->global_to_D, rhs, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1343 } 1344 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1345 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D)); 1346 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1347 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D)); 1348 if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior) { 1349 PetscCall(VecSet(pcis->vec1_global, 0.)); 1350 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, pcis->vec1_global, INSERT_VALUES, SCATTER_REVERSE)); 1351 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, pcis->vec1_global, INSERT_VALUES, SCATTER_REVERSE)); 1352 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcis->vec1_global, x)); 1353 } else { 1354 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, x, INSERT_VALUES, SCATTER_REVERSE)); 1355 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, x, INSERT_VALUES, SCATTER_REVERSE)); 1356 } 1357 if (ksp) PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE)); 1358 pcbddc->exact_dirichlet_trick_app = PETSC_TRUE; 1359 } else if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior && benign_correction_computed && pcbddc->use_exact_dirichlet_trick) { 1360 PetscCall(VecLockReadPop(pcis->vec1_global)); 1361 } 1362 PetscFunctionReturn(PETSC_SUCCESS); 1363 } 1364 1365 static PetscErrorCode PCPostSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x) 1366 { 1367 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1368 1369 PetscFunctionBegin; 1370 /* add solution removed in presolve */ 1371 if (x && pcbddc->rhs_change) { 1372 if (pcbddc->temp_solution_used) { 1373 PetscCall(VecAXPY(x, 1.0, pcbddc->temp_solution)); 1374 } else if (pcbddc->benign_compute_correction && pcbddc->benign_vec) { 1375 PetscCall(VecAXPY(x, -1.0, pcbddc->benign_vec)); 1376 } 1377 /* restore to original state (not for FETI-DP) */ 1378 if (ksp) pcbddc->temp_solution_used = PETSC_FALSE; 1379 } 1380 1381 /* restore rhs to its original state (not needed for FETI-DP) */ 1382 if (rhs && pcbddc->rhs_change) { 1383 PetscCall(VecSwap(rhs, pcbddc->original_rhs)); 1384 pcbddc->rhs_change = PETSC_FALSE; 1385 } 1386 /* restore ksp guess state */ 1387 if (ksp) { 1388 PetscCall(KSPSetInitialGuessNonzero(ksp, pcbddc->ksp_guess_nonzero)); 1389 /* reset flag for exact dirichlet trick */ 1390 pcbddc->exact_dirichlet_trick_app = PETSC_FALSE; 1391 } 1392 PetscFunctionReturn(PETSC_SUCCESS); 1393 } 1394 1395 static PetscErrorCode PCSetUp_BDDC(PC pc) 1396 { 1397 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1398 PCBDDCSubSchurs sub_schurs; 1399 Mat_IS *matis; 1400 MatNullSpace nearnullspace; 1401 Mat lA; 1402 IS lP, zerodiag = NULL; 1403 PetscInt nrows, ncols; 1404 PetscMPIInt size; 1405 PetscBool computesubschurs; 1406 PetscBool computeconstraintsmatrix; 1407 PetscBool new_nearnullspace_provided, ismatis, rl; 1408 PetscBool isset, issym, isspd; 1409 1410 PetscFunctionBegin; 1411 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATIS, &ismatis)); 1412 PetscCheck(ismatis, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PCBDDC preconditioner requires matrix of type MATIS"); 1413 PetscCall(MatGetSize(pc->pmat, &nrows, &ncols)); 1414 PetscCheck(nrows == ncols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCBDDC preconditioner requires a square preconditioning matrix"); 1415 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size)); 1416 1417 matis = (Mat_IS *)pc->pmat->data; 1418 /* the following lines of code should be replaced by a better logic between PCIS, PCNN, PCBDDC and other future nonoverlapping preconditioners */ 1419 /* For BDDC we need to define a local "Neumann" problem different to that defined in PCISSetup 1420 Also, BDDC builds its own KSP for the Dirichlet problem */ 1421 rl = pcbddc->recompute_topography; 1422 if (!pc->setupcalled || pc->flag == DIFFERENT_NONZERO_PATTERN) rl = PETSC_TRUE; 1423 PetscCall(MPIU_Allreduce(&rl, &pcbddc->recompute_topography, 1, MPIU_BOOL, MPI_LOR, PetscObjectComm((PetscObject)pc))); 1424 if (pcbddc->recompute_topography) { 1425 pcbddc->graphanalyzed = PETSC_FALSE; 1426 computeconstraintsmatrix = PETSC_TRUE; 1427 } else { 1428 computeconstraintsmatrix = PETSC_FALSE; 1429 } 1430 1431 /* check parameters' compatibility */ 1432 if (!pcbddc->use_deluxe_scaling) pcbddc->deluxe_zerorows = PETSC_FALSE; 1433 pcbddc->adaptive_selection = (PetscBool)(pcbddc->adaptive_threshold[0] != 0.0 || pcbddc->adaptive_threshold[1] != 0.0); 1434 pcbddc->use_deluxe_scaling = (PetscBool)(pcbddc->use_deluxe_scaling && size > 1); 1435 pcbddc->adaptive_selection = (PetscBool)(pcbddc->adaptive_selection && size > 1); 1436 pcbddc->adaptive_userdefined = (PetscBool)(pcbddc->adaptive_selection && pcbddc->adaptive_userdefined); 1437 if (pcbddc->adaptive_selection) pcbddc->use_faces = PETSC_TRUE; 1438 1439 computesubschurs = (PetscBool)(pcbddc->adaptive_selection || pcbddc->use_deluxe_scaling); 1440 1441 /* activate all connected components if the netflux has been requested */ 1442 if (pcbddc->compute_nonetflux) { 1443 pcbddc->use_vertices = PETSC_TRUE; 1444 pcbddc->use_edges = PETSC_TRUE; 1445 pcbddc->use_faces = PETSC_TRUE; 1446 } 1447 1448 /* Get stdout for dbg */ 1449 if (pcbddc->dbg_flag) { 1450 if (!pcbddc->dbg_viewer) pcbddc->dbg_viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)pc)); 1451 PetscCall(PetscViewerASCIIPushSynchronized(pcbddc->dbg_viewer)); 1452 PetscCall(PetscViewerASCIIAddTab(pcbddc->dbg_viewer, 2 * pcbddc->current_level)); 1453 } 1454 1455 /* process topology information */ 1456 PetscCall(PetscLogEventBegin(PC_BDDC_Topology[pcbddc->current_level], pc, 0, 0, 0)); 1457 if (pcbddc->recompute_topography) { 1458 PetscCall(PCBDDCComputeLocalTopologyInfo(pc)); 1459 if (pcbddc->discretegradient) PetscCall(PCBDDCNedelecSupport(pc)); 1460 } 1461 if (pcbddc->corner_selected) pcbddc->use_vertices = PETSC_TRUE; 1462 1463 /* change basis if requested by the user */ 1464 if (pcbddc->user_ChangeOfBasisMatrix) { 1465 /* use_change_of_basis flag is used to automatically compute a change of basis from constraints */ 1466 pcbddc->use_change_of_basis = PETSC_FALSE; 1467 PetscCall(PCBDDCComputeLocalMatrix(pc, pcbddc->user_ChangeOfBasisMatrix)); 1468 } else { 1469 PetscCall(MatDestroy(&pcbddc->local_mat)); 1470 PetscCall(PetscObjectReference((PetscObject)matis->A)); 1471 pcbddc->local_mat = matis->A; 1472 } 1473 1474 /* 1475 Compute change of basis on local pressures (aka zerodiag dofs) with the benign trick 1476 This should come earlier then PCISSetUp for extracting the correct subdomain matrices 1477 */ 1478 PetscCall(PCBDDCBenignShellMat(pc, PETSC_TRUE)); 1479 if (pcbddc->benign_saddle_point) { 1480 PC_IS *pcis = (PC_IS *)pc->data; 1481 1482 if (pcbddc->user_ChangeOfBasisMatrix || pcbddc->use_change_of_basis || !computesubschurs) pcbddc->benign_change_explicit = PETSC_TRUE; 1483 /* detect local saddle point and change the basis in pcbddc->local_mat */ 1484 PetscCall(PCBDDCBenignDetectSaddlePoint(pc, (PetscBool)(!pcbddc->recompute_topography), &zerodiag)); 1485 /* pop B0 mat from local mat */ 1486 PetscCall(PCBDDCBenignPopOrPushB0(pc, PETSC_TRUE)); 1487 /* give pcis a hint to not reuse submatrices during PCISCreate */ 1488 if (pc->flag == SAME_NONZERO_PATTERN && pcis->reusesubmatrices == PETSC_TRUE) { 1489 if (pcbddc->benign_n && (pcbddc->benign_change_explicit || pcbddc->dbg_flag)) { 1490 pcis->reusesubmatrices = PETSC_FALSE; 1491 } else { 1492 pcis->reusesubmatrices = PETSC_TRUE; 1493 } 1494 } else { 1495 pcis->reusesubmatrices = PETSC_FALSE; 1496 } 1497 } 1498 1499 /* propagate relevant information */ 1500 PetscCall(MatIsSymmetricKnown(matis->A, &isset, &issym)); 1501 if (isset) PetscCall(MatSetOption(pcbddc->local_mat, MAT_SYMMETRIC, issym)); 1502 PetscCall(MatIsSPDKnown(matis->A, &isset, &isspd)); 1503 if (isset) PetscCall(MatSetOption(pcbddc->local_mat, MAT_SPD, isspd)); 1504 1505 /* Set up all the "iterative substructuring" common block without computing solvers */ 1506 { 1507 Mat temp_mat; 1508 1509 temp_mat = matis->A; 1510 matis->A = pcbddc->local_mat; 1511 PetscCall(PCISSetUp(pc, PETSC_TRUE, PETSC_FALSE)); 1512 pcbddc->local_mat = matis->A; 1513 matis->A = temp_mat; 1514 } 1515 1516 /* Analyze interface */ 1517 if (!pcbddc->graphanalyzed) { 1518 PetscCall(PCBDDCAnalyzeInterface(pc)); 1519 computeconstraintsmatrix = PETSC_TRUE; 1520 PetscCheck(!(pcbddc->adaptive_selection && !pcbddc->use_deluxe_scaling && !pcbddc->mat_graph->twodim), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot compute the adaptive primal space for a problem with 3D edges without deluxe scaling"); 1521 if (pcbddc->compute_nonetflux) { 1522 MatNullSpace nnfnnsp; 1523 1524 PetscCheck(pcbddc->divudotp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Missing divudotp operator"); 1525 PetscCall(PCBDDCComputeNoNetFlux(pc->pmat, pcbddc->divudotp, pcbddc->divudotp_trans, pcbddc->divudotp_vl2l, pcbddc->mat_graph, &nnfnnsp)); 1526 /* TODO what if a nearnullspace is already attached? */ 1527 if (nnfnnsp) { 1528 PetscCall(MatSetNearNullSpace(pc->pmat, nnfnnsp)); 1529 PetscCall(MatNullSpaceDestroy(&nnfnnsp)); 1530 } 1531 } 1532 } 1533 PetscCall(PetscLogEventEnd(PC_BDDC_Topology[pcbddc->current_level], pc, 0, 0, 0)); 1534 1535 /* check existence of a divergence free extension, i.e. 1536 b(v_I,p_0) = 0 for all v_I (raise error if not). 1537 Also, check that PCBDDCBenignGetOrSetP0 works */ 1538 if (pcbddc->benign_saddle_point && pcbddc->dbg_flag > 1) PetscCall(PCBDDCBenignCheck(pc, zerodiag)); 1539 PetscCall(ISDestroy(&zerodiag)); 1540 1541 /* Setup local dirichlet solver ksp_D and sub_schurs solvers */ 1542 if (computesubschurs && pcbddc->recompute_topography) PetscCall(PCBDDCInitSubSchurs(pc)); 1543 /* SetUp Scaling operator (scaling matrices could be needed in SubSchursSetUp)*/ 1544 if (!pcbddc->use_deluxe_scaling) PetscCall(PCBDDCScalingSetUp(pc)); 1545 1546 /* finish setup solvers and do adaptive selection of constraints */ 1547 sub_schurs = pcbddc->sub_schurs; 1548 if (sub_schurs && sub_schurs->schur_explicit) { 1549 if (computesubschurs) PetscCall(PCBDDCSetUpSubSchurs(pc)); 1550 PetscCall(PCBDDCSetUpLocalSolvers(pc, PETSC_TRUE, PETSC_FALSE)); 1551 } else { 1552 PetscCall(PCBDDCSetUpLocalSolvers(pc, PETSC_TRUE, PETSC_FALSE)); 1553 if (computesubschurs) PetscCall(PCBDDCSetUpSubSchurs(pc)); 1554 } 1555 if (pcbddc->adaptive_selection) { 1556 PetscCall(PCBDDCAdaptiveSelection(pc)); 1557 computeconstraintsmatrix = PETSC_TRUE; 1558 } 1559 1560 /* infer if NullSpace object attached to Mat via MatSetNearNullSpace has changed */ 1561 new_nearnullspace_provided = PETSC_FALSE; 1562 PetscCall(MatGetNearNullSpace(pc->pmat, &nearnullspace)); 1563 if (pcbddc->onearnullspace) { /* already used nearnullspace */ 1564 if (!nearnullspace) { /* near null space attached to mat has been destroyed */ 1565 new_nearnullspace_provided = PETSC_TRUE; 1566 } else { 1567 /* determine if the two nullspaces are different (should be lightweight) */ 1568 if (nearnullspace != pcbddc->onearnullspace) { 1569 new_nearnullspace_provided = PETSC_TRUE; 1570 } else { /* maybe the user has changed the content of the nearnullspace so check vectors ObjectStateId */ 1571 PetscInt i; 1572 const Vec *nearnullvecs; 1573 PetscObjectState state; 1574 PetscInt nnsp_size; 1575 PetscCall(MatNullSpaceGetVecs(nearnullspace, NULL, &nnsp_size, &nearnullvecs)); 1576 for (i = 0; i < nnsp_size; i++) { 1577 PetscCall(PetscObjectStateGet((PetscObject)nearnullvecs[i], &state)); 1578 if (pcbddc->onearnullvecs_state[i] != state) { 1579 new_nearnullspace_provided = PETSC_TRUE; 1580 break; 1581 } 1582 } 1583 } 1584 } 1585 } else { 1586 if (!nearnullspace) { /* both nearnullspaces are null */ 1587 new_nearnullspace_provided = PETSC_FALSE; 1588 } else { /* nearnullspace attached later */ 1589 new_nearnullspace_provided = PETSC_TRUE; 1590 } 1591 } 1592 1593 /* Setup constraints and related work vectors */ 1594 /* reset primal space flags */ 1595 PetscCall(PetscLogEventBegin(PC_BDDC_LocalWork[pcbddc->current_level], pc, 0, 0, 0)); 1596 pcbddc->new_primal_space = PETSC_FALSE; 1597 pcbddc->new_primal_space_local = PETSC_FALSE; 1598 if (computeconstraintsmatrix || new_nearnullspace_provided) { 1599 /* It also sets the primal space flags */ 1600 PetscCall(PCBDDCConstraintsSetUp(pc)); 1601 } 1602 /* Allocate needed local vectors (which depends on quantities defined during ConstraintsSetUp) */ 1603 PetscCall(PCBDDCSetUpLocalWorkVectors(pc)); 1604 1605 if (pcbddc->use_change_of_basis) { 1606 PC_IS *pcis = (PC_IS *)(pc->data); 1607 1608 PetscCall(PCBDDCComputeLocalMatrix(pc, pcbddc->ChangeOfBasisMatrix)); 1609 if (pcbddc->benign_change) { 1610 PetscCall(MatDestroy(&pcbddc->benign_B0)); 1611 /* pop B0 from pcbddc->local_mat */ 1612 PetscCall(PCBDDCBenignPopOrPushB0(pc, PETSC_TRUE)); 1613 } 1614 /* get submatrices */ 1615 PetscCall(MatDestroy(&pcis->A_IB)); 1616 PetscCall(MatDestroy(&pcis->A_BI)); 1617 PetscCall(MatDestroy(&pcis->A_BB)); 1618 PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_B_local, pcis->is_B_local, MAT_INITIAL_MATRIX, &pcis->A_BB)); 1619 PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_I_local, pcis->is_B_local, MAT_INITIAL_MATRIX, &pcis->A_IB)); 1620 PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_B_local, pcis->is_I_local, MAT_INITIAL_MATRIX, &pcis->A_BI)); 1621 /* set flag in pcis to not reuse submatrices during PCISCreate */ 1622 pcis->reusesubmatrices = PETSC_FALSE; 1623 } else if (!pcbddc->user_ChangeOfBasisMatrix && !pcbddc->benign_change) { 1624 PetscCall(MatDestroy(&pcbddc->local_mat)); 1625 PetscCall(PetscObjectReference((PetscObject)matis->A)); 1626 pcbddc->local_mat = matis->A; 1627 } 1628 1629 /* interface pressure block row for B_C */ 1630 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_lP", (PetscObject *)&lP)); 1631 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_lA", (PetscObject *)&lA)); 1632 if (lA && lP) { 1633 PC_IS *pcis = (PC_IS *)pc->data; 1634 Mat B_BI, B_BB, Bt_BI, Bt_BB; 1635 PetscBool issym; 1636 1637 PetscCall(MatIsSymmetric(lA, PETSC_SMALL, &issym)); 1638 if (issym) { 1639 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_I_local, MAT_INITIAL_MATRIX, &B_BI)); 1640 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_B_local, MAT_INITIAL_MATRIX, &B_BB)); 1641 PetscCall(MatCreateTranspose(B_BI, &Bt_BI)); 1642 PetscCall(MatCreateTranspose(B_BB, &Bt_BB)); 1643 } else { 1644 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_I_local, MAT_INITIAL_MATRIX, &B_BI)); 1645 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_B_local, MAT_INITIAL_MATRIX, &B_BB)); 1646 PetscCall(MatCreateSubMatrix(lA, pcis->is_I_local, lP, MAT_INITIAL_MATRIX, &Bt_BI)); 1647 PetscCall(MatCreateSubMatrix(lA, pcis->is_B_local, lP, MAT_INITIAL_MATRIX, &Bt_BB)); 1648 } 1649 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_B_BI", (PetscObject)B_BI)); 1650 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_B_BB", (PetscObject)B_BB)); 1651 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_Bt_BI", (PetscObject)Bt_BI)); 1652 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_Bt_BB", (PetscObject)Bt_BB)); 1653 PetscCall(MatDestroy(&B_BI)); 1654 PetscCall(MatDestroy(&B_BB)); 1655 PetscCall(MatDestroy(&Bt_BI)); 1656 PetscCall(MatDestroy(&Bt_BB)); 1657 } 1658 PetscCall(PetscLogEventEnd(PC_BDDC_LocalWork[pcbddc->current_level], pc, 0, 0, 0)); 1659 1660 /* SetUp coarse and local Neumann solvers */ 1661 PetscCall(PCBDDCSetUpSolvers(pc)); 1662 /* SetUp Scaling operator */ 1663 if (pcbddc->use_deluxe_scaling) PetscCall(PCBDDCScalingSetUp(pc)); 1664 1665 /* mark topography as done */ 1666 pcbddc->recompute_topography = PETSC_FALSE; 1667 1668 /* wrap pcis->A_IB and pcis->A_BI if we did not change explicitly the variables on the pressures */ 1669 PetscCall(PCBDDCBenignShellMat(pc, PETSC_FALSE)); 1670 1671 if (pcbddc->dbg_flag) { 1672 PetscCall(PetscViewerASCIISubtractTab(pcbddc->dbg_viewer, 2 * pcbddc->current_level)); 1673 PetscCall(PetscViewerASCIIPopSynchronized(pcbddc->dbg_viewer)); 1674 } 1675 PetscFunctionReturn(PETSC_SUCCESS); 1676 } 1677 1678 static PetscErrorCode PCApply_BDDC(PC pc, Vec r, Vec z) 1679 { 1680 PC_IS *pcis = (PC_IS *)(pc->data); 1681 PC_BDDC *pcbddc = (PC_BDDC *)(pc->data); 1682 Mat lA = NULL; 1683 PetscInt n_B = pcis->n_B, n_D = pcis->n - n_B; 1684 const PetscScalar one = 1.0; 1685 const PetscScalar m_one = -1.0; 1686 const PetscScalar zero = 0.0; 1687 /* This code is similar to that provided in nn.c for PCNN 1688 NN interface preconditioner changed to BDDC 1689 Added support for M_3 preconditioner in the reference article (code is active if pcbddc->switch_static == PETSC_TRUE) */ 1690 1691 PetscFunctionBegin; 1692 PetscCall(PetscCitationsRegister(citation, &cited)); 1693 if (pcbddc->switch_static) PetscCall(MatISGetLocalMat(pc->useAmat ? pc->mat : pc->pmat, &lA)); 1694 1695 if (pcbddc->ChangeOfBasisMatrix) { 1696 Vec swap; 1697 1698 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, r, pcbddc->work_change)); 1699 swap = pcbddc->work_change; 1700 pcbddc->work_change = r; 1701 r = swap; 1702 /* save rhs so that we don't need to apply the change of basis for the exact dirichlet trick in PreSolve */ 1703 if (pcbddc->benign_apply_coarse_only && pcbddc->use_exact_dirichlet_trick && pcbddc->change_interior) { 1704 PetscCall(VecCopy(r, pcis->vec1_global)); 1705 PetscCall(VecLockReadPush(pcis->vec1_global)); 1706 } 1707 } 1708 if (pcbddc->benign_have_null) { /* get p0 from r */ 1709 PetscCall(PCBDDCBenignGetOrSetP0(pc, r, PETSC_TRUE)); 1710 } 1711 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_DIRICHLET && !pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 1712 PetscCall(VecCopy(r, z)); 1713 /* First Dirichlet solve */ 1714 PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1715 PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1716 /* 1717 Assembling right hand side for BDDC operator 1718 - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE) 1719 - pcis->vec1_B the interface part of the global vector z 1720 */ 1721 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1722 if (n_D) { 1723 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D)); 1724 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1725 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D)); 1726 PetscCall(VecScale(pcis->vec2_D, m_one)); 1727 if (pcbddc->switch_static) { 1728 PetscCall(VecSet(pcis->vec1_N, 0.)); 1729 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1730 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1731 if (!pcbddc->switch_static_change) { 1732 PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N)); 1733 } else { 1734 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1735 PetscCall(MatMult(lA, pcis->vec2_N, pcis->vec1_N)); 1736 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1737 } 1738 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD)); 1739 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD)); 1740 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1741 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1742 } else { 1743 PetscCall(MatMult(pcis->A_BI, pcis->vec2_D, pcis->vec1_B)); 1744 } 1745 } else { 1746 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1747 PetscCall(VecSet(pcis->vec1_B, zero)); 1748 } 1749 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE)); 1750 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE)); 1751 PetscCall(PCBDDCScalingRestriction(pc, z, pcis->vec1_B)); 1752 } else { 1753 if (!pcbddc->benign_apply_coarse_only) PetscCall(PCBDDCScalingRestriction(pc, r, pcis->vec1_B)); 1754 } 1755 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_LUMP) { 1756 PetscCheck(pcbddc->switch_static, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "You forgot to pass -pc_bddc_switch_static"); 1757 PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1758 PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1759 } 1760 1761 /* Apply interface preconditioner 1762 input/output vecs: pcis->vec1_B and pcis->vec1_D */ 1763 PetscCall(PCBDDCApplyInterfacePreconditioner(pc, PETSC_FALSE)); 1764 1765 /* Apply transpose of partition of unity operator */ 1766 PetscCall(PCBDDCScalingExtension(pc, pcis->vec1_B, z)); 1767 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_LUMP) { 1768 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1769 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1770 PetscFunctionReturn(PETSC_SUCCESS); 1771 } 1772 /* Second Dirichlet solve and assembling of output */ 1773 PetscCall(VecScatterBegin(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1774 PetscCall(VecScatterEnd(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1775 if (n_B) { 1776 if (pcbddc->switch_static) { 1777 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1778 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1779 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1780 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1781 if (!pcbddc->switch_static_change) { 1782 PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N)); 1783 } else { 1784 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1785 PetscCall(MatMult(lA, pcis->vec2_N, pcis->vec1_N)); 1786 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1787 } 1788 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD)); 1789 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD)); 1790 } else { 1791 PetscCall(MatMult(pcis->A_IB, pcis->vec1_B, pcis->vec3_D)); 1792 } 1793 } else if (pcbddc->switch_static) { /* n_B is zero */ 1794 if (!pcbddc->switch_static_change) { 1795 PetscCall(MatMult(lA, pcis->vec1_D, pcis->vec3_D)); 1796 } else { 1797 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_D, pcis->vec1_N)); 1798 PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N)); 1799 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec2_N, pcis->vec3_D)); 1800 } 1801 } 1802 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1803 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec3_D, pcis->vec4_D)); 1804 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1805 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec4_D)); 1806 1807 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 1808 if (pcbddc->switch_static) { 1809 PetscCall(VecAXPBYPCZ(pcis->vec2_D, m_one, one, m_one, pcis->vec4_D, pcis->vec1_D)); 1810 } else { 1811 PetscCall(VecAXPBY(pcis->vec2_D, m_one, m_one, pcis->vec4_D)); 1812 } 1813 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1814 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1815 } else { 1816 if (pcbddc->switch_static) { 1817 PetscCall(VecAXPBY(pcis->vec4_D, one, m_one, pcis->vec1_D)); 1818 } else { 1819 PetscCall(VecScale(pcis->vec4_D, m_one)); 1820 } 1821 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1822 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1823 } 1824 if (pcbddc->benign_have_null) { /* set p0 (computed in PCBDDCApplyInterface) */ 1825 if (pcbddc->benign_apply_coarse_only) PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n)); 1826 PetscCall(PCBDDCBenignGetOrSetP0(pc, z, PETSC_FALSE)); 1827 } 1828 1829 if (pcbddc->ChangeOfBasisMatrix) { 1830 pcbddc->work_change = r; 1831 PetscCall(VecCopy(z, pcbddc->work_change)); 1832 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcbddc->work_change, z)); 1833 } 1834 PetscFunctionReturn(PETSC_SUCCESS); 1835 } 1836 1837 static PetscErrorCode PCApplyTranspose_BDDC(PC pc, Vec r, Vec z) 1838 { 1839 PC_IS *pcis = (PC_IS *)(pc->data); 1840 PC_BDDC *pcbddc = (PC_BDDC *)(pc->data); 1841 Mat lA = NULL; 1842 PetscInt n_B = pcis->n_B, n_D = pcis->n - n_B; 1843 const PetscScalar one = 1.0; 1844 const PetscScalar m_one = -1.0; 1845 const PetscScalar zero = 0.0; 1846 1847 PetscFunctionBegin; 1848 PetscCall(PetscCitationsRegister(citation, &cited)); 1849 if (pcbddc->switch_static) PetscCall(MatISGetLocalMat(pc->useAmat ? pc->mat : pc->pmat, &lA)); 1850 if (pcbddc->ChangeOfBasisMatrix) { 1851 Vec swap; 1852 1853 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, r, pcbddc->work_change)); 1854 swap = pcbddc->work_change; 1855 pcbddc->work_change = r; 1856 r = swap; 1857 /* save rhs so that we don't need to apply the change of basis for the exact dirichlet trick in PreSolve */ 1858 if (pcbddc->benign_apply_coarse_only && pcbddc->exact_dirichlet_trick_app && pcbddc->change_interior) { 1859 PetscCall(VecCopy(r, pcis->vec1_global)); 1860 PetscCall(VecLockReadPush(pcis->vec1_global)); 1861 } 1862 } 1863 if (pcbddc->benign_have_null) { /* get p0 from r */ 1864 PetscCall(PCBDDCBenignGetOrSetP0(pc, r, PETSC_TRUE)); 1865 } 1866 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 1867 PetscCall(VecCopy(r, z)); 1868 /* First Dirichlet solve */ 1869 PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1870 PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD)); 1871 /* 1872 Assembling right hand side for BDDC operator 1873 - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE) 1874 - pcis->vec1_B the interface part of the global vector z 1875 */ 1876 if (n_D) { 1877 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1878 PetscCall(KSPSolveTranspose(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D)); 1879 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1880 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D)); 1881 PetscCall(VecScale(pcis->vec2_D, m_one)); 1882 if (pcbddc->switch_static) { 1883 PetscCall(VecSet(pcis->vec1_N, 0.)); 1884 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1885 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1886 if (!pcbddc->switch_static_change) { 1887 PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N)); 1888 } else { 1889 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1890 PetscCall(MatMultTranspose(lA, pcis->vec2_N, pcis->vec1_N)); 1891 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1892 } 1893 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD)); 1894 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD)); 1895 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1896 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1897 } else { 1898 PetscCall(MatMultTranspose(pcis->A_IB, pcis->vec2_D, pcis->vec1_B)); 1899 } 1900 } else { 1901 PetscCall(VecSet(pcis->vec1_B, zero)); 1902 } 1903 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE)); 1904 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE)); 1905 PetscCall(PCBDDCScalingRestriction(pc, z, pcis->vec1_B)); 1906 } else { 1907 PetscCall(PCBDDCScalingRestriction(pc, r, pcis->vec1_B)); 1908 } 1909 1910 /* Apply interface preconditioner 1911 input/output vecs: pcis->vec1_B and pcis->vec1_D */ 1912 PetscCall(PCBDDCApplyInterfacePreconditioner(pc, PETSC_TRUE)); 1913 1914 /* Apply transpose of partition of unity operator */ 1915 PetscCall(PCBDDCScalingExtension(pc, pcis->vec1_B, z)); 1916 1917 /* Second Dirichlet solve and assembling of output */ 1918 PetscCall(VecScatterBegin(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1919 PetscCall(VecScatterEnd(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD)); 1920 if (n_B) { 1921 if (pcbddc->switch_static) { 1922 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1923 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1924 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1925 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE)); 1926 if (!pcbddc->switch_static_change) { 1927 PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N)); 1928 } else { 1929 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1930 PetscCall(MatMultTranspose(lA, pcis->vec2_N, pcis->vec1_N)); 1931 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N)); 1932 } 1933 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD)); 1934 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD)); 1935 } else { 1936 PetscCall(MatMultTranspose(pcis->A_BI, pcis->vec1_B, pcis->vec3_D)); 1937 } 1938 } else if (pcbddc->switch_static) { /* n_B is zero */ 1939 if (!pcbddc->switch_static_change) { 1940 PetscCall(MatMultTranspose(lA, pcis->vec1_D, pcis->vec3_D)); 1941 } else { 1942 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_D, pcis->vec1_N)); 1943 PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N)); 1944 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec2_N, pcis->vec3_D)); 1945 } 1946 } 1947 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1948 PetscCall(KSPSolveTranspose(pcbddc->ksp_D, pcis->vec3_D, pcis->vec4_D)); 1949 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0)); 1950 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec4_D)); 1951 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 1952 if (pcbddc->switch_static) { 1953 PetscCall(VecAXPBYPCZ(pcis->vec2_D, m_one, one, m_one, pcis->vec4_D, pcis->vec1_D)); 1954 } else { 1955 PetscCall(VecAXPBY(pcis->vec2_D, m_one, m_one, pcis->vec4_D)); 1956 } 1957 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1958 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1959 } else { 1960 if (pcbddc->switch_static) { 1961 PetscCall(VecAXPBY(pcis->vec4_D, one, m_one, pcis->vec1_D)); 1962 } else { 1963 PetscCall(VecScale(pcis->vec4_D, m_one)); 1964 } 1965 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1966 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE)); 1967 } 1968 if (pcbddc->benign_have_null) { /* set p0 (computed in PCBDDCApplyInterface) */ 1969 PetscCall(PCBDDCBenignGetOrSetP0(pc, z, PETSC_FALSE)); 1970 } 1971 if (pcbddc->ChangeOfBasisMatrix) { 1972 pcbddc->work_change = r; 1973 PetscCall(VecCopy(z, pcbddc->work_change)); 1974 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcbddc->work_change, z)); 1975 } 1976 PetscFunctionReturn(PETSC_SUCCESS); 1977 } 1978 1979 static PetscErrorCode PCReset_BDDC(PC pc) 1980 { 1981 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 1982 PC_IS *pcis = (PC_IS *)pc->data; 1983 KSP kspD, kspR, kspC; 1984 1985 PetscFunctionBegin; 1986 /* free BDDC custom data */ 1987 PetscCall(PCBDDCResetCustomization(pc)); 1988 /* destroy objects related to topography */ 1989 PetscCall(PCBDDCResetTopography(pc)); 1990 /* destroy objects for scaling operator */ 1991 PetscCall(PCBDDCScalingDestroy(pc)); 1992 /* free solvers stuff */ 1993 PetscCall(PCBDDCResetSolvers(pc)); 1994 /* free global vectors needed in presolve */ 1995 PetscCall(VecDestroy(&pcbddc->temp_solution)); 1996 PetscCall(VecDestroy(&pcbddc->original_rhs)); 1997 /* free data created by PCIS */ 1998 PetscCall(PCISReset(pc)); 1999 2000 /* restore defaults */ 2001 kspD = pcbddc->ksp_D; 2002 kspR = pcbddc->ksp_R; 2003 kspC = pcbddc->coarse_ksp; 2004 PetscCall(PetscMemzero(pc->data, sizeof(*pcbddc))); 2005 pcis->n_neigh = -1; 2006 pcis->scaling_factor = 1.0; 2007 pcis->reusesubmatrices = PETSC_TRUE; 2008 pcbddc->use_local_adj = PETSC_TRUE; 2009 pcbddc->use_vertices = PETSC_TRUE; 2010 pcbddc->use_edges = PETSC_TRUE; 2011 pcbddc->symmetric_primal = PETSC_TRUE; 2012 pcbddc->vertex_size = 1; 2013 pcbddc->recompute_topography = PETSC_TRUE; 2014 pcbddc->coarse_size = -1; 2015 pcbddc->use_exact_dirichlet_trick = PETSC_TRUE; 2016 pcbddc->coarsening_ratio = 8; 2017 pcbddc->coarse_eqs_per_proc = 1; 2018 pcbddc->benign_compute_correction = PETSC_TRUE; 2019 pcbddc->nedfield = -1; 2020 pcbddc->nedglobal = PETSC_TRUE; 2021 pcbddc->graphmaxcount = PETSC_MAX_INT; 2022 pcbddc->sub_schurs_layers = -1; 2023 pcbddc->ksp_D = kspD; 2024 pcbddc->ksp_R = kspR; 2025 pcbddc->coarse_ksp = kspC; 2026 PetscFunctionReturn(PETSC_SUCCESS); 2027 } 2028 2029 static PetscErrorCode PCDestroy_BDDC(PC pc) 2030 { 2031 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 2032 2033 PetscFunctionBegin; 2034 PetscCall(PCReset_BDDC(pc)); 2035 PetscCall(KSPDestroy(&pcbddc->ksp_D)); 2036 PetscCall(KSPDestroy(&pcbddc->ksp_R)); 2037 PetscCall(KSPDestroy(&pcbddc->coarse_ksp)); 2038 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDiscreteGradient_C", NULL)); 2039 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDivergenceMat_C", NULL)); 2040 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetChangeOfBasisMat_C", NULL)); 2041 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesLocalIS_C", NULL)); 2042 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesIS_C", NULL)); 2043 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesLocalIS_C", NULL)); 2044 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesIS_C", NULL)); 2045 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetCoarseningRatio_C", NULL)); 2046 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevel_C", NULL)); 2047 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetUseExactDirichlet_C", NULL)); 2048 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevels_C", NULL)); 2049 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundaries_C", NULL)); 2050 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundariesLocal_C", NULL)); 2051 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundaries_C", NULL)); 2052 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundariesLocal_C", NULL)); 2053 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundaries_C", NULL)); 2054 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundariesLocal_C", NULL)); 2055 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundaries_C", NULL)); 2056 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundariesLocal_C", NULL)); 2057 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplitting_C", NULL)); 2058 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplittingLocal_C", NULL)); 2059 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", NULL)); 2060 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCCreateFETIDPOperators_C", NULL)); 2061 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetRHS_C", NULL)); 2062 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetSolution_C", NULL)); 2063 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCPreSolveChangeRHS_C", NULL)); 2064 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL)); 2065 PetscCall(PetscFree(pc->data)); 2066 PetscFunctionReturn(PETSC_SUCCESS); 2067 } 2068 2069 static PetscErrorCode PCSetCoordinates_BDDC(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords) 2070 { 2071 PC_BDDC *pcbddc = (PC_BDDC *)pc->data; 2072 PCBDDCGraph mat_graph = pcbddc->mat_graph; 2073 2074 PetscFunctionBegin; 2075 PetscCall(PetscFree(mat_graph->coords)); 2076 PetscCall(PetscMalloc1(nloc * dim, &mat_graph->coords)); 2077 PetscCall(PetscArraycpy(mat_graph->coords, coords, nloc * dim)); 2078 mat_graph->cnloc = nloc; 2079 mat_graph->cdim = dim; 2080 mat_graph->cloc = PETSC_FALSE; 2081 /* flg setup */ 2082 pcbddc->recompute_topography = PETSC_TRUE; 2083 pcbddc->corner_selected = PETSC_FALSE; 2084 PetscFunctionReturn(PETSC_SUCCESS); 2085 } 2086 2087 static PetscErrorCode PCPreSolveChangeRHS_BDDC(PC pc, PetscBool *change) 2088 { 2089 PetscFunctionBegin; 2090 *change = PETSC_TRUE; 2091 PetscFunctionReturn(PETSC_SUCCESS); 2092 } 2093 2094 static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs) 2095 { 2096 FETIDPMat_ctx mat_ctx; 2097 Vec work; 2098 PC_IS *pcis; 2099 PC_BDDC *pcbddc; 2100 2101 PetscFunctionBegin; 2102 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx)); 2103 pcis = (PC_IS *)mat_ctx->pc->data; 2104 pcbddc = (PC_BDDC *)mat_ctx->pc->data; 2105 2106 PetscCall(VecSet(fetidp_flux_rhs, 0.0)); 2107 /* copy rhs since we may change it during PCPreSolve_BDDC */ 2108 if (!pcbddc->original_rhs) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->original_rhs)); 2109 if (mat_ctx->rhs_flip) { 2110 PetscCall(VecPointwiseMult(pcbddc->original_rhs, standard_rhs, mat_ctx->rhs_flip)); 2111 } else { 2112 PetscCall(VecCopy(standard_rhs, pcbddc->original_rhs)); 2113 } 2114 if (mat_ctx->g2g_p) { 2115 /* interface pressure rhs */ 2116 PetscCall(VecScatterBegin(mat_ctx->g2g_p, fetidp_flux_rhs, pcbddc->original_rhs, INSERT_VALUES, SCATTER_REVERSE)); 2117 PetscCall(VecScatterEnd(mat_ctx->g2g_p, fetidp_flux_rhs, pcbddc->original_rhs, INSERT_VALUES, SCATTER_REVERSE)); 2118 PetscCall(VecScatterBegin(mat_ctx->g2g_p, standard_rhs, fetidp_flux_rhs, INSERT_VALUES, SCATTER_FORWARD)); 2119 PetscCall(VecScatterEnd(mat_ctx->g2g_p, standard_rhs, fetidp_flux_rhs, INSERT_VALUES, SCATTER_FORWARD)); 2120 if (!mat_ctx->rhs_flip) PetscCall(VecScale(fetidp_flux_rhs, -1.)); 2121 } 2122 /* 2123 change of basis for physical rhs if needed 2124 It also changes the rhs in case of dirichlet boundaries 2125 */ 2126 PetscCall(PCPreSolve_BDDC(mat_ctx->pc, NULL, pcbddc->original_rhs, NULL)); 2127 if (pcbddc->ChangeOfBasisMatrix) { 2128 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, pcbddc->original_rhs, pcbddc->work_change)); 2129 work = pcbddc->work_change; 2130 } else { 2131 work = pcbddc->original_rhs; 2132 } 2133 /* store vectors for computation of fetidp final solution */ 2134 PetscCall(VecScatterBegin(pcis->global_to_D, work, mat_ctx->temp_solution_D, INSERT_VALUES, SCATTER_FORWARD)); 2135 PetscCall(VecScatterEnd(pcis->global_to_D, work, mat_ctx->temp_solution_D, INSERT_VALUES, SCATTER_FORWARD)); 2136 /* scale rhs since it should be unassembled */ 2137 /* TODO use counter scaling? (also below) */ 2138 PetscCall(VecScatterBegin(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD)); 2139 PetscCall(VecScatterEnd(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD)); 2140 /* Apply partition of unity */ 2141 PetscCall(VecPointwiseMult(mat_ctx->temp_solution_B, pcis->D, mat_ctx->temp_solution_B)); 2142 /* PetscCall(PCBDDCScalingRestriction(mat_ctx->pc,work,mat_ctx->temp_solution_B)); */ 2143 if (!pcbddc->switch_static) { 2144 /* compute partially subassembled Schur complement right-hand side */ 2145 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0)); 2146 PetscCall(KSPSolve(pcbddc->ksp_D, mat_ctx->temp_solution_D, pcis->vec1_D)); 2147 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0)); 2148 /* Cannot propagate up error in KSPSolve() because there is no access to the PC */ 2149 PetscCall(MatMult(pcis->A_BI, pcis->vec1_D, pcis->vec1_B)); 2150 PetscCall(VecAXPY(mat_ctx->temp_solution_B, -1.0, pcis->vec1_B)); 2151 PetscCall(VecSet(work, 0.0)); 2152 PetscCall(VecScatterBegin(pcis->global_to_B, mat_ctx->temp_solution_B, work, ADD_VALUES, SCATTER_REVERSE)); 2153 PetscCall(VecScatterEnd(pcis->global_to_B, mat_ctx->temp_solution_B, work, ADD_VALUES, SCATTER_REVERSE)); 2154 /* PetscCall(PCBDDCScalingRestriction(mat_ctx->pc,work,mat_ctx->temp_solution_B)); */ 2155 PetscCall(VecScatterBegin(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD)); 2156 PetscCall(VecScatterEnd(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD)); 2157 PetscCall(VecPointwiseMult(mat_ctx->temp_solution_B, pcis->D, mat_ctx->temp_solution_B)); 2158 } 2159 /* BDDC rhs */ 2160 PetscCall(VecCopy(mat_ctx->temp_solution_B, pcis->vec1_B)); 2161 if (pcbddc->switch_static) PetscCall(VecCopy(mat_ctx->temp_solution_D, pcis->vec1_D)); 2162 /* apply BDDC */ 2163 PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n)); 2164 PetscCall(PCBDDCApplyInterfacePreconditioner(mat_ctx->pc, PETSC_FALSE)); 2165 PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n)); 2166 2167 /* Application of B_delta and assembling of rhs for fetidp fluxes */ 2168 PetscCall(MatMult(mat_ctx->B_delta, pcis->vec1_B, mat_ctx->lambda_local)); 2169 PetscCall(VecScatterBegin(mat_ctx->l2g_lambda, mat_ctx->lambda_local, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD)); 2170 PetscCall(VecScatterEnd(mat_ctx->l2g_lambda, mat_ctx->lambda_local, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD)); 2171 /* Add contribution to interface pressures */ 2172 if (mat_ctx->l2g_p) { 2173 PetscCall(MatMult(mat_ctx->B_BB, pcis->vec1_B, mat_ctx->vP)); 2174 if (pcbddc->switch_static) PetscCall(MatMultAdd(mat_ctx->B_BI, pcis->vec1_D, mat_ctx->vP, mat_ctx->vP)); 2175 PetscCall(VecScatterBegin(mat_ctx->l2g_p, mat_ctx->vP, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD)); 2176 PetscCall(VecScatterEnd(mat_ctx->l2g_p, mat_ctx->vP, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD)); 2177 } 2178 PetscFunctionReturn(PETSC_SUCCESS); 2179 } 2180 2181 /*@ 2182 PCBDDCMatFETIDPGetRHS - Compute the right-hand side for a FETI-DP linear system using the physical right-hand side 2183 2184 Collective 2185 2186 Input Parameters: 2187 + fetidp_mat - the FETI-DP matrix object obtained by a call to `PCBDDCCreateFETIDPOperators()` 2188 - standard_rhs - the right-hand side of the original linear system 2189 2190 Output Parameter: 2191 . fetidp_flux_rhs - the right-hand side for the FETI-DP linear system 2192 2193 Level: developer 2194 2195 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCCreateFETIDPOperators()`, `PCBDDCMatFETIDPGetSolution()` 2196 @*/ 2197 PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs) 2198 { 2199 FETIDPMat_ctx mat_ctx; 2200 2201 PetscFunctionBegin; 2202 PetscValidHeaderSpecific(fetidp_mat, MAT_CLASSID, 1); 2203 PetscValidHeaderSpecific(standard_rhs, VEC_CLASSID, 2); 2204 PetscValidHeaderSpecific(fetidp_flux_rhs, VEC_CLASSID, 3); 2205 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx)); 2206 PetscUseMethod(mat_ctx->pc, "PCBDDCMatFETIDPGetRHS_C", (Mat, Vec, Vec), (fetidp_mat, standard_rhs, fetidp_flux_rhs)); 2207 PetscFunctionReturn(PETSC_SUCCESS); 2208 } 2209 2210 static PetscErrorCode PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol) 2211 { 2212 FETIDPMat_ctx mat_ctx; 2213 PC_IS *pcis; 2214 PC_BDDC *pcbddc; 2215 Vec work; 2216 2217 PetscFunctionBegin; 2218 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx)); 2219 pcis = (PC_IS *)mat_ctx->pc->data; 2220 pcbddc = (PC_BDDC *)mat_ctx->pc->data; 2221 2222 /* apply B_delta^T */ 2223 PetscCall(VecSet(pcis->vec1_B, 0.)); 2224 PetscCall(VecScatterBegin(mat_ctx->l2g_lambda, fetidp_flux_sol, mat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE)); 2225 PetscCall(VecScatterEnd(mat_ctx->l2g_lambda, fetidp_flux_sol, mat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE)); 2226 PetscCall(MatMultTranspose(mat_ctx->B_delta, mat_ctx->lambda_local, pcis->vec1_B)); 2227 if (mat_ctx->l2g_p) { 2228 PetscCall(VecScatterBegin(mat_ctx->l2g_p, fetidp_flux_sol, mat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE)); 2229 PetscCall(VecScatterEnd(mat_ctx->l2g_p, fetidp_flux_sol, mat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE)); 2230 PetscCall(MatMultAdd(mat_ctx->Bt_BB, mat_ctx->vP, pcis->vec1_B, pcis->vec1_B)); 2231 } 2232 2233 /* compute rhs for BDDC application */ 2234 PetscCall(VecAYPX(pcis->vec1_B, -1.0, mat_ctx->temp_solution_B)); 2235 if (pcbddc->switch_static) { 2236 PetscCall(VecCopy(mat_ctx->temp_solution_D, pcis->vec1_D)); 2237 if (mat_ctx->l2g_p) { 2238 PetscCall(VecScale(mat_ctx->vP, -1.)); 2239 PetscCall(MatMultAdd(mat_ctx->Bt_BI, mat_ctx->vP, pcis->vec1_D, pcis->vec1_D)); 2240 } 2241 } 2242 2243 /* apply BDDC */ 2244 PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n)); 2245 PetscCall(PCBDDCApplyInterfacePreconditioner(mat_ctx->pc, PETSC_FALSE)); 2246 2247 /* put values into global vector */ 2248 if (pcbddc->ChangeOfBasisMatrix) work = pcbddc->work_change; 2249 else work = standard_sol; 2250 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, work, INSERT_VALUES, SCATTER_REVERSE)); 2251 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, work, INSERT_VALUES, SCATTER_REVERSE)); 2252 if (!pcbddc->switch_static) { 2253 /* compute values into the interior if solved for the partially subassembled Schur complement */ 2254 PetscCall(MatMult(pcis->A_IB, pcis->vec1_B, pcis->vec1_D)); 2255 PetscCall(VecAYPX(pcis->vec1_D, -1.0, mat_ctx->temp_solution_D)); 2256 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0)); 2257 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec1_D)); 2258 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0)); 2259 /* Cannot propagate up error in KSPSolve() because there is no access to the PC */ 2260 } 2261 2262 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_D, work, INSERT_VALUES, SCATTER_REVERSE)); 2263 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_D, work, INSERT_VALUES, SCATTER_REVERSE)); 2264 /* add p0 solution to final solution */ 2265 PetscCall(PCBDDCBenignGetOrSetP0(mat_ctx->pc, work, PETSC_FALSE)); 2266 if (pcbddc->ChangeOfBasisMatrix) PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, work, standard_sol)); 2267 PetscCall(PCPostSolve_BDDC(mat_ctx->pc, NULL, NULL, standard_sol)); 2268 if (mat_ctx->g2g_p) { 2269 PetscCall(VecScatterBegin(mat_ctx->g2g_p, fetidp_flux_sol, standard_sol, INSERT_VALUES, SCATTER_REVERSE)); 2270 PetscCall(VecScatterEnd(mat_ctx->g2g_p, fetidp_flux_sol, standard_sol, INSERT_VALUES, SCATTER_REVERSE)); 2271 } 2272 PetscFunctionReturn(PETSC_SUCCESS); 2273 } 2274 2275 static PetscErrorCode PCView_BDDCIPC(PC pc, PetscViewer viewer) 2276 { 2277 BDDCIPC_ctx bddcipc_ctx; 2278 PetscBool isascii; 2279 2280 PetscFunctionBegin; 2281 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2282 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii)); 2283 if (isascii) PetscCall(PetscViewerASCIIPrintf(viewer, "BDDC interface preconditioner\n")); 2284 PetscCall(PetscViewerASCIIPushTab(viewer)); 2285 PetscCall(PCView(bddcipc_ctx->bddc, viewer)); 2286 PetscCall(PetscViewerASCIIPopTab(viewer)); 2287 PetscFunctionReturn(PETSC_SUCCESS); 2288 } 2289 2290 static PetscErrorCode PCSetUp_BDDCIPC(PC pc) 2291 { 2292 BDDCIPC_ctx bddcipc_ctx; 2293 PetscBool isbddc; 2294 Vec vv; 2295 IS is; 2296 PC_IS *pcis; 2297 2298 PetscFunctionBegin; 2299 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2300 PetscCall(PetscObjectTypeCompare((PetscObject)bddcipc_ctx->bddc, PCBDDC, &isbddc)); 2301 PetscCheck(isbddc, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid type %s. Must be of type bddc", ((PetscObject)bddcipc_ctx->bddc)->type_name); 2302 PetscCall(PCSetUp(bddcipc_ctx->bddc)); 2303 2304 /* create interface scatter */ 2305 pcis = (PC_IS *)(bddcipc_ctx->bddc->data); 2306 PetscCall(VecScatterDestroy(&bddcipc_ctx->g2l)); 2307 PetscCall(MatCreateVecs(pc->pmat, &vv, NULL)); 2308 PetscCall(ISRenumber(pcis->is_B_global, NULL, NULL, &is)); 2309 PetscCall(VecScatterCreate(vv, is, pcis->vec1_B, NULL, &bddcipc_ctx->g2l)); 2310 PetscCall(ISDestroy(&is)); 2311 PetscCall(VecDestroy(&vv)); 2312 PetscFunctionReturn(PETSC_SUCCESS); 2313 } 2314 2315 static PetscErrorCode PCApply_BDDCIPC(PC pc, Vec r, Vec x) 2316 { 2317 BDDCIPC_ctx bddcipc_ctx; 2318 PC_IS *pcis; 2319 VecScatter tmps; 2320 2321 PetscFunctionBegin; 2322 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2323 pcis = (PC_IS *)(bddcipc_ctx->bddc->data); 2324 tmps = pcis->global_to_B; 2325 pcis->global_to_B = bddcipc_ctx->g2l; 2326 PetscCall(PCBDDCScalingRestriction(bddcipc_ctx->bddc, r, pcis->vec1_B)); 2327 PetscCall(PCBDDCApplyInterfacePreconditioner(bddcipc_ctx->bddc, PETSC_FALSE)); 2328 PetscCall(PCBDDCScalingExtension(bddcipc_ctx->bddc, pcis->vec1_B, x)); 2329 pcis->global_to_B = tmps; 2330 PetscFunctionReturn(PETSC_SUCCESS); 2331 } 2332 2333 static PetscErrorCode PCApplyTranspose_BDDCIPC(PC pc, Vec r, Vec x) 2334 { 2335 BDDCIPC_ctx bddcipc_ctx; 2336 PC_IS *pcis; 2337 VecScatter tmps; 2338 2339 PetscFunctionBegin; 2340 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2341 pcis = (PC_IS *)(bddcipc_ctx->bddc->data); 2342 tmps = pcis->global_to_B; 2343 pcis->global_to_B = bddcipc_ctx->g2l; 2344 PetscCall(PCBDDCScalingRestriction(bddcipc_ctx->bddc, r, pcis->vec1_B)); 2345 PetscCall(PCBDDCApplyInterfacePreconditioner(bddcipc_ctx->bddc, PETSC_TRUE)); 2346 PetscCall(PCBDDCScalingExtension(bddcipc_ctx->bddc, pcis->vec1_B, x)); 2347 pcis->global_to_B = tmps; 2348 PetscFunctionReturn(PETSC_SUCCESS); 2349 } 2350 2351 static PetscErrorCode PCDestroy_BDDCIPC(PC pc) 2352 { 2353 BDDCIPC_ctx bddcipc_ctx; 2354 2355 PetscFunctionBegin; 2356 PetscCall(PCShellGetContext(pc, &bddcipc_ctx)); 2357 PetscCall(PCDestroy(&bddcipc_ctx->bddc)); 2358 PetscCall(VecScatterDestroy(&bddcipc_ctx->g2l)); 2359 PetscCall(PetscFree(bddcipc_ctx)); 2360 PetscFunctionReturn(PETSC_SUCCESS); 2361 } 2362 2363 /*@ 2364 PCBDDCMatFETIDPGetSolution - Compute the physical solution using the solution of the FETI-DP linear system 2365 2366 Collective 2367 2368 Input Parameters: 2369 + fetidp_mat - the FETI-DP matrix obtained by a call to `PCBDDCCreateFETIDPOperators()` 2370 - fetidp_flux_sol - the solution of the FETI-DP linear system` 2371 2372 Output Parameter: 2373 . standard_sol - the solution defined on the physical domain 2374 2375 Level: developer 2376 2377 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCCreateFETIDPOperators()`, `PCBDDCMatFETIDPGetRHS()` 2378 @*/ 2379 PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol) 2380 { 2381 FETIDPMat_ctx mat_ctx; 2382 2383 PetscFunctionBegin; 2384 PetscValidHeaderSpecific(fetidp_mat, MAT_CLASSID, 1); 2385 PetscValidHeaderSpecific(fetidp_flux_sol, VEC_CLASSID, 2); 2386 PetscValidHeaderSpecific(standard_sol, VEC_CLASSID, 3); 2387 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx)); 2388 PetscUseMethod(mat_ctx->pc, "PCBDDCMatFETIDPGetSolution_C", (Mat, Vec, Vec), (fetidp_mat, fetidp_flux_sol, standard_sol)); 2389 PetscFunctionReturn(PETSC_SUCCESS); 2390 } 2391 2392 static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, PetscBool fully_redundant, const char *prefix, Mat *fetidp_mat, PC *fetidp_pc) 2393 { 2394 FETIDPMat_ctx fetidpmat_ctx; 2395 Mat newmat; 2396 FETIDPPC_ctx fetidppc_ctx; 2397 PC newpc; 2398 MPI_Comm comm; 2399 2400 PetscFunctionBegin; 2401 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm)); 2402 /* FETI-DP matrix */ 2403 PetscCall(PCBDDCCreateFETIDPMatContext(pc, &fetidpmat_ctx)); 2404 fetidpmat_ctx->fully_redundant = fully_redundant; 2405 PetscCall(PCBDDCSetupFETIDPMatContext(fetidpmat_ctx)); 2406 PetscCall(MatCreateShell(comm, fetidpmat_ctx->n, fetidpmat_ctx->n, fetidpmat_ctx->N, fetidpmat_ctx->N, fetidpmat_ctx, &newmat)); 2407 PetscCall(PetscObjectSetName((PetscObject)newmat, !fetidpmat_ctx->l2g_lambda_only ? "F" : "G")); 2408 PetscCall(MatShellSetOperation(newmat, MATOP_MULT, (void (*)(void))FETIDPMatMult)); 2409 PetscCall(MatShellSetOperation(newmat, MATOP_MULT_TRANSPOSE, (void (*)(void))FETIDPMatMultTranspose)); 2410 PetscCall(MatShellSetOperation(newmat, MATOP_DESTROY, (void (*)(void))PCBDDCDestroyFETIDPMat)); 2411 /* propagate MatOptions */ 2412 { 2413 PC_BDDC *pcbddc = (PC_BDDC *)fetidpmat_ctx->pc->data; 2414 PetscBool isset, issym; 2415 2416 PetscCall(MatIsSymmetricKnown(pc->mat, &isset, &issym)); 2417 if ((isset && issym) || pcbddc->symmetric_primal) PetscCall(MatSetOption(newmat, MAT_SYMMETRIC, PETSC_TRUE)); 2418 } 2419 PetscCall(MatSetOptionsPrefix(newmat, prefix)); 2420 PetscCall(MatAppendOptionsPrefix(newmat, "fetidp_")); 2421 PetscCall(MatSetUp(newmat)); 2422 /* FETI-DP preconditioner */ 2423 PetscCall(PCBDDCCreateFETIDPPCContext(pc, &fetidppc_ctx)); 2424 PetscCall(PCBDDCSetupFETIDPPCContext(newmat, fetidppc_ctx)); 2425 PetscCall(PCCreate(comm, &newpc)); 2426 PetscCall(PCSetOperators(newpc, newmat, newmat)); 2427 PetscCall(PCSetOptionsPrefix(newpc, prefix)); 2428 PetscCall(PCAppendOptionsPrefix(newpc, "fetidp_")); 2429 PetscCall(PCSetErrorIfFailure(newpc, pc->erroriffailure)); 2430 if (!fetidpmat_ctx->l2g_lambda_only) { /* standard FETI-DP */ 2431 PetscCall(PCSetType(newpc, PCSHELL)); 2432 PetscCall(PCShellSetName(newpc, "FETI-DP multipliers")); 2433 PetscCall(PCShellSetContext(newpc, fetidppc_ctx)); 2434 PetscCall(PCShellSetApply(newpc, FETIDPPCApply)); 2435 PetscCall(PCShellSetApplyTranspose(newpc, FETIDPPCApplyTranspose)); 2436 PetscCall(PCShellSetView(newpc, FETIDPPCView)); 2437 PetscCall(PCShellSetDestroy(newpc, PCBDDCDestroyFETIDPPC)); 2438 } else { /* saddle-point FETI-DP */ 2439 Mat M; 2440 PetscInt psize; 2441 PetscBool fake = PETSC_FALSE, isfieldsplit; 2442 2443 PetscCall(ISViewFromOptions(fetidpmat_ctx->lagrange, NULL, "-lag_view")); 2444 PetscCall(ISViewFromOptions(fetidpmat_ctx->pressure, NULL, "-press_view")); 2445 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_PPmat", (PetscObject *)&M)); 2446 PetscCall(PCSetType(newpc, PCFIELDSPLIT)); 2447 PetscCall(PCFieldSplitSetIS(newpc, "lag", fetidpmat_ctx->lagrange)); 2448 PetscCall(PCFieldSplitSetIS(newpc, "p", fetidpmat_ctx->pressure)); 2449 PetscCall(PCFieldSplitSetType(newpc, PC_COMPOSITE_SCHUR)); 2450 PetscCall(PCFieldSplitSetSchurFactType(newpc, PC_FIELDSPLIT_SCHUR_FACT_DIAG)); 2451 PetscCall(ISGetSize(fetidpmat_ctx->pressure, &psize)); 2452 if (psize != M->rmap->N) { 2453 Mat M2; 2454 PetscInt lpsize; 2455 2456 fake = PETSC_TRUE; 2457 PetscCall(ISGetLocalSize(fetidpmat_ctx->pressure, &lpsize)); 2458 PetscCall(MatCreate(comm, &M2)); 2459 PetscCall(MatSetType(M2, MATAIJ)); 2460 PetscCall(MatSetSizes(M2, lpsize, lpsize, psize, psize)); 2461 PetscCall(MatSetUp(M2)); 2462 PetscCall(MatAssemblyBegin(M2, MAT_FINAL_ASSEMBLY)); 2463 PetscCall(MatAssemblyEnd(M2, MAT_FINAL_ASSEMBLY)); 2464 PetscCall(PCFieldSplitSetSchurPre(newpc, PC_FIELDSPLIT_SCHUR_PRE_USER, M2)); 2465 PetscCall(MatDestroy(&M2)); 2466 } else { 2467 PetscCall(PCFieldSplitSetSchurPre(newpc, PC_FIELDSPLIT_SCHUR_PRE_USER, M)); 2468 } 2469 PetscCall(PCFieldSplitSetSchurScale(newpc, 1.0)); 2470 2471 /* we need to setfromoptions and setup here to access the blocks */ 2472 PetscCall(PCSetFromOptions(newpc)); 2473 PetscCall(PCSetUp(newpc)); 2474 2475 /* user may have changed the type (e.g. -fetidp_pc_type none) */ 2476 PetscCall(PetscObjectTypeCompare((PetscObject)newpc, PCFIELDSPLIT, &isfieldsplit)); 2477 if (isfieldsplit) { 2478 KSP *ksps; 2479 PC ppc, lagpc; 2480 PetscInt nn; 2481 PetscBool ismatis, matisok = PETSC_FALSE, check = PETSC_FALSE; 2482 2483 /* set the solver for the (0,0) block */ 2484 PetscCall(PCFieldSplitSchurGetSubKSP(newpc, &nn, &ksps)); 2485 if (!nn) { /* not of type PC_COMPOSITE_SCHUR */ 2486 PetscCall(PCFieldSplitGetSubKSP(newpc, &nn, &ksps)); 2487 if (!fake) { /* pass pmat to the pressure solver */ 2488 Mat F; 2489 2490 PetscCall(KSPGetOperators(ksps[1], &F, NULL)); 2491 PetscCall(KSPSetOperators(ksps[1], F, M)); 2492 } 2493 } else { 2494 PetscBool issym, isset; 2495 Mat S; 2496 2497 PetscCall(PCFieldSplitSchurGetS(newpc, &S)); 2498 PetscCall(MatIsSymmetricKnown(newmat, &isset, &issym)); 2499 if (isset) PetscCall(MatSetOption(S, MAT_SYMMETRIC, issym)); 2500 } 2501 PetscCall(KSPGetPC(ksps[0], &lagpc)); 2502 PetscCall(PCSetType(lagpc, PCSHELL)); 2503 PetscCall(PCShellSetName(lagpc, "FETI-DP multipliers")); 2504 PetscCall(PCShellSetContext(lagpc, fetidppc_ctx)); 2505 PetscCall(PCShellSetApply(lagpc, FETIDPPCApply)); 2506 PetscCall(PCShellSetApplyTranspose(lagpc, FETIDPPCApplyTranspose)); 2507 PetscCall(PCShellSetView(lagpc, FETIDPPCView)); 2508 PetscCall(PCShellSetDestroy(lagpc, PCBDDCDestroyFETIDPPC)); 2509 2510 /* Olof's idea: interface Schur complement preconditioner for the mass matrix */ 2511 PetscCall(KSPGetPC(ksps[1], &ppc)); 2512 if (fake) { 2513 BDDCIPC_ctx bddcipc_ctx; 2514 PetscContainer c; 2515 2516 matisok = PETSC_TRUE; 2517 2518 /* create inner BDDC solver */ 2519 PetscCall(PetscNew(&bddcipc_ctx)); 2520 PetscCall(PCCreate(comm, &bddcipc_ctx->bddc)); 2521 PetscCall(PCSetType(bddcipc_ctx->bddc, PCBDDC)); 2522 PetscCall(PCSetOperators(bddcipc_ctx->bddc, M, M)); 2523 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_pCSR", (PetscObject *)&c)); 2524 PetscCall(PetscObjectTypeCompare((PetscObject)M, MATIS, &ismatis)); 2525 if (c && ismatis) { 2526 Mat lM; 2527 PetscInt *csr, n; 2528 2529 PetscCall(MatISGetLocalMat(M, &lM)); 2530 PetscCall(MatGetSize(lM, &n, NULL)); 2531 PetscCall(PetscContainerGetPointer(c, (void **)&csr)); 2532 PetscCall(PCBDDCSetLocalAdjacencyGraph(bddcipc_ctx->bddc, n, csr, csr + (n + 1), PETSC_COPY_VALUES)); 2533 PetscCall(MatISRestoreLocalMat(M, &lM)); 2534 } 2535 PetscCall(PCSetOptionsPrefix(bddcipc_ctx->bddc, ((PetscObject)ksps[1])->prefix)); 2536 PetscCall(PCSetErrorIfFailure(bddcipc_ctx->bddc, pc->erroriffailure)); 2537 PetscCall(PCSetFromOptions(bddcipc_ctx->bddc)); 2538 2539 /* wrap the interface application */ 2540 PetscCall(PCSetType(ppc, PCSHELL)); 2541 PetscCall(PCShellSetName(ppc, "FETI-DP pressure")); 2542 PetscCall(PCShellSetContext(ppc, bddcipc_ctx)); 2543 PetscCall(PCShellSetSetUp(ppc, PCSetUp_BDDCIPC)); 2544 PetscCall(PCShellSetApply(ppc, PCApply_BDDCIPC)); 2545 PetscCall(PCShellSetApplyTranspose(ppc, PCApplyTranspose_BDDCIPC)); 2546 PetscCall(PCShellSetView(ppc, PCView_BDDCIPC)); 2547 PetscCall(PCShellSetDestroy(ppc, PCDestroy_BDDCIPC)); 2548 } 2549 2550 /* determine if we need to assemble M to construct a preconditioner */ 2551 if (!matisok) { 2552 PetscCall(PetscObjectTypeCompare((PetscObject)M, MATIS, &ismatis)); 2553 PetscCall(PetscObjectTypeCompareAny((PetscObject)ppc, &matisok, PCBDDC, PCJACOBI, PCNONE, PCMG, "")); 2554 if (ismatis && !matisok) PetscCall(MatConvert(M, MATAIJ, MAT_INPLACE_MATRIX, &M)); 2555 } 2556 2557 /* run the subproblems to check convergence */ 2558 PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)newmat)->prefix, "-check_saddlepoint", &check, NULL)); 2559 if (check) { 2560 PetscInt i; 2561 2562 for (i = 0; i < nn; i++) { 2563 KSP kspC; 2564 PC npc; 2565 Mat F, pF; 2566 Vec x, y; 2567 PetscBool isschur, prec = PETSC_TRUE; 2568 2569 PetscCall(KSPCreate(PetscObjectComm((PetscObject)ksps[i]), &kspC)); 2570 PetscCall(KSPSetNestLevel(kspC, pc->kspnestlevel)); 2571 PetscCall(KSPSetOptionsPrefix(kspC, ((PetscObject)ksps[i])->prefix)); 2572 PetscCall(KSPAppendOptionsPrefix(kspC, "check_")); 2573 PetscCall(KSPGetOperators(ksps[i], &F, &pF)); 2574 PetscCall(PetscObjectTypeCompare((PetscObject)F, MATSCHURCOMPLEMENT, &isschur)); 2575 if (isschur) { 2576 KSP kspS, kspS2; 2577 Mat A00, pA00, A10, A01, A11; 2578 char prefix[256]; 2579 2580 PetscCall(MatSchurComplementGetKSP(F, &kspS)); 2581 PetscCall(MatSchurComplementGetSubMatrices(F, &A00, &pA00, &A01, &A10, &A11)); 2582 PetscCall(MatCreateSchurComplement(A00, pA00, A01, A10, A11, &F)); 2583 PetscCall(MatSchurComplementGetKSP(F, &kspS2)); 2584 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sschur_", ((PetscObject)kspC)->prefix)); 2585 PetscCall(KSPSetOptionsPrefix(kspS2, prefix)); 2586 PetscCall(KSPGetPC(kspS2, &npc)); 2587 PetscCall(PCSetType(npc, PCKSP)); 2588 PetscCall(PCKSPSetKSP(npc, kspS)); 2589 PetscCall(KSPSetFromOptions(kspS2)); 2590 PetscCall(KSPGetPC(kspS2, &npc)); 2591 PetscCall(PCSetUseAmat(npc, PETSC_TRUE)); 2592 } else { 2593 PetscCall(PetscObjectReference((PetscObject)F)); 2594 } 2595 PetscCall(KSPSetFromOptions(kspC)); 2596 PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)kspC)->prefix, "-preconditioned", &prec, NULL)); 2597 if (prec) { 2598 PetscCall(KSPGetPC(ksps[i], &npc)); 2599 PetscCall(KSPSetPC(kspC, npc)); 2600 } 2601 PetscCall(KSPSetOperators(kspC, F, pF)); 2602 PetscCall(MatCreateVecs(F, &x, &y)); 2603 PetscCall(VecSetRandom(x, NULL)); 2604 PetscCall(MatMult(F, x, y)); 2605 PetscCall(KSPSolve(kspC, y, x)); 2606 PetscCall(KSPCheckSolve(kspC, npc, x)); 2607 PetscCall(KSPDestroy(&kspC)); 2608 PetscCall(MatDestroy(&F)); 2609 PetscCall(VecDestroy(&x)); 2610 PetscCall(VecDestroy(&y)); 2611 } 2612 } 2613 PetscCall(PetscFree(ksps)); 2614 } 2615 } 2616 /* return pointers for objects created */ 2617 *fetidp_mat = newmat; 2618 *fetidp_pc = newpc; 2619 PetscFunctionReturn(PETSC_SUCCESS); 2620 } 2621 2622 /*@C 2623 PCBDDCCreateFETIDPOperators - Create FETI-DP operators 2624 2625 Collective 2626 2627 Input Parameters: 2628 + pc - the `PCBDDC` preconditioning context (setup should have been called before) 2629 . fully_redundant - true for a fully redundant set of Lagrange multipliers 2630 - prefix - optional options database prefix for the objects to be created (can be `NULL`) 2631 2632 Output Parameters: 2633 + fetidp_mat - shell FETI-DP matrix object 2634 - fetidp_pc - shell Dirichlet preconditioner for FETI-DP matrix 2635 2636 Level: developer 2637 2638 Note: 2639 Currently the only operations provided for FETI-DP matrix are `MatMult()` and `MatMultTranspose()` 2640 2641 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCMatFETIDPGetRHS()`, `PCBDDCMatFETIDPGetSolution()` 2642 @*/ 2643 PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, PetscBool fully_redundant, const char *prefix, Mat *fetidp_mat, PC *fetidp_pc) 2644 { 2645 PetscFunctionBegin; 2646 PetscValidHeaderSpecific(pc, PC_CLASSID, 1); 2647 PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "You must call PCSetup_BDDC() first"); 2648 PetscUseMethod(pc, "PCBDDCCreateFETIDPOperators_C", (PC, PetscBool, const char *, Mat *, PC *), (pc, fully_redundant, prefix, fetidp_mat, fetidp_pc)); 2649 PetscFunctionReturn(PETSC_SUCCESS); 2650 } 2651 2652 /*MC 2653 PCBDDC - Balancing Domain Decomposition by Constraints preconditioners, {cite}`dohrmann2007approximate`, {cite}`klawonn2006dual`, {cite}`mandel2008multispace` 2654 2655 Requires `MATIS` matrices (Pmat) with local matrices (inside the `MATIS`) of type `MATSEQAIJ`, `MATSEQBAIJ` or `MATSEQSBAIJ` 2656 2657 It also works with unsymmetric and indefinite problems. 2658 2659 Unlike 'conventional' interface preconditioners, `PCBDDC` iterates over all degrees of freedom, not just those on the interface. This allows the use 2660 of approximate solvers on the subdomains. 2661 2662 Approximate local solvers are automatically adapted (see {cite}`dohrmann2007approximate`,) if the user has attached a nullspace object to the subdomain matrices, and informed 2663 `PCBDDC` of using approximate solvers (via the command line). 2664 2665 Boundary nodes are split in vertices, edges and faces classes using information from the local to global mapping of dofs and the local connectivity graph of nodes. 2666 The latter can be customized by using `PCBDDCSetLocalAdjacencyGraph()` 2667 2668 Additional information on dofs can be provided by using `PCBDDCSetDofsSplitting()`, `PCBDDCSetDirichletBoundaries()`, `PCBDDCSetNeumannBoundaries()`, and 2669 `PCBDDCSetPrimalVerticesIS()` and their local counterparts. 2670 2671 Constraints can be customized by attaching a `MatNullSpace` object to the `MATIS` matrix via `MatSetNearNullSpace()`. Non-singular modes are retained via SVD. 2672 2673 Change of basis is performed similarly to {cite}`klawonn2006dual` when requested. When more than one constraint is present on a single connected component 2674 (i.e. an edge or a face), a robust method based on local QR factorizations is used. 2675 User defined change of basis can be passed to `PCBDDC` with `PCBDDCSetChangeOfBasisMat()` 2676 2677 The PETSc implementation also supports multilevel `PCBDDC` {cite}`mandel2008multispace`. Coarse grids are partitioned using a `MatPartitioning` object. 2678 2679 Adaptive selection of primal constraints is supported for SPD systems with high-contrast in the coefficients if MUMPS or MKL_PARDISO are present. 2680 Future versions of the code will also consider using PASTIX. 2681 2682 An experimental interface to the FETI-DP method is available. FETI-DP operators could be created using `PCBDDCCreateFETIDPOperators()`. 2683 A stand-alone class for the FETI-DP method will be provided in the next releases. 2684 2685 Options Database Keys: 2686 + -pc_bddc_use_vertices <true> - use or not vertices in primal space 2687 . -pc_bddc_use_edges <true> - use or not edges in primal space 2688 . -pc_bddc_use_faces <false> - use or not faces in primal space 2689 . -pc_bddc_symmetric <true> - symmetric computation of primal basis functions. Specify false for unsymmetric problems 2690 . -pc_bddc_use_change_of_basis <false> - use change of basis approach (on edges only) 2691 . -pc_bddc_use_change_on_faces <false> - use change of basis approach on faces if change of basis has been requested 2692 . -pc_bddc_switch_static <false> - switches from M_2 (default) to M_3 operator (see reference article [1]) 2693 . -pc_bddc_levels <0> - maximum number of levels for multilevel 2694 . -pc_bddc_coarsening_ratio <8> - number of subdomains which will be aggregated together at the coarser level (e.g. H/h ratio at the coarser level, significative only in the multilevel case) 2695 . -pc_bddc_coarse_redistribute <0> - size of a subset of processors where the coarse problem will be remapped (the value is ignored if not at the coarsest level) 2696 . -pc_bddc_use_deluxe_scaling <false> - use deluxe scaling 2697 . -pc_bddc_schur_layers <\-1> - select the economic version of deluxe scaling by specifying the number of layers (-1 corresponds to the original deluxe scaling) 2698 . -pc_bddc_adaptive_threshold <0.0> - when a value different than zero is specified, adaptive selection of constraints is performed on edges and faces (requires deluxe scaling and MUMPS or MKL_PARDISO installed) 2699 - -pc_bddc_check_level <0> - set verbosity level of debugging output 2700 2701 Options for Dirichlet, Neumann or coarse solver can be set using the appropriate options prefix 2702 .vb 2703 -pc_bddc_dirichlet_ 2704 -pc_bddc_neumann_ 2705 -pc_bddc_coarse_ 2706 .ve 2707 e.g. -pc_bddc_dirichlet_ksp_type richardson -pc_bddc_dirichlet_pc_type gamg. `PCBDDC` uses by default `KSPPREONLY` and `PCLU`. 2708 2709 When using a multilevel approach, solvers' options at the N-th level (N > 1) can be specified using the options prefix 2710 .vb 2711 -pc_bddc_dirichlet_lN_ 2712 -pc_bddc_neumann_lN_ 2713 -pc_bddc_coarse_lN_ 2714 .ve 2715 Note that level number ranges from the finest (0) to the coarsest (N). 2716 In order to specify options for the `PCBDDC` operators at the coarser levels (and not for the solvers), prepend -pc_bddc_coarse_ or -pc_bddc_coarse_l 2717 to the option, e.g. 2718 .vb 2719 -pc_bddc_coarse_pc_bddc_adaptive_threshold 5 -pc_bddc_coarse_l1_pc_bddc_redistribute 3 2720 .ve 2721 will use a threshold of 5 for constraints' selection at the first coarse level and will redistribute the coarse problem of the first coarse level on 3 processors 2722 2723 Level: intermediate 2724 2725 Contributed by: 2726 Stefano Zampini 2727 2728 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `MATIS`, `PCLU`, `PCGAMG`, `PC`, `PCBDDCSetLocalAdjacencyGraph()`, `PCBDDCSetDofsSplitting()`, 2729 `PCBDDCSetDirichletBoundaries()`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCSetPrimalVerticesIS()`, `MatNullSpace`, `MatSetNearNullSpace()`, 2730 `PCBDDCSetChangeOfBasisMat()`, `PCBDDCCreateFETIDPOperators()`, `PCNN` 2731 M*/ 2732 2733 PETSC_EXTERN PetscErrorCode PCCreate_BDDC(PC pc) 2734 { 2735 PC_BDDC *pcbddc; 2736 2737 PetscFunctionBegin; 2738 PetscCall(PetscNew(&pcbddc)); 2739 pc->data = pcbddc; 2740 2741 PetscCall(PCISInitialize(pc)); 2742 2743 /* create local graph structure */ 2744 PetscCall(PCBDDCGraphCreate(&pcbddc->mat_graph)); 2745 2746 /* BDDC nonzero defaults */ 2747 pcbddc->use_nnsp = PETSC_TRUE; 2748 pcbddc->use_local_adj = PETSC_TRUE; 2749 pcbddc->use_vertices = PETSC_TRUE; 2750 pcbddc->use_edges = PETSC_TRUE; 2751 pcbddc->symmetric_primal = PETSC_TRUE; 2752 pcbddc->vertex_size = 1; 2753 pcbddc->recompute_topography = PETSC_TRUE; 2754 pcbddc->coarse_size = -1; 2755 pcbddc->use_exact_dirichlet_trick = PETSC_TRUE; 2756 pcbddc->coarsening_ratio = 8; 2757 pcbddc->coarse_eqs_per_proc = 1; 2758 pcbddc->benign_compute_correction = PETSC_TRUE; 2759 pcbddc->nedfield = -1; 2760 pcbddc->nedglobal = PETSC_TRUE; 2761 pcbddc->graphmaxcount = PETSC_MAX_INT; 2762 pcbddc->sub_schurs_layers = -1; 2763 pcbddc->adaptive_threshold[0] = 0.0; 2764 pcbddc->adaptive_threshold[1] = 0.0; 2765 2766 /* function pointers */ 2767 pc->ops->apply = PCApply_BDDC; 2768 pc->ops->applytranspose = PCApplyTranspose_BDDC; 2769 pc->ops->setup = PCSetUp_BDDC; 2770 pc->ops->destroy = PCDestroy_BDDC; 2771 pc->ops->setfromoptions = PCSetFromOptions_BDDC; 2772 pc->ops->view = PCView_BDDC; 2773 pc->ops->applyrichardson = NULL; 2774 pc->ops->applysymmetricleft = NULL; 2775 pc->ops->applysymmetricright = NULL; 2776 pc->ops->presolve = PCPreSolve_BDDC; 2777 pc->ops->postsolve = PCPostSolve_BDDC; 2778 pc->ops->reset = PCReset_BDDC; 2779 2780 /* composing function */ 2781 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDiscreteGradient_C", PCBDDCSetDiscreteGradient_BDDC)); 2782 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDivergenceMat_C", PCBDDCSetDivergenceMat_BDDC)); 2783 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetChangeOfBasisMat_C", PCBDDCSetChangeOfBasisMat_BDDC)); 2784 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesLocalIS_C", PCBDDCSetPrimalVerticesLocalIS_BDDC)); 2785 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesIS_C", PCBDDCSetPrimalVerticesIS_BDDC)); 2786 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesLocalIS_C", PCBDDCGetPrimalVerticesLocalIS_BDDC)); 2787 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesIS_C", PCBDDCGetPrimalVerticesIS_BDDC)); 2788 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetCoarseningRatio_C", PCBDDCSetCoarseningRatio_BDDC)); 2789 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevel_C", PCBDDCSetLevel_BDDC)); 2790 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetUseExactDirichlet_C", PCBDDCSetUseExactDirichlet_BDDC)); 2791 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevels_C", PCBDDCSetLevels_BDDC)); 2792 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundaries_C", PCBDDCSetDirichletBoundaries_BDDC)); 2793 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundariesLocal_C", PCBDDCSetDirichletBoundariesLocal_BDDC)); 2794 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundaries_C", PCBDDCSetNeumannBoundaries_BDDC)); 2795 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundariesLocal_C", PCBDDCSetNeumannBoundariesLocal_BDDC)); 2796 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundaries_C", PCBDDCGetDirichletBoundaries_BDDC)); 2797 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundariesLocal_C", PCBDDCGetDirichletBoundariesLocal_BDDC)); 2798 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundaries_C", PCBDDCGetNeumannBoundaries_BDDC)); 2799 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundariesLocal_C", PCBDDCGetNeumannBoundariesLocal_BDDC)); 2800 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplitting_C", PCBDDCSetDofsSplitting_BDDC)); 2801 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplittingLocal_C", PCBDDCSetDofsSplittingLocal_BDDC)); 2802 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", PCBDDCSetLocalAdjacencyGraph_BDDC)); 2803 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCCreateFETIDPOperators_C", PCBDDCCreateFETIDPOperators_BDDC)); 2804 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetRHS_C", PCBDDCMatFETIDPGetRHS_BDDC)); 2805 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetSolution_C", PCBDDCMatFETIDPGetSolution_BDDC)); 2806 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCPreSolveChangeRHS_C", PCPreSolveChangeRHS_BDDC)); 2807 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_BDDC)); 2808 PetscFunctionReturn(PETSC_SUCCESS); 2809 } 2810 2811 /*@C 2812 PCBDDCInitializePackage - This function initializes everything in the `PCBDDC` package. It is called 2813 from `PCInitializePackage()`. 2814 2815 Level: developer 2816 2817 .seealso: [](ch_ksp), `PetscInitialize()`, `PCBDDCFinalizePackage()` 2818 @*/ 2819 PetscErrorCode PCBDDCInitializePackage(void) 2820 { 2821 int i; 2822 2823 PetscFunctionBegin; 2824 if (PCBDDCPackageInitialized) PetscFunctionReturn(PETSC_SUCCESS); 2825 PCBDDCPackageInitialized = PETSC_TRUE; 2826 PetscCall(PetscRegisterFinalize(PCBDDCFinalizePackage)); 2827 2828 /* general events */ 2829 PetscCall(PetscLogEventRegister("PCBDDCTopo", PC_CLASSID, &PC_BDDC_Topology[0])); 2830 PetscCall(PetscLogEventRegister("PCBDDCLKSP", PC_CLASSID, &PC_BDDC_LocalSolvers[0])); 2831 PetscCall(PetscLogEventRegister("PCBDDCLWor", PC_CLASSID, &PC_BDDC_LocalWork[0])); 2832 PetscCall(PetscLogEventRegister("PCBDDCCorr", PC_CLASSID, &PC_BDDC_CorrectionSetUp[0])); 2833 PetscCall(PetscLogEventRegister("PCBDDCASet", PC_CLASSID, &PC_BDDC_ApproxSetUp[0])); 2834 PetscCall(PetscLogEventRegister("PCBDDCAApp", PC_CLASSID, &PC_BDDC_ApproxApply[0])); 2835 PetscCall(PetscLogEventRegister("PCBDDCCSet", PC_CLASSID, &PC_BDDC_CoarseSetUp[0])); 2836 PetscCall(PetscLogEventRegister("PCBDDCCKSP", PC_CLASSID, &PC_BDDC_CoarseSolver[0])); 2837 PetscCall(PetscLogEventRegister("PCBDDCAdap", PC_CLASSID, &PC_BDDC_AdaptiveSetUp[0])); 2838 PetscCall(PetscLogEventRegister("PCBDDCScal", PC_CLASSID, &PC_BDDC_Scaling[0])); 2839 PetscCall(PetscLogEventRegister("PCBDDCSchr", PC_CLASSID, &PC_BDDC_Schurs[0])); 2840 PetscCall(PetscLogEventRegister("PCBDDCDirS", PC_CLASSID, &PC_BDDC_Solves[0][0])); 2841 PetscCall(PetscLogEventRegister("PCBDDCNeuS", PC_CLASSID, &PC_BDDC_Solves[0][1])); 2842 PetscCall(PetscLogEventRegister("PCBDDCCoaS", PC_CLASSID, &PC_BDDC_Solves[0][2])); 2843 for (i = 1; i < PETSC_PCBDDC_MAXLEVELS; i++) { 2844 char ename[32]; 2845 2846 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCTopo l%02d", i)); 2847 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Topology[i])); 2848 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCLKSP l%02d", i)); 2849 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_LocalSolvers[i])); 2850 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCLWor l%02d", i)); 2851 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_LocalWork[i])); 2852 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCorr l%02d", i)); 2853 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CorrectionSetUp[i])); 2854 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCASet l%02d", i)); 2855 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_ApproxSetUp[i])); 2856 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCAApp l%02d", i)); 2857 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_ApproxApply[i])); 2858 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCSet l%02d", i)); 2859 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CoarseSetUp[i])); 2860 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCKSP l%02d", i)); 2861 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CoarseSolver[i])); 2862 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCAdap l%02d", i)); 2863 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_AdaptiveSetUp[i])); 2864 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCScal l%02d", i)); 2865 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Scaling[i])); 2866 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCSchr l%02d", i)); 2867 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Schurs[i])); 2868 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCDirS l%02d", i)); 2869 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][0])); 2870 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCNeuS l%02d", i)); 2871 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][1])); 2872 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCoaS l%02d", i)); 2873 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][2])); 2874 } 2875 PetscFunctionReturn(PETSC_SUCCESS); 2876 } 2877 2878 /*@C 2879 PCBDDCFinalizePackage - This function frees everything from the `PCBDDC` package. It is 2880 called from `PetscFinalize()` automatically. 2881 2882 Level: developer 2883 2884 .seealso: [](ch_ksp), `PetscFinalize()`, `PCBDDCInitializePackage()` 2885 @*/ 2886 PetscErrorCode PCBDDCFinalizePackage(void) 2887 { 2888 PetscFunctionBegin; 2889 PCBDDCPackageInitialized = PETSC_FALSE; 2890 PetscFunctionReturn(PETSC_SUCCESS); 2891 } 2892