1 #include <petsc/private/pcbddcimpl.h> /*I "petscpc.h" I*/ /* header file for Fortran wrappers */
2 #include <petsc/private/pcbddcprivateimpl.h>
3 #include <petscblaslapack.h>
4
5 static PetscBool PCBDDCPackageInitialized = PETSC_FALSE;
6
7 static PetscBool cited = PETSC_FALSE;
8 static const char citation[] = "@article{ZampiniPCBDDC,\n"
9 "author = {Stefano Zampini},\n"
10 "title = {{PCBDDC}: A Class of Robust Dual-Primal Methods in {PETS}c},\n"
11 "journal = {SIAM Journal on Scientific Computing},\n"
12 "volume = {38},\n"
13 "number = {5},\n"
14 "pages = {S282-S306},\n"
15 "year = {2016},\n"
16 "doi = {10.1137/15M1025785},\n"
17 "URL = {http://dx.doi.org/10.1137/15M1025785},\n"
18 "eprint = {http://dx.doi.org/10.1137/15M1025785}\n"
19 "}\n";
20
21 PetscLogEvent PC_BDDC_Topology[PETSC_PCBDDC_MAXLEVELS];
22 PetscLogEvent PC_BDDC_LocalSolvers[PETSC_PCBDDC_MAXLEVELS];
23 PetscLogEvent PC_BDDC_LocalWork[PETSC_PCBDDC_MAXLEVELS];
24 PetscLogEvent PC_BDDC_CorrectionSetUp[PETSC_PCBDDC_MAXLEVELS];
25 PetscLogEvent PC_BDDC_ApproxSetUp[PETSC_PCBDDC_MAXLEVELS];
26 PetscLogEvent PC_BDDC_ApproxApply[PETSC_PCBDDC_MAXLEVELS];
27 PetscLogEvent PC_BDDC_CoarseSetUp[PETSC_PCBDDC_MAXLEVELS];
28 PetscLogEvent PC_BDDC_CoarseSolver[PETSC_PCBDDC_MAXLEVELS];
29 PetscLogEvent PC_BDDC_AdaptiveSetUp[PETSC_PCBDDC_MAXLEVELS];
30 PetscLogEvent PC_BDDC_Scaling[PETSC_PCBDDC_MAXLEVELS];
31 PetscLogEvent PC_BDDC_Schurs[PETSC_PCBDDC_MAXLEVELS];
32 PetscLogEvent PC_BDDC_Solves[PETSC_PCBDDC_MAXLEVELS][3];
33
34 const char *const PCBDDCInterfaceExtTypes[] = {"DIRICHLET", "LUMP", "PCBDDCInterfaceExtType", "PC_BDDC_INTERFACE_EXT_", NULL};
35
36 static PetscErrorCode PCApply_BDDC(PC, Vec, Vec);
37
PCSetFromOptions_BDDC(PC pc,PetscOptionItems PetscOptionsObject)38 static PetscErrorCode PCSetFromOptions_BDDC(PC pc, PetscOptionItems PetscOptionsObject)
39 {
40 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
41 PetscInt nt, i;
42 char load[PETSC_MAX_PATH_LEN] = {'\0'};
43 PetscBool flg;
44
45 PetscFunctionBegin;
46 PetscOptionsHeadBegin(PetscOptionsObject, "BDDC options");
47 /* Load customization from binary file (debugging) */
48 PetscCall(PetscOptionsString("-pc_bddc_load", "Load customization from file (intended for debug)", "none", load, load, sizeof(load), &flg));
49 if (flg) {
50 size_t len;
51
52 PetscCall(PetscStrlen(load, &len));
53 PetscCall(PCBDDCLoadOrViewCustomization(pc, PETSC_TRUE, len ? load : NULL));
54 }
55 /* Verbose debugging */
56 PetscCall(PetscOptionsInt("-pc_bddc_check_level", "Verbose output for PCBDDC (intended for debug)", "none", pcbddc->dbg_flag, &pcbddc->dbg_flag, NULL));
57 /* Approximate solvers */
58 PetscCall(PetscOptionsEnum("-pc_bddc_interface_ext_type", "Use DIRICHLET or LUMP to extend interface corrections to interior", "PCBDDCSetInterfaceExtType", PCBDDCInterfaceExtTypes, (PetscEnum)pcbddc->interface_extension, (PetscEnum *)&pcbddc->interface_extension, NULL));
59 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_DIRICHLET) {
60 PetscCall(PetscOptionsBool("-pc_bddc_dirichlet_approximate", "Inform PCBDDC that we are using approximate Dirichlet solvers", "none", pcbddc->NullSpace_corr[0], &pcbddc->NullSpace_corr[0], NULL));
61 PetscCall(PetscOptionsBool("-pc_bddc_dirichlet_approximate_scale", "Inform PCBDDC that we need to scale the Dirichlet solve", "none", pcbddc->NullSpace_corr[1], &pcbddc->NullSpace_corr[1], NULL));
62 } else {
63 /* This flag is needed/implied by lumping */
64 pcbddc->switch_static = PETSC_TRUE;
65 }
66 PetscCall(PetscOptionsBool("-pc_bddc_neumann_approximate", "Inform PCBDDC that we are using approximate Neumann solvers", "none", pcbddc->NullSpace_corr[2], &pcbddc->NullSpace_corr[2], NULL));
67 PetscCall(PetscOptionsBool("-pc_bddc_neumann_approximate_scale", "Inform PCBDDC that we need to scale the Neumann solve", "none", pcbddc->NullSpace_corr[3], &pcbddc->NullSpace_corr[3], NULL));
68 /* Primal space customization */
69 PetscCall(PetscOptionsBool("-pc_bddc_use_local_mat_graph", "Use or not adjacency graph of local mat for interface analysis", "none", pcbddc->use_local_adj, &pcbddc->use_local_adj, NULL));
70 PetscCall(PetscOptionsInt("-pc_bddc_local_mat_graph_square", "Square adjacency graph of local mat for interface analysis", "none", pcbddc->local_adj_square, &pcbddc->local_adj_square, NULL));
71 PetscCall(PetscOptionsInt("-pc_bddc_graph_maxcount", "Maximum number of shared subdomains for a connected component", "none", pcbddc->graphmaxcount, &pcbddc->graphmaxcount, NULL));
72 PetscCall(PetscOptionsBool("-pc_bddc_corner_selection", "Activates face-based corner selection", "none", pcbddc->corner_selection, &pcbddc->corner_selection, NULL));
73 PetscCall(PetscOptionsBool("-pc_bddc_use_vertices", "Use or not corner dofs in coarse space", "none", pcbddc->use_vertices, &pcbddc->use_vertices, NULL));
74 PetscCall(PetscOptionsBool("-pc_bddc_use_edges", "Use or not edge constraints in coarse space", "none", pcbddc->use_edges, &pcbddc->use_edges, NULL));
75 PetscCall(PetscOptionsBool("-pc_bddc_use_faces", "Use or not face constraints in coarse space", "none", pcbddc->use_faces, &pcbddc->use_faces, NULL));
76 PetscCall(PetscOptionsInt("-pc_bddc_vertex_size", "Connected components smaller or equal to vertex size will be considered as primal vertices", "none", pcbddc->vertex_size, &pcbddc->vertex_size, NULL));
77 PetscCall(PetscOptionsBool("-pc_bddc_use_nnsp", "Use near null space attached to the matrix to compute constraints", "none", pcbddc->use_nnsp, &pcbddc->use_nnsp, NULL));
78 PetscCall(PetscOptionsBool("-pc_bddc_use_nnsp_true", "Use near null space attached to the matrix to compute constraints as is", "none", pcbddc->use_nnsp_true, &pcbddc->use_nnsp_true, NULL));
79 PetscCall(PetscOptionsBool("-pc_bddc_use_qr_single", "Use QR factorization for single constraints on cc (QR is always used when multiple constraints are present)", "none", pcbddc->use_qr_single, &pcbddc->use_qr_single, NULL));
80 /* Change of basis */
81 PetscCall(PetscOptionsBool("-pc_bddc_use_change_of_basis", "Use or not internal change of basis on local edge nodes", "none", pcbddc->use_change_of_basis, &pcbddc->use_change_of_basis, NULL));
82 PetscCall(PetscOptionsBool("-pc_bddc_use_change_on_faces", "Use or not internal change of basis on local face nodes", "none", pcbddc->use_change_on_faces, &pcbddc->use_change_on_faces, NULL));
83 if (!pcbddc->use_change_of_basis) pcbddc->use_change_on_faces = PETSC_FALSE;
84 /* Switch between M_2 (default) and M_3 preconditioners (as defined by C. Dohrmann in the ref. article) */
85 PetscCall(PetscOptionsBool("-pc_bddc_switch_static", "Switch on static condensation ops around the interface preconditioner", "none", pcbddc->switch_static, &pcbddc->switch_static, NULL));
86 PetscCall(PetscOptionsInt("-pc_bddc_coarse_eqs_per_proc", "Target number of equations per process for coarse problem redistribution (significant only at the coarsest level)", "none", pcbddc->coarse_eqs_per_proc, &pcbddc->coarse_eqs_per_proc, NULL));
87 i = pcbddc->coarsening_ratio;
88 PetscCall(PetscOptionsInt("-pc_bddc_coarsening_ratio", "Set coarsening ratio used in multilevel coarsening", "PCBDDCSetCoarseningRatio", i, &i, NULL));
89 PetscCall(PCBDDCSetCoarseningRatio(pc, i));
90 i = pcbddc->max_levels;
91 PetscCall(PetscOptionsInt("-pc_bddc_levels", "Set maximum number of levels for multilevel", "PCBDDCSetLevels", i, &i, NULL));
92 PetscCall(PCBDDCSetLevels(pc, i));
93 PetscCall(PetscOptionsInt("-pc_bddc_coarse_eqs_limit", "Set maximum number of equations on coarsest grid to aim for", "none", pcbddc->coarse_eqs_limit, &pcbddc->coarse_eqs_limit, NULL));
94 PetscCall(PetscOptionsBool("-pc_bddc_use_coarse_estimates", "Use estimated eigenvalues for coarse problem", "none", pcbddc->use_coarse_estimates, &pcbddc->use_coarse_estimates, NULL));
95 PetscCall(PetscOptionsBool("-pc_bddc_use_deluxe_scaling", "Use deluxe scaling for BDDC", "none", pcbddc->use_deluxe_scaling, &pcbddc->use_deluxe_scaling, NULL));
96 PetscCall(PetscOptionsBool("-pc_bddc_schur_rebuild", "Whether or not the interface graph for Schur principal minors has to be rebuilt (i.e. define the interface without any adjacency)", "none", pcbddc->sub_schurs_rebuild, &pcbddc->sub_schurs_rebuild, NULL));
97 PetscCall(PetscOptionsInt("-pc_bddc_schur_layers", "Number of dofs' layers for the computation of principal minors (i.e. -1 uses all dofs)", "none", pcbddc->sub_schurs_layers, &pcbddc->sub_schurs_layers, NULL));
98 PetscCall(PetscOptionsBool("-pc_bddc_schur_use_useradj", "Whether or not the CSR graph specified by the user should be used for computing successive layers (default is to use adj of local mat)", "none", pcbddc->sub_schurs_use_useradj, &pcbddc->sub_schurs_use_useradj, NULL));
99 PetscCall(PetscOptionsBool("-pc_bddc_schur_exact", "Whether or not to use the exact Schur complement instead of the reduced one (which excludes size 1 cc)", "none", pcbddc->sub_schurs_exact_schur, &pcbddc->sub_schurs_exact_schur, NULL));
100 PetscCall(PetscOptionsBool("-pc_bddc_deluxe_zerorows", "Zero rows and columns of deluxe operators associated with primal dofs", "none", pcbddc->deluxe_zerorows, &pcbddc->deluxe_zerorows, NULL));
101 PetscCall(PetscOptionsBool("-pc_bddc_deluxe_singlemat", "Collapse deluxe operators", "none", pcbddc->deluxe_singlemat, &pcbddc->deluxe_singlemat, NULL));
102 PetscCall(PetscOptionsBool("-pc_bddc_adaptive_userdefined", "Use user-defined constraints (should be attached via MatSetNearNullSpace to pmat) in addition to those adaptively generated", "none", pcbddc->adaptive_userdefined, &pcbddc->adaptive_userdefined, NULL));
103 nt = 2;
104 PetscCall(PetscOptionsRealArray("-pc_bddc_adaptive_threshold", "Thresholds to be used for adaptive selection of constraints", "none", pcbddc->adaptive_threshold, &nt, NULL));
105 if (nt == 1) pcbddc->adaptive_threshold[1] = pcbddc->adaptive_threshold[0];
106 PetscCall(PetscOptionsInt("-pc_bddc_adaptive_nmin", "Minimum number of constraints per connected components", "none", pcbddc->adaptive_nmin, &pcbddc->adaptive_nmin, NULL));
107 PetscCall(PetscOptionsInt("-pc_bddc_adaptive_nmax", "Maximum number of constraints per connected components", "none", pcbddc->adaptive_nmax, &pcbddc->adaptive_nmax, NULL));
108 PetscCall(PetscOptionsBool("-pc_bddc_symmetric", "Symmetric computation of primal basis functions", "none", pcbddc->symmetric_primal, &pcbddc->symmetric_primal, NULL));
109 PetscCall(PetscOptionsInt("-pc_bddc_coarse_adj", "Number of processors where to map the coarse adjacency list", "none", pcbddc->coarse_adj_red, &pcbddc->coarse_adj_red, NULL));
110 PetscCall(PetscOptionsBool("-pc_bddc_benign_trick", "Apply the benign subspace trick to saddle point problems with discontinuous pressures", "none", pcbddc->benign_saddle_point, &pcbddc->benign_saddle_point, NULL));
111 PetscCall(PetscOptionsBool("-pc_bddc_benign_change", "Compute the pressure change of basis explicitly", "none", pcbddc->benign_change_explicit, &pcbddc->benign_change_explicit, NULL));
112 PetscCall(PetscOptionsBool("-pc_bddc_benign_compute_correction", "Compute the benign correction during PreSolve", "none", pcbddc->benign_compute_correction, &pcbddc->benign_compute_correction, NULL));
113 PetscCall(PetscOptionsBool("-pc_bddc_nonetflux", "Automatic computation of no-net-flux quadrature weights", "none", pcbddc->compute_nonetflux, &pcbddc->compute_nonetflux, NULL));
114 PetscCall(PetscOptionsBool("-pc_bddc_detect_disconnected", "Detects disconnected subdomains", "none", pcbddc->detect_disconnected, &pcbddc->detect_disconnected, NULL));
115 PetscCall(PetscOptionsBool("-pc_bddc_detect_disconnected_filter", "Filters out small entries in the local matrix when detecting disconnected subdomains", "none", pcbddc->detect_disconnected_filter, &pcbddc->detect_disconnected_filter, NULL));
116 PetscCall(PetscOptionsBool("-pc_bddc_eliminate_dirichlet", "Whether or not we want to eliminate dirichlet dofs during presolve", "none", pcbddc->eliminate_dirdofs, &pcbddc->eliminate_dirdofs, NULL));
117 PetscOptionsHeadEnd();
118 PetscFunctionReturn(PETSC_SUCCESS);
119 }
120
PCView_BDDC(PC pc,PetscViewer viewer)121 static PetscErrorCode PCView_BDDC(PC pc, PetscViewer viewer)
122 {
123 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
124 PC_IS *pcis = (PC_IS *)pc->data;
125 PetscBool isascii;
126 PetscSubcomm subcomm;
127 PetscViewer subviewer;
128
129 PetscFunctionBegin;
130 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
131 /* ASCII viewer */
132 if (isascii) {
133 PetscMPIInt color, rank, size;
134 PetscInt64 loc[7], gsum[6], gmax[6], gmin[6], totbenign;
135 PetscScalar interface_size;
136 PetscReal ratio1 = 0., ratio2 = 0.;
137 Vec counter;
138
139 if (!pc->setupcalled) PetscCall(PetscViewerASCIIPrintf(viewer, " Partial information available: preconditioner has not been setup yet\n"));
140 PetscCall(PetscViewerASCIIPrintf(viewer, " Use verbose output: %" PetscInt_FMT "\n", pcbddc->dbg_flag));
141 PetscCall(PetscViewerASCIIPrintf(viewer, " Use user-defined CSR: %d\n", !!pcbddc->mat_graph->nvtxs_csr));
142 PetscCall(PetscViewerASCIIPrintf(viewer, " Use local mat graph: %d\n", pcbddc->use_local_adj && !pcbddc->mat_graph->nvtxs_csr));
143 if (pcbddc->mat_graph->twodim) {
144 PetscCall(PetscViewerASCIIPrintf(viewer, " Connectivity graph topological dimension: 2\n"));
145 } else {
146 PetscCall(PetscViewerASCIIPrintf(viewer, " Connectivity graph topological dimension: 3\n"));
147 }
148 if (pcbddc->graphmaxcount != PETSC_INT_MAX) PetscCall(PetscViewerASCIIPrintf(viewer, " Graph max count: %" PetscInt_FMT "\n", pcbddc->graphmaxcount));
149 PetscCall(PetscViewerASCIIPrintf(viewer, " Corner selection: %d (selected %d)\n", pcbddc->corner_selection, pcbddc->corner_selected));
150 PetscCall(PetscViewerASCIIPrintf(viewer, " Use vertices: %d (vertex size %" PetscInt_FMT ")\n", pcbddc->use_vertices, pcbddc->vertex_size));
151 PetscCall(PetscViewerASCIIPrintf(viewer, " Use edges: %d\n", pcbddc->use_edges));
152 PetscCall(PetscViewerASCIIPrintf(viewer, " Use faces: %d\n", pcbddc->use_faces));
153 PetscCall(PetscViewerASCIIPrintf(viewer, " Use true near null space: %d\n", pcbddc->use_nnsp_true));
154 PetscCall(PetscViewerASCIIPrintf(viewer, " Use QR for single constraints on cc: %d\n", pcbddc->use_qr_single));
155 PetscCall(PetscViewerASCIIPrintf(viewer, " Use change of basis on local edge nodes: %d\n", pcbddc->use_change_of_basis));
156 PetscCall(PetscViewerASCIIPrintf(viewer, " Use change of basis on local face nodes: %d\n", pcbddc->use_change_on_faces));
157 PetscCall(PetscViewerASCIIPrintf(viewer, " User defined change of basis matrix: %d\n", !!pcbddc->user_ChangeOfBasisMatrix));
158 PetscCall(PetscViewerASCIIPrintf(viewer, " Has change of basis matrix: %d\n", !!pcbddc->ChangeOfBasisMatrix));
159 PetscCall(PetscViewerASCIIPrintf(viewer, " Eliminate dirichlet boundary dofs: %d\n", pcbddc->eliminate_dirdofs));
160 PetscCall(PetscViewerASCIIPrintf(viewer, " Switch on static condensation ops around the interface preconditioner: %d\n", pcbddc->switch_static));
161 PetscCall(PetscViewerASCIIPrintf(viewer, " Use exact dirichlet trick: %d\n", pcbddc->use_exact_dirichlet_trick));
162 PetscCall(PetscViewerASCIIPrintf(viewer, " Interface extension: %s\n", PCBDDCInterfaceExtTypes[pcbddc->interface_extension]));
163 PetscCall(PetscViewerASCIIPrintf(viewer, " Multilevel max levels: %" PetscInt_FMT "\n", pcbddc->max_levels));
164 PetscCall(PetscViewerASCIIPrintf(viewer, " Multilevel coarsening ratio: %" PetscInt_FMT "\n", pcbddc->coarsening_ratio));
165 PetscCall(PetscViewerASCIIPrintf(viewer, " Use estimated eigs for coarse problem: %d\n", pcbddc->use_coarse_estimates));
166 PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe scaling: %d\n", pcbddc->use_deluxe_scaling));
167 PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe zerorows: %d\n", pcbddc->deluxe_zerorows));
168 PetscCall(PetscViewerASCIIPrintf(viewer, " Use deluxe singlemat: %d\n", pcbddc->deluxe_singlemat));
169 PetscCall(PetscViewerASCIIPrintf(viewer, " Rebuild interface graph for Schur principal minors: %d\n", pcbddc->sub_schurs_rebuild));
170 PetscCall(PetscViewerASCIIPrintf(viewer, " Number of dofs' layers for the computation of principal minors: %" PetscInt_FMT "\n", pcbddc->sub_schurs_layers));
171 PetscCall(PetscViewerASCIIPrintf(viewer, " Use user CSR graph to compute successive layers: %d\n", pcbddc->sub_schurs_use_useradj));
172 if (pcbddc->adaptive_threshold[1] != pcbddc->adaptive_threshold[0]) {
173 PetscCall(PetscViewerASCIIPrintf(viewer, " Adaptive constraint selection thresholds (active %d, userdefined %d): %g,%g\n", pcbddc->adaptive_selection, pcbddc->adaptive_userdefined, (double)pcbddc->adaptive_threshold[0], (double)pcbddc->adaptive_threshold[1]));
174 } else {
175 PetscCall(PetscViewerASCIIPrintf(viewer, " Adaptive constraint selection threshold (active %d, userdefined %d): %g\n", pcbddc->adaptive_selection, pcbddc->adaptive_userdefined, (double)pcbddc->adaptive_threshold[0]));
176 }
177 PetscCall(PetscViewerASCIIPrintf(viewer, " Min constraints / connected component: %" PetscInt_FMT "\n", pcbddc->adaptive_nmin));
178 PetscCall(PetscViewerASCIIPrintf(viewer, " Max constraints / connected component: %" PetscInt_FMT "\n", pcbddc->adaptive_nmax));
179 PetscCall(PetscViewerASCIIPrintf(viewer, " Invert exact Schur complement for adaptive selection: %d\n", pcbddc->sub_schurs_exact_schur));
180 PetscCall(PetscViewerASCIIPrintf(viewer, " Symmetric computation of primal basis functions: %d\n", pcbddc->symmetric_primal));
181 PetscCall(PetscViewerASCIIPrintf(viewer, " Num. Procs. to map coarse adjacency list: %" PetscInt_FMT "\n", pcbddc->coarse_adj_red));
182 PetscCall(PetscViewerASCIIPrintf(viewer, " Coarse eqs per proc (significant at the coarsest level): %" PetscInt_FMT "\n", pcbddc->coarse_eqs_per_proc));
183 PetscCall(PetscViewerASCIIPrintf(viewer, " Detect disconnected: %d (filter %d)\n", pcbddc->detect_disconnected, pcbddc->detect_disconnected_filter));
184 PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subspace trick: %d (change explicit %d)\n", pcbddc->benign_saddle_point, pcbddc->benign_change_explicit));
185 PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subspace trick is active: %d\n", pcbddc->benign_have_null));
186 PetscCall(PetscViewerASCIIPrintf(viewer, " Algebraic computation of no-net-flux: %d\n", pcbddc->compute_nonetflux));
187 if (!pc->setupcalled) PetscFunctionReturn(PETSC_SUCCESS);
188
189 /* compute interface size */
190 PetscCall(VecSet(pcis->vec1_B, 1.0));
191 PetscCall(MatCreateVecs(pc->pmat, &counter, NULL));
192 PetscCall(VecSet(counter, 0.0));
193 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, counter, INSERT_VALUES, SCATTER_REVERSE));
194 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, counter, INSERT_VALUES, SCATTER_REVERSE));
195 PetscCall(VecSum(counter, &interface_size));
196 PetscCall(VecDestroy(&counter));
197
198 /* compute some statistics on the domain decomposition */
199 gsum[0] = 1;
200 gsum[1] = gsum[2] = gsum[3] = gsum[4] = gsum[5] = 0;
201 loc[0] = !!pcis->n;
202 loc[1] = pcis->n - pcis->n_B;
203 loc[2] = pcis->n_B;
204 loc[3] = pcbddc->local_primal_size;
205 loc[4] = pcis->n;
206 loc[5] = pcbddc->n_local_subs > 0 ? pcbddc->n_local_subs : (pcis->n ? 1 : 0);
207 loc[6] = pcbddc->benign_n;
208 PetscCallMPI(MPI_Reduce(loc, gsum, 6, MPIU_INT64, MPI_SUM, 0, PetscObjectComm((PetscObject)pc)));
209 if (!loc[0]) loc[1] = loc[2] = loc[3] = loc[4] = loc[5] = -1;
210 PetscCallMPI(MPI_Reduce(loc, gmax, 6, MPIU_INT64, MPI_MAX, 0, PetscObjectComm((PetscObject)pc)));
211 if (!loc[0]) loc[1] = loc[2] = loc[3] = loc[4] = loc[5] = PETSC_INT_MAX;
212 PetscCallMPI(MPI_Reduce(loc, gmin, 6, MPIU_INT64, MPI_MIN, 0, PetscObjectComm((PetscObject)pc)));
213 PetscCallMPI(MPI_Reduce(&loc[6], &totbenign, 1, MPIU_INT64, MPI_SUM, 0, PetscObjectComm((PetscObject)pc)));
214 if (pcbddc->coarse_size) {
215 ratio1 = pc->pmat->rmap->N / (1. * pcbddc->coarse_size);
216 ratio2 = PetscRealPart(interface_size) / pcbddc->coarse_size;
217 }
218 PetscCall(PetscViewerASCIIPrintf(viewer, "********************************** STATISTICS AT LEVEL %" PetscInt_FMT " **********************************\n", pcbddc->current_level));
219 PetscCall(PetscViewerASCIIPrintf(viewer, " Global dofs sizes: all %" PetscInt_FMT " interface %" PetscInt_FMT " coarse %" PetscInt_FMT "\n", pc->pmat->rmap->N, (PetscInt)PetscRealPart(interface_size), pcbddc->coarse_size));
220 PetscCall(PetscViewerASCIIPrintf(viewer, " Coarsening ratios: all/coarse %" PetscInt_FMT " interface/coarse %" PetscInt_FMT "\n", (PetscInt)ratio1, (PetscInt)ratio2));
221 PetscCall(PetscViewerASCIIPrintf(viewer, " Active processes : %" PetscInt64_FMT "\n", gsum[0]));
222 PetscCall(PetscViewerASCIIPrintf(viewer, " Total subdomains : %" PetscInt64_FMT "\n", gsum[5]));
223 if (pcbddc->benign_have_null) PetscCall(PetscViewerASCIIPrintf(viewer, " Benign subs : %" PetscInt64_FMT "\n", totbenign));
224 PetscCall(PetscViewerASCIIPrintf(viewer, " Dofs type :\tMIN\tMAX\tMEAN\n"));
225 PetscCall(PetscViewerASCIIPrintf(viewer, " Interior dofs :\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\n", gmin[1], gmax[1], gsum[1] / gsum[0]));
226 PetscCall(PetscViewerASCIIPrintf(viewer, " Interface dofs :\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\n", gmin[2], gmax[2], gsum[2] / gsum[0]));
227 PetscCall(PetscViewerASCIIPrintf(viewer, " Primal dofs :\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\n", gmin[3], gmax[3], gsum[3] / gsum[0]));
228 PetscCall(PetscViewerASCIIPrintf(viewer, " Local dofs :\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\n", gmin[4], gmax[4], gsum[4] / gsum[0]));
229 PetscCall(PetscViewerASCIIPrintf(viewer, " Local subs :\t%" PetscInt64_FMT "\t%" PetscInt64_FMT "\n", gmin[5], gmax[5]));
230 PetscCall(PetscViewerFlush(viewer));
231
232 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc), &rank));
233
234 /* local solvers */
235 PetscCall(PetscViewerGetSubViewer(viewer, PetscObjectComm((PetscObject)pcbddc->ksp_D), &subviewer));
236 if (rank == 0) {
237 PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Interior solver (rank 0)\n"));
238 PetscCall(PetscViewerASCIIPushTab(subviewer));
239 PetscCall(KSPView(pcbddc->ksp_D, subviewer));
240 PetscCall(PetscViewerASCIIPopTab(subviewer));
241 PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Correction solver (rank 0)\n"));
242 PetscCall(PetscViewerASCIIPushTab(subviewer));
243 PetscCall(KSPView(pcbddc->ksp_R, subviewer));
244 PetscCall(PetscViewerASCIIPopTab(subviewer));
245 PetscCall(PetscViewerFlush(subviewer));
246 }
247 PetscCall(PetscViewerRestoreSubViewer(viewer, PetscObjectComm((PetscObject)pcbddc->ksp_D), &subviewer));
248 /* the coarse problem can be handled by a different communicator */
249 if (pcbddc->coarse_ksp) color = 1;
250 else color = 0;
251 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
252 PetscCall(PetscSubcommCreate(PetscObjectComm((PetscObject)pc), &subcomm));
253 PetscCall(PetscSubcommSetNumber(subcomm, PetscMin(size, 2)));
254 PetscCall(PetscSubcommSetTypeGeneral(subcomm, color, rank));
255 PetscCall(PetscViewerGetSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer));
256 if (color == 1) {
257 PetscCall(PetscViewerASCIIPrintf(subviewer, "--- Coarse solver\n"));
258 PetscCall(PetscViewerASCIIPushTab(subviewer));
259 PetscCall(KSPView(pcbddc->coarse_ksp, subviewer));
260 PetscCall(PetscViewerASCIIPopTab(subviewer));
261 PetscCall(PetscViewerFlush(subviewer));
262 }
263 PetscCall(PetscViewerRestoreSubViewer(viewer, PetscSubcommChild(subcomm), &subviewer));
264 PetscCall(PetscSubcommDestroy(&subcomm));
265 PetscCall(PetscViewerFlush(viewer));
266 }
267 PetscFunctionReturn(PETSC_SUCCESS);
268 }
269
PCBDDCSetDiscreteGradient_BDDC(PC pc,Mat G,PetscInt order,PetscInt field,PetscBool global,PetscBool conforming)270 static PetscErrorCode PCBDDCSetDiscreteGradient_BDDC(PC pc, Mat G, PetscInt order, PetscInt field, PetscBool global, PetscBool conforming)
271 {
272 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
273
274 PetscFunctionBegin;
275 PetscCall(PetscObjectReference((PetscObject)G));
276 PetscCall(MatDestroy(&pcbddc->discretegradient));
277 pcbddc->discretegradient = G;
278 pcbddc->nedorder = order > 0 ? order : -order;
279 pcbddc->nedfield = field;
280 pcbddc->nedglobal = global;
281 pcbddc->conforming = conforming;
282 PetscFunctionReturn(PETSC_SUCCESS);
283 }
284
285 /*@
286 PCBDDCSetDiscreteGradient - Sets the discrete gradient to be used by the `PCBDDC` preconditioner
287
288 Collective
289
290 Input Parameters:
291 + pc - the preconditioning context
292 . G - the discrete gradient matrix (in `MATAIJ` format)
293 . order - the order of the Nedelec space (1 for the lowest order)
294 . field - the field id of the Nedelec dofs (not used if the fields have not been specified)
295 . global - the type of global ordering for the rows of `G`
296 - conforming - whether the mesh is conforming or not
297
298 Level: advanced
299
300 Note:
301 The discrete gradient matrix `G` is used to analyze the subdomain edges, and it should not contain any zero entry.
302 For variable order spaces, the order should be set to zero.
303 If `global` is `PETSC_TRUE`, the rows of `G` should be given in global ordering for the whole dofs;
304 if `PETSC_FALSE`, the ordering should be global for the Nedelec field.
305 In the latter case, it should hold gid[i] < gid[j] iff geid[i] < geid[j], with gid the global orderding for all the dofs
306 and geid the one for the Nedelec field.
307
308 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDofsSplitting()`, `PCBDDCSetDofsSplittingLocal()`, `MATAIJ`, `PCBDDCSetDivergenceMat()`
309 @*/
PCBDDCSetDiscreteGradient(PC pc,Mat G,PetscInt order,PetscInt field,PetscBool global,PetscBool conforming)310 PetscErrorCode PCBDDCSetDiscreteGradient(PC pc, Mat G, PetscInt order, PetscInt field, PetscBool global, PetscBool conforming)
311 {
312 PetscFunctionBegin;
313 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
314 PetscValidHeaderSpecific(G, MAT_CLASSID, 2);
315 PetscValidLogicalCollectiveInt(pc, order, 3);
316 PetscValidLogicalCollectiveInt(pc, field, 4);
317 PetscValidLogicalCollectiveBool(pc, global, 5);
318 PetscValidLogicalCollectiveBool(pc, conforming, 6);
319 PetscCheckSameComm(pc, 1, G, 2);
320 PetscTryMethod(pc, "PCBDDCSetDiscreteGradient_C", (PC, Mat, PetscInt, PetscInt, PetscBool, PetscBool), (pc, G, order, field, global, conforming));
321 PetscFunctionReturn(PETSC_SUCCESS);
322 }
323
PCBDDCSetDivergenceMat_BDDC(PC pc,Mat divudotp,PetscBool trans,IS vl2l)324 static PetscErrorCode PCBDDCSetDivergenceMat_BDDC(PC pc, Mat divudotp, PetscBool trans, IS vl2l)
325 {
326 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
327
328 PetscFunctionBegin;
329 PetscCall(PetscObjectReference((PetscObject)divudotp));
330 PetscCall(MatDestroy(&pcbddc->divudotp));
331 pcbddc->divudotp = divudotp;
332 pcbddc->divudotp_trans = trans;
333 pcbddc->compute_nonetflux = PETSC_TRUE;
334 if (vl2l) {
335 PetscCall(PetscObjectReference((PetscObject)vl2l));
336 PetscCall(ISDestroy(&pcbddc->divudotp_vl2l));
337 pcbddc->divudotp_vl2l = vl2l;
338 }
339 PetscFunctionReturn(PETSC_SUCCESS);
340 }
341
342 /*@
343 PCBDDCSetDivergenceMat - Sets the linear operator representing \int_\Omega \div {\bf u} \cdot p dx for the `PCBDDC` preconditioner
344
345 Collective
346
347 Input Parameters:
348 + pc - the preconditioning context
349 . divudotp - the matrix (must be of type `MATIS`)
350 . trans - if `PETSC_FALSE` (resp. `PETSC_TRUE`), then pressures are in the test (trial) space and velocities are in the trial (test) space.
351 - vl2l - optional index set describing the local (wrt the local matrix in `divudotp`) to local (wrt the local matrix
352 in the matrix used to construct the preconditioner) map for the velocities
353
354 Level: advanced
355
356 Notes:
357 This auxiliary matrix is used to compute quadrature weights representing the net-flux across subdomain boundaries
358
359 If `vl2l` is `NULL`, the local ordering for velocities in `divudotp` should match that of the matrix used to construct the preconditioner
360
361 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDiscreteGradient()`
362 @*/
PCBDDCSetDivergenceMat(PC pc,Mat divudotp,PetscBool trans,IS vl2l)363 PetscErrorCode PCBDDCSetDivergenceMat(PC pc, Mat divudotp, PetscBool trans, IS vl2l)
364 {
365 PetscBool ismatis;
366
367 PetscFunctionBegin;
368 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
369 PetscValidHeaderSpecific(divudotp, MAT_CLASSID, 2);
370 PetscCheckSameComm(pc, 1, divudotp, 2);
371 PetscValidLogicalCollectiveBool(pc, trans, 3);
372 if (vl2l) PetscValidHeaderSpecific(vl2l, IS_CLASSID, 4);
373 PetscCall(PetscObjectTypeCompare((PetscObject)divudotp, MATIS, &ismatis));
374 PetscCheck(ismatis, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "Divergence matrix needs to be of type MATIS");
375 PetscTryMethod(pc, "PCBDDCSetDivergenceMat_C", (PC, Mat, PetscBool, IS), (pc, divudotp, trans, vl2l));
376 PetscFunctionReturn(PETSC_SUCCESS);
377 }
378
PCBDDCSetChangeOfBasisMat_BDDC(PC pc,Mat change,PetscBool interior)379 static PetscErrorCode PCBDDCSetChangeOfBasisMat_BDDC(PC pc, Mat change, PetscBool interior)
380 {
381 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
382
383 PetscFunctionBegin;
384 PetscCall(PetscObjectReference((PetscObject)change));
385 PetscCall(MatDestroy(&pcbddc->user_ChangeOfBasisMatrix));
386 pcbddc->user_ChangeOfBasisMatrix = change;
387 pcbddc->change_interior = interior;
388 PetscFunctionReturn(PETSC_SUCCESS);
389 }
390
391 /*@
392 PCBDDCSetChangeOfBasisMat - Set user defined change of basis for dofs
393
394 Collective
395
396 Input Parameters:
397 + pc - the preconditioning context
398 . change - the change of basis matrix
399 - interior - whether or not the change of basis modifies interior dofs
400
401 Level: intermediate
402
403 .seealso: [](ch_ksp), `PCBDDC`
404 @*/
PCBDDCSetChangeOfBasisMat(PC pc,Mat change,PetscBool interior)405 PetscErrorCode PCBDDCSetChangeOfBasisMat(PC pc, Mat change, PetscBool interior)
406 {
407 PetscFunctionBegin;
408 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
409 PetscValidHeaderSpecific(change, MAT_CLASSID, 2);
410 PetscCheckSameComm(pc, 1, change, 2);
411 if (pc->mat) {
412 PetscInt rows_c, cols_c, rows, cols;
413 PetscCall(MatGetSize(pc->mat, &rows, &cols));
414 PetscCall(MatGetSize(change, &rows_c, &cols_c));
415 PetscCheck(rows_c == rows, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of rows for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, rows_c, rows);
416 PetscCheck(cols_c == cols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of columns for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, cols_c, cols);
417 PetscCall(MatGetLocalSize(pc->mat, &rows, &cols));
418 PetscCall(MatGetLocalSize(change, &rows_c, &cols_c));
419 PetscCheck(rows_c == rows, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of local rows for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, rows_c, rows);
420 PetscCheck(cols_c == cols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid number of local columns for change of basis matrix! %" PetscInt_FMT " != %" PetscInt_FMT, cols_c, cols);
421 }
422 PetscTryMethod(pc, "PCBDDCSetChangeOfBasisMat_C", (PC, Mat, PetscBool), (pc, change, interior));
423 PetscFunctionReturn(PETSC_SUCCESS);
424 }
425
PCBDDCSetPrimalVerticesIS_BDDC(PC pc,IS PrimalVertices)426 static PetscErrorCode PCBDDCSetPrimalVerticesIS_BDDC(PC pc, IS PrimalVertices)
427 {
428 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
429 PetscBool isequal = PETSC_FALSE;
430
431 PetscFunctionBegin;
432 PetscCall(PetscObjectReference((PetscObject)PrimalVertices));
433 if (pcbddc->user_primal_vertices) PetscCall(ISEqual(PrimalVertices, pcbddc->user_primal_vertices, &isequal));
434 PetscCall(ISDestroy(&pcbddc->user_primal_vertices));
435 PetscCall(ISDestroy(&pcbddc->user_primal_vertices_local));
436 pcbddc->user_primal_vertices = PrimalVertices;
437 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
438 PetscFunctionReturn(PETSC_SUCCESS);
439 }
440
441 /*@
442 PCBDDCSetPrimalVerticesIS - Set additional user defined primal vertices in `PCBDDC`
443
444 Collective
445
446 Input Parameters:
447 + pc - the preconditioning context
448 - PrimalVertices - index set of primal vertices in global numbering (can be empty)
449
450 Level: intermediate
451
452 Note:
453 Any process can list any global node
454
455 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()`, `PCBDDCGetPrimalVerticesLocalIS()`
456 @*/
PCBDDCSetPrimalVerticesIS(PC pc,IS PrimalVertices)457 PetscErrorCode PCBDDCSetPrimalVerticesIS(PC pc, IS PrimalVertices)
458 {
459 PetscFunctionBegin;
460 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
461 PetscValidHeaderSpecific(PrimalVertices, IS_CLASSID, 2);
462 PetscCheckSameComm(pc, 1, PrimalVertices, 2);
463 PetscTryMethod(pc, "PCBDDCSetPrimalVerticesIS_C", (PC, IS), (pc, PrimalVertices));
464 PetscFunctionReturn(PETSC_SUCCESS);
465 }
466
PCBDDCGetPrimalVerticesIS_BDDC(PC pc,IS * is)467 static PetscErrorCode PCBDDCGetPrimalVerticesIS_BDDC(PC pc, IS *is)
468 {
469 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
470
471 PetscFunctionBegin;
472 *is = pcbddc->user_primal_vertices;
473 PetscFunctionReturn(PETSC_SUCCESS);
474 }
475
476 /*@
477 PCBDDCGetPrimalVerticesIS - Get user defined primal vertices set with `PCBDDCSetPrimalVerticesIS()`
478
479 Collective
480
481 Input Parameter:
482 . pc - the preconditioning context
483
484 Output Parameter:
485 . is - index set of primal vertices in global numbering (`NULL` if not set)
486
487 Level: intermediate
488
489 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()`, `PCBDDCGetPrimalVerticesLocalIS()`
490 @*/
PCBDDCGetPrimalVerticesIS(PC pc,IS * is)491 PetscErrorCode PCBDDCGetPrimalVerticesIS(PC pc, IS *is)
492 {
493 PetscFunctionBegin;
494 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
495 PetscAssertPointer(is, 2);
496 PetscUseMethod(pc, "PCBDDCGetPrimalVerticesIS_C", (PC, IS *), (pc, is));
497 PetscFunctionReturn(PETSC_SUCCESS);
498 }
499
PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc,IS PrimalVertices)500 static PetscErrorCode PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc, IS PrimalVertices)
501 {
502 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
503 PetscBool isequal = PETSC_FALSE;
504
505 PetscFunctionBegin;
506 PetscCall(PetscObjectReference((PetscObject)PrimalVertices));
507 if (pcbddc->user_primal_vertices_local) PetscCall(ISEqual(PrimalVertices, pcbddc->user_primal_vertices_local, &isequal));
508 PetscCall(ISDestroy(&pcbddc->user_primal_vertices));
509 PetscCall(ISDestroy(&pcbddc->user_primal_vertices_local));
510 pcbddc->user_primal_vertices_local = PrimalVertices;
511 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
512 PetscFunctionReturn(PETSC_SUCCESS);
513 }
514
515 /*@
516 PCBDDCSetPrimalVerticesLocalIS - Set additional user defined primal vertices in `PCBDDC`
517
518 Collective
519
520 Input Parameters:
521 + pc - the preconditioning context
522 - PrimalVertices - index set of primal vertices in local numbering (can be empty)
523
524 Level: intermediate
525
526 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesLocalIS()`
527 @*/
PCBDDCSetPrimalVerticesLocalIS(PC pc,IS PrimalVertices)528 PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC pc, IS PrimalVertices)
529 {
530 PetscFunctionBegin;
531 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
532 PetscValidHeaderSpecific(PrimalVertices, IS_CLASSID, 2);
533 PetscCheckSameComm(pc, 1, PrimalVertices, 2);
534 PetscTryMethod(pc, "PCBDDCSetPrimalVerticesLocalIS_C", (PC, IS), (pc, PrimalVertices));
535 PetscFunctionReturn(PETSC_SUCCESS);
536 }
537
PCBDDCGetPrimalVerticesLocalIS_BDDC(PC pc,IS * is)538 static PetscErrorCode PCBDDCGetPrimalVerticesLocalIS_BDDC(PC pc, IS *is)
539 {
540 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
541
542 PetscFunctionBegin;
543 *is = pcbddc->user_primal_vertices_local;
544 PetscFunctionReturn(PETSC_SUCCESS);
545 }
546
547 /*@
548 PCBDDCGetPrimalVerticesLocalIS - Get user defined primal vertices set with `PCBDDCSetPrimalVerticesLocalIS()`
549
550 Collective
551
552 Input Parameter:
553 . pc - the preconditioning context
554
555 Output Parameter:
556 . is - index set of primal vertices in local numbering (`NULL` if not set)
557
558 Level: intermediate
559
560 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetPrimalVerticesIS()`, `PCBDDCGetPrimalVerticesIS()`, `PCBDDCSetPrimalVerticesLocalIS()`
561 @*/
PCBDDCGetPrimalVerticesLocalIS(PC pc,IS * is)562 PetscErrorCode PCBDDCGetPrimalVerticesLocalIS(PC pc, IS *is)
563 {
564 PetscFunctionBegin;
565 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
566 PetscAssertPointer(is, 2);
567 PetscUseMethod(pc, "PCBDDCGetPrimalVerticesLocalIS_C", (PC, IS *), (pc, is));
568 PetscFunctionReturn(PETSC_SUCCESS);
569 }
570
PCBDDCSetCoarseningRatio_BDDC(PC pc,PetscInt k)571 static PetscErrorCode PCBDDCSetCoarseningRatio_BDDC(PC pc, PetscInt k)
572 {
573 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
574
575 PetscFunctionBegin;
576 pcbddc->coarsening_ratio = k;
577 PetscFunctionReturn(PETSC_SUCCESS);
578 }
579
580 /*@
581 PCBDDCSetCoarseningRatio - Set coarsening ratio used in the multi-level version of `PCBDDC`
582
583 Logically Collective
584
585 Input Parameters:
586 + pc - the preconditioning context
587 - k - coarsening ratio (H/h at the coarser level)
588
589 Options Database Key:
590 . -pc_bddc_coarsening_ratio <int> - Set the coarsening ratio used in multi-level coarsening
591
592 Level: intermediate
593
594 Note:
595 Approximately `k` subdomains at the finer level will be aggregated into a single subdomain at the coarser level
596
597 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetLevels()`
598 @*/
PCBDDCSetCoarseningRatio(PC pc,PetscInt k)599 PetscErrorCode PCBDDCSetCoarseningRatio(PC pc, PetscInt k)
600 {
601 PetscFunctionBegin;
602 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
603 PetscValidLogicalCollectiveInt(pc, k, 2);
604 PetscTryMethod(pc, "PCBDDCSetCoarseningRatio_C", (PC, PetscInt), (pc, k));
605 PetscFunctionReturn(PETSC_SUCCESS);
606 }
607
608 /* The following functions (PCBDDCSetUseExactDirichlet PCBDDCSetLevel) are not public */
PCBDDCSetUseExactDirichlet_BDDC(PC pc,PetscBool flg)609 static PetscErrorCode PCBDDCSetUseExactDirichlet_BDDC(PC pc, PetscBool flg)
610 {
611 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
612
613 PetscFunctionBegin;
614 pcbddc->use_exact_dirichlet_trick = flg;
615 PetscFunctionReturn(PETSC_SUCCESS);
616 }
617
PCBDDCSetUseExactDirichlet(PC pc,PetscBool flg)618 PetscErrorCode PCBDDCSetUseExactDirichlet(PC pc, PetscBool flg)
619 {
620 PetscFunctionBegin;
621 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
622 PetscValidLogicalCollectiveBool(pc, flg, 2);
623 PetscTryMethod(pc, "PCBDDCSetUseExactDirichlet_C", (PC, PetscBool), (pc, flg));
624 PetscFunctionReturn(PETSC_SUCCESS);
625 }
626
PCBDDCSetLevel_BDDC(PC pc,PetscInt level)627 static PetscErrorCode PCBDDCSetLevel_BDDC(PC pc, PetscInt level)
628 {
629 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
630
631 PetscFunctionBegin;
632 pcbddc->current_level = level;
633 PetscFunctionReturn(PETSC_SUCCESS);
634 }
635
PCBDDCSetLevel(PC pc,PetscInt level)636 PetscErrorCode PCBDDCSetLevel(PC pc, PetscInt level)
637 {
638 PetscFunctionBegin;
639 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
640 PetscValidLogicalCollectiveInt(pc, level, 2);
641 PetscTryMethod(pc, "PCBDDCSetLevel_C", (PC, PetscInt), (pc, level));
642 PetscFunctionReturn(PETSC_SUCCESS);
643 }
644
PCBDDCSetLevels_BDDC(PC pc,PetscInt levels)645 static PetscErrorCode PCBDDCSetLevels_BDDC(PC pc, PetscInt levels)
646 {
647 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
648
649 PetscFunctionBegin;
650 PetscCheck(levels < PETSC_PCBDDC_MAXLEVELS, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Maximum number of additional levels for BDDC is %d", PETSC_PCBDDC_MAXLEVELS - 1);
651 pcbddc->max_levels = levels;
652 PetscFunctionReturn(PETSC_SUCCESS);
653 }
654
655 /*@
656 PCBDDCSetLevels - Sets the maximum number of additional levels allowed for multilevel `PCBDDC`
657
658 Logically Collective
659
660 Input Parameters:
661 + pc - the preconditioning context
662 - levels - the maximum number of levels
663
664 Options Database Key:
665 . -pc_bddc_levels <int> - Set maximum number of levels for multilevel
666
667 Level: intermediate
668
669 Note:
670 The default value is 0, that gives the classical two-levels BDDC algorithm
671
672 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetCoarseningRatio()`
673 @*/
PCBDDCSetLevels(PC pc,PetscInt levels)674 PetscErrorCode PCBDDCSetLevels(PC pc, PetscInt levels)
675 {
676 PetscFunctionBegin;
677 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
678 PetscValidLogicalCollectiveInt(pc, levels, 2);
679 PetscTryMethod(pc, "PCBDDCSetLevels_C", (PC, PetscInt), (pc, levels));
680 PetscFunctionReturn(PETSC_SUCCESS);
681 }
682
PCBDDCSetDirichletBoundaries_BDDC(PC pc,IS DirichletBoundaries)683 static PetscErrorCode PCBDDCSetDirichletBoundaries_BDDC(PC pc, IS DirichletBoundaries)
684 {
685 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
686 PetscBool isequal = PETSC_FALSE;
687
688 PetscFunctionBegin;
689 PetscCall(PetscObjectReference((PetscObject)DirichletBoundaries));
690 if (pcbddc->DirichletBoundaries) PetscCall(ISEqual(DirichletBoundaries, pcbddc->DirichletBoundaries, &isequal));
691 /* last user setting takes precedence -> destroy any other customization */
692 PetscCall(ISDestroy(&pcbddc->DirichletBoundariesLocal));
693 PetscCall(ISDestroy(&pcbddc->DirichletBoundaries));
694 pcbddc->DirichletBoundaries = DirichletBoundaries;
695 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
696 PetscFunctionReturn(PETSC_SUCCESS);
697 }
698
699 /*@
700 PCBDDCSetDirichletBoundaries - Set the `IS` defining Dirichlet boundaries for the global problem.
701
702 Collective
703
704 Input Parameters:
705 + pc - the preconditioning context
706 - DirichletBoundaries - parallel `IS` defining the Dirichlet boundaries
707
708 Level: intermediate
709
710 Note:
711 Provide the information if you used `MatZeroRows()` or `MatZeroRowsColumns()`. Any process can list any global node
712
713 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDirichletBoundariesLocal()`, `MatZeroRows()`, `MatZeroRowsColumns()`
714 @*/
PCBDDCSetDirichletBoundaries(PC pc,IS DirichletBoundaries)715 PetscErrorCode PCBDDCSetDirichletBoundaries(PC pc, IS DirichletBoundaries)
716 {
717 PetscFunctionBegin;
718 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
719 PetscValidHeaderSpecific(DirichletBoundaries, IS_CLASSID, 2);
720 PetscCheckSameComm(pc, 1, DirichletBoundaries, 2);
721 PetscTryMethod(pc, "PCBDDCSetDirichletBoundaries_C", (PC, IS), (pc, DirichletBoundaries));
722 PetscFunctionReturn(PETSC_SUCCESS);
723 }
724
PCBDDCSetDirichletBoundariesLocal_BDDC(PC pc,IS DirichletBoundaries)725 static PetscErrorCode PCBDDCSetDirichletBoundariesLocal_BDDC(PC pc, IS DirichletBoundaries)
726 {
727 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
728 PetscBool isequal = PETSC_FALSE;
729
730 PetscFunctionBegin;
731 PetscCall(PetscObjectReference((PetscObject)DirichletBoundaries));
732 if (pcbddc->DirichletBoundariesLocal) PetscCall(ISEqual(DirichletBoundaries, pcbddc->DirichletBoundariesLocal, &isequal));
733 /* last user setting takes precedence -> destroy any other customization */
734 PetscCall(ISDestroy(&pcbddc->DirichletBoundariesLocal));
735 PetscCall(ISDestroy(&pcbddc->DirichletBoundaries));
736 pcbddc->DirichletBoundariesLocal = DirichletBoundaries;
737 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
738 PetscFunctionReturn(PETSC_SUCCESS);
739 }
740
741 /*@
742 PCBDDCSetDirichletBoundariesLocal - Set the `IS` defining Dirichlet boundaries for the global problem in local ordering.
743
744 Collective
745
746 Input Parameters:
747 + pc - the preconditioning context
748 - DirichletBoundaries - parallel `IS` defining the Dirichlet boundaries (in local ordering)
749
750 Level: intermediate
751
752 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDirichletBoundaries()`, `MatZeroRows()`, `MatZeroRowsColumns()`
753 @*/
PCBDDCSetDirichletBoundariesLocal(PC pc,IS DirichletBoundaries)754 PetscErrorCode PCBDDCSetDirichletBoundariesLocal(PC pc, IS DirichletBoundaries)
755 {
756 PetscFunctionBegin;
757 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
758 PetscValidHeaderSpecific(DirichletBoundaries, IS_CLASSID, 2);
759 PetscCheckSameComm(pc, 1, DirichletBoundaries, 2);
760 PetscTryMethod(pc, "PCBDDCSetDirichletBoundariesLocal_C", (PC, IS), (pc, DirichletBoundaries));
761 PetscFunctionReturn(PETSC_SUCCESS);
762 }
763
PCBDDCSetNeumannBoundaries_BDDC(PC pc,IS NeumannBoundaries)764 static PetscErrorCode PCBDDCSetNeumannBoundaries_BDDC(PC pc, IS NeumannBoundaries)
765 {
766 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
767 PetscBool isequal = PETSC_FALSE;
768
769 PetscFunctionBegin;
770 PetscCall(PetscObjectReference((PetscObject)NeumannBoundaries));
771 if (pcbddc->NeumannBoundaries) PetscCall(ISEqual(NeumannBoundaries, pcbddc->NeumannBoundaries, &isequal));
772 /* last user setting takes precedence -> destroy any other customization */
773 PetscCall(ISDestroy(&pcbddc->NeumannBoundariesLocal));
774 PetscCall(ISDestroy(&pcbddc->NeumannBoundaries));
775 pcbddc->NeumannBoundaries = NeumannBoundaries;
776 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
777 PetscFunctionReturn(PETSC_SUCCESS);
778 }
779
780 /*@
781 PCBDDCSetNeumannBoundaries - Set the `IS` defining Neumann boundaries for the global problem.
782
783 Collective
784
785 Input Parameters:
786 + pc - the preconditioning context
787 - NeumannBoundaries - parallel `IS` defining the Neumann boundaries
788
789 Level: intermediate
790
791 Note:
792 Any process can list any global node
793
794 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundariesLocal()`
795 @*/
PCBDDCSetNeumannBoundaries(PC pc,IS NeumannBoundaries)796 PetscErrorCode PCBDDCSetNeumannBoundaries(PC pc, IS NeumannBoundaries)
797 {
798 PetscFunctionBegin;
799 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
800 PetscValidHeaderSpecific(NeumannBoundaries, IS_CLASSID, 2);
801 PetscCheckSameComm(pc, 1, NeumannBoundaries, 2);
802 PetscTryMethod(pc, "PCBDDCSetNeumannBoundaries_C", (PC, IS), (pc, NeumannBoundaries));
803 PetscFunctionReturn(PETSC_SUCCESS);
804 }
805
PCBDDCSetNeumannBoundariesLocal_BDDC(PC pc,IS NeumannBoundaries)806 static PetscErrorCode PCBDDCSetNeumannBoundariesLocal_BDDC(PC pc, IS NeumannBoundaries)
807 {
808 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
809 PetscBool isequal = PETSC_FALSE;
810
811 PetscFunctionBegin;
812 PetscCall(PetscObjectReference((PetscObject)NeumannBoundaries));
813 if (pcbddc->NeumannBoundariesLocal) PetscCall(ISEqual(NeumannBoundaries, pcbddc->NeumannBoundariesLocal, &isequal));
814 /* last user setting takes precedence -> destroy any other customization */
815 PetscCall(ISDestroy(&pcbddc->NeumannBoundariesLocal));
816 PetscCall(ISDestroy(&pcbddc->NeumannBoundaries));
817 pcbddc->NeumannBoundariesLocal = NeumannBoundaries;
818 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
819 PetscFunctionReturn(PETSC_SUCCESS);
820 }
821
822 /*@
823 PCBDDCSetNeumannBoundariesLocal - Set the `IS` defining Neumann boundaries for the global problem in local ordering.
824
825 Collective
826
827 Input Parameters:
828 + pc - the preconditioning context
829 - NeumannBoundaries - parallel `IS` defining the subdomain part of Neumann boundaries (in local ordering)
830
831 Level: intermediate
832
833 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCGetDirichletBoundaries()`
834 @*/
PCBDDCSetNeumannBoundariesLocal(PC pc,IS NeumannBoundaries)835 PetscErrorCode PCBDDCSetNeumannBoundariesLocal(PC pc, IS NeumannBoundaries)
836 {
837 PetscFunctionBegin;
838 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
839 PetscValidHeaderSpecific(NeumannBoundaries, IS_CLASSID, 2);
840 PetscCheckSameComm(pc, 1, NeumannBoundaries, 2);
841 PetscTryMethod(pc, "PCBDDCSetNeumannBoundariesLocal_C", (PC, IS), (pc, NeumannBoundaries));
842 PetscFunctionReturn(PETSC_SUCCESS);
843 }
844
PCBDDCGetDirichletBoundaries_BDDC(PC pc,IS * DirichletBoundaries)845 static PetscErrorCode PCBDDCGetDirichletBoundaries_BDDC(PC pc, IS *DirichletBoundaries)
846 {
847 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
848
849 PetscFunctionBegin;
850 *DirichletBoundaries = pcbddc->DirichletBoundaries;
851 PetscFunctionReturn(PETSC_SUCCESS);
852 }
853
854 /*@
855 PCBDDCGetDirichletBoundaries - Get parallel `IS` for Dirichlet boundaries
856
857 Collective
858
859 Input Parameter:
860 . pc - the preconditioning context
861
862 Output Parameter:
863 . DirichletBoundaries - index set defining the Dirichlet boundaries
864
865 Level: intermediate
866
867 Note:
868 The `IS` returned (if any) is the same passed in earlier by the user with `PCBDDCSetDirichletBoundaries()`
869
870 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDirichletBoundaries()`
871 @*/
PCBDDCGetDirichletBoundaries(PC pc,IS * DirichletBoundaries)872 PetscErrorCode PCBDDCGetDirichletBoundaries(PC pc, IS *DirichletBoundaries)
873 {
874 PetscFunctionBegin;
875 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
876 PetscUseMethod(pc, "PCBDDCGetDirichletBoundaries_C", (PC, IS *), (pc, DirichletBoundaries));
877 PetscFunctionReturn(PETSC_SUCCESS);
878 }
879
PCBDDCGetDirichletBoundariesLocal_BDDC(PC pc,IS * DirichletBoundaries)880 static PetscErrorCode PCBDDCGetDirichletBoundariesLocal_BDDC(PC pc, IS *DirichletBoundaries)
881 {
882 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
883
884 PetscFunctionBegin;
885 *DirichletBoundaries = pcbddc->DirichletBoundariesLocal;
886 PetscFunctionReturn(PETSC_SUCCESS);
887 }
888
889 /*@
890 PCBDDCGetDirichletBoundariesLocal - Get parallel `IS` for Dirichlet boundaries (in local ordering)
891
892 Collective
893
894 Input Parameter:
895 . pc - the preconditioning context
896
897 Output Parameter:
898 . DirichletBoundaries - index set defining the subdomain part of Dirichlet boundaries
899
900 Level: intermediate
901
902 Note:
903 The `IS` returned could be the same passed in earlier by the user (if provided with `PCBDDCSetDirichletBoundariesLocal()`)
904 or a global-to-local map of the global `IS` (if provided with `PCBDDCSetDirichletBoundaries()`).
905 In the latter case, the `IS` will be available only after `PCSetUp()`.
906
907 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCGetDirichletBoundaries()`, `PCBDDCSetDirichletBoundaries()`
908 @*/
PCBDDCGetDirichletBoundariesLocal(PC pc,IS * DirichletBoundaries)909 PetscErrorCode PCBDDCGetDirichletBoundariesLocal(PC pc, IS *DirichletBoundaries)
910 {
911 PetscFunctionBegin;
912 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
913 PetscUseMethod(pc, "PCBDDCGetDirichletBoundariesLocal_C", (PC, IS *), (pc, DirichletBoundaries));
914 PetscFunctionReturn(PETSC_SUCCESS);
915 }
916
PCBDDCGetNeumannBoundaries_BDDC(PC pc,IS * NeumannBoundaries)917 static PetscErrorCode PCBDDCGetNeumannBoundaries_BDDC(PC pc, IS *NeumannBoundaries)
918 {
919 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
920
921 PetscFunctionBegin;
922 *NeumannBoundaries = pcbddc->NeumannBoundaries;
923 PetscFunctionReturn(PETSC_SUCCESS);
924 }
925
926 /*@
927 PCBDDCGetNeumannBoundaries - Get parallel `IS` for Neumann boundaries
928
929 Not Collective
930
931 Input Parameter:
932 . pc - the preconditioning context
933
934 Output Parameter:
935 . NeumannBoundaries - index set defining the Neumann boundaries
936
937 Level: intermediate
938
939 Note:
940 The `IS` returned (if any) is the same passed in earlier by the user with `PCBDDCSetNeumannBoundaries()`
941
942 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCGetDirichletBoundaries()`, `PCBDDCSetDirichletBoundaries()`
943 @*/
PCBDDCGetNeumannBoundaries(PC pc,IS * NeumannBoundaries)944 PetscErrorCode PCBDDCGetNeumannBoundaries(PC pc, IS *NeumannBoundaries)
945 {
946 PetscFunctionBegin;
947 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
948 PetscUseMethod(pc, "PCBDDCGetNeumannBoundaries_C", (PC, IS *), (pc, NeumannBoundaries));
949 PetscFunctionReturn(PETSC_SUCCESS);
950 }
951
PCBDDCGetNeumannBoundariesLocal_BDDC(PC pc,IS * NeumannBoundaries)952 static PetscErrorCode PCBDDCGetNeumannBoundariesLocal_BDDC(PC pc, IS *NeumannBoundaries)
953 {
954 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
955
956 PetscFunctionBegin;
957 *NeumannBoundaries = pcbddc->NeumannBoundariesLocal;
958 PetscFunctionReturn(PETSC_SUCCESS);
959 }
960
961 /*@
962 PCBDDCGetNeumannBoundariesLocal - Get parallel `IS` for Neumann boundaries (in local ordering)
963
964 Not Collective
965
966 Input Parameter:
967 . pc - the preconditioning context
968
969 Output Parameter:
970 . NeumannBoundaries - index set defining the subdomain part of Neumann boundaries
971
972 Level: intermediate
973
974 Note:
975 The `IS` returned could be the same passed in earlier by the user (if provided with `PCBDDCSetNeumannBoundariesLocal()`
976 or a global-to-local map of the global `IS` (if provided with `PCBDDCSetNeumannBoundaries()`).
977 In the latter case, the `IS` will be available after `PCSetUp()`.
978
979 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCSetNeumannBoundariesLocal()`, `PCBDDCGetNeumannBoundaries()`
980 @*/
PCBDDCGetNeumannBoundariesLocal(PC pc,IS * NeumannBoundaries)981 PetscErrorCode PCBDDCGetNeumannBoundariesLocal(PC pc, IS *NeumannBoundaries)
982 {
983 PetscFunctionBegin;
984 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
985 PetscUseMethod(pc, "PCBDDCGetNeumannBoundariesLocal_C", (PC, IS *), (pc, NeumannBoundaries));
986 PetscFunctionReturn(PETSC_SUCCESS);
987 }
988
PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc,PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[],PetscCopyMode copymode)989 static PetscErrorCode PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc, PetscInt nvtxs, const PetscInt xadj[], const PetscInt adjncy[], PetscCopyMode copymode)
990 {
991 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
992 PCBDDCGraph mat_graph = pcbddc->mat_graph;
993 PetscBool same_data = PETSC_FALSE;
994
995 PetscFunctionBegin;
996 if (!nvtxs) {
997 if (copymode == PETSC_OWN_POINTER) {
998 PetscCall(PetscFree(xadj));
999 PetscCall(PetscFree(adjncy));
1000 }
1001 PetscCall(PCBDDCGraphResetCSR(mat_graph));
1002 PetscFunctionReturn(PETSC_SUCCESS);
1003 }
1004 if (mat_graph->nvtxs == nvtxs && mat_graph->freecsr) { /* we own the data */
1005 if (mat_graph->xadj == xadj && mat_graph->adjncy == adjncy) same_data = PETSC_TRUE;
1006 if (!same_data && mat_graph->xadj[nvtxs] == xadj[nvtxs]) {
1007 PetscCall(PetscArraycmp(xadj, mat_graph->xadj, nvtxs + 1, &same_data));
1008 if (same_data) PetscCall(PetscArraycmp(adjncy, mat_graph->adjncy, xadj[nvtxs], &same_data));
1009 }
1010 }
1011 if (!same_data) {
1012 /* free old CSR */
1013 PetscCall(PCBDDCGraphResetCSR(mat_graph));
1014 /* get CSR into graph structure */
1015 if (copymode == PETSC_COPY_VALUES) {
1016 PetscCall(PetscMalloc1(nvtxs + 1, &mat_graph->xadj));
1017 PetscCall(PetscMalloc1(xadj[nvtxs], &mat_graph->adjncy));
1018 PetscCall(PetscArraycpy(mat_graph->xadj, xadj, nvtxs + 1));
1019 PetscCall(PetscArraycpy(mat_graph->adjncy, adjncy, xadj[nvtxs]));
1020 mat_graph->freecsr = PETSC_TRUE;
1021 } else if (copymode == PETSC_OWN_POINTER) {
1022 mat_graph->xadj = (PetscInt *)xadj;
1023 mat_graph->adjncy = (PetscInt *)adjncy;
1024 mat_graph->freecsr = PETSC_TRUE;
1025 } else if (copymode == PETSC_USE_POINTER) {
1026 mat_graph->xadj = (PetscInt *)xadj;
1027 mat_graph->adjncy = (PetscInt *)adjncy;
1028 mat_graph->freecsr = PETSC_FALSE;
1029 } else SETERRQ(PETSC_COMM_SELF, PETSC_ERR_SUP, "Unsupported copy mode %d", copymode);
1030 mat_graph->nvtxs_csr = nvtxs;
1031 pcbddc->recompute_topography = PETSC_TRUE;
1032 }
1033 PetscFunctionReturn(PETSC_SUCCESS);
1034 }
1035
1036 /*@
1037 PCBDDCSetLocalAdjacencyGraph - Set adjacency structure (CSR graph) of the local degrees of freedom.
1038
1039 Not collective
1040
1041 Input Parameters:
1042 + pc - the preconditioning context.
1043 . nvtxs - number of local vertices of the graph (i.e., the number of local dofs).
1044 . xadj - CSR format row pointers for the connectivity of the dofs
1045 . adjncy - CSR format column pointers for the connectivity of the dofs
1046 - copymode - supported modes are `PETSC_COPY_VALUES`, `PETSC_USE_POINTER` or `PETSC_OWN_POINTER`.
1047
1048 Level: intermediate
1049
1050 Note:
1051 A dof is considered connected with all local dofs if xadj[dof+1]-xadj[dof] == 1 and adjncy[xadj[dof]] is negative.
1052
1053 .seealso: [](ch_ksp), `PCBDDC`, `PetscCopyMode`
1054 @*/
PCBDDCSetLocalAdjacencyGraph(PC pc,PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[],PetscCopyMode copymode)1055 PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC pc, PetscInt nvtxs, const PetscInt xadj[], const PetscInt adjncy[], PetscCopyMode copymode)
1056 {
1057 PetscBool f = PETSC_FALSE;
1058
1059 PetscFunctionBegin;
1060 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1061 if (nvtxs) {
1062 PetscAssertPointer(xadj, 3);
1063 if (xadj[nvtxs]) PetscAssertPointer(adjncy, 4);
1064 }
1065 PetscTryMethod(pc, "PCBDDCSetLocalAdjacencyGraph_C", (PC, PetscInt, const PetscInt[], const PetscInt[], PetscCopyMode), (pc, nvtxs, xadj, adjncy, copymode));
1066 /* free arrays if PCBDDC is not the PC type */
1067 PetscCall(PetscObjectHasFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", &f));
1068 if (!f && copymode == PETSC_OWN_POINTER) {
1069 PetscCall(PetscFree(xadj));
1070 PetscCall(PetscFree(adjncy));
1071 }
1072 PetscFunctionReturn(PETSC_SUCCESS);
1073 }
1074
PCBDDCSetDofsSplittingLocal_BDDC(PC pc,PetscInt n_is,IS ISForDofs[])1075 static PetscErrorCode PCBDDCSetDofsSplittingLocal_BDDC(PC pc, PetscInt n_is, IS ISForDofs[])
1076 {
1077 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1078 PetscInt i;
1079 PetscBool isequal = PETSC_FALSE;
1080
1081 PetscFunctionBegin;
1082 if (pcbddc->n_ISForDofsLocal == n_is) {
1083 for (i = 0; i < n_is; i++) {
1084 PetscBool isequalt;
1085 PetscCall(ISEqual(ISForDofs[i], pcbddc->ISForDofsLocal[i], &isequalt));
1086 if (!isequalt) break;
1087 }
1088 if (i == n_is) isequal = PETSC_TRUE;
1089 }
1090 for (i = 0; i < n_is; i++) PetscCall(PetscObjectReference((PetscObject)ISForDofs[i]));
1091 /* Destroy ISes if they were already set */
1092 for (i = 0; i < pcbddc->n_ISForDofsLocal; i++) PetscCall(ISDestroy(&pcbddc->ISForDofsLocal[i]));
1093 PetscCall(PetscFree(pcbddc->ISForDofsLocal));
1094 /* last user setting takes precedence -> destroy any other customization */
1095 for (i = 0; i < pcbddc->n_ISForDofs; i++) PetscCall(ISDestroy(&pcbddc->ISForDofs[i]));
1096 PetscCall(PetscFree(pcbddc->ISForDofs));
1097 pcbddc->n_ISForDofs = 0;
1098 /* allocate space then set */
1099 if (n_is) PetscCall(PetscMalloc1(n_is, &pcbddc->ISForDofsLocal));
1100 for (i = 0; i < n_is; i++) pcbddc->ISForDofsLocal[i] = ISForDofs[i];
1101 pcbddc->n_ISForDofsLocal = n_is;
1102 if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE;
1103 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
1104 PetscFunctionReturn(PETSC_SUCCESS);
1105 }
1106
1107 /*@
1108 PCBDDCSetDofsSplittingLocal - Set the `IS` defining fields of the local subdomain matrix
1109
1110 Collective
1111
1112 Input Parameters:
1113 + pc - the preconditioning context
1114 . n_is - number of index sets defining the fields, must be the same on all MPI processes
1115 - ISForDofs - array of `IS` describing the fields in local ordering
1116
1117 Level: intermediate
1118
1119 Note:
1120 Not all nodes need to be listed, unlisted nodes will belong to the complement field.
1121
1122 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDofsSplitting()`
1123 @*/
PCBDDCSetDofsSplittingLocal(PC pc,PetscInt n_is,IS ISForDofs[])1124 PetscErrorCode PCBDDCSetDofsSplittingLocal(PC pc, PetscInt n_is, IS ISForDofs[])
1125 {
1126 PetscInt i;
1127
1128 PetscFunctionBegin;
1129 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1130 PetscValidLogicalCollectiveInt(pc, n_is, 2);
1131 for (i = 0; i < n_is; i++) {
1132 PetscCheckSameComm(pc, 1, ISForDofs[i], 3);
1133 PetscValidHeaderSpecific(ISForDofs[i], IS_CLASSID, 3);
1134 }
1135 PetscTryMethod(pc, "PCBDDCSetDofsSplittingLocal_C", (PC, PetscInt, IS[]), (pc, n_is, ISForDofs));
1136 PetscFunctionReturn(PETSC_SUCCESS);
1137 }
1138
PCBDDCSetDofsSplitting_BDDC(PC pc,PetscInt n_is,IS ISForDofs[])1139 static PetscErrorCode PCBDDCSetDofsSplitting_BDDC(PC pc, PetscInt n_is, IS ISForDofs[])
1140 {
1141 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1142 PetscInt i;
1143 PetscBool isequal = PETSC_FALSE;
1144
1145 PetscFunctionBegin;
1146 if (pcbddc->n_ISForDofs == n_is) {
1147 for (i = 0; i < n_is; i++) {
1148 PetscBool isequalt;
1149 PetscCall(ISEqual(ISForDofs[i], pcbddc->ISForDofs[i], &isequalt));
1150 if (!isequalt) break;
1151 }
1152 if (i == n_is) isequal = PETSC_TRUE;
1153 }
1154 for (i = 0; i < n_is; i++) PetscCall(PetscObjectReference((PetscObject)ISForDofs[i]));
1155 /* Destroy ISes if they were already set */
1156 for (i = 0; i < pcbddc->n_ISForDofs; i++) PetscCall(ISDestroy(&pcbddc->ISForDofs[i]));
1157 PetscCall(PetscFree(pcbddc->ISForDofs));
1158 /* last user setting takes precedence -> destroy any other customization */
1159 for (i = 0; i < pcbddc->n_ISForDofsLocal; i++) PetscCall(ISDestroy(&pcbddc->ISForDofsLocal[i]));
1160 PetscCall(PetscFree(pcbddc->ISForDofsLocal));
1161 pcbddc->n_ISForDofsLocal = 0;
1162 /* allocate space then set */
1163 if (n_is) PetscCall(PetscMalloc1(n_is, &pcbddc->ISForDofs));
1164 for (i = 0; i < n_is; i++) pcbddc->ISForDofs[i] = ISForDofs[i];
1165 pcbddc->n_ISForDofs = n_is;
1166 if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE;
1167 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE;
1168 PetscFunctionReturn(PETSC_SUCCESS);
1169 }
1170
1171 /*@
1172 PCBDDCSetDofsSplitting - Set the `IS` defining fields of the global matrix
1173
1174 Collective
1175
1176 Input Parameters:
1177 + pc - the preconditioning context
1178 . n_is - number of index sets defining the fields
1179 - ISForDofs - array of `IS` describing the fields in global ordering
1180
1181 Level: intermediate
1182
1183 Note:
1184 Any process can list any global node. Not all nodes need to be listed: unlisted nodes will belong to the complement field.
1185
1186 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCSetDofsSplittingLocal()`
1187 @*/
PCBDDCSetDofsSplitting(PC pc,PetscInt n_is,IS ISForDofs[])1188 PetscErrorCode PCBDDCSetDofsSplitting(PC pc, PetscInt n_is, IS ISForDofs[])
1189 {
1190 PetscInt i;
1191
1192 PetscFunctionBegin;
1193 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
1194 PetscValidLogicalCollectiveInt(pc, n_is, 2);
1195 for (i = 0; i < n_is; i++) {
1196 PetscValidHeaderSpecific(ISForDofs[i], IS_CLASSID, 3);
1197 PetscCheckSameComm(pc, 1, ISForDofs[i], 3);
1198 }
1199 PetscTryMethod(pc, "PCBDDCSetDofsSplitting_C", (PC, PetscInt, IS[]), (pc, n_is, ISForDofs));
1200 PetscFunctionReturn(PETSC_SUCCESS);
1201 }
1202
PCPreSolve_BDDC(PC pc,KSP ksp,Vec rhs,Vec x)1203 static PetscErrorCode PCPreSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
1204 {
1205 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1206 PC_IS *pcis = (PC_IS *)pc->data;
1207 Vec used_vec;
1208 PetscBool iscg, save_rhs = PETSC_TRUE, benign_correction_computed;
1209
1210 PetscFunctionBegin;
1211 /* if we are working with CG, one dirichlet solve can be avoided during Krylov iterations */
1212 if (ksp) {
1213 PetscCall(PetscObjectTypeCompareAny((PetscObject)ksp, &iscg, KSPCG, KSPGROPPCG, KSPPIPECG, KSPPIPELCG, KSPPIPECGRR, ""));
1214 if (pcbddc->benign_apply_coarse_only || pcbddc->switch_static || !iscg || pc->mat != pc->pmat) PetscCall(PCBDDCSetUseExactDirichlet(pc, PETSC_FALSE));
1215 }
1216 if (pcbddc->benign_apply_coarse_only || pcbddc->switch_static || pc->mat != pc->pmat) PetscCall(PCBDDCSetUseExactDirichlet(pc, PETSC_FALSE));
1217
1218 /* Creates parallel work vectors used in presolve */
1219 if (!pcbddc->original_rhs) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->original_rhs));
1220 if (!pcbddc->temp_solution) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->temp_solution));
1221
1222 pcbddc->temp_solution_used = PETSC_FALSE;
1223 if (x) {
1224 PetscCall(PetscObjectReference((PetscObject)x));
1225 used_vec = x;
1226 } else { /* it can only happen when calling PCBDDCMatFETIDPGetRHS */
1227 PetscCall(PetscObjectReference((PetscObject)pcbddc->temp_solution));
1228 used_vec = pcbddc->temp_solution;
1229 PetscCall(VecSet(used_vec, 0.0));
1230 pcbddc->temp_solution_used = PETSC_TRUE;
1231 PetscCall(VecCopy(rhs, pcbddc->original_rhs));
1232 save_rhs = PETSC_FALSE;
1233 pcbddc->eliminate_dirdofs = PETSC_TRUE;
1234 }
1235
1236 /* hack into ksp data structure since PCPreSolve comes earlier than setting to zero the guess in src/ksp/ksp/interface/itfunc.c */
1237 if (ksp) {
1238 /* store the flag for the initial guess since it will be restored back during PCPostSolve_BDDC */
1239 PetscCall(KSPGetInitialGuessNonzero(ksp, &pcbddc->ksp_guess_nonzero));
1240 if (!pcbddc->ksp_guess_nonzero) PetscCall(VecSet(used_vec, 0.0));
1241 }
1242
1243 pcbddc->rhs_change = PETSC_FALSE;
1244 /* Take into account zeroed rows -> change rhs and store solution removed */
1245 if (rhs && pcbddc->eliminate_dirdofs) {
1246 IS dirIS = NULL;
1247
1248 /* DirichletBoundariesLocal may not be consistent among neighbours; gets a dirichlet dofs IS from graph (may be cached) */
1249 PetscCall(PCBDDCGraphGetDirichletDofs(pcbddc->mat_graph, &dirIS));
1250 if (dirIS) {
1251 Mat_IS *matis = (Mat_IS *)pc->pmat->data;
1252 PetscInt dirsize, i, *is_indices;
1253 PetscScalar *array_x;
1254 const PetscScalar *array_diagonal;
1255
1256 PetscCall(MatGetDiagonal(pc->pmat, pcis->vec1_global));
1257 PetscCall(VecPointwiseDivide(pcis->vec1_global, rhs, pcis->vec1_global));
1258 PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_global, pcis->vec2_N, INSERT_VALUES, SCATTER_FORWARD));
1259 PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_global, pcis->vec2_N, INSERT_VALUES, SCATTER_FORWARD));
1260 PetscCall(VecScatterBegin(matis->rctx, used_vec, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD));
1261 PetscCall(VecScatterEnd(matis->rctx, used_vec, pcis->vec1_N, INSERT_VALUES, SCATTER_FORWARD));
1262 PetscCall(ISGetLocalSize(dirIS, &dirsize));
1263 PetscCall(VecGetArray(pcis->vec1_N, &array_x));
1264 PetscCall(VecGetArrayRead(pcis->vec2_N, &array_diagonal));
1265 PetscCall(ISGetIndices(dirIS, (const PetscInt **)&is_indices));
1266 for (i = 0; i < dirsize; i++) array_x[is_indices[i]] = array_diagonal[is_indices[i]];
1267 PetscCall(ISRestoreIndices(dirIS, (const PetscInt **)&is_indices));
1268 PetscCall(VecRestoreArrayRead(pcis->vec2_N, &array_diagonal));
1269 PetscCall(VecRestoreArray(pcis->vec1_N, &array_x));
1270 PetscCall(VecScatterBegin(matis->rctx, pcis->vec1_N, used_vec, INSERT_VALUES, SCATTER_REVERSE));
1271 PetscCall(VecScatterEnd(matis->rctx, pcis->vec1_N, used_vec, INSERT_VALUES, SCATTER_REVERSE));
1272 pcbddc->rhs_change = PETSC_TRUE;
1273 PetscCall(ISDestroy(&dirIS));
1274 }
1275 }
1276
1277 /* remove the computed solution or the initial guess from the rhs */
1278 if (pcbddc->rhs_change || (ksp && pcbddc->ksp_guess_nonzero)) {
1279 /* save the original rhs */
1280 if (save_rhs) {
1281 PetscCall(VecSwap(rhs, pcbddc->original_rhs));
1282 save_rhs = PETSC_FALSE;
1283 }
1284 pcbddc->rhs_change = PETSC_TRUE;
1285 PetscCall(VecScale(used_vec, -1.0));
1286 PetscCall(MatMultAdd(pc->mat, used_vec, pcbddc->original_rhs, rhs));
1287 PetscCall(VecScale(used_vec, -1.0));
1288 PetscCall(VecCopy(used_vec, pcbddc->temp_solution));
1289 pcbddc->temp_solution_used = PETSC_TRUE;
1290 if (ksp) PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_FALSE));
1291 }
1292 PetscCall(VecDestroy(&used_vec));
1293
1294 /* compute initial vector in benign space if needed
1295 and remove non-benign solution from the rhs */
1296 benign_correction_computed = PETSC_FALSE;
1297 if (rhs && pcbddc->benign_compute_correction && (pcbddc->benign_have_null || pcbddc->benign_apply_coarse_only)) {
1298 /* compute u^*_h using ideas similar to those in Xuemin Tu's PhD thesis (see Section 4.8.1)
1299 Recursively apply BDDC in the multilevel case */
1300 if (!pcbddc->benign_vec) PetscCall(VecDuplicate(rhs, &pcbddc->benign_vec));
1301 /* keep applying coarse solver unless we no longer have benign subdomains */
1302 pcbddc->benign_apply_coarse_only = pcbddc->benign_have_null ? PETSC_TRUE : PETSC_FALSE;
1303 if (!pcbddc->benign_skip_correction) {
1304 PetscCall(PCApply_BDDC(pc, rhs, pcbddc->benign_vec));
1305 benign_correction_computed = PETSC_TRUE;
1306 if (pcbddc->temp_solution_used) PetscCall(VecAXPY(pcbddc->temp_solution, 1.0, pcbddc->benign_vec));
1307 PetscCall(VecScale(pcbddc->benign_vec, -1.0));
1308 /* store the original rhs if not done earlier */
1309 if (save_rhs) PetscCall(VecSwap(rhs, pcbddc->original_rhs));
1310 if (pcbddc->rhs_change) {
1311 PetscCall(MatMultAdd(pc->mat, pcbddc->benign_vec, rhs, rhs));
1312 } else {
1313 PetscCall(MatMultAdd(pc->mat, pcbddc->benign_vec, pcbddc->original_rhs, rhs));
1314 }
1315 pcbddc->rhs_change = PETSC_TRUE;
1316 }
1317 pcbddc->benign_apply_coarse_only = PETSC_FALSE;
1318 } else {
1319 PetscCall(VecDestroy(&pcbddc->benign_vec));
1320 }
1321
1322 /* dbg output */
1323 if (pcbddc->dbg_flag && benign_correction_computed) {
1324 Vec v;
1325
1326 PetscCall(VecDuplicate(pcis->vec1_global, &v));
1327 if (pcbddc->ChangeOfBasisMatrix) {
1328 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, rhs, v));
1329 } else {
1330 PetscCall(VecCopy(rhs, v));
1331 }
1332 PetscCall(PCBDDCBenignGetOrSetP0(pc, v, PETSC_TRUE));
1333 PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer, "LEVEL %" PetscInt_FMT ": is the correction benign?\n", pcbddc->current_level));
1334 PetscCall(PetscScalarView(pcbddc->benign_n, pcbddc->benign_p0, pcbddc->dbg_viewer));
1335 PetscCall(PetscViewerFlush(pcbddc->dbg_viewer));
1336 PetscCall(VecDestroy(&v));
1337 }
1338
1339 /* set initial guess if using PCG */
1340 pcbddc->exact_dirichlet_trick_app = PETSC_FALSE;
1341 if (x && pcbddc->use_exact_dirichlet_trick) {
1342 PetscCall(VecSet(x, 0.0));
1343 if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior) {
1344 if (benign_correction_computed) { /* we have already saved the changed rhs */
1345 PetscCall(VecLockReadPop(pcis->vec1_global));
1346 } else {
1347 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, rhs, pcis->vec1_global));
1348 }
1349 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_global, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1350 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_global, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1351 } else {
1352 PetscCall(VecScatterBegin(pcis->global_to_D, rhs, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1353 PetscCall(VecScatterEnd(pcis->global_to_D, rhs, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1354 }
1355 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1356 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D));
1357 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1358 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D));
1359 if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior) {
1360 PetscCall(VecSet(pcis->vec1_global, 0.));
1361 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, pcis->vec1_global, INSERT_VALUES, SCATTER_REVERSE));
1362 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, pcis->vec1_global, INSERT_VALUES, SCATTER_REVERSE));
1363 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcis->vec1_global, x));
1364 } else {
1365 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, x, INSERT_VALUES, SCATTER_REVERSE));
1366 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, x, INSERT_VALUES, SCATTER_REVERSE));
1367 }
1368 if (ksp) PetscCall(KSPSetInitialGuessNonzero(ksp, PETSC_TRUE));
1369 pcbddc->exact_dirichlet_trick_app = PETSC_TRUE;
1370 } else if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior && benign_correction_computed && pcbddc->use_exact_dirichlet_trick) {
1371 PetscCall(VecLockReadPop(pcis->vec1_global));
1372 }
1373 PetscFunctionReturn(PETSC_SUCCESS);
1374 }
1375
PCPostSolve_BDDC(PC pc,KSP ksp,Vec rhs,Vec x)1376 static PetscErrorCode PCPostSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x)
1377 {
1378 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1379
1380 PetscFunctionBegin;
1381 /* add solution removed in presolve */
1382 if (x && pcbddc->rhs_change) {
1383 if (pcbddc->temp_solution_used) {
1384 PetscCall(VecAXPY(x, 1.0, pcbddc->temp_solution));
1385 } else if (pcbddc->benign_compute_correction && pcbddc->benign_vec) {
1386 PetscCall(VecAXPY(x, -1.0, pcbddc->benign_vec));
1387 }
1388 /* restore to original state (not for FETI-DP) */
1389 if (ksp) pcbddc->temp_solution_used = PETSC_FALSE;
1390 }
1391
1392 /* restore rhs to its original state (not needed for FETI-DP) */
1393 if (rhs && pcbddc->rhs_change) {
1394 PetscCall(VecSwap(rhs, pcbddc->original_rhs));
1395 pcbddc->rhs_change = PETSC_FALSE;
1396 }
1397 /* restore ksp guess state */
1398 if (ksp) {
1399 PetscCall(KSPSetInitialGuessNonzero(ksp, pcbddc->ksp_guess_nonzero));
1400 /* reset flag for exact dirichlet trick */
1401 pcbddc->exact_dirichlet_trick_app = PETSC_FALSE;
1402 }
1403 PetscFunctionReturn(PETSC_SUCCESS);
1404 }
1405
PCSetUp_BDDC(PC pc)1406 static PetscErrorCode PCSetUp_BDDC(PC pc)
1407 {
1408 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1409 PCBDDCSubSchurs sub_schurs;
1410 Mat_IS *matis;
1411 MatNullSpace nearnullspace;
1412 Mat lA;
1413 IS lP, zerodiag = NULL;
1414 PetscInt nrows, ncols;
1415 PetscMPIInt size;
1416 PetscBool computesubschurs;
1417 PetscBool computeconstraintsmatrix;
1418 PetscBool new_nearnullspace_provided, ismatis, rl;
1419 PetscBool isset, issym, isspd;
1420
1421 PetscFunctionBegin;
1422 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat, MATIS, &ismatis));
1423 PetscCheck(ismatis, PetscObjectComm((PetscObject)pc), PETSC_ERR_ARG_WRONG, "PCBDDC preconditioner requires matrix of type MATIS");
1424 PetscCall(MatGetSize(pc->pmat, &nrows, &ncols));
1425 PetscCheck(nrows == ncols, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "PCBDDC preconditioner requires a square matrix for constructing the preconditioner");
1426 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc), &size));
1427
1428 matis = (Mat_IS *)pc->pmat->data;
1429 /* the following lines of code should be replaced by a better logic between PCIS, PCNN, PCBDDC and other future nonoverlapping preconditioners */
1430 /* For BDDC we need to define a local "Neumann" problem different to that defined in PCISSetUp
1431 Also, BDDC builds its own KSP for the Dirichlet problem */
1432 rl = pcbddc->recompute_topography;
1433 if (!pc->setupcalled || pc->flag == DIFFERENT_NONZERO_PATTERN) rl = PETSC_TRUE;
1434 PetscCallMPI(MPIU_Allreduce(&rl, &pcbddc->recompute_topography, 1, MPI_C_BOOL, MPI_LOR, PetscObjectComm((PetscObject)pc)));
1435 if (pcbddc->recompute_topography) {
1436 pcbddc->graphanalyzed = PETSC_FALSE;
1437 computeconstraintsmatrix = PETSC_TRUE;
1438 } else {
1439 computeconstraintsmatrix = PETSC_FALSE;
1440 }
1441
1442 /* check parameters' compatibility */
1443 if (!pcbddc->use_deluxe_scaling) pcbddc->deluxe_zerorows = PETSC_FALSE;
1444 pcbddc->adaptive_selection = (PetscBool)(pcbddc->adaptive_threshold[0] != 0.0 || pcbddc->adaptive_threshold[1] != 0.0);
1445 pcbddc->use_deluxe_scaling = (PetscBool)(pcbddc->use_deluxe_scaling && (size > 1 || matis->allow_repeated));
1446 pcbddc->adaptive_selection = (PetscBool)(pcbddc->adaptive_selection && (size > 1 || matis->allow_repeated));
1447 pcbddc->adaptive_userdefined = (PetscBool)(pcbddc->adaptive_selection && pcbddc->adaptive_userdefined);
1448 if (pcbddc->adaptive_selection) pcbddc->use_faces = PETSC_TRUE;
1449
1450 computesubschurs = (PetscBool)(pcbddc->adaptive_selection || pcbddc->use_deluxe_scaling);
1451
1452 /* activate all connected components if the netflux has been requested */
1453 if (pcbddc->compute_nonetflux) {
1454 pcbddc->use_vertices = PETSC_TRUE;
1455 pcbddc->use_edges = PETSC_TRUE;
1456 pcbddc->use_faces = PETSC_TRUE;
1457 }
1458
1459 /* Get stdout for dbg */
1460 if (pcbddc->dbg_flag) {
1461 if (!pcbddc->dbg_viewer) pcbddc->dbg_viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)pc));
1462 PetscCall(PetscViewerASCIIPushSynchronized(pcbddc->dbg_viewer));
1463 PetscCall(PetscViewerASCIIAddTab(pcbddc->dbg_viewer, 2 * pcbddc->current_level));
1464 }
1465
1466 /* process topology information */
1467 PetscCall(PetscLogEventBegin(PC_BDDC_Topology[pcbddc->current_level], pc, 0, 0, 0));
1468 if (pcbddc->recompute_topography) {
1469 PetscCall(PCBDDCComputeLocalTopologyInfo(pc));
1470 if (pcbddc->discretegradient) PetscCall(PCBDDCNedelecSupport(pc));
1471 }
1472 if (pcbddc->corner_selected) pcbddc->use_vertices = PETSC_TRUE;
1473
1474 /* change basis if requested by the user */
1475 if (pcbddc->user_ChangeOfBasisMatrix) {
1476 /* use_change_of_basis flag is used to automatically compute a change of basis from constraints */
1477 pcbddc->use_change_of_basis = PETSC_FALSE;
1478 PetscCall(PCBDDCComputeLocalMatrix(pc, pcbddc->user_ChangeOfBasisMatrix));
1479 } else {
1480 PetscCall(MatDestroy(&pcbddc->local_mat));
1481 PetscCall(PetscObjectReference((PetscObject)matis->A));
1482 pcbddc->local_mat = matis->A;
1483 }
1484
1485 /*
1486 Compute change of basis on local pressures (aka zerodiag dofs) with the benign trick
1487 This should come earlier than PCISSetUp for extracting the correct subdomain matrices
1488 */
1489 PetscCall(PCBDDCBenignShellMat(pc, PETSC_TRUE));
1490 if (pcbddc->benign_saddle_point) {
1491 PC_IS *pcis = (PC_IS *)pc->data;
1492
1493 if (pcbddc->user_ChangeOfBasisMatrix || pcbddc->use_change_of_basis || !computesubschurs) pcbddc->benign_change_explicit = PETSC_TRUE;
1494 /* detect local saddle point and change the basis in pcbddc->local_mat */
1495 PetscCall(PCBDDCBenignDetectSaddlePoint(pc, (PetscBool)(!pcbddc->recompute_topography), &zerodiag));
1496 /* pop B0 mat from local mat */
1497 PetscCall(PCBDDCBenignPopOrPushB0(pc, PETSC_TRUE));
1498 /* give pcis a hint to not reuse submatrices during PCISCreate */
1499 if (pc->flag == SAME_NONZERO_PATTERN && pcis->reusesubmatrices == PETSC_TRUE) {
1500 if (pcbddc->benign_n && (pcbddc->benign_change_explicit || pcbddc->dbg_flag)) {
1501 pcis->reusesubmatrices = PETSC_FALSE;
1502 } else {
1503 pcis->reusesubmatrices = PETSC_TRUE;
1504 }
1505 } else {
1506 pcis->reusesubmatrices = PETSC_FALSE;
1507 }
1508 }
1509
1510 /* propagate relevant information */
1511 PetscCall(MatIsSymmetricKnown(matis->A, &isset, &issym));
1512 if (isset) PetscCall(MatSetOption(pcbddc->local_mat, MAT_SYMMETRIC, issym));
1513 PetscCall(MatIsSPDKnown(matis->A, &isset, &isspd));
1514 if (isset) PetscCall(MatSetOption(pcbddc->local_mat, MAT_SPD, isspd));
1515
1516 /* Set up all the "iterative substructuring" common block without computing solvers */
1517 {
1518 Mat temp_mat;
1519
1520 temp_mat = matis->A;
1521 matis->A = pcbddc->local_mat;
1522 PetscCall(PCISSetUp(pc, PETSC_TRUE, PETSC_FALSE));
1523 pcbddc->local_mat = matis->A;
1524 matis->A = temp_mat;
1525 }
1526
1527 /* Analyze interface */
1528 if (!pcbddc->graphanalyzed) {
1529 PetscCall(PCBDDCAnalyzeInterface(pc));
1530 computeconstraintsmatrix = PETSC_TRUE;
1531 PetscCheck(!(pcbddc->adaptive_selection && !pcbddc->use_deluxe_scaling && !pcbddc->mat_graph->twodim), PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Cannot compute the adaptive primal space for a problem with 3D edges without deluxe scaling");
1532 if (pcbddc->compute_nonetflux) {
1533 MatNullSpace nnfnnsp;
1534
1535 PetscCheck(pcbddc->divudotp, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Missing divudotp operator");
1536 PetscCall(PCBDDCComputeNoNetFlux(pc->pmat, pcbddc->divudotp, pcbddc->divudotp_trans, pcbddc->divudotp_vl2l, pcbddc->mat_graph, &nnfnnsp));
1537 /* TODO what if a nearnullspace is already attached? */
1538 if (nnfnnsp) {
1539 PetscCall(MatSetNearNullSpace(pc->pmat, nnfnnsp));
1540 PetscCall(MatNullSpaceDestroy(&nnfnnsp));
1541 }
1542 }
1543 }
1544 PetscCall(PetscLogEventEnd(PC_BDDC_Topology[pcbddc->current_level], pc, 0, 0, 0));
1545
1546 /* check existence of a divergence free extension, i.e.
1547 b(v_I,p_0) = 0 for all v_I (raise error if not).
1548 Also, check that PCBDDCBenignGetOrSetP0 works */
1549 if (pcbddc->benign_saddle_point && pcbddc->dbg_flag > 1) PetscCall(PCBDDCBenignCheck(pc, zerodiag));
1550 PetscCall(ISDestroy(&zerodiag));
1551
1552 /* Setup local dirichlet solver ksp_D and sub_schurs solvers */
1553 if (computesubschurs && pcbddc->recompute_topography) PetscCall(PCBDDCInitSubSchurs(pc));
1554 /* SetUp Scaling operator (scaling matrices could be needed in SubSchursSetUp)*/
1555 if (!pcbddc->use_deluxe_scaling) PetscCall(PCBDDCScalingSetUp(pc));
1556
1557 /* finish setup solvers and do adaptive selection of constraints */
1558 sub_schurs = pcbddc->sub_schurs;
1559 if (sub_schurs && sub_schurs->schur_explicit) {
1560 if (computesubschurs) PetscCall(PCBDDCSetUpSubSchurs(pc));
1561 PetscCall(PCBDDCSetUpLocalSolvers(pc, PETSC_TRUE, PETSC_FALSE));
1562 } else {
1563 PetscCall(PCBDDCSetUpLocalSolvers(pc, PETSC_TRUE, PETSC_FALSE));
1564 if (computesubschurs) PetscCall(PCBDDCSetUpSubSchurs(pc));
1565 }
1566 if (pcbddc->adaptive_selection) {
1567 PetscCall(PCBDDCAdaptiveSelection(pc));
1568 computeconstraintsmatrix = PETSC_TRUE;
1569 }
1570
1571 /* infer if NullSpace object attached to Mat via MatSetNearNullSpace has changed */
1572 new_nearnullspace_provided = PETSC_FALSE;
1573 PetscCall(MatGetNearNullSpace(pc->pmat, &nearnullspace));
1574 if (pcbddc->onearnullspace) { /* already used nearnullspace */
1575 if (!nearnullspace) { /* near null space attached to mat has been destroyed */
1576 new_nearnullspace_provided = PETSC_TRUE;
1577 } else {
1578 /* determine if the two nullspaces are different (should be lightweight) */
1579 if (nearnullspace != pcbddc->onearnullspace) {
1580 new_nearnullspace_provided = PETSC_TRUE;
1581 } else { /* maybe the user has changed the content of the nearnullspace so check vectors ObjectStateId */
1582 PetscInt i;
1583 const Vec *nearnullvecs;
1584 PetscObjectState state;
1585 PetscInt nnsp_size;
1586 PetscCall(MatNullSpaceGetVecs(nearnullspace, NULL, &nnsp_size, &nearnullvecs));
1587 for (i = 0; i < nnsp_size; i++) {
1588 PetscCall(PetscObjectStateGet((PetscObject)nearnullvecs[i], &state));
1589 if (pcbddc->onearnullvecs_state[i] != state) {
1590 new_nearnullspace_provided = PETSC_TRUE;
1591 break;
1592 }
1593 }
1594 }
1595 }
1596 } else {
1597 if (!nearnullspace) { /* both nearnullspaces are null */
1598 new_nearnullspace_provided = PETSC_FALSE;
1599 } else { /* nearnullspace attached later */
1600 new_nearnullspace_provided = PETSC_TRUE;
1601 }
1602 }
1603
1604 /* Setup constraints and related work vectors */
1605 /* reset primal space flags */
1606 PetscCall(PetscLogEventBegin(PC_BDDC_LocalWork[pcbddc->current_level], pc, 0, 0, 0));
1607 pcbddc->new_primal_space = PETSC_FALSE;
1608 pcbddc->new_primal_space_local = PETSC_FALSE;
1609 if (computeconstraintsmatrix || new_nearnullspace_provided) {
1610 /* It also sets the primal space flags */
1611 PetscCall(PCBDDCConstraintsSetUp(pc));
1612 }
1613 /* Allocate needed local vectors (which depends on quantities defined during ConstraintsSetUp) */
1614 PetscCall(PCBDDCSetUpLocalWorkVectors(pc));
1615
1616 if (pcbddc->use_change_of_basis) {
1617 PC_IS *pcis = (PC_IS *)pc->data;
1618
1619 PetscCall(PCBDDCComputeLocalMatrix(pc, pcbddc->ChangeOfBasisMatrix));
1620 if (pcbddc->benign_change) {
1621 PetscCall(MatDestroy(&pcbddc->benign_B0));
1622 /* pop B0 from pcbddc->local_mat */
1623 PetscCall(PCBDDCBenignPopOrPushB0(pc, PETSC_TRUE));
1624 }
1625 /* get submatrices */
1626 PetscCall(MatDestroy(&pcis->A_IB));
1627 PetscCall(MatDestroy(&pcis->A_BI));
1628 PetscCall(MatDestroy(&pcis->A_BB));
1629 PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_B_local, pcis->is_B_local, MAT_INITIAL_MATRIX, &pcis->A_BB));
1630 PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_I_local, pcis->is_B_local, MAT_INITIAL_MATRIX, &pcis->A_IB));
1631 PetscCall(MatCreateSubMatrix(pcbddc->local_mat, pcis->is_B_local, pcis->is_I_local, MAT_INITIAL_MATRIX, &pcis->A_BI));
1632 /* set flag in pcis to not reuse submatrices during PCISCreate */
1633 pcis->reusesubmatrices = PETSC_FALSE;
1634 } else if (!pcbddc->user_ChangeOfBasisMatrix && !pcbddc->benign_change) {
1635 PetscCall(MatDestroy(&pcbddc->local_mat));
1636 PetscCall(PetscObjectReference((PetscObject)matis->A));
1637 pcbddc->local_mat = matis->A;
1638 }
1639
1640 /* interface pressure block row for B_C */
1641 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_lP", (PetscObject *)&lP));
1642 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_lA", (PetscObject *)&lA));
1643 if (lA && lP) {
1644 PC_IS *pcis = (PC_IS *)pc->data;
1645 Mat B_BI, B_BB, Bt_BI, Bt_BB;
1646 PetscBool issym;
1647
1648 PetscCall(MatIsSymmetric(lA, PETSC_SMALL, &issym));
1649 if (issym) {
1650 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_I_local, MAT_INITIAL_MATRIX, &B_BI));
1651 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_B_local, MAT_INITIAL_MATRIX, &B_BB));
1652 PetscCall(MatCreateTranspose(B_BI, &Bt_BI));
1653 PetscCall(MatCreateTranspose(B_BB, &Bt_BB));
1654 } else {
1655 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_I_local, MAT_INITIAL_MATRIX, &B_BI));
1656 PetscCall(MatCreateSubMatrix(lA, lP, pcis->is_B_local, MAT_INITIAL_MATRIX, &B_BB));
1657 PetscCall(MatCreateSubMatrix(lA, pcis->is_I_local, lP, MAT_INITIAL_MATRIX, &Bt_BI));
1658 PetscCall(MatCreateSubMatrix(lA, pcis->is_B_local, lP, MAT_INITIAL_MATRIX, &Bt_BB));
1659 }
1660 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_B_BI", (PetscObject)B_BI));
1661 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_B_BB", (PetscObject)B_BB));
1662 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_Bt_BI", (PetscObject)Bt_BI));
1663 PetscCall(PetscObjectCompose((PetscObject)pc, "__KSPFETIDP_Bt_BB", (PetscObject)Bt_BB));
1664 PetscCall(MatDestroy(&B_BI));
1665 PetscCall(MatDestroy(&B_BB));
1666 PetscCall(MatDestroy(&Bt_BI));
1667 PetscCall(MatDestroy(&Bt_BB));
1668 }
1669 PetscCall(PetscLogEventEnd(PC_BDDC_LocalWork[pcbddc->current_level], pc, 0, 0, 0));
1670
1671 /* SetUp coarse and local Neumann solvers */
1672 PetscCall(PCBDDCSetUpSolvers(pc));
1673 /* SetUp Scaling operator */
1674 if (pcbddc->use_deluxe_scaling) PetscCall(PCBDDCScalingSetUp(pc));
1675
1676 /* mark topography as done */
1677 pcbddc->recompute_topography = PETSC_FALSE;
1678
1679 /* wrap pcis->A_IB and pcis->A_BI if we did not change explicitly the variables on the pressures */
1680 PetscCall(PCBDDCBenignShellMat(pc, PETSC_FALSE));
1681
1682 if (pcbddc->dbg_flag) {
1683 PetscCall(PetscViewerASCIISubtractTab(pcbddc->dbg_viewer, 2 * pcbddc->current_level));
1684 PetscCall(PetscViewerASCIIPopSynchronized(pcbddc->dbg_viewer));
1685 }
1686
1687 { /* Dump customization */
1688 PetscBool flg;
1689 char save[PETSC_MAX_PATH_LEN] = {'\0'};
1690
1691 PetscCall(PetscOptionsGetString(NULL, ((PetscObject)pc)->prefix, "-pc_bddc_save", save, sizeof(save), &flg));
1692 if (flg) {
1693 size_t len;
1694
1695 PetscCall(PetscStrlen(save, &len));
1696 PetscCall(PCBDDCLoadOrViewCustomization(pc, PETSC_FALSE, len ? save : NULL));
1697 }
1698 }
1699 PetscFunctionReturn(PETSC_SUCCESS);
1700 }
1701
PCApply_BDDC(PC pc,Vec r,Vec z)1702 static PetscErrorCode PCApply_BDDC(PC pc, Vec r, Vec z)
1703 {
1704 PC_IS *pcis = (PC_IS *)pc->data;
1705 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1706 Mat lA = NULL;
1707 PetscInt n_B = pcis->n_B, n_D = pcis->n - n_B;
1708 const PetscScalar one = 1.0;
1709 const PetscScalar m_one = -1.0;
1710 const PetscScalar zero = 0.0;
1711 /* This code is similar to that provided in nn.c for PCNN
1712 NN interface preconditioner changed to BDDC
1713 Added support for M_3 preconditioner in the reference article (code is active if pcbddc->switch_static == PETSC_TRUE) */
1714
1715 PetscFunctionBegin;
1716 PetscCall(PetscCitationsRegister(citation, &cited));
1717 if (pcbddc->switch_static) PetscCall(MatISGetLocalMat(pc->useAmat ? pc->mat : pc->pmat, &lA));
1718
1719 if (pcbddc->ChangeOfBasisMatrix) {
1720 Vec swap;
1721
1722 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, r, pcbddc->work_change));
1723 swap = pcbddc->work_change;
1724 pcbddc->work_change = r;
1725 r = swap;
1726 /* save rhs so that we don't need to apply the change of basis for the exact dirichlet trick in PreSolve */
1727 if (pcbddc->benign_apply_coarse_only && pcbddc->use_exact_dirichlet_trick && pcbddc->change_interior) {
1728 PetscCall(VecCopy(r, pcis->vec1_global));
1729 PetscCall(VecLockReadPush(pcis->vec1_global));
1730 }
1731 }
1732 if (pcbddc->benign_have_null) { /* get p0 from r */
1733 PetscCall(PCBDDCBenignGetOrSetP0(pc, r, PETSC_TRUE));
1734 }
1735 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_DIRICHLET && !pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) {
1736 PetscCall(VecCopy(r, z));
1737 /* First Dirichlet solve */
1738 PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1739 PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1740 /*
1741 Assembling right-hand side for BDDC operator
1742 - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE)
1743 - pcis->vec1_B the interface part of the global vector z
1744 */
1745 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1746 if (n_D) {
1747 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D));
1748 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1749 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D));
1750 PetscCall(VecScale(pcis->vec2_D, m_one));
1751 if (pcbddc->switch_static) {
1752 PetscCall(VecSet(pcis->vec1_N, 0.));
1753 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1754 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1755 if (!pcbddc->switch_static_change) {
1756 PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N));
1757 } else {
1758 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1759 PetscCall(MatMult(lA, pcis->vec2_N, pcis->vec1_N));
1760 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1761 }
1762 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD));
1763 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD));
1764 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1765 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1766 } else {
1767 PetscCall(MatMult(pcis->A_BI, pcis->vec2_D, pcis->vec1_B));
1768 }
1769 } else {
1770 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1771 PetscCall(VecSet(pcis->vec1_B, zero));
1772 }
1773 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE));
1774 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE));
1775 PetscCall(PCBDDCScalingRestriction(pc, z, pcis->vec1_B));
1776 } else {
1777 if (!pcbddc->benign_apply_coarse_only) PetscCall(PCBDDCScalingRestriction(pc, r, pcis->vec1_B));
1778 }
1779 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_LUMP) {
1780 PetscCheck(pcbddc->switch_static, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "You forgot to pass -pc_bddc_switch_static");
1781 PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1782 PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1783 }
1784
1785 /* Apply interface preconditioner
1786 input/output vecs: pcis->vec1_B and pcis->vec1_D */
1787 PetscCall(PCBDDCApplyInterfacePreconditioner(pc, PETSC_FALSE));
1788
1789 /* Apply transpose of partition of unity operator */
1790 PetscCall(PCBDDCScalingExtension(pc, pcis->vec1_B, z));
1791 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_LUMP) {
1792 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_D, z, INSERT_VALUES, SCATTER_REVERSE));
1793 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_D, z, INSERT_VALUES, SCATTER_REVERSE));
1794 PetscFunctionReturn(PETSC_SUCCESS);
1795 }
1796 /* Second Dirichlet solve and assembling of output */
1797 PetscCall(VecScatterBegin(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1798 PetscCall(VecScatterEnd(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1799 if (n_B) {
1800 if (pcbddc->switch_static) {
1801 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1802 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1803 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1804 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1805 if (!pcbddc->switch_static_change) {
1806 PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N));
1807 } else {
1808 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1809 PetscCall(MatMult(lA, pcis->vec2_N, pcis->vec1_N));
1810 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1811 }
1812 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD));
1813 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD));
1814 } else {
1815 PetscCall(MatMult(pcis->A_IB, pcis->vec1_B, pcis->vec3_D));
1816 }
1817 } else if (pcbddc->switch_static) { /* n_B is zero */
1818 if (!pcbddc->switch_static_change) {
1819 PetscCall(MatMult(lA, pcis->vec1_D, pcis->vec3_D));
1820 } else {
1821 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_D, pcis->vec1_N));
1822 PetscCall(MatMult(lA, pcis->vec1_N, pcis->vec2_N));
1823 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec2_N, pcis->vec3_D));
1824 }
1825 }
1826 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1827 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec3_D, pcis->vec4_D));
1828 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1829 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec4_D));
1830
1831 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) {
1832 if (pcbddc->switch_static) {
1833 PetscCall(VecAXPBYPCZ(pcis->vec2_D, m_one, one, m_one, pcis->vec4_D, pcis->vec1_D));
1834 } else {
1835 PetscCall(VecAXPBY(pcis->vec2_D, m_one, m_one, pcis->vec4_D));
1836 }
1837 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE));
1838 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE));
1839 } else {
1840 if (pcbddc->switch_static) {
1841 PetscCall(VecAXPBY(pcis->vec4_D, one, m_one, pcis->vec1_D));
1842 } else {
1843 PetscCall(VecScale(pcis->vec4_D, m_one));
1844 }
1845 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE));
1846 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE));
1847 }
1848 if (pcbddc->benign_have_null) { /* set p0 (computed in PCBDDCApplyInterface) */
1849 if (pcbddc->benign_apply_coarse_only) PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n));
1850 PetscCall(PCBDDCBenignGetOrSetP0(pc, z, PETSC_FALSE));
1851 }
1852
1853 if (pcbddc->ChangeOfBasisMatrix) {
1854 pcbddc->work_change = r;
1855 PetscCall(VecCopy(z, pcbddc->work_change));
1856 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcbddc->work_change, z));
1857 }
1858 PetscFunctionReturn(PETSC_SUCCESS);
1859 }
1860
PCApplyTranspose_BDDC(PC pc,Vec r,Vec z)1861 static PetscErrorCode PCApplyTranspose_BDDC(PC pc, Vec r, Vec z)
1862 {
1863 PC_IS *pcis = (PC_IS *)pc->data;
1864 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
1865 Mat lA = NULL;
1866 PetscInt n_B = pcis->n_B, n_D = pcis->n - n_B;
1867 const PetscScalar one = 1.0;
1868 const PetscScalar m_one = -1.0;
1869 const PetscScalar zero = 0.0;
1870
1871 PetscFunctionBegin;
1872 PetscCall(PetscCitationsRegister(citation, &cited));
1873 if (pcbddc->switch_static) PetscCall(MatISGetLocalMat(pc->useAmat ? pc->mat : pc->pmat, &lA));
1874 if (pcbddc->ChangeOfBasisMatrix) {
1875 Vec swap;
1876
1877 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, r, pcbddc->work_change));
1878 swap = pcbddc->work_change;
1879 pcbddc->work_change = r;
1880 r = swap;
1881 /* save rhs so that we don't need to apply the change of basis for the exact dirichlet trick in PreSolve */
1882 if (pcbddc->benign_apply_coarse_only && pcbddc->exact_dirichlet_trick_app && pcbddc->change_interior) {
1883 PetscCall(VecCopy(r, pcis->vec1_global));
1884 PetscCall(VecLockReadPush(pcis->vec1_global));
1885 }
1886 }
1887 if (pcbddc->benign_have_null) { /* get p0 from r */
1888 PetscCall(PCBDDCBenignGetOrSetP0(pc, r, PETSC_TRUE));
1889 }
1890 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) {
1891 PetscCall(VecCopy(r, z));
1892 /* First Dirichlet solve */
1893 PetscCall(VecScatterBegin(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1894 PetscCall(VecScatterEnd(pcis->global_to_D, r, pcis->vec1_D, INSERT_VALUES, SCATTER_FORWARD));
1895 /*
1896 Assembling right-hand side for BDDC operator
1897 - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE)
1898 - pcis->vec1_B the interface part of the global vector z
1899 */
1900 if (n_D) {
1901 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1902 PetscCall(KSPSolveTranspose(pcbddc->ksp_D, pcis->vec1_D, pcis->vec2_D));
1903 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1904 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec2_D));
1905 PetscCall(VecScale(pcis->vec2_D, m_one));
1906 if (pcbddc->switch_static) {
1907 PetscCall(VecSet(pcis->vec1_N, 0.));
1908 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1909 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1910 if (!pcbddc->switch_static_change) {
1911 PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N));
1912 } else {
1913 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1914 PetscCall(MatMultTranspose(lA, pcis->vec2_N, pcis->vec1_N));
1915 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1916 }
1917 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD));
1918 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec1_D, ADD_VALUES, SCATTER_FORWARD));
1919 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1920 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec2_N, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1921 } else {
1922 PetscCall(MatMultTranspose(pcis->A_IB, pcis->vec2_D, pcis->vec1_B));
1923 }
1924 } else {
1925 PetscCall(VecSet(pcis->vec1_B, zero));
1926 }
1927 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE));
1928 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, z, ADD_VALUES, SCATTER_REVERSE));
1929 PetscCall(PCBDDCScalingRestriction(pc, z, pcis->vec1_B));
1930 } else {
1931 PetscCall(PCBDDCScalingRestriction(pc, r, pcis->vec1_B));
1932 }
1933
1934 /* Apply interface preconditioner
1935 input/output vecs: pcis->vec1_B and pcis->vec1_D */
1936 PetscCall(PCBDDCApplyInterfacePreconditioner(pc, PETSC_TRUE));
1937
1938 /* Apply transpose of partition of unity operator */
1939 PetscCall(PCBDDCScalingExtension(pc, pcis->vec1_B, z));
1940
1941 /* Second Dirichlet solve and assembling of output */
1942 PetscCall(VecScatterBegin(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1943 PetscCall(VecScatterEnd(pcis->global_to_B, z, pcis->vec1_B, INSERT_VALUES, SCATTER_FORWARD));
1944 if (n_B) {
1945 if (pcbddc->switch_static) {
1946 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1947 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec1_D, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1948 PetscCall(VecScatterBegin(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1949 PetscCall(VecScatterEnd(pcis->N_to_B, pcis->vec1_B, pcis->vec1_N, INSERT_VALUES, SCATTER_REVERSE));
1950 if (!pcbddc->switch_static_change) {
1951 PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N));
1952 } else {
1953 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1954 PetscCall(MatMultTranspose(lA, pcis->vec2_N, pcis->vec1_N));
1955 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec1_N, pcis->vec2_N));
1956 }
1957 PetscCall(VecScatterBegin(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD));
1958 PetscCall(VecScatterEnd(pcis->N_to_D, pcis->vec2_N, pcis->vec3_D, INSERT_VALUES, SCATTER_FORWARD));
1959 } else {
1960 PetscCall(MatMultTranspose(pcis->A_BI, pcis->vec1_B, pcis->vec3_D));
1961 }
1962 } else if (pcbddc->switch_static) { /* n_B is zero */
1963 if (!pcbddc->switch_static_change) {
1964 PetscCall(MatMultTranspose(lA, pcis->vec1_D, pcis->vec3_D));
1965 } else {
1966 PetscCall(MatMult(pcbddc->switch_static_change, pcis->vec1_D, pcis->vec1_N));
1967 PetscCall(MatMultTranspose(lA, pcis->vec1_N, pcis->vec2_N));
1968 PetscCall(MatMultTranspose(pcbddc->switch_static_change, pcis->vec2_N, pcis->vec3_D));
1969 }
1970 }
1971 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1972 PetscCall(KSPSolveTranspose(pcbddc->ksp_D, pcis->vec3_D, pcis->vec4_D));
1973 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], pc, 0, 0, 0));
1974 PetscCall(KSPCheckSolve(pcbddc->ksp_D, pc, pcis->vec4_D));
1975 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) {
1976 if (pcbddc->switch_static) {
1977 PetscCall(VecAXPBYPCZ(pcis->vec2_D, m_one, one, m_one, pcis->vec4_D, pcis->vec1_D));
1978 } else {
1979 PetscCall(VecAXPBY(pcis->vec2_D, m_one, m_one, pcis->vec4_D));
1980 }
1981 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE));
1982 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec2_D, z, INSERT_VALUES, SCATTER_REVERSE));
1983 } else {
1984 if (pcbddc->switch_static) {
1985 PetscCall(VecAXPBY(pcis->vec4_D, one, m_one, pcis->vec1_D));
1986 } else {
1987 PetscCall(VecScale(pcis->vec4_D, m_one));
1988 }
1989 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE));
1990 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec4_D, z, INSERT_VALUES, SCATTER_REVERSE));
1991 }
1992 if (pcbddc->benign_have_null) { /* set p0 (computed in PCBDDCApplyInterface) */
1993 PetscCall(PCBDDCBenignGetOrSetP0(pc, z, PETSC_FALSE));
1994 }
1995 if (pcbddc->ChangeOfBasisMatrix) {
1996 pcbddc->work_change = r;
1997 PetscCall(VecCopy(z, pcbddc->work_change));
1998 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, pcbddc->work_change, z));
1999 }
2000 PetscFunctionReturn(PETSC_SUCCESS);
2001 }
2002
PCReset_BDDC(PC pc)2003 static PetscErrorCode PCReset_BDDC(PC pc)
2004 {
2005 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
2006 PC_IS *pcis = (PC_IS *)pc->data;
2007 KSP kspD, kspR, kspC;
2008
2009 PetscFunctionBegin;
2010 /* free BDDC custom data */
2011 PetscCall(PCBDDCResetCustomization(pc));
2012 /* destroy objects related to topography */
2013 PetscCall(PCBDDCResetTopography(pc));
2014 /* destroy objects for scaling operator */
2015 PetscCall(PCBDDCScalingDestroy(pc));
2016 /* free solvers stuff */
2017 PetscCall(PCBDDCResetSolvers(pc));
2018 /* free global vectors needed in presolve */
2019 PetscCall(VecDestroy(&pcbddc->temp_solution));
2020 PetscCall(VecDestroy(&pcbddc->original_rhs));
2021 /* free data created by PCIS */
2022 PetscCall(PCISReset(pc));
2023
2024 /* restore defaults */
2025 kspD = pcbddc->ksp_D;
2026 kspR = pcbddc->ksp_R;
2027 kspC = pcbddc->coarse_ksp;
2028 PetscCall(PetscMemzero(pc->data, sizeof(*pcbddc)));
2029 pcis->n_neigh = -1;
2030 pcis->scaling_factor = 1.0;
2031 pcis->reusesubmatrices = PETSC_TRUE;
2032 pcbddc->use_local_adj = PETSC_TRUE;
2033 pcbddc->use_vertices = PETSC_TRUE;
2034 pcbddc->use_edges = PETSC_TRUE;
2035 pcbddc->symmetric_primal = PETSC_TRUE;
2036 pcbddc->vertex_size = 1;
2037 pcbddc->recompute_topography = PETSC_TRUE;
2038 pcbddc->coarse_size = -1;
2039 pcbddc->use_exact_dirichlet_trick = PETSC_TRUE;
2040 pcbddc->coarsening_ratio = 8;
2041 pcbddc->coarse_eqs_per_proc = 1;
2042 pcbddc->benign_compute_correction = PETSC_TRUE;
2043 pcbddc->nedfield = -1;
2044 pcbddc->nedglobal = PETSC_TRUE;
2045 pcbddc->graphmaxcount = PETSC_INT_MAX;
2046 pcbddc->sub_schurs_layers = -1;
2047 pcbddc->ksp_D = kspD;
2048 pcbddc->ksp_R = kspR;
2049 pcbddc->coarse_ksp = kspC;
2050 PetscFunctionReturn(PETSC_SUCCESS);
2051 }
2052
PCDestroy_BDDC(PC pc)2053 static PetscErrorCode PCDestroy_BDDC(PC pc)
2054 {
2055 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
2056
2057 PetscFunctionBegin;
2058 PetscCall(PCReset_BDDC(pc));
2059 PetscCall(KSPDestroy(&pcbddc->ksp_D));
2060 PetscCall(KSPDestroy(&pcbddc->ksp_R));
2061 PetscCall(KSPDestroy(&pcbddc->coarse_ksp));
2062 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDiscreteGradient_C", NULL));
2063 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDivergenceMat_C", NULL));
2064 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetChangeOfBasisMat_C", NULL));
2065 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesLocalIS_C", NULL));
2066 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesIS_C", NULL));
2067 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesLocalIS_C", NULL));
2068 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesIS_C", NULL));
2069 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetCoarseningRatio_C", NULL));
2070 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevel_C", NULL));
2071 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetUseExactDirichlet_C", NULL));
2072 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevels_C", NULL));
2073 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundaries_C", NULL));
2074 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundariesLocal_C", NULL));
2075 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundaries_C", NULL));
2076 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundariesLocal_C", NULL));
2077 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundaries_C", NULL));
2078 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundariesLocal_C", NULL));
2079 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundaries_C", NULL));
2080 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundariesLocal_C", NULL));
2081 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplitting_C", NULL));
2082 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplittingLocal_C", NULL));
2083 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", NULL));
2084 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCCreateFETIDPOperators_C", NULL));
2085 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetRHS_C", NULL));
2086 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetSolution_C", NULL));
2087 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCPreSolveChangeRHS_C", NULL));
2088 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", NULL));
2089 PetscCall(PetscFree(pc->data));
2090 PetscFunctionReturn(PETSC_SUCCESS);
2091 }
2092
PCSetCoordinates_BDDC(PC pc,PetscInt dim,PetscInt nloc,PetscReal * coords)2093 static PetscErrorCode PCSetCoordinates_BDDC(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords)
2094 {
2095 PC_BDDC *pcbddc = (PC_BDDC *)pc->data;
2096 PCBDDCGraph mat_graph = pcbddc->mat_graph;
2097
2098 PetscFunctionBegin;
2099 PetscCall(PetscFree(mat_graph->coords));
2100 PetscCall(PetscMalloc1(nloc * dim, &mat_graph->coords));
2101 PetscCall(PetscArraycpy(mat_graph->coords, coords, nloc * dim));
2102 mat_graph->cnloc = nloc;
2103 mat_graph->cdim = dim;
2104 mat_graph->cloc = PETSC_FALSE;
2105 /* flg setup */
2106 pcbddc->recompute_topography = PETSC_TRUE;
2107 pcbddc->corner_selected = PETSC_FALSE;
2108 PetscFunctionReturn(PETSC_SUCCESS);
2109 }
2110
PCPreSolveChangeRHS_BDDC(PC pc,PetscBool * change)2111 static PetscErrorCode PCPreSolveChangeRHS_BDDC(PC pc, PetscBool *change)
2112 {
2113 PetscFunctionBegin;
2114 *change = PETSC_TRUE;
2115 PetscFunctionReturn(PETSC_SUCCESS);
2116 }
2117
PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat,Vec standard_rhs,Vec fetidp_flux_rhs)2118 static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
2119 {
2120 FETIDPMat_ctx mat_ctx;
2121 Vec work;
2122 PC_IS *pcis;
2123 PC_BDDC *pcbddc;
2124
2125 PetscFunctionBegin;
2126 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx));
2127 pcis = (PC_IS *)mat_ctx->pc->data;
2128 pcbddc = (PC_BDDC *)mat_ctx->pc->data;
2129
2130 PetscCall(VecSet(fetidp_flux_rhs, 0.0));
2131 /* copy rhs since we may change it during PCPreSolve_BDDC */
2132 if (!pcbddc->original_rhs) PetscCall(VecDuplicate(pcis->vec1_global, &pcbddc->original_rhs));
2133 if (mat_ctx->rhs_flip) {
2134 PetscCall(VecPointwiseMult(pcbddc->original_rhs, standard_rhs, mat_ctx->rhs_flip));
2135 } else {
2136 PetscCall(VecCopy(standard_rhs, pcbddc->original_rhs));
2137 }
2138 if (mat_ctx->g2g_p) {
2139 /* interface pressure rhs */
2140 PetscCall(VecScatterBegin(mat_ctx->g2g_p, fetidp_flux_rhs, pcbddc->original_rhs, INSERT_VALUES, SCATTER_REVERSE));
2141 PetscCall(VecScatterEnd(mat_ctx->g2g_p, fetidp_flux_rhs, pcbddc->original_rhs, INSERT_VALUES, SCATTER_REVERSE));
2142 PetscCall(VecScatterBegin(mat_ctx->g2g_p, standard_rhs, fetidp_flux_rhs, INSERT_VALUES, SCATTER_FORWARD));
2143 PetscCall(VecScatterEnd(mat_ctx->g2g_p, standard_rhs, fetidp_flux_rhs, INSERT_VALUES, SCATTER_FORWARD));
2144 if (!mat_ctx->rhs_flip) PetscCall(VecScale(fetidp_flux_rhs, -1.));
2145 }
2146 /*
2147 change of basis for physical rhs if needed
2148 It also changes the rhs in case of dirichlet boundaries
2149 */
2150 PetscCall(PCPreSolve_BDDC(mat_ctx->pc, NULL, pcbddc->original_rhs, NULL));
2151 if (pcbddc->ChangeOfBasisMatrix) {
2152 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix, pcbddc->original_rhs, pcbddc->work_change));
2153 work = pcbddc->work_change;
2154 } else {
2155 work = pcbddc->original_rhs;
2156 }
2157 /* store vectors for computation of fetidp final solution */
2158 PetscCall(VecScatterBegin(pcis->global_to_D, work, mat_ctx->temp_solution_D, INSERT_VALUES, SCATTER_FORWARD));
2159 PetscCall(VecScatterEnd(pcis->global_to_D, work, mat_ctx->temp_solution_D, INSERT_VALUES, SCATTER_FORWARD));
2160 /* scale rhs since it should be unassembled */
2161 /* TODO use counter scaling? (also below) */
2162 PetscCall(VecScatterBegin(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD));
2163 PetscCall(VecScatterEnd(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD));
2164 /* Apply partition of unity */
2165 PetscCall(VecPointwiseMult(mat_ctx->temp_solution_B, pcis->D, mat_ctx->temp_solution_B));
2166 /* PetscCall(PCBDDCScalingRestriction(mat_ctx->pc,work,mat_ctx->temp_solution_B)); */
2167 if (!pcbddc->switch_static) {
2168 /* compute partially subassembled Schur complement right-hand side */
2169 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0));
2170 PetscCall(KSPSolve(pcbddc->ksp_D, mat_ctx->temp_solution_D, pcis->vec1_D));
2171 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0));
2172 /* Cannot propagate up error in KSPSolve() because there is no access to the PC */
2173 PetscCall(MatMult(pcis->A_BI, pcis->vec1_D, pcis->vec1_B));
2174 PetscCall(VecAXPY(mat_ctx->temp_solution_B, -1.0, pcis->vec1_B));
2175 PetscCall(VecSet(work, 0.0));
2176 PetscCall(VecScatterBegin(pcis->global_to_B, mat_ctx->temp_solution_B, work, ADD_VALUES, SCATTER_REVERSE));
2177 PetscCall(VecScatterEnd(pcis->global_to_B, mat_ctx->temp_solution_B, work, ADD_VALUES, SCATTER_REVERSE));
2178 /* PetscCall(PCBDDCScalingRestriction(mat_ctx->pc,work,mat_ctx->temp_solution_B)); */
2179 PetscCall(VecScatterBegin(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD));
2180 PetscCall(VecScatterEnd(pcis->global_to_B, work, mat_ctx->temp_solution_B, INSERT_VALUES, SCATTER_FORWARD));
2181 PetscCall(VecPointwiseMult(mat_ctx->temp_solution_B, pcis->D, mat_ctx->temp_solution_B));
2182 }
2183 /* BDDC rhs */
2184 PetscCall(VecCopy(mat_ctx->temp_solution_B, pcis->vec1_B));
2185 if (pcbddc->switch_static) PetscCall(VecCopy(mat_ctx->temp_solution_D, pcis->vec1_D));
2186 /* apply BDDC */
2187 PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n));
2188 PetscCall(PCBDDCApplyInterfacePreconditioner(mat_ctx->pc, PETSC_FALSE));
2189 PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n));
2190
2191 /* Application of B_delta and assembling of rhs for fetidp fluxes */
2192 PetscCall(MatMult(mat_ctx->B_delta, pcis->vec1_B, mat_ctx->lambda_local));
2193 PetscCall(VecScatterBegin(mat_ctx->l2g_lambda, mat_ctx->lambda_local, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD));
2194 PetscCall(VecScatterEnd(mat_ctx->l2g_lambda, mat_ctx->lambda_local, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD));
2195 /* Add contribution to interface pressures */
2196 if (mat_ctx->l2g_p) {
2197 PetscCall(VecISSet(pcis->vec1_B, mat_ctx->lP_B, 0));
2198 PetscCall(MatMult(mat_ctx->B_BB, pcis->vec1_B, mat_ctx->vP));
2199 if (pcbddc->switch_static) {
2200 PetscCall(VecISSet(pcis->vec1_D, mat_ctx->lP_I, 0));
2201 PetscCall(MatMultAdd(mat_ctx->B_BI, pcis->vec1_D, mat_ctx->vP, mat_ctx->vP));
2202 }
2203 PetscCall(VecScatterBegin(mat_ctx->l2g_p, mat_ctx->vP, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD));
2204 PetscCall(VecScatterEnd(mat_ctx->l2g_p, mat_ctx->vP, fetidp_flux_rhs, ADD_VALUES, SCATTER_FORWARD));
2205 }
2206 PetscFunctionReturn(PETSC_SUCCESS);
2207 }
2208
2209 /*@
2210 PCBDDCMatFETIDPGetRHS - Compute the right-hand side for a FETI-DP linear system using the physical right-hand side
2211
2212 Collective
2213
2214 Input Parameters:
2215 + fetidp_mat - the FETI-DP matrix object obtained by a call to `PCBDDCCreateFETIDPOperators()`
2216 - standard_rhs - the right-hand side of the original linear system
2217
2218 Output Parameter:
2219 . fetidp_flux_rhs - the right-hand side for the FETI-DP linear system
2220
2221 Level: developer
2222
2223 Note:
2224 Most users should employ the `KSP` interface for linear solvers and create a solver of type `KSPFETIDP`.
2225
2226 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCCreateFETIDPOperators()`, `PCBDDCMatFETIDPGetSolution()`
2227 @*/
PCBDDCMatFETIDPGetRHS(Mat fetidp_mat,Vec standard_rhs,Vec fetidp_flux_rhs)2228 PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs)
2229 {
2230 FETIDPMat_ctx mat_ctx;
2231
2232 PetscFunctionBegin;
2233 PetscValidHeaderSpecific(fetidp_mat, MAT_CLASSID, 1);
2234 PetscValidHeaderSpecific(standard_rhs, VEC_CLASSID, 2);
2235 PetscValidHeaderSpecific(fetidp_flux_rhs, VEC_CLASSID, 3);
2236 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx));
2237 PetscUseMethod(mat_ctx->pc, "PCBDDCMatFETIDPGetRHS_C", (Mat, Vec, Vec), (fetidp_mat, standard_rhs, fetidp_flux_rhs));
2238 PetscFunctionReturn(PETSC_SUCCESS);
2239 }
2240
PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat,Vec fetidp_flux_sol,Vec standard_sol)2241 static PetscErrorCode PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
2242 {
2243 FETIDPMat_ctx mat_ctx;
2244 PC_IS *pcis;
2245 PC_BDDC *pcbddc;
2246 Vec work;
2247
2248 PetscFunctionBegin;
2249 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx));
2250 pcis = (PC_IS *)mat_ctx->pc->data;
2251 pcbddc = (PC_BDDC *)mat_ctx->pc->data;
2252
2253 /* apply B_delta^T */
2254 PetscCall(VecSet(pcis->vec1_B, 0.));
2255 PetscCall(VecScatterBegin(mat_ctx->l2g_lambda, fetidp_flux_sol, mat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
2256 PetscCall(VecScatterEnd(mat_ctx->l2g_lambda, fetidp_flux_sol, mat_ctx->lambda_local, INSERT_VALUES, SCATTER_REVERSE));
2257 PetscCall(MatMultTranspose(mat_ctx->B_delta, mat_ctx->lambda_local, pcis->vec1_B));
2258 if (mat_ctx->l2g_p) {
2259 PetscCall(VecScatterBegin(mat_ctx->l2g_p, fetidp_flux_sol, mat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE));
2260 PetscCall(VecScatterEnd(mat_ctx->l2g_p, fetidp_flux_sol, mat_ctx->vP, INSERT_VALUES, SCATTER_REVERSE));
2261 PetscCall(MatMultAdd(mat_ctx->Bt_BB, mat_ctx->vP, pcis->vec1_B, pcis->vec1_B));
2262 }
2263
2264 /* compute rhs for BDDC application */
2265 PetscCall(VecAYPX(pcis->vec1_B, -1.0, mat_ctx->temp_solution_B));
2266 if (pcbddc->switch_static) {
2267 PetscCall(VecCopy(mat_ctx->temp_solution_D, pcis->vec1_D));
2268 if (mat_ctx->l2g_p) {
2269 PetscCall(VecScale(mat_ctx->vP, -1.));
2270 PetscCall(MatMultAdd(mat_ctx->Bt_BI, mat_ctx->vP, pcis->vec1_D, pcis->vec1_D));
2271 }
2272 }
2273
2274 /* apply BDDC */
2275 PetscCall(PetscArrayzero(pcbddc->benign_p0, pcbddc->benign_n));
2276 PetscCall(PCBDDCApplyInterfacePreconditioner(mat_ctx->pc, PETSC_FALSE));
2277
2278 /* put values into global vector */
2279 if (pcbddc->ChangeOfBasisMatrix) work = pcbddc->work_change;
2280 else work = standard_sol;
2281 PetscCall(VecScatterBegin(pcis->global_to_B, pcis->vec1_B, work, INSERT_VALUES, SCATTER_REVERSE));
2282 PetscCall(VecScatterEnd(pcis->global_to_B, pcis->vec1_B, work, INSERT_VALUES, SCATTER_REVERSE));
2283 if (!pcbddc->switch_static) {
2284 /* compute values into the interior if solved for the partially subassembled Schur complement */
2285 PetscCall(MatMult(pcis->A_IB, pcis->vec1_B, pcis->vec1_D));
2286 PetscCall(VecAYPX(pcis->vec1_D, -1.0, mat_ctx->temp_solution_D));
2287 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0));
2288 PetscCall(KSPSolve(pcbddc->ksp_D, pcis->vec1_D, pcis->vec1_D));
2289 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0], mat_ctx->pc, 0, 0, 0));
2290 /* Cannot propagate up error in KSPSolve() because there is no access to the PC */
2291 }
2292
2293 PetscCall(VecScatterBegin(pcis->global_to_D, pcis->vec1_D, work, INSERT_VALUES, SCATTER_REVERSE));
2294 PetscCall(VecScatterEnd(pcis->global_to_D, pcis->vec1_D, work, INSERT_VALUES, SCATTER_REVERSE));
2295 /* add p0 solution to final solution */
2296 PetscCall(PCBDDCBenignGetOrSetP0(mat_ctx->pc, work, PETSC_FALSE));
2297 if (pcbddc->ChangeOfBasisMatrix) PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix, work, standard_sol));
2298 PetscCall(PCPostSolve_BDDC(mat_ctx->pc, NULL, NULL, standard_sol));
2299 if (mat_ctx->g2g_p) {
2300 PetscCall(VecScatterBegin(mat_ctx->g2g_p, fetidp_flux_sol, standard_sol, INSERT_VALUES, SCATTER_REVERSE));
2301 PetscCall(VecScatterEnd(mat_ctx->g2g_p, fetidp_flux_sol, standard_sol, INSERT_VALUES, SCATTER_REVERSE));
2302 }
2303 PetscFunctionReturn(PETSC_SUCCESS);
2304 }
2305
PCView_BDDCIPC(PC pc,PetscViewer viewer)2306 static PetscErrorCode PCView_BDDCIPC(PC pc, PetscViewer viewer)
2307 {
2308 BDDCIPC_ctx bddcipc_ctx;
2309 PetscBool isascii;
2310
2311 PetscFunctionBegin;
2312 PetscCall(PCShellGetContext(pc, &bddcipc_ctx));
2313 PetscCall(PetscObjectTypeCompare((PetscObject)viewer, PETSCVIEWERASCII, &isascii));
2314 if (isascii) PetscCall(PetscViewerASCIIPrintf(viewer, "BDDC interface preconditioner\n"));
2315 PetscCall(PetscViewerASCIIPushTab(viewer));
2316 PetscCall(PCView(bddcipc_ctx->bddc, viewer));
2317 PetscCall(PetscViewerASCIIPopTab(viewer));
2318 PetscFunctionReturn(PETSC_SUCCESS);
2319 }
2320
PCSetUp_BDDCIPC(PC pc)2321 static PetscErrorCode PCSetUp_BDDCIPC(PC pc)
2322 {
2323 BDDCIPC_ctx bddcipc_ctx;
2324 PetscBool isbddc;
2325 Vec vv;
2326 IS is;
2327 PC_IS *pcis;
2328
2329 PetscFunctionBegin;
2330 PetscCall(PCShellGetContext(pc, &bddcipc_ctx));
2331 PetscCall(PetscObjectTypeCompare((PetscObject)bddcipc_ctx->bddc, PCBDDC, &isbddc));
2332 PetscCheck(isbddc, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "Invalid type %s. Must be of type bddc", ((PetscObject)bddcipc_ctx->bddc)->type_name);
2333 PetscCall(PCSetUp(bddcipc_ctx->bddc));
2334
2335 /* create interface scatter */
2336 pcis = (PC_IS *)bddcipc_ctx->bddc->data;
2337 PetscCall(VecScatterDestroy(&bddcipc_ctx->g2l));
2338 PetscCall(MatCreateVecs(pc->pmat, &vv, NULL));
2339 PetscCall(ISRenumber(pcis->is_B_global, NULL, NULL, &is));
2340 PetscCall(VecScatterCreate(vv, is, pcis->vec1_B, NULL, &bddcipc_ctx->g2l));
2341 PetscCall(ISDestroy(&is));
2342 PetscCall(VecDestroy(&vv));
2343 PetscFunctionReturn(PETSC_SUCCESS);
2344 }
2345
PCApply_BDDCIPC(PC pc,Vec r,Vec x)2346 static PetscErrorCode PCApply_BDDCIPC(PC pc, Vec r, Vec x)
2347 {
2348 BDDCIPC_ctx bddcipc_ctx;
2349 PC_IS *pcis;
2350 VecScatter tmps;
2351
2352 PetscFunctionBegin;
2353 PetscCall(PCShellGetContext(pc, &bddcipc_ctx));
2354 pcis = (PC_IS *)bddcipc_ctx->bddc->data;
2355 tmps = pcis->global_to_B;
2356 pcis->global_to_B = bddcipc_ctx->g2l;
2357 PetscCall(PCBDDCScalingRestriction(bddcipc_ctx->bddc, r, pcis->vec1_B));
2358 PetscCall(PCBDDCApplyInterfacePreconditioner(bddcipc_ctx->bddc, PETSC_FALSE));
2359 PetscCall(PCBDDCScalingExtension(bddcipc_ctx->bddc, pcis->vec1_B, x));
2360 pcis->global_to_B = tmps;
2361 PetscFunctionReturn(PETSC_SUCCESS);
2362 }
2363
PCApplyTranspose_BDDCIPC(PC pc,Vec r,Vec x)2364 static PetscErrorCode PCApplyTranspose_BDDCIPC(PC pc, Vec r, Vec x)
2365 {
2366 BDDCIPC_ctx bddcipc_ctx;
2367 PC_IS *pcis;
2368 VecScatter tmps;
2369
2370 PetscFunctionBegin;
2371 PetscCall(PCShellGetContext(pc, &bddcipc_ctx));
2372 pcis = (PC_IS *)bddcipc_ctx->bddc->data;
2373 tmps = pcis->global_to_B;
2374 pcis->global_to_B = bddcipc_ctx->g2l;
2375 PetscCall(PCBDDCScalingRestriction(bddcipc_ctx->bddc, r, pcis->vec1_B));
2376 PetscCall(PCBDDCApplyInterfacePreconditioner(bddcipc_ctx->bddc, PETSC_TRUE));
2377 PetscCall(PCBDDCScalingExtension(bddcipc_ctx->bddc, pcis->vec1_B, x));
2378 pcis->global_to_B = tmps;
2379 PetscFunctionReturn(PETSC_SUCCESS);
2380 }
2381
PCDestroy_BDDCIPC(PC pc)2382 static PetscErrorCode PCDestroy_BDDCIPC(PC pc)
2383 {
2384 BDDCIPC_ctx bddcipc_ctx;
2385
2386 PetscFunctionBegin;
2387 PetscCall(PCShellGetContext(pc, &bddcipc_ctx));
2388 PetscCall(PCDestroy(&bddcipc_ctx->bddc));
2389 PetscCall(VecScatterDestroy(&bddcipc_ctx->g2l));
2390 PetscCall(PetscFree(bddcipc_ctx));
2391 PetscFunctionReturn(PETSC_SUCCESS);
2392 }
2393
2394 /*@
2395 PCBDDCMatFETIDPGetSolution - Compute the physical solution using the solution of the FETI-DP linear system
2396
2397 Collective
2398
2399 Input Parameters:
2400 + fetidp_mat - the FETI-DP matrix obtained by a call to `PCBDDCCreateFETIDPOperators()`
2401 - fetidp_flux_sol - the solution of the FETI-DP linear system`
2402
2403 Output Parameter:
2404 . standard_sol - the solution defined on the physical domain
2405
2406 Level: developer
2407
2408 Note:
2409 Most users should employ the `KSP` interface for linear solvers and create a solver of type `KSPFETIDP`.
2410
2411 .seealso: [](ch_ksp), `PCBDDC`, `PCBDDCCreateFETIDPOperators()`, `PCBDDCMatFETIDPGetRHS()`
2412 @*/
PCBDDCMatFETIDPGetSolution(Mat fetidp_mat,Vec fetidp_flux_sol,Vec standard_sol)2413 PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol)
2414 {
2415 FETIDPMat_ctx mat_ctx;
2416
2417 PetscFunctionBegin;
2418 PetscValidHeaderSpecific(fetidp_mat, MAT_CLASSID, 1);
2419 PetscValidHeaderSpecific(fetidp_flux_sol, VEC_CLASSID, 2);
2420 PetscValidHeaderSpecific(standard_sol, VEC_CLASSID, 3);
2421 PetscCall(MatShellGetContext(fetidp_mat, &mat_ctx));
2422 PetscUseMethod(mat_ctx->pc, "PCBDDCMatFETIDPGetSolution_C", (Mat, Vec, Vec), (fetidp_mat, fetidp_flux_sol, standard_sol));
2423 PetscFunctionReturn(PETSC_SUCCESS);
2424 }
2425
MatISSubMatrixEmbedLocalIS(Mat A,IS oldis,IS * newis)2426 static PetscErrorCode MatISSubMatrixEmbedLocalIS(Mat A, IS oldis, IS *newis)
2427 {
2428 Mat_IS *matis = (Mat_IS *)A->data;
2429 ISLocalToGlobalMapping ltog;
2430 IS is;
2431
2432 PetscFunctionBegin;
2433 PetscCheck(matis->getsub_ris, PetscObjectComm((PetscObject)A), PETSC_ERR_PLIB, "Missing getsub IS");
2434 PetscCall(ISLocalToGlobalMappingCreateIS(matis->getsub_ris, <og));
2435 PetscCall(ISGlobalToLocalMappingApplyIS(ltog, IS_GTOLM_DROP, oldis, &is));
2436 PetscCall(ISOnComm(is, PetscObjectComm((PetscObject)A), PETSC_COPY_VALUES, newis));
2437 PetscCall(ISLocalToGlobalMappingDestroy(<og));
2438 PetscCall(ISDestroy(&is));
2439 PetscFunctionReturn(PETSC_SUCCESS);
2440 }
2441
PCBDDCCreateFETIDPOperators_BDDC(PC pc,PetscBool fully_redundant,const char * prefix,Mat * fetidp_mat,PC * fetidp_pc)2442 static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, PetscBool fully_redundant, const char *prefix, Mat *fetidp_mat, PC *fetidp_pc)
2443 {
2444 FETIDPMat_ctx fetidpmat_ctx;
2445 Mat newmat;
2446 FETIDPPC_ctx fetidppc_ctx;
2447 PC newpc;
2448 MPI_Comm comm;
2449
2450 PetscFunctionBegin;
2451 PetscCall(PetscObjectGetComm((PetscObject)pc, &comm));
2452 /* FETI-DP matrix */
2453 PetscCall(PCBDDCCreateFETIDPMatContext(pc, &fetidpmat_ctx));
2454 fetidpmat_ctx->fully_redundant = fully_redundant;
2455 PetscCall(PCBDDCSetupFETIDPMatContext(fetidpmat_ctx));
2456 PetscCall(MatCreateShell(comm, fetidpmat_ctx->n, fetidpmat_ctx->n, fetidpmat_ctx->N, fetidpmat_ctx->N, fetidpmat_ctx, &newmat));
2457 PetscCall(PetscObjectSetName((PetscObject)newmat, !fetidpmat_ctx->l2g_lambda_only ? "F" : "G"));
2458 PetscCall(MatShellSetOperation(newmat, MATOP_MULT, (PetscErrorCodeFn *)FETIDPMatMult));
2459 PetscCall(MatShellSetOperation(newmat, MATOP_MULT_TRANSPOSE, (PetscErrorCodeFn *)FETIDPMatMultTranspose));
2460 PetscCall(MatShellSetOperation(newmat, MATOP_DESTROY, (PetscErrorCodeFn *)PCBDDCDestroyFETIDPMat));
2461 /* propagate MatOptions */
2462 {
2463 PC_BDDC *pcbddc = (PC_BDDC *)fetidpmat_ctx->pc->data;
2464 PetscBool isset, issym;
2465
2466 PetscCall(MatIsSymmetricKnown(pc->mat, &isset, &issym));
2467 if ((isset && issym) || pcbddc->symmetric_primal) PetscCall(MatSetOption(newmat, MAT_SYMMETRIC, PETSC_TRUE));
2468 }
2469 PetscCall(MatSetOptionsPrefix(newmat, prefix));
2470 PetscCall(MatAppendOptionsPrefix(newmat, "fetidp_"));
2471 PetscCall(MatSetUp(newmat));
2472 /* FETI-DP preconditioner */
2473 PetscCall(PCBDDCCreateFETIDPPCContext(pc, &fetidppc_ctx));
2474 PetscCall(PCBDDCSetupFETIDPPCContext(newmat, fetidppc_ctx));
2475 PetscCall(PCCreate(comm, &newpc));
2476 PetscCall(PCSetOperators(newpc, newmat, newmat));
2477 PetscCall(PCSetOptionsPrefix(newpc, prefix));
2478 PetscCall(PCAppendOptionsPrefix(newpc, "fetidp_"));
2479 PetscCall(PCSetErrorIfFailure(newpc, pc->erroriffailure));
2480 if (!fetidpmat_ctx->l2g_lambda_only) { /* standard FETI-DP */
2481 PetscCall(PCSetType(newpc, PCSHELL));
2482 PetscCall(PCShellSetName(newpc, "FETI-DP multipliers"));
2483 PetscCall(PCShellSetContext(newpc, fetidppc_ctx));
2484 PetscCall(PCShellSetApply(newpc, FETIDPPCApply));
2485 PetscCall(PCShellSetApplyTranspose(newpc, FETIDPPCApplyTranspose));
2486 PetscCall(PCShellSetView(newpc, FETIDPPCView));
2487 PetscCall(PCShellSetDestroy(newpc, PCBDDCDestroyFETIDPPC));
2488 } else { /* saddle-point FETI-DP */
2489 Mat M;
2490 PetscInt psize;
2491 PetscBool fake = PETSC_FALSE, isfieldsplit;
2492
2493 PetscCall(ISViewFromOptions(fetidpmat_ctx->lagrange, NULL, "-lag_view"));
2494 PetscCall(ISViewFromOptions(fetidpmat_ctx->pressure, NULL, "-press_view"));
2495 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_PPmat", (PetscObject *)&M));
2496 PetscCall(PCSetType(newpc, PCFIELDSPLIT));
2497 PetscCall(PCFieldSplitSetIS(newpc, "lag", fetidpmat_ctx->lagrange));
2498 PetscCall(PCFieldSplitSetIS(newpc, "p", fetidpmat_ctx->pressure));
2499 PetscCall(PCFieldSplitSetType(newpc, PC_COMPOSITE_SCHUR));
2500 PetscCall(PCFieldSplitSetSchurFactType(newpc, PC_FIELDSPLIT_SCHUR_FACT_DIAG));
2501 PetscCall(ISGetSize(fetidpmat_ctx->pressure, &psize));
2502 if (psize != M->rmap->N) {
2503 Mat M2;
2504 PetscInt lpsize;
2505
2506 fake = PETSC_TRUE;
2507 PetscCall(ISGetLocalSize(fetidpmat_ctx->pressure, &lpsize));
2508 PetscCall(MatCreate(comm, &M2));
2509 PetscCall(MatSetType(M2, MATAIJ));
2510 PetscCall(MatSetSizes(M2, lpsize, lpsize, psize, psize));
2511 PetscCall(MatSetUp(M2));
2512 PetscCall(MatAssemblyBegin(M2, MAT_FINAL_ASSEMBLY));
2513 PetscCall(MatAssemblyEnd(M2, MAT_FINAL_ASSEMBLY));
2514 PetscCall(PCFieldSplitSetSchurPre(newpc, PC_FIELDSPLIT_SCHUR_PRE_USER, M2));
2515 PetscCall(MatDestroy(&M2));
2516 } else {
2517 PetscCall(PCFieldSplitSetSchurPre(newpc, PC_FIELDSPLIT_SCHUR_PRE_USER, M));
2518 }
2519 PetscCall(PCFieldSplitSetSchurScale(newpc, 1.0));
2520
2521 /* we need to setfromoptions and setup here to access the blocks */
2522 PetscCall(PCSetFromOptions(newpc));
2523 PetscCall(PCSetUp(newpc));
2524
2525 /* user may have changed the type (e.g. -fetidp_pc_type none) */
2526 PetscCall(PetscObjectTypeCompare((PetscObject)newpc, PCFIELDSPLIT, &isfieldsplit));
2527 if (isfieldsplit) {
2528 KSP *ksps;
2529 PC ppc, lagpc;
2530 PetscInt nn;
2531 PetscBool ismatis, matisok = PETSC_FALSE, check = PETSC_FALSE;
2532
2533 /* set the solver for the (0,0) block */
2534 PetscCall(PCFieldSplitSchurGetSubKSP(newpc, &nn, &ksps));
2535 if (!nn) { /* not of type PC_COMPOSITE_SCHUR */
2536 PetscCall(PCFieldSplitGetSubKSP(newpc, &nn, &ksps));
2537 if (!fake) { /* pass pmat to the pressure solver */
2538 Mat F;
2539
2540 PetscCall(KSPGetOperators(ksps[1], &F, NULL));
2541 PetscCall(KSPSetOperators(ksps[1], F, M));
2542 }
2543 } else {
2544 PetscBool issym, isset;
2545 Mat S;
2546
2547 PetscCall(PCFieldSplitSchurGetS(newpc, &S));
2548 PetscCall(MatIsSymmetricKnown(newmat, &isset, &issym));
2549 if (isset) PetscCall(MatSetOption(S, MAT_SYMMETRIC, issym));
2550 }
2551 PetscCall(KSPGetPC(ksps[0], &lagpc));
2552 PetscCall(PCSetType(lagpc, PCSHELL));
2553 PetscCall(PCShellSetName(lagpc, "FETI-DP multipliers"));
2554 PetscCall(PCShellSetContext(lagpc, fetidppc_ctx));
2555 PetscCall(PCShellSetApply(lagpc, FETIDPPCApply));
2556 PetscCall(PCShellSetApplyTranspose(lagpc, FETIDPPCApplyTranspose));
2557 PetscCall(PCShellSetView(lagpc, FETIDPPCView));
2558 PetscCall(PCShellSetDestroy(lagpc, PCBDDCDestroyFETIDPPC));
2559
2560 /* Olof's idea: interface Schur complement preconditioner for the mass matrix */
2561 PetscCall(KSPGetPC(ksps[1], &ppc));
2562 if (fake) {
2563 PC_BDDC *pcbddc = (PC_BDDC *)fetidpmat_ctx->pc->data;
2564 BDDCIPC_ctx bddcipc_ctx;
2565 PetscContainer c;
2566
2567 matisok = PETSC_TRUE;
2568
2569 /* create inner BDDC solver */
2570 PetscCall(PetscNew(&bddcipc_ctx));
2571 PetscCall(PCCreate(comm, &bddcipc_ctx->bddc));
2572 PetscCall(PCSetType(bddcipc_ctx->bddc, PCBDDC));
2573 PetscCall(PCSetOperators(bddcipc_ctx->bddc, M, M));
2574 PetscCall(PetscObjectTypeCompare((PetscObject)M, MATIS, &ismatis));
2575 PetscCheck(ismatis, comm, PETSC_ERR_PLIB, "Matrix type %s not of type MATIS", ((PetscObject)M)->type_name);
2576 /* the inner bddc for FETI-DP is already setup, we have local info available */
2577 if (pcbddc->user_primal_vertices_local || pcbddc->n_ISForDofsLocal > 2) {
2578 if (pcbddc->user_primal_vertices_local) {
2579 IS primals;
2580
2581 PetscCall(MatISSubMatrixEmbedLocalIS(M, pcbddc->user_primal_vertices_local, &primals));
2582 PetscCall(PCBDDCSetPrimalVerticesLocalIS(bddcipc_ctx->bddc, primals));
2583 PetscCall(ISDestroy(&primals));
2584 }
2585 if (pcbddc->n_ISForDofsLocal > 2) { /* no need to propagate info if nfields < 3 */
2586 IS *split;
2587 PetscInt i, nf;
2588
2589 PetscCall(PetscCalloc1(pcbddc->n_ISForDofsLocal, &split));
2590 for (i = 0, nf = 0; i < pcbddc->n_ISForDofsLocal; i++) {
2591 PetscInt ns;
2592
2593 PetscCall(MatISSubMatrixEmbedLocalIS(M, pcbddc->ISForDofsLocal[i], &split[nf]));
2594 PetscCall(ISGetSize(split[nf], &ns));
2595 if (!ns) PetscCall(ISDestroy(&split[nf]));
2596 else nf++;
2597 }
2598 PetscCall(PCBDDCSetDofsSplittingLocal(bddcipc_ctx->bddc, nf, split));
2599 for (i = 0; i < nf; i++) PetscCall(ISDestroy(&split[i]));
2600 PetscCall(PetscFree(split));
2601 }
2602 }
2603 PetscCall(PetscObjectQuery((PetscObject)pc, "__KSPFETIDP_pCSR", (PetscObject *)&c));
2604 PetscCall(PetscObjectTypeCompare((PetscObject)M, MATIS, &ismatis));
2605 if (c && ismatis) {
2606 Mat lM;
2607 PetscInt *csr, n;
2608
2609 PetscCall(MatISGetLocalMat(M, &lM));
2610 PetscCall(MatGetSize(lM, &n, NULL));
2611 PetscCall(PetscContainerGetPointer(c, &csr));
2612 PetscCall(PCBDDCSetLocalAdjacencyGraph(bddcipc_ctx->bddc, n, csr, csr + (n + 1), PETSC_COPY_VALUES));
2613 PetscCall(MatISRestoreLocalMat(M, &lM));
2614 }
2615 PetscCall(PCSetOptionsPrefix(bddcipc_ctx->bddc, ((PetscObject)ksps[1])->prefix));
2616 PetscCall(PCSetErrorIfFailure(bddcipc_ctx->bddc, pc->erroriffailure));
2617 PetscCall(PCSetFromOptions(bddcipc_ctx->bddc));
2618
2619 /* wrap the interface application */
2620 PetscCall(PCSetType(ppc, PCSHELL));
2621 PetscCall(PCShellSetName(ppc, "FETI-DP pressure"));
2622 PetscCall(PCShellSetContext(ppc, bddcipc_ctx));
2623 PetscCall(PCShellSetSetUp(ppc, PCSetUp_BDDCIPC));
2624 PetscCall(PCShellSetApply(ppc, PCApply_BDDCIPC));
2625 PetscCall(PCShellSetApplyTranspose(ppc, PCApplyTranspose_BDDCIPC));
2626 PetscCall(PCShellSetView(ppc, PCView_BDDCIPC));
2627 PetscCall(PCShellSetDestroy(ppc, PCDestroy_BDDCIPC));
2628 }
2629
2630 /* determine if we need to assemble M to construct a preconditioner */
2631 if (!matisok) {
2632 PetscCall(PetscObjectTypeCompare((PetscObject)M, MATIS, &ismatis));
2633 PetscCall(PetscObjectTypeCompareAny((PetscObject)ppc, &matisok, PCBDDC, PCJACOBI, PCNONE, PCMG, ""));
2634 if (ismatis && !matisok) PetscCall(MatConvert(M, MATAIJ, MAT_INPLACE_MATRIX, &M));
2635 }
2636
2637 /* run the subproblems to check convergence */
2638 PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)newmat)->prefix, "-check_saddlepoint", &check, NULL));
2639 if (check) {
2640 PetscInt i;
2641
2642 for (i = 0; i < nn; i++) {
2643 KSP kspC;
2644 PC npc;
2645 Mat F, pF;
2646 Vec x, y;
2647 PetscBool isschur, prec = PETSC_TRUE;
2648
2649 PetscCall(KSPCreate(PetscObjectComm((PetscObject)ksps[i]), &kspC));
2650 PetscCall(KSPSetNestLevel(kspC, pc->kspnestlevel));
2651 PetscCall(KSPSetOptionsPrefix(kspC, ((PetscObject)ksps[i])->prefix));
2652 PetscCall(KSPAppendOptionsPrefix(kspC, "check_"));
2653 PetscCall(KSPGetOperators(ksps[i], &F, &pF));
2654 PetscCall(PetscObjectTypeCompare((PetscObject)F, MATSCHURCOMPLEMENT, &isschur));
2655 if (isschur) {
2656 KSP kspS, kspS2;
2657 Mat A00, pA00, A10, A01, A11;
2658 char prefix[256];
2659
2660 PetscCall(MatSchurComplementGetKSP(F, &kspS));
2661 PetscCall(MatSchurComplementGetSubMatrices(F, &A00, &pA00, &A01, &A10, &A11));
2662 PetscCall(MatCreateSchurComplement(A00, pA00, A01, A10, A11, &F));
2663 PetscCall(MatSchurComplementGetKSP(F, &kspS2));
2664 PetscCall(PetscSNPrintf(prefix, sizeof(prefix), "%sschur_", ((PetscObject)kspC)->prefix));
2665 PetscCall(KSPSetOptionsPrefix(kspS2, prefix));
2666 PetscCall(KSPGetPC(kspS2, &npc));
2667 PetscCall(PCSetType(npc, PCKSP));
2668 PetscCall(PCKSPSetKSP(npc, kspS));
2669 PetscCall(KSPSetFromOptions(kspS2));
2670 PetscCall(KSPGetPC(kspS2, &npc));
2671 PetscCall(PCSetUseAmat(npc, PETSC_TRUE));
2672 } else {
2673 PetscCall(PetscObjectReference((PetscObject)F));
2674 }
2675 PetscCall(KSPSetFromOptions(kspC));
2676 PetscCall(PetscOptionsGetBool(NULL, ((PetscObject)kspC)->prefix, "-preconditioned", &prec, NULL));
2677 if (prec) {
2678 PetscCall(KSPGetPC(ksps[i], &npc));
2679 PetscCall(KSPSetPC(kspC, npc));
2680 }
2681 PetscCall(KSPSetOperators(kspC, F, pF));
2682 PetscCall(MatCreateVecs(F, &x, &y));
2683 PetscCall(VecSetRandom(x, NULL));
2684 PetscCall(MatMult(F, x, y));
2685 PetscCall(KSPSolve(kspC, y, x));
2686 PetscCall(KSPCheckSolve(kspC, npc, x));
2687 PetscCall(KSPDestroy(&kspC));
2688 PetscCall(MatDestroy(&F));
2689 PetscCall(VecDestroy(&x));
2690 PetscCall(VecDestroy(&y));
2691 }
2692 }
2693 PetscCall(PetscFree(ksps));
2694 }
2695 }
2696 /* return pointers for objects created */
2697 *fetidp_mat = newmat;
2698 *fetidp_pc = newpc;
2699 PetscFunctionReturn(PETSC_SUCCESS);
2700 }
2701
2702 /*@C
2703 PCBDDCCreateFETIDPOperators - Create FETI-DP operators
2704
2705 Collective
2706
2707 Input Parameters:
2708 + pc - the `PCBDDC` preconditioning context (setup should have been called before)
2709 . fully_redundant - true for a fully redundant set of Lagrange multipliers
2710 - prefix - optional options database prefix for the objects to be created (can be `NULL`)
2711
2712 Output Parameters:
2713 + fetidp_mat - shell FETI-DP matrix object
2714 - fetidp_pc - shell Dirichlet preconditioner for FETI-DP matrix
2715
2716 Level: developer
2717
2718 Notes:
2719 Most users should employ the `KSP` interface for linear solvers and create a solver of type `KSPFETIDP`.
2720 Currently the only operations provided for the FETI-DP matrix are `MatMult()` and `MatMultTranspose()`
2721
2722 .seealso: [](ch_ksp), `KSPFETIDP`, `PCBDDC`, `PCBDDCMatFETIDPGetRHS()`, `PCBDDCMatFETIDPGetSolution()`
2723 @*/
PCBDDCCreateFETIDPOperators(PC pc,PetscBool fully_redundant,const char * prefix,Mat * fetidp_mat,PC * fetidp_pc)2724 PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, PetscBool fully_redundant, const char *prefix, Mat *fetidp_mat, PC *fetidp_pc)
2725 {
2726 PetscFunctionBegin;
2727 PetscValidHeaderSpecific(pc, PC_CLASSID, 1);
2728 PetscCheck(pc->setupcalled, PetscObjectComm((PetscObject)pc), PETSC_ERR_SUP, "You must call PCSetup_BDDC() first");
2729 PetscUseMethod(pc, "PCBDDCCreateFETIDPOperators_C", (PC, PetscBool, const char *, Mat *, PC *), (pc, fully_redundant, prefix, fetidp_mat, fetidp_pc));
2730 PetscFunctionReturn(PETSC_SUCCESS);
2731 }
2732
2733 /*MC
2734 PCBDDC - Balancing Domain Decomposition by Constraints preconditioners, {cite}`dohrmann2007approximate`, {cite}`klawonn2006dual`, {cite}`mandel2008multispace`
2735
2736 Requires `MATIS` matrices (Pmat) with local matrices (inside the `MATIS`) of type `MATSEQAIJ`, `MATSEQBAIJ` or `MATSEQSBAIJ`
2737
2738 Works with unsymmetric and indefinite problems.
2739
2740 Unlike 'conventional' interface preconditioners, `PCBDDC` iterates over all degrees of freedom, not just those on the interface. This allows the use
2741 of approximate solvers on the subdomains.
2742
2743 Approximate local solvers are automatically adapted (see {cite}`dohrmann2007approximate`,) if the user has attached a nullspace object to the subdomain matrices, and informed
2744 `PCBDDC` of using approximate solvers (via the command line).
2745
2746 Boundary nodes are split into vertices, edges and faces classes using information from the local to global mapping of dofs and the local connectivity graph of nodes.
2747 The latter can be customized by using `PCBDDCSetLocalAdjacencyGraph()`
2748
2749 Additional information on dofs can be provided by using `PCBDDCSetDofsSplitting()`, `PCBDDCSetDirichletBoundaries()`, `PCBDDCSetNeumannBoundaries()`, and
2750 `PCBDDCSetPrimalVerticesIS()` and their local counterparts.
2751
2752 Constraints can be customized by attaching a `MatNullSpace` object to the `MATIS` matrix via `MatSetNearNullSpace()`. Non-singular modes are retained via SVD.
2753
2754 Change of basis is performed similarly to {cite}`klawonn2006dual` when requested. When more than one constraint is present on a single connected component
2755 (i.e. an edge or a face), a robust method based on local QR factorizations is used.
2756 User defined change of basis can be passed to `PCBDDC` with `PCBDDCSetChangeOfBasisMat()`
2757
2758 The PETSc implementation also supports multilevel `PCBDDC` {cite}`mandel2008multispace`. Coarse grids are partitioned using a `MatPartitioning` object.
2759
2760 Adaptive selection of primal constraints is supported for SPD systems with high-contrast in the coefficients if MUMPS or MKL_PARDISO are present.
2761
2762 Options Database Keys:
2763 + -pc_bddc_use_vertices <true> - use or not vertices in primal space
2764 . -pc_bddc_use_edges <true> - use or not edges in primal space
2765 . -pc_bddc_use_faces <false> - use or not faces in primal space
2766 . -pc_bddc_symmetric <true> - symmetric computation of primal basis functions. Specify false for unsymmetric problems
2767 . -pc_bddc_use_change_of_basis <false> - use change of basis approach (on edges only)
2768 . -pc_bddc_use_change_on_faces <false> - use change of basis approach on faces if change of basis has been requested
2769 . -pc_bddc_switch_static <false> - switches from M_2 (default) to M_3 operator (see reference article [1])
2770 . -pc_bddc_levels <0> - maximum number of levels for multilevel
2771 . -pc_bddc_coarsening_ratio <8> - number of subdomains which will be aggregated together at the coarser level (e.g. H/h ratio at the coarser level, significative only in the multilevel case)
2772 . -pc_bddc_coarse_redistribute <0> - size of a subset of processors where the coarse problem will be remapped (the value is ignored if not at the coarsest level)
2773 . -pc_bddc_use_deluxe_scaling <false> - use deluxe scaling
2774 . -pc_bddc_schur_layers <\-1> - select the economic version of deluxe scaling by specifying the number of layers (-1 corresponds to the original deluxe scaling)
2775 . -pc_bddc_adaptive_threshold <0.0> - when a value different than zero is specified, adaptive selection of constraints is performed on edges and faces (requires deluxe scaling and MUMPS or MKL_PARDISO installed)
2776 - -pc_bddc_check_level <0> - set verbosity level of debugging output
2777
2778 Options for Dirichlet, Neumann or coarse solver can be set using the appropriate options prefix
2779 .vb
2780 -pc_bddc_dirichlet_
2781 -pc_bddc_neumann_
2782 -pc_bddc_coarse_
2783 .ve
2784 e.g. -pc_bddc_dirichlet_ksp_type richardson -pc_bddc_dirichlet_pc_type gamg. `PCBDDC` uses by default `KSPPREONLY` and `PCLU`.
2785
2786 When using a multilevel approach, solvers' options at the N-th level (N > 1) can be specified using the options prefix
2787 .vb
2788 -pc_bddc_dirichlet_lN_
2789 -pc_bddc_neumann_lN_
2790 -pc_bddc_coarse_lN_
2791 .ve
2792 Note that level number ranges from the finest (0) to the coarsest (N).
2793 In order to specify options for the `PCBDDC` operators at the coarser levels (and not for the solvers), prepend -pc_bddc_coarse_ or -pc_bddc_coarse_l
2794 to the option, e.g.
2795 .vb
2796 -pc_bddc_coarse_pc_bddc_adaptive_threshold 5 -pc_bddc_coarse_l1_pc_bddc_redistribute 3
2797 .ve
2798 will use a threshold of 5 for constraints' selection at the first coarse level and will redistribute the coarse problem of the first coarse level on 3 processors
2799
2800 Level: intermediate
2801
2802 .seealso: [](ch_ksp), `PCCreate()`, `PCSetType()`, `PCType`, `PC`, `MATIS`, `KSPFETIDP`, `PCLU`, `PCGAMG`, `PC`, `PCBDDCSetLocalAdjacencyGraph()`, `PCBDDCSetDofsSplitting()`,
2803 `PCBDDCSetDirichletBoundaries()`, `PCBDDCSetNeumannBoundaries()`, `PCBDDCSetPrimalVerticesIS()`, `MatNullSpace`, `MatSetNearNullSpace()`,
2804 `PCBDDCSetChangeOfBasisMat()`, `PCBDDCCreateFETIDPOperators()`, `PCNN`
2805 M*/
2806
PCCreate_BDDC(PC pc)2807 PETSC_EXTERN PetscErrorCode PCCreate_BDDC(PC pc)
2808 {
2809 PC_BDDC *pcbddc;
2810
2811 PetscFunctionBegin;
2812 PetscCall(PetscNew(&pcbddc));
2813 pc->data = pcbddc;
2814
2815 PetscCall(PCISInitialize(pc));
2816
2817 /* create local graph structure */
2818 PetscCall(PCBDDCGraphCreate(&pcbddc->mat_graph));
2819
2820 /* BDDC nonzero defaults */
2821 pcbddc->use_nnsp = PETSC_TRUE;
2822 pcbddc->use_local_adj = PETSC_TRUE;
2823 pcbddc->use_vertices = PETSC_TRUE;
2824 pcbddc->use_edges = PETSC_TRUE;
2825 pcbddc->symmetric_primal = PETSC_TRUE;
2826 pcbddc->vertex_size = 1;
2827 pcbddc->recompute_topography = PETSC_TRUE;
2828 pcbddc->coarse_size = -1;
2829 pcbddc->use_exact_dirichlet_trick = PETSC_TRUE;
2830 pcbddc->coarsening_ratio = 8;
2831 pcbddc->coarse_eqs_per_proc = 1;
2832 pcbddc->benign_compute_correction = PETSC_TRUE;
2833 pcbddc->nedfield = -1;
2834 pcbddc->nedglobal = PETSC_TRUE;
2835 pcbddc->graphmaxcount = PETSC_INT_MAX;
2836 pcbddc->sub_schurs_layers = -1;
2837 pcbddc->adaptive_threshold[0] = 0.0;
2838 pcbddc->adaptive_threshold[1] = 0.0;
2839
2840 /* function pointers */
2841 pc->ops->apply = PCApply_BDDC;
2842 pc->ops->applytranspose = PCApplyTranspose_BDDC;
2843 pc->ops->setup = PCSetUp_BDDC;
2844 pc->ops->destroy = PCDestroy_BDDC;
2845 pc->ops->setfromoptions = PCSetFromOptions_BDDC;
2846 pc->ops->view = PCView_BDDC;
2847 pc->ops->applyrichardson = NULL;
2848 pc->ops->applysymmetricleft = NULL;
2849 pc->ops->applysymmetricright = NULL;
2850 pc->ops->presolve = PCPreSolve_BDDC;
2851 pc->ops->postsolve = PCPostSolve_BDDC;
2852 pc->ops->reset = PCReset_BDDC;
2853
2854 /* composing function */
2855 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDiscreteGradient_C", PCBDDCSetDiscreteGradient_BDDC));
2856 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDivergenceMat_C", PCBDDCSetDivergenceMat_BDDC));
2857 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetChangeOfBasisMat_C", PCBDDCSetChangeOfBasisMat_BDDC));
2858 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesLocalIS_C", PCBDDCSetPrimalVerticesLocalIS_BDDC));
2859 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetPrimalVerticesIS_C", PCBDDCSetPrimalVerticesIS_BDDC));
2860 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesLocalIS_C", PCBDDCGetPrimalVerticesLocalIS_BDDC));
2861 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetPrimalVerticesIS_C", PCBDDCGetPrimalVerticesIS_BDDC));
2862 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetCoarseningRatio_C", PCBDDCSetCoarseningRatio_BDDC));
2863 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevel_C", PCBDDCSetLevel_BDDC));
2864 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetUseExactDirichlet_C", PCBDDCSetUseExactDirichlet_BDDC));
2865 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLevels_C", PCBDDCSetLevels_BDDC));
2866 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundaries_C", PCBDDCSetDirichletBoundaries_BDDC));
2867 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDirichletBoundariesLocal_C", PCBDDCSetDirichletBoundariesLocal_BDDC));
2868 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundaries_C", PCBDDCSetNeumannBoundaries_BDDC));
2869 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetNeumannBoundariesLocal_C", PCBDDCSetNeumannBoundariesLocal_BDDC));
2870 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundaries_C", PCBDDCGetDirichletBoundaries_BDDC));
2871 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetDirichletBoundariesLocal_C", PCBDDCGetDirichletBoundariesLocal_BDDC));
2872 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundaries_C", PCBDDCGetNeumannBoundaries_BDDC));
2873 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCGetNeumannBoundariesLocal_C", PCBDDCGetNeumannBoundariesLocal_BDDC));
2874 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplitting_C", PCBDDCSetDofsSplitting_BDDC));
2875 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetDofsSplittingLocal_C", PCBDDCSetDofsSplittingLocal_BDDC));
2876 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCSetLocalAdjacencyGraph_C", PCBDDCSetLocalAdjacencyGraph_BDDC));
2877 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCCreateFETIDPOperators_C", PCBDDCCreateFETIDPOperators_BDDC));
2878 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetRHS_C", PCBDDCMatFETIDPGetRHS_BDDC));
2879 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCBDDCMatFETIDPGetSolution_C", PCBDDCMatFETIDPGetSolution_BDDC));
2880 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCPreSolveChangeRHS_C", PCPreSolveChangeRHS_BDDC));
2881 PetscCall(PetscObjectComposeFunction((PetscObject)pc, "PCSetCoordinates_C", PCSetCoordinates_BDDC));
2882 PetscFunctionReturn(PETSC_SUCCESS);
2883 }
2884
2885 /*@C
2886 PCBDDCInitializePackage - This function initializes everything in the `PCBDDC` package. It is called
2887 from `PCInitializePackage()`.
2888
2889 Level: developer
2890
2891 .seealso: [](ch_ksp), `PetscInitialize()`, `PCBDDCFinalizePackage()`
2892 @*/
PCBDDCInitializePackage(void)2893 PetscErrorCode PCBDDCInitializePackage(void)
2894 {
2895 int i;
2896
2897 PetscFunctionBegin;
2898 if (PCBDDCPackageInitialized) PetscFunctionReturn(PETSC_SUCCESS);
2899 PCBDDCPackageInitialized = PETSC_TRUE;
2900 PetscCall(PetscRegisterFinalize(PCBDDCFinalizePackage));
2901
2902 /* general events */
2903 PetscCall(PetscLogEventRegister("PCBDDCTopo", PC_CLASSID, &PC_BDDC_Topology[0]));
2904 PetscCall(PetscLogEventRegister("PCBDDCLKSP", PC_CLASSID, &PC_BDDC_LocalSolvers[0]));
2905 PetscCall(PetscLogEventRegister("PCBDDCLWor", PC_CLASSID, &PC_BDDC_LocalWork[0]));
2906 PetscCall(PetscLogEventRegister("PCBDDCCorr", PC_CLASSID, &PC_BDDC_CorrectionSetUp[0]));
2907 PetscCall(PetscLogEventRegister("PCBDDCASet", PC_CLASSID, &PC_BDDC_ApproxSetUp[0]));
2908 PetscCall(PetscLogEventRegister("PCBDDCAApp", PC_CLASSID, &PC_BDDC_ApproxApply[0]));
2909 PetscCall(PetscLogEventRegister("PCBDDCCSet", PC_CLASSID, &PC_BDDC_CoarseSetUp[0]));
2910 PetscCall(PetscLogEventRegister("PCBDDCCKSP", PC_CLASSID, &PC_BDDC_CoarseSolver[0]));
2911 PetscCall(PetscLogEventRegister("PCBDDCAdap", PC_CLASSID, &PC_BDDC_AdaptiveSetUp[0]));
2912 PetscCall(PetscLogEventRegister("PCBDDCScal", PC_CLASSID, &PC_BDDC_Scaling[0]));
2913 PetscCall(PetscLogEventRegister("PCBDDCSchr", PC_CLASSID, &PC_BDDC_Schurs[0]));
2914 PetscCall(PetscLogEventRegister("PCBDDCDirS", PC_CLASSID, &PC_BDDC_Solves[0][0]));
2915 PetscCall(PetscLogEventRegister("PCBDDCNeuS", PC_CLASSID, &PC_BDDC_Solves[0][1]));
2916 PetscCall(PetscLogEventRegister("PCBDDCCoaS", PC_CLASSID, &PC_BDDC_Solves[0][2]));
2917 for (i = 1; i < PETSC_PCBDDC_MAXLEVELS; i++) {
2918 char ename[32];
2919
2920 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCTopo l%02d", i));
2921 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Topology[i]));
2922 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCLKSP l%02d", i));
2923 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_LocalSolvers[i]));
2924 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCLWor l%02d", i));
2925 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_LocalWork[i]));
2926 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCorr l%02d", i));
2927 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CorrectionSetUp[i]));
2928 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCASet l%02d", i));
2929 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_ApproxSetUp[i]));
2930 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCAApp l%02d", i));
2931 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_ApproxApply[i]));
2932 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCSet l%02d", i));
2933 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CoarseSetUp[i]));
2934 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCKSP l%02d", i));
2935 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_CoarseSolver[i]));
2936 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCAdap l%02d", i));
2937 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_AdaptiveSetUp[i]));
2938 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCScal l%02d", i));
2939 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Scaling[i]));
2940 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCSchr l%02d", i));
2941 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Schurs[i]));
2942 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCDirS l%02d", i));
2943 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][0]));
2944 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCNeuS l%02d", i));
2945 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][1]));
2946 PetscCall(PetscSNPrintf(ename, sizeof(ename), "PCBDDCCoaS l%02d", i));
2947 PetscCall(PetscLogEventRegister(ename, PC_CLASSID, &PC_BDDC_Solves[i][2]));
2948 }
2949 PetscFunctionReturn(PETSC_SUCCESS);
2950 }
2951
2952 /*@C
2953 PCBDDCFinalizePackage - This function frees everything from the `PCBDDC` package. It is
2954 called from `PetscFinalize()` automatically.
2955
2956 Level: developer
2957
2958 .seealso: [](ch_ksp), `PetscFinalize()`, `PCBDDCInitializePackage()`
2959 @*/
PCBDDCFinalizePackage(void)2960 PetscErrorCode PCBDDCFinalizePackage(void)
2961 {
2962 PetscFunctionBegin;
2963 PCBDDCPackageInitialized = PETSC_FALSE;
2964 PetscFunctionReturn(PETSC_SUCCESS);
2965 }
2966