1 /* TODOLIST 2 3 Solvers 4 - Add support for cholesky for coarse solver (similar to local solvers) 5 - Propagate ksp prefixes for solvers to mat objects? 6 7 User interface 8 - ** DM attached to pc? 9 10 Debugging output 11 - * Better management of verbosity levels of debugging output 12 13 Extra 14 - *** Is it possible to work with PCBDDCGraph on boundary indices only (less memory consumed)? 15 - BDDC with MG framework? 16 17 MATIS related operations contained in BDDC code 18 - Provide general case for subassembling 19 20 */ 21 22 #include <../src/ksp/pc/impls/bddc/bddc.h> /*I "petscpc.h" I*/ /* includes for fortran wrappers */ 23 #include <../src/ksp/pc/impls/bddc/bddcprivate.h> 24 #include <petscblaslapack.h> 25 26 static PetscBool PCBDDCPackageInitialized = PETSC_FALSE; 27 28 static PetscBool cited = PETSC_FALSE; 29 static const char citation[] = 30 "@article{ZampiniPCBDDC,\n" 31 "author = {Stefano Zampini},\n" 32 "title = {{PCBDDC}: A Class of Robust Dual-Primal Methods in {PETS}c},\n" 33 "journal = {SIAM Journal on Scientific Computing},\n" 34 "volume = {38},\n" 35 "number = {5},\n" 36 "pages = {S282-S306},\n" 37 "year = {2016},\n" 38 "doi = {10.1137/15M1025785},\n" 39 "URL = {http://dx.doi.org/10.1137/15M1025785},\n" 40 "eprint = {http://dx.doi.org/10.1137/15M1025785}\n" 41 "}\n"; 42 43 PetscLogEvent PC_BDDC_Topology[PETSC_PCBDDC_MAXLEVELS]; 44 PetscLogEvent PC_BDDC_LocalSolvers[PETSC_PCBDDC_MAXLEVELS]; 45 PetscLogEvent PC_BDDC_LocalWork[PETSC_PCBDDC_MAXLEVELS]; 46 PetscLogEvent PC_BDDC_CorrectionSetUp[PETSC_PCBDDC_MAXLEVELS]; 47 PetscLogEvent PC_BDDC_ApproxSetUp[PETSC_PCBDDC_MAXLEVELS]; 48 PetscLogEvent PC_BDDC_ApproxApply[PETSC_PCBDDC_MAXLEVELS]; 49 PetscLogEvent PC_BDDC_CoarseSetUp[PETSC_PCBDDC_MAXLEVELS]; 50 PetscLogEvent PC_BDDC_CoarseSolver[PETSC_PCBDDC_MAXLEVELS]; 51 PetscLogEvent PC_BDDC_AdaptiveSetUp[PETSC_PCBDDC_MAXLEVELS]; 52 PetscLogEvent PC_BDDC_Scaling[PETSC_PCBDDC_MAXLEVELS]; 53 PetscLogEvent PC_BDDC_Schurs[PETSC_PCBDDC_MAXLEVELS]; 54 PetscLogEvent PC_BDDC_Solves[PETSC_PCBDDC_MAXLEVELS][3]; 55 56 const char *const PCBDDCInterfaceExtTypes[] = {"DIRICHLET","LUMP","PCBDDCInterfaceExtType","PC_BDDC_INTERFACE_EXT_",NULL}; 57 58 PetscErrorCode PCApply_BDDC(PC,Vec,Vec); 59 60 PetscErrorCode PCSetFromOptions_BDDC(PetscOptionItems *PetscOptionsObject,PC pc) 61 { 62 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 63 PetscInt nt,i; 64 65 PetscFunctionBegin; 66 PetscCall(PetscOptionsHead(PetscOptionsObject,"BDDC options")); 67 /* Verbose debugging */ 68 PetscCall(PetscOptionsInt("-pc_bddc_check_level","Verbose output for PCBDDC (intended for debug)","none",pcbddc->dbg_flag,&pcbddc->dbg_flag,NULL)); 69 /* Approximate solvers */ 70 PetscCall(PetscOptionsEnum("-pc_bddc_interface_ext_type","Use DIRICHLET or LUMP to extend interface corrections to interior","PCBDDCSetInterfaceExtType",PCBDDCInterfaceExtTypes,(PetscEnum)pcbddc->interface_extension,(PetscEnum*)&pcbddc->interface_extension,NULL)); 71 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_DIRICHLET) { 72 PetscCall(PetscOptionsBool("-pc_bddc_dirichlet_approximate","Inform PCBDDC that we are using approximate Dirichlet solvers","none",pcbddc->NullSpace_corr[0],&pcbddc->NullSpace_corr[0],NULL)); 73 PetscCall(PetscOptionsBool("-pc_bddc_dirichlet_approximate_scale","Inform PCBDDC that we need to scale the Dirichlet solve","none",pcbddc->NullSpace_corr[1],&pcbddc->NullSpace_corr[1],NULL)); 74 } else { 75 /* This flag is needed/implied by lumping */ 76 pcbddc->switch_static = PETSC_TRUE; 77 } 78 PetscCall(PetscOptionsBool("-pc_bddc_neumann_approximate","Inform PCBDDC that we are using approximate Neumann solvers","none",pcbddc->NullSpace_corr[2],&pcbddc->NullSpace_corr[2],NULL)); 79 PetscCall(PetscOptionsBool("-pc_bddc_neumann_approximate_scale","Inform PCBDDC that we need to scale the Neumann solve","none",pcbddc->NullSpace_corr[3],&pcbddc->NullSpace_corr[3],NULL)); 80 /* Primal space customization */ 81 PetscCall(PetscOptionsBool("-pc_bddc_use_local_mat_graph","Use or not adjacency graph of local mat for interface analysis","none",pcbddc->use_local_adj,&pcbddc->use_local_adj,NULL)); 82 PetscCall(PetscOptionsInt("-pc_bddc_graph_maxcount","Maximum number of shared subdomains for a connected component","none",pcbddc->graphmaxcount,&pcbddc->graphmaxcount,NULL)); 83 PetscCall(PetscOptionsBool("-pc_bddc_corner_selection","Activates face-based corner selection","none",pcbddc->corner_selection,&pcbddc->corner_selection,NULL)); 84 PetscCall(PetscOptionsBool("-pc_bddc_use_vertices","Use or not corner dofs in coarse space","none",pcbddc->use_vertices,&pcbddc->use_vertices,NULL)); 85 PetscCall(PetscOptionsBool("-pc_bddc_use_edges","Use or not edge constraints in coarse space","none",pcbddc->use_edges,&pcbddc->use_edges,NULL)); 86 PetscCall(PetscOptionsBool("-pc_bddc_use_faces","Use or not face constraints in coarse space","none",pcbddc->use_faces,&pcbddc->use_faces,NULL)); 87 PetscCall(PetscOptionsInt("-pc_bddc_vertex_size","Connected components smaller or equal to vertex size will be considered as primal vertices","none",pcbddc->vertex_size,&pcbddc->vertex_size,NULL)); 88 PetscCall(PetscOptionsBool("-pc_bddc_use_nnsp","Use near null space attached to the matrix to compute constraints","none",pcbddc->use_nnsp,&pcbddc->use_nnsp,NULL)); 89 PetscCall(PetscOptionsBool("-pc_bddc_use_nnsp_true","Use near null space attached to the matrix to compute constraints as is","none",pcbddc->use_nnsp_true,&pcbddc->use_nnsp_true,NULL)); 90 PetscCall(PetscOptionsBool("-pc_bddc_use_qr_single","Use QR factorization for single constraints on cc (QR is always used when multiple constraints are present)","none",pcbddc->use_qr_single,&pcbddc->use_qr_single,NULL)); 91 /* Change of basis */ 92 PetscCall(PetscOptionsBool("-pc_bddc_use_change_of_basis","Use or not internal change of basis on local edge nodes","none",pcbddc->use_change_of_basis,&pcbddc->use_change_of_basis,NULL)); 93 PetscCall(PetscOptionsBool("-pc_bddc_use_change_on_faces","Use or not internal change of basis on local face nodes","none",pcbddc->use_change_on_faces,&pcbddc->use_change_on_faces,NULL)); 94 if (!pcbddc->use_change_of_basis) { 95 pcbddc->use_change_on_faces = PETSC_FALSE; 96 } 97 /* Switch between M_2 (default) and M_3 preconditioners (as defined by C. Dohrmann in the ref. article) */ 98 PetscCall(PetscOptionsBool("-pc_bddc_switch_static","Switch on static condensation ops around the interface preconditioner","none",pcbddc->switch_static,&pcbddc->switch_static,NULL)); 99 PetscCall(PetscOptionsInt("-pc_bddc_coarse_eqs_per_proc","Target number of equations per process for coarse problem redistribution (significant only at the coarsest level)","none",pcbddc->coarse_eqs_per_proc,&pcbddc->coarse_eqs_per_proc,NULL)); 100 i = pcbddc->coarsening_ratio; 101 PetscCall(PetscOptionsInt("-pc_bddc_coarsening_ratio","Set coarsening ratio used in multilevel coarsening","PCBDDCSetCoarseningRatio",i,&i,NULL)); 102 PetscCall(PCBDDCSetCoarseningRatio(pc,i)); 103 i = pcbddc->max_levels; 104 PetscCall(PetscOptionsInt("-pc_bddc_levels","Set maximum number of levels for multilevel","PCBDDCSetLevels",i,&i,NULL)); 105 PetscCall(PCBDDCSetLevels(pc,i)); 106 PetscCall(PetscOptionsInt("-pc_bddc_coarse_eqs_limit","Set maximum number of equations on coarsest grid to aim for","none",pcbddc->coarse_eqs_limit,&pcbddc->coarse_eqs_limit,NULL)); 107 PetscCall(PetscOptionsBool("-pc_bddc_use_coarse_estimates","Use estimated eigenvalues for coarse problem","none",pcbddc->use_coarse_estimates,&pcbddc->use_coarse_estimates,NULL)); 108 PetscCall(PetscOptionsBool("-pc_bddc_use_deluxe_scaling","Use deluxe scaling for BDDC","none",pcbddc->use_deluxe_scaling,&pcbddc->use_deluxe_scaling,NULL)); 109 PetscCall(PetscOptionsBool("-pc_bddc_schur_rebuild","Whether or not the interface graph for Schur principal minors has to be rebuilt (i.e. define the interface without any adjacency)","none",pcbddc->sub_schurs_rebuild,&pcbddc->sub_schurs_rebuild,NULL)); 110 PetscCall(PetscOptionsInt("-pc_bddc_schur_layers","Number of dofs' layers for the computation of principal minors (i.e. -1 uses all dofs)","none",pcbddc->sub_schurs_layers,&pcbddc->sub_schurs_layers,NULL)); 111 PetscCall(PetscOptionsBool("-pc_bddc_schur_use_useradj","Whether or not the CSR graph specified by the user should be used for computing successive layers (default is to use adj of local mat)","none",pcbddc->sub_schurs_use_useradj,&pcbddc->sub_schurs_use_useradj,NULL)); 112 PetscCall(PetscOptionsBool("-pc_bddc_schur_exact","Whether or not to use the exact Schur complement instead of the reduced one (which excludes size 1 cc)","none",pcbddc->sub_schurs_exact_schur,&pcbddc->sub_schurs_exact_schur,NULL)); 113 PetscCall(PetscOptionsBool("-pc_bddc_deluxe_zerorows","Zero rows and columns of deluxe operators associated with primal dofs","none",pcbddc->deluxe_zerorows,&pcbddc->deluxe_zerorows,NULL)); 114 PetscCall(PetscOptionsBool("-pc_bddc_deluxe_singlemat","Collapse deluxe operators","none",pcbddc->deluxe_singlemat,&pcbddc->deluxe_singlemat,NULL)); 115 PetscCall(PetscOptionsBool("-pc_bddc_adaptive_userdefined","Use user-defined constraints (should be attached via MatSetNearNullSpace to pmat) in addition to those adaptively generated","none",pcbddc->adaptive_userdefined,&pcbddc->adaptive_userdefined,NULL)); 116 nt = 2; 117 PetscCall(PetscOptionsRealArray("-pc_bddc_adaptive_threshold","Thresholds to be used for adaptive selection of constraints","none",pcbddc->adaptive_threshold,&nt,NULL)); 118 if (nt == 1) pcbddc->adaptive_threshold[1] = pcbddc->adaptive_threshold[0]; 119 PetscCall(PetscOptionsInt("-pc_bddc_adaptive_nmin","Minimum number of constraints per connected components","none",pcbddc->adaptive_nmin,&pcbddc->adaptive_nmin,NULL)); 120 PetscCall(PetscOptionsInt("-pc_bddc_adaptive_nmax","Maximum number of constraints per connected components","none",pcbddc->adaptive_nmax,&pcbddc->adaptive_nmax,NULL)); 121 PetscCall(PetscOptionsBool("-pc_bddc_symmetric","Symmetric computation of primal basis functions","none",pcbddc->symmetric_primal,&pcbddc->symmetric_primal,NULL)); 122 PetscCall(PetscOptionsInt("-pc_bddc_coarse_adj","Number of processors where to map the coarse adjacency list","none",pcbddc->coarse_adj_red,&pcbddc->coarse_adj_red,NULL)); 123 PetscCall(PetscOptionsBool("-pc_bddc_benign_trick","Apply the benign subspace trick to saddle point problems with discontinuous pressures","none",pcbddc->benign_saddle_point,&pcbddc->benign_saddle_point,NULL)); 124 PetscCall(PetscOptionsBool("-pc_bddc_benign_change","Compute the pressure change of basis explicitly","none",pcbddc->benign_change_explicit,&pcbddc->benign_change_explicit,NULL)); 125 PetscCall(PetscOptionsBool("-pc_bddc_benign_compute_correction","Compute the benign correction during PreSolve","none",pcbddc->benign_compute_correction,&pcbddc->benign_compute_correction,NULL)); 126 PetscCall(PetscOptionsBool("-pc_bddc_nonetflux","Automatic computation of no-net-flux quadrature weights","none",pcbddc->compute_nonetflux,&pcbddc->compute_nonetflux,NULL)); 127 PetscCall(PetscOptionsBool("-pc_bddc_detect_disconnected","Detects disconnected subdomains","none",pcbddc->detect_disconnected,&pcbddc->detect_disconnected,NULL)); 128 PetscCall(PetscOptionsBool("-pc_bddc_detect_disconnected_filter","Filters out small entries in the local matrix when detecting disconnected subdomains","none",pcbddc->detect_disconnected_filter,&pcbddc->detect_disconnected_filter,NULL)); 129 PetscCall(PetscOptionsBool("-pc_bddc_eliminate_dirichlet","Whether or not we want to eliminate dirichlet dofs during presolve","none",pcbddc->eliminate_dirdofs,&pcbddc->eliminate_dirdofs,NULL)); 130 PetscCall(PetscOptionsTail()); 131 PetscFunctionReturn(0); 132 } 133 134 static PetscErrorCode PCView_BDDC(PC pc,PetscViewer viewer) 135 { 136 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 137 PC_IS *pcis = (PC_IS*)pc->data; 138 PetscBool isascii; 139 PetscSubcomm subcomm; 140 PetscViewer subviewer; 141 142 PetscFunctionBegin; 143 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii)); 144 /* ASCII viewer */ 145 if (isascii) { 146 PetscMPIInt color,rank,size; 147 PetscInt64 loc[7],gsum[6],gmax[6],gmin[6],totbenign; 148 PetscScalar interface_size; 149 PetscReal ratio1=0.,ratio2=0.; 150 Vec counter; 151 152 if (!pc->setupcalled) { 153 PetscCall(PetscViewerASCIIPrintf(viewer," Partial information available: preconditioner has not been setup yet\n")); 154 } 155 PetscCall(PetscViewerASCIIPrintf(viewer," Use verbose output: %D\n",pcbddc->dbg_flag)); 156 PetscCall(PetscViewerASCIIPrintf(viewer," Use user-defined CSR: %d\n",!!pcbddc->mat_graph->nvtxs_csr)); 157 PetscCall(PetscViewerASCIIPrintf(viewer," Use local mat graph: %d\n",pcbddc->use_local_adj && !pcbddc->mat_graph->nvtxs_csr)); 158 if (pcbddc->mat_graph->twodim) { 159 PetscCall(PetscViewerASCIIPrintf(viewer," Connectivity graph topological dimension: 2\n")); 160 } else { 161 PetscCall(PetscViewerASCIIPrintf(viewer," Connectivity graph topological dimension: 3\n")); 162 } 163 if (pcbddc->graphmaxcount != PETSC_MAX_INT) { 164 PetscCall(PetscViewerASCIIPrintf(viewer," Graph max count: %D\n",pcbddc->graphmaxcount)); 165 } 166 PetscCall(PetscViewerASCIIPrintf(viewer," Corner selection: %d (selected %d)\n",pcbddc->corner_selection,pcbddc->corner_selected)); 167 PetscCall(PetscViewerASCIIPrintf(viewer," Use vertices: %d (vertex size %D)\n",pcbddc->use_vertices,pcbddc->vertex_size)); 168 PetscCall(PetscViewerASCIIPrintf(viewer," Use edges: %d\n",pcbddc->use_edges)); 169 PetscCall(PetscViewerASCIIPrintf(viewer," Use faces: %d\n",pcbddc->use_faces)); 170 PetscCall(PetscViewerASCIIPrintf(viewer," Use true near null space: %d\n",pcbddc->use_nnsp_true)); 171 PetscCall(PetscViewerASCIIPrintf(viewer," Use QR for single constraints on cc: %d\n",pcbddc->use_qr_single)); 172 PetscCall(PetscViewerASCIIPrintf(viewer," Use change of basis on local edge nodes: %d\n",pcbddc->use_change_of_basis)); 173 PetscCall(PetscViewerASCIIPrintf(viewer," Use change of basis on local face nodes: %d\n",pcbddc->use_change_on_faces)); 174 PetscCall(PetscViewerASCIIPrintf(viewer," User defined change of basis matrix: %d\n",!!pcbddc->user_ChangeOfBasisMatrix)); 175 PetscCall(PetscViewerASCIIPrintf(viewer," Has change of basis matrix: %d\n",!!pcbddc->ChangeOfBasisMatrix)); 176 PetscCall(PetscViewerASCIIPrintf(viewer," Eliminate dirichlet boundary dofs: %d\n",pcbddc->eliminate_dirdofs)); 177 PetscCall(PetscViewerASCIIPrintf(viewer," Switch on static condensation ops around the interface preconditioner: %d\n",pcbddc->switch_static)); 178 PetscCall(PetscViewerASCIIPrintf(viewer," Use exact dirichlet trick: %d\n",pcbddc->use_exact_dirichlet_trick)); 179 PetscCall(PetscViewerASCIIPrintf(viewer," Interface extension: %s\n",PCBDDCInterfaceExtTypes[pcbddc->interface_extension])); 180 PetscCall(PetscViewerASCIIPrintf(viewer," Multilevel max levels: %D\n",pcbddc->max_levels)); 181 PetscCall(PetscViewerASCIIPrintf(viewer," Multilevel coarsening ratio: %D\n",pcbddc->coarsening_ratio)); 182 PetscCall(PetscViewerASCIIPrintf(viewer," Use estimated eigs for coarse problem: %d\n",pcbddc->use_coarse_estimates)); 183 PetscCall(PetscViewerASCIIPrintf(viewer," Use deluxe scaling: %d\n",pcbddc->use_deluxe_scaling)); 184 PetscCall(PetscViewerASCIIPrintf(viewer," Use deluxe zerorows: %d\n",pcbddc->deluxe_zerorows)); 185 PetscCall(PetscViewerASCIIPrintf(viewer," Use deluxe singlemat: %d\n",pcbddc->deluxe_singlemat)); 186 PetscCall(PetscViewerASCIIPrintf(viewer," Rebuild interface graph for Schur principal minors: %d\n",pcbddc->sub_schurs_rebuild)); 187 PetscCall(PetscViewerASCIIPrintf(viewer," Number of dofs' layers for the computation of principal minors: %D\n",pcbddc->sub_schurs_layers)); 188 PetscCall(PetscViewerASCIIPrintf(viewer," Use user CSR graph to compute successive layers: %d\n",pcbddc->sub_schurs_use_useradj)); 189 if (pcbddc->adaptive_threshold[1] != pcbddc->adaptive_threshold[0]) { 190 PetscCall(PetscViewerASCIIPrintf(viewer," Adaptive constraint selection thresholds (active %d, userdefined %d): %g,%g\n",pcbddc->adaptive_selection,pcbddc->adaptive_userdefined,pcbddc->adaptive_threshold[0],pcbddc->adaptive_threshold[1])); 191 } else { 192 PetscCall(PetscViewerASCIIPrintf(viewer," Adaptive constraint selection threshold (active %d, userdefined %d): %g\n",pcbddc->adaptive_selection,pcbddc->adaptive_userdefined,pcbddc->adaptive_threshold[0])); 193 } 194 PetscCall(PetscViewerASCIIPrintf(viewer," Min constraints / connected component: %D\n",pcbddc->adaptive_nmin)); 195 PetscCall(PetscViewerASCIIPrintf(viewer," Max constraints / connected component: %D\n",pcbddc->adaptive_nmax)); 196 PetscCall(PetscViewerASCIIPrintf(viewer," Invert exact Schur complement for adaptive selection: %d\n",pcbddc->sub_schurs_exact_schur)); 197 PetscCall(PetscViewerASCIIPrintf(viewer," Symmetric computation of primal basis functions: %d\n",pcbddc->symmetric_primal)); 198 PetscCall(PetscViewerASCIIPrintf(viewer," Num. Procs. to map coarse adjacency list: %D\n",pcbddc->coarse_adj_red)); 199 PetscCall(PetscViewerASCIIPrintf(viewer," Coarse eqs per proc (significant at the coarsest level): %D\n",pcbddc->coarse_eqs_per_proc)); 200 PetscCall(PetscViewerASCIIPrintf(viewer," Detect disconnected: %d (filter %d)\n",pcbddc->detect_disconnected,pcbddc->detect_disconnected_filter)); 201 PetscCall(PetscViewerASCIIPrintf(viewer," Benign subspace trick: %d (change explicit %d)\n",pcbddc->benign_saddle_point,pcbddc->benign_change_explicit)); 202 PetscCall(PetscViewerASCIIPrintf(viewer," Benign subspace trick is active: %d\n",pcbddc->benign_have_null)); 203 PetscCall(PetscViewerASCIIPrintf(viewer," Algebraic computation of no-net-flux: %d\n",pcbddc->compute_nonetflux)); 204 if (!pc->setupcalled) PetscFunctionReturn(0); 205 206 /* compute interface size */ 207 PetscCall(VecSet(pcis->vec1_B,1.0)); 208 PetscCall(MatCreateVecs(pc->pmat,&counter,NULL)); 209 PetscCall(VecSet(counter,0.0)); 210 PetscCall(VecScatterBegin(pcis->global_to_B,pcis->vec1_B,counter,INSERT_VALUES,SCATTER_REVERSE)); 211 PetscCall(VecScatterEnd(pcis->global_to_B,pcis->vec1_B,counter,INSERT_VALUES,SCATTER_REVERSE)); 212 PetscCall(VecSum(counter,&interface_size)); 213 PetscCall(VecDestroy(&counter)); 214 215 /* compute some statistics on the domain decomposition */ 216 gsum[0] = 1; 217 gsum[1] = gsum[2] = gsum[3] = gsum[4] = gsum[5] = 0; 218 loc[0] = !!pcis->n; 219 loc[1] = pcis->n - pcis->n_B; 220 loc[2] = pcis->n_B; 221 loc[3] = pcbddc->local_primal_size; 222 loc[4] = pcis->n; 223 loc[5] = pcbddc->n_local_subs > 0 ? pcbddc->n_local_subs : (pcis->n ? 1 : 0); 224 loc[6] = pcbddc->benign_n; 225 PetscCallMPI(MPI_Reduce(loc,gsum,6,MPIU_INT64,MPI_SUM,0,PetscObjectComm((PetscObject)pc))); 226 if (!loc[0]) loc[1] = loc[2] = loc[3] = loc[4] = loc[5] = -1; 227 PetscCallMPI(MPI_Reduce(loc,gmax,6,MPIU_INT64,MPI_MAX,0,PetscObjectComm((PetscObject)pc))); 228 if (!loc[0]) loc[1] = loc[2] = loc[3] = loc[4] = loc[5] = PETSC_MAX_INT; 229 PetscCallMPI(MPI_Reduce(loc,gmin,6,MPIU_INT64,MPI_MIN,0,PetscObjectComm((PetscObject)pc))); 230 PetscCallMPI(MPI_Reduce(&loc[6],&totbenign,1,MPIU_INT64,MPI_SUM,0,PetscObjectComm((PetscObject)pc))); 231 if (pcbddc->coarse_size) { 232 ratio1 = pc->pmat->rmap->N/(1.*pcbddc->coarse_size); 233 ratio2 = PetscRealPart(interface_size)/pcbddc->coarse_size; 234 } 235 PetscCall(PetscViewerASCIIPrintf(viewer,"********************************** STATISTICS AT LEVEL %d **********************************\n",pcbddc->current_level)); 236 PetscCall(PetscViewerASCIIPrintf(viewer," Global dofs sizes: all %D interface %D coarse %D\n",pc->pmat->rmap->N,(PetscInt)PetscRealPart(interface_size),pcbddc->coarse_size)); 237 PetscCall(PetscViewerASCIIPrintf(viewer," Coarsening ratios: all/coarse %D interface/coarse %D\n",(PetscInt)ratio1,(PetscInt)ratio2)); 238 PetscCall(PetscViewerASCIIPrintf(viewer," Active processes : %D\n",(PetscInt)gsum[0])); 239 PetscCall(PetscViewerASCIIPrintf(viewer," Total subdomains : %D\n",(PetscInt)gsum[5])); 240 if (pcbddc->benign_have_null) { 241 PetscCall(PetscViewerASCIIPrintf(viewer," Benign subs : %D\n",(PetscInt)totbenign)); 242 } 243 PetscCall(PetscViewerASCIIPrintf(viewer," Dofs type :\tMIN\tMAX\tMEAN\n")); 244 PetscCall(PetscViewerASCIIPrintf(viewer," Interior dofs :\t%D\t%D\t%D\n",(PetscInt)gmin[1],(PetscInt)gmax[1],(PetscInt)(gsum[1]/gsum[0]))); 245 PetscCall(PetscViewerASCIIPrintf(viewer," Interface dofs :\t%D\t%D\t%D\n",(PetscInt)gmin[2],(PetscInt)gmax[2],(PetscInt)(gsum[2]/gsum[0]))); 246 PetscCall(PetscViewerASCIIPrintf(viewer," Primal dofs :\t%D\t%D\t%D\n",(PetscInt)gmin[3],(PetscInt)gmax[3],(PetscInt)(gsum[3]/gsum[0]))); 247 PetscCall(PetscViewerASCIIPrintf(viewer," Local dofs :\t%D\t%D\t%D\n",(PetscInt)gmin[4],(PetscInt)gmax[4],(PetscInt)(gsum[4]/gsum[0]))); 248 PetscCall(PetscViewerASCIIPrintf(viewer," Local subs :\t%D\t%D\n" ,(PetscInt)gmin[5],(PetscInt)gmax[5])); 249 PetscCall(PetscViewerFlush(viewer)); 250 251 PetscCallMPI(MPI_Comm_rank(PetscObjectComm((PetscObject)pc),&rank)); 252 253 /* local solvers */ 254 PetscCall(PetscViewerGetSubViewer(viewer,PetscObjectComm((PetscObject)pcbddc->ksp_D),&subviewer)); 255 if (rank == 0) { 256 PetscCall(PetscViewerASCIIPrintf(subviewer,"--- Interior solver (rank 0)\n")); 257 PetscCall(PetscViewerASCIIPushTab(subviewer)); 258 PetscCall(KSPView(pcbddc->ksp_D,subviewer)); 259 PetscCall(PetscViewerASCIIPopTab(subviewer)); 260 PetscCall(PetscViewerASCIIPrintf(subviewer,"--- Correction solver (rank 0)\n")); 261 PetscCall(PetscViewerASCIIPushTab(subviewer)); 262 PetscCall(KSPView(pcbddc->ksp_R,subviewer)); 263 PetscCall(PetscViewerASCIIPopTab(subviewer)); 264 PetscCall(PetscViewerFlush(subviewer)); 265 } 266 PetscCall(PetscViewerRestoreSubViewer(viewer,PetscObjectComm((PetscObject)pcbddc->ksp_D),&subviewer)); 267 PetscCall(PetscViewerFlush(viewer)); 268 269 /* the coarse problem can be handled by a different communicator */ 270 if (pcbddc->coarse_ksp) color = 1; 271 else color = 0; 272 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size)); 273 PetscCall(PetscSubcommCreate(PetscObjectComm((PetscObject)pc),&subcomm)); 274 PetscCall(PetscSubcommSetNumber(subcomm,PetscMin(size,2))); 275 PetscCall(PetscSubcommSetTypeGeneral(subcomm,color,rank)); 276 PetscCall(PetscViewerGetSubViewer(viewer,PetscSubcommChild(subcomm),&subviewer)); 277 if (color == 1) { 278 PetscCall(PetscViewerASCIIPrintf(subviewer,"--- Coarse solver\n")); 279 PetscCall(PetscViewerASCIIPushTab(subviewer)); 280 PetscCall(KSPView(pcbddc->coarse_ksp,subviewer)); 281 PetscCall(PetscViewerASCIIPopTab(subviewer)); 282 PetscCall(PetscViewerFlush(subviewer)); 283 } 284 PetscCall(PetscViewerRestoreSubViewer(viewer,PetscSubcommChild(subcomm),&subviewer)); 285 PetscCall(PetscSubcommDestroy(&subcomm)); 286 PetscCall(PetscViewerFlush(viewer)); 287 } 288 PetscFunctionReturn(0); 289 } 290 291 static PetscErrorCode PCBDDCSetDiscreteGradient_BDDC(PC pc, Mat G, PetscInt order, PetscInt field, PetscBool global, PetscBool conforming) 292 { 293 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 294 295 PetscFunctionBegin; 296 PetscCall(PetscObjectReference((PetscObject)G)); 297 PetscCall(MatDestroy(&pcbddc->discretegradient)); 298 pcbddc->discretegradient = G; 299 pcbddc->nedorder = order > 0 ? order : -order; 300 pcbddc->nedfield = field; 301 pcbddc->nedglobal = global; 302 pcbddc->conforming = conforming; 303 PetscFunctionReturn(0); 304 } 305 306 /*@ 307 PCBDDCSetDiscreteGradient - Sets the discrete gradient 308 309 Collective on PC 310 311 Input Parameters: 312 + pc - the preconditioning context 313 . G - the discrete gradient matrix (should be in AIJ format) 314 . order - the order of the Nedelec space (1 for the lowest order) 315 . field - the field id of the Nedelec dofs (not used if the fields have not been specified) 316 . global - the type of global ordering for the rows of G 317 - conforming - whether the mesh is conforming or not 318 319 Level: advanced 320 321 Notes: 322 The discrete gradient matrix G is used to analyze the subdomain edges, and it should not contain any zero entry. 323 For variable order spaces, the order should be set to zero. 324 If global is true, the rows of G should be given in global ordering for the whole dofs; 325 if false, the ordering should be global for the Nedelec field. 326 In the latter case, it should hold gid[i] < gid[j] iff geid[i] < geid[j], with gid the global orderding for all the dofs 327 and geid the one for the Nedelec field. 328 329 .seealso: PCBDDC,PCBDDCSetDofsSplitting(),PCBDDCSetDofsSplittingLocal() 330 @*/ 331 PetscErrorCode PCBDDCSetDiscreteGradient(PC pc, Mat G, PetscInt order, PetscInt field, PetscBool global, PetscBool conforming) 332 { 333 PetscFunctionBegin; 334 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 335 PetscValidHeaderSpecific(G,MAT_CLASSID,2); 336 PetscValidLogicalCollectiveInt(pc,order,3); 337 PetscValidLogicalCollectiveInt(pc,field,4); 338 PetscValidLogicalCollectiveBool(pc,global,5); 339 PetscValidLogicalCollectiveBool(pc,conforming,6); 340 PetscCheckSameComm(pc,1,G,2); 341 PetscTryMethod(pc,"PCBDDCSetDiscreteGradient_C",(PC,Mat,PetscInt,PetscInt,PetscBool,PetscBool),(pc,G,order,field,global,conforming)); 342 PetscFunctionReturn(0); 343 } 344 345 static PetscErrorCode PCBDDCSetDivergenceMat_BDDC(PC pc, Mat divudotp, PetscBool trans, IS vl2l) 346 { 347 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 348 349 PetscFunctionBegin; 350 PetscCall(PetscObjectReference((PetscObject)divudotp)); 351 PetscCall(MatDestroy(&pcbddc->divudotp)); 352 pcbddc->divudotp = divudotp; 353 pcbddc->divudotp_trans = trans; 354 pcbddc->compute_nonetflux = PETSC_TRUE; 355 if (vl2l) { 356 PetscCall(PetscObjectReference((PetscObject)vl2l)); 357 PetscCall(ISDestroy(&pcbddc->divudotp_vl2l)); 358 pcbddc->divudotp_vl2l = vl2l; 359 } 360 PetscFunctionReturn(0); 361 } 362 363 /*@ 364 PCBDDCSetDivergenceMat - Sets the linear operator representing \int_\Omega \div {\bf u} \cdot p dx 365 366 Collective on PC 367 368 Input Parameters: 369 + pc - the preconditioning context 370 . divudotp - the matrix (must be of type MATIS) 371 . trans - if trans if false (resp. true), then pressures are in the test (trial) space and velocities are in the trial (test) space. 372 - vl2l - optional index set describing the local (wrt the local matrix in divudotp) to local (wrt the local matrix in the preconditioning matrix) map for the velocities 373 374 Level: advanced 375 376 Notes: 377 This auxiliary matrix is used to compute quadrature weights representing the net-flux across subdomain boundaries 378 If vl2l is NULL, the local ordering for velocities in divudotp should match that of the preconditioning matrix 379 380 .seealso: PCBDDC 381 @*/ 382 PetscErrorCode PCBDDCSetDivergenceMat(PC pc, Mat divudotp, PetscBool trans, IS vl2l) 383 { 384 PetscBool ismatis; 385 386 PetscFunctionBegin; 387 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 388 PetscValidHeaderSpecific(divudotp,MAT_CLASSID,2); 389 PetscCheckSameComm(pc,1,divudotp,2); 390 PetscValidLogicalCollectiveBool(pc,trans,3); 391 if (vl2l) PetscValidHeaderSpecific(vl2l,IS_CLASSID,4); 392 PetscCall(PetscObjectTypeCompare((PetscObject)divudotp,MATIS,&ismatis)); 393 PetscCheck(ismatis,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"Divergence matrix needs to be of type MATIS"); 394 PetscTryMethod(pc,"PCBDDCSetDivergenceMat_C",(PC,Mat,PetscBool,IS),(pc,divudotp,trans,vl2l)); 395 PetscFunctionReturn(0); 396 } 397 398 static PetscErrorCode PCBDDCSetChangeOfBasisMat_BDDC(PC pc, Mat change, PetscBool interior) 399 { 400 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 401 402 PetscFunctionBegin; 403 PetscCall(PetscObjectReference((PetscObject)change)); 404 PetscCall(MatDestroy(&pcbddc->user_ChangeOfBasisMatrix)); 405 pcbddc->user_ChangeOfBasisMatrix = change; 406 pcbddc->change_interior = interior; 407 PetscFunctionReturn(0); 408 } 409 /*@ 410 PCBDDCSetChangeOfBasisMat - Set user defined change of basis for dofs 411 412 Collective on PC 413 414 Input Parameters: 415 + pc - the preconditioning context 416 . change - the change of basis matrix 417 - interior - whether or not the change of basis modifies interior dofs 418 419 Level: intermediate 420 421 Notes: 422 423 .seealso: PCBDDC 424 @*/ 425 PetscErrorCode PCBDDCSetChangeOfBasisMat(PC pc, Mat change, PetscBool interior) 426 { 427 PetscFunctionBegin; 428 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 429 PetscValidHeaderSpecific(change,MAT_CLASSID,2); 430 PetscCheckSameComm(pc,1,change,2); 431 if (pc->mat) { 432 PetscInt rows_c,cols_c,rows,cols; 433 PetscCall(MatGetSize(pc->mat,&rows,&cols)); 434 PetscCall(MatGetSize(change,&rows_c,&cols_c)); 435 PetscCheck(rows_c == rows,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Invalid number of rows for change of basis matrix! %D != %D",rows_c,rows); 436 PetscCheck(cols_c == cols,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Invalid number of columns for change of basis matrix! %D != %D",cols_c,cols); 437 PetscCall(MatGetLocalSize(pc->mat,&rows,&cols)); 438 PetscCall(MatGetLocalSize(change,&rows_c,&cols_c)); 439 PetscCheck(rows_c == rows,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Invalid number of local rows for change of basis matrix! %D != %D",rows_c,rows); 440 PetscCheck(cols_c == cols,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Invalid number of local columns for change of basis matrix! %D != %D",cols_c,cols); 441 } 442 PetscTryMethod(pc,"PCBDDCSetChangeOfBasisMat_C",(PC,Mat,PetscBool),(pc,change,interior)); 443 PetscFunctionReturn(0); 444 } 445 446 static PetscErrorCode PCBDDCSetPrimalVerticesIS_BDDC(PC pc, IS PrimalVertices) 447 { 448 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 449 PetscBool isequal = PETSC_FALSE; 450 451 PetscFunctionBegin; 452 PetscCall(PetscObjectReference((PetscObject)PrimalVertices)); 453 if (pcbddc->user_primal_vertices) { 454 PetscCall(ISEqual(PrimalVertices,pcbddc->user_primal_vertices,&isequal)); 455 } 456 PetscCall(ISDestroy(&pcbddc->user_primal_vertices)); 457 PetscCall(ISDestroy(&pcbddc->user_primal_vertices_local)); 458 pcbddc->user_primal_vertices = PrimalVertices; 459 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 460 PetscFunctionReturn(0); 461 } 462 463 /*@ 464 PCBDDCSetPrimalVerticesIS - Set additional user defined primal vertices in PCBDDC 465 466 Collective 467 468 Input Parameters: 469 + pc - the preconditioning context 470 - PrimalVertices - index set of primal vertices in global numbering (can be empty) 471 472 Level: intermediate 473 474 Notes: 475 Any process can list any global node 476 477 .seealso: PCBDDC, PCBDDCGetPrimalVerticesIS(), PCBDDCSetPrimalVerticesLocalIS(), PCBDDCGetPrimalVerticesLocalIS() 478 @*/ 479 PetscErrorCode PCBDDCSetPrimalVerticesIS(PC pc, IS PrimalVertices) 480 { 481 PetscFunctionBegin; 482 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 483 PetscValidHeaderSpecific(PrimalVertices,IS_CLASSID,2); 484 PetscCheckSameComm(pc,1,PrimalVertices,2); 485 PetscTryMethod(pc,"PCBDDCSetPrimalVerticesIS_C",(PC,IS),(pc,PrimalVertices)); 486 PetscFunctionReturn(0); 487 } 488 489 static PetscErrorCode PCBDDCGetPrimalVerticesIS_BDDC(PC pc, IS *is) 490 { 491 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 492 493 PetscFunctionBegin; 494 *is = pcbddc->user_primal_vertices; 495 PetscFunctionReturn(0); 496 } 497 498 /*@ 499 PCBDDCGetPrimalVerticesIS - Get user defined primal vertices set with PCBDDCSetPrimalVerticesIS() 500 501 Collective 502 503 Input Parameters: 504 . pc - the preconditioning context 505 506 Output Parameters: 507 . is - index set of primal vertices in global numbering (NULL if not set) 508 509 Level: intermediate 510 511 Notes: 512 513 .seealso: PCBDDC, PCBDDCSetPrimalVerticesIS(), PCBDDCSetPrimalVerticesLocalIS(), PCBDDCGetPrimalVerticesLocalIS() 514 @*/ 515 PetscErrorCode PCBDDCGetPrimalVerticesIS(PC pc, IS *is) 516 { 517 PetscFunctionBegin; 518 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 519 PetscValidPointer(is,2); 520 PetscUseMethod(pc,"PCBDDCGetPrimalVerticesIS_C",(PC,IS*),(pc,is)); 521 PetscFunctionReturn(0); 522 } 523 524 static PetscErrorCode PCBDDCSetPrimalVerticesLocalIS_BDDC(PC pc, IS PrimalVertices) 525 { 526 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 527 PetscBool isequal = PETSC_FALSE; 528 529 PetscFunctionBegin; 530 PetscCall(PetscObjectReference((PetscObject)PrimalVertices)); 531 if (pcbddc->user_primal_vertices_local) { 532 PetscCall(ISEqual(PrimalVertices,pcbddc->user_primal_vertices_local,&isequal)); 533 } 534 PetscCall(ISDestroy(&pcbddc->user_primal_vertices)); 535 PetscCall(ISDestroy(&pcbddc->user_primal_vertices_local)); 536 pcbddc->user_primal_vertices_local = PrimalVertices; 537 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 538 PetscFunctionReturn(0); 539 } 540 541 /*@ 542 PCBDDCSetPrimalVerticesLocalIS - Set additional user defined primal vertices in PCBDDC 543 544 Collective 545 546 Input Parameters: 547 + pc - the preconditioning context 548 - PrimalVertices - index set of primal vertices in local numbering (can be empty) 549 550 Level: intermediate 551 552 Notes: 553 554 .seealso: PCBDDC, PCBDDCSetPrimalVerticesIS(), PCBDDCGetPrimalVerticesIS(), PCBDDCGetPrimalVerticesLocalIS() 555 @*/ 556 PetscErrorCode PCBDDCSetPrimalVerticesLocalIS(PC pc, IS PrimalVertices) 557 { 558 PetscFunctionBegin; 559 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 560 PetscValidHeaderSpecific(PrimalVertices,IS_CLASSID,2); 561 PetscCheckSameComm(pc,1,PrimalVertices,2); 562 PetscTryMethod(pc,"PCBDDCSetPrimalVerticesLocalIS_C",(PC,IS),(pc,PrimalVertices)); 563 PetscFunctionReturn(0); 564 } 565 566 static PetscErrorCode PCBDDCGetPrimalVerticesLocalIS_BDDC(PC pc, IS *is) 567 { 568 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 569 570 PetscFunctionBegin; 571 *is = pcbddc->user_primal_vertices_local; 572 PetscFunctionReturn(0); 573 } 574 575 /*@ 576 PCBDDCGetPrimalVerticesLocalIS - Get user defined primal vertices set with PCBDDCSetPrimalVerticesLocalIS() 577 578 Collective 579 580 Input Parameters: 581 . pc - the preconditioning context 582 583 Output Parameters: 584 . is - index set of primal vertices in local numbering (NULL if not set) 585 586 Level: intermediate 587 588 Notes: 589 590 .seealso: PCBDDC, PCBDDCSetPrimalVerticesIS(), PCBDDCGetPrimalVerticesIS(), PCBDDCSetPrimalVerticesLocalIS() 591 @*/ 592 PetscErrorCode PCBDDCGetPrimalVerticesLocalIS(PC pc, IS *is) 593 { 594 PetscFunctionBegin; 595 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 596 PetscValidPointer(is,2); 597 PetscUseMethod(pc,"PCBDDCGetPrimalVerticesLocalIS_C",(PC,IS*),(pc,is)); 598 PetscFunctionReturn(0); 599 } 600 601 static PetscErrorCode PCBDDCSetCoarseningRatio_BDDC(PC pc,PetscInt k) 602 { 603 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 604 605 PetscFunctionBegin; 606 pcbddc->coarsening_ratio = k; 607 PetscFunctionReturn(0); 608 } 609 610 /*@ 611 PCBDDCSetCoarseningRatio - Set coarsening ratio used in multilevel 612 613 Logically collective on PC 614 615 Input Parameters: 616 + pc - the preconditioning context 617 - k - coarsening ratio (H/h at the coarser level) 618 619 Options Database Keys: 620 . -pc_bddc_coarsening_ratio <int> - Set coarsening ratio used in multilevel coarsening 621 622 Level: intermediate 623 624 Notes: 625 Approximatively k subdomains at the finer level will be aggregated into a single subdomain at the coarser level 626 627 .seealso: PCBDDC, PCBDDCSetLevels() 628 @*/ 629 PetscErrorCode PCBDDCSetCoarseningRatio(PC pc,PetscInt k) 630 { 631 PetscFunctionBegin; 632 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 633 PetscValidLogicalCollectiveInt(pc,k,2); 634 PetscTryMethod(pc,"PCBDDCSetCoarseningRatio_C",(PC,PetscInt),(pc,k)); 635 PetscFunctionReturn(0); 636 } 637 638 /* The following functions (PCBDDCSetUseExactDirichlet PCBDDCSetLevel) are not public */ 639 static PetscErrorCode PCBDDCSetUseExactDirichlet_BDDC(PC pc,PetscBool flg) 640 { 641 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 642 643 PetscFunctionBegin; 644 pcbddc->use_exact_dirichlet_trick = flg; 645 PetscFunctionReturn(0); 646 } 647 648 PetscErrorCode PCBDDCSetUseExactDirichlet(PC pc,PetscBool flg) 649 { 650 PetscFunctionBegin; 651 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 652 PetscValidLogicalCollectiveBool(pc,flg,2); 653 PetscTryMethod(pc,"PCBDDCSetUseExactDirichlet_C",(PC,PetscBool),(pc,flg)); 654 PetscFunctionReturn(0); 655 } 656 657 static PetscErrorCode PCBDDCSetLevel_BDDC(PC pc,PetscInt level) 658 { 659 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 660 661 PetscFunctionBegin; 662 pcbddc->current_level = level; 663 PetscFunctionReturn(0); 664 } 665 666 PetscErrorCode PCBDDCSetLevel(PC pc,PetscInt level) 667 { 668 PetscFunctionBegin; 669 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 670 PetscValidLogicalCollectiveInt(pc,level,2); 671 PetscTryMethod(pc,"PCBDDCSetLevel_C",(PC,PetscInt),(pc,level)); 672 PetscFunctionReturn(0); 673 } 674 675 static PetscErrorCode PCBDDCSetLevels_BDDC(PC pc,PetscInt levels) 676 { 677 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 678 679 PetscFunctionBegin; 680 PetscCheck(levels < PETSC_PCBDDC_MAXLEVELS,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Maximum number of additional levels for BDDC is %d",PETSC_PCBDDC_MAXLEVELS-1); 681 pcbddc->max_levels = levels; 682 PetscFunctionReturn(0); 683 } 684 685 /*@ 686 PCBDDCSetLevels - Sets the maximum number of additional levels allowed for multilevel BDDC 687 688 Logically collective on PC 689 690 Input Parameters: 691 + pc - the preconditioning context 692 - levels - the maximum number of levels 693 694 Options Database Keys: 695 . -pc_bddc_levels <int> - Set maximum number of levels for multilevel 696 697 Level: intermediate 698 699 Notes: 700 The default value is 0, that gives the classical two-levels BDDC 701 702 .seealso: PCBDDC, PCBDDCSetCoarseningRatio() 703 @*/ 704 PetscErrorCode PCBDDCSetLevels(PC pc,PetscInt levels) 705 { 706 PetscFunctionBegin; 707 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 708 PetscValidLogicalCollectiveInt(pc,levels,2); 709 PetscTryMethod(pc,"PCBDDCSetLevels_C",(PC,PetscInt),(pc,levels)); 710 PetscFunctionReturn(0); 711 } 712 713 static PetscErrorCode PCBDDCSetDirichletBoundaries_BDDC(PC pc,IS DirichletBoundaries) 714 { 715 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 716 PetscBool isequal = PETSC_FALSE; 717 718 PetscFunctionBegin; 719 PetscCall(PetscObjectReference((PetscObject)DirichletBoundaries)); 720 if (pcbddc->DirichletBoundaries) { 721 PetscCall(ISEqual(DirichletBoundaries,pcbddc->DirichletBoundaries,&isequal)); 722 } 723 /* last user setting takes precedence -> destroy any other customization */ 724 PetscCall(ISDestroy(&pcbddc->DirichletBoundariesLocal)); 725 PetscCall(ISDestroy(&pcbddc->DirichletBoundaries)); 726 pcbddc->DirichletBoundaries = DirichletBoundaries; 727 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 728 PetscFunctionReturn(0); 729 } 730 731 /*@ 732 PCBDDCSetDirichletBoundaries - Set IS defining Dirichlet boundaries for the global problem. 733 734 Collective 735 736 Input Parameters: 737 + pc - the preconditioning context 738 - DirichletBoundaries - parallel IS defining the Dirichlet boundaries 739 740 Level: intermediate 741 742 Notes: 743 Provide the information if you used MatZeroRows/Columns routines. Any process can list any global node 744 745 .seealso: PCBDDC, PCBDDCSetDirichletBoundariesLocal(), MatZeroRows(), MatZeroRowsColumns() 746 @*/ 747 PetscErrorCode PCBDDCSetDirichletBoundaries(PC pc,IS DirichletBoundaries) 748 { 749 PetscFunctionBegin; 750 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 751 PetscValidHeaderSpecific(DirichletBoundaries,IS_CLASSID,2); 752 PetscCheckSameComm(pc,1,DirichletBoundaries,2); 753 PetscTryMethod(pc,"PCBDDCSetDirichletBoundaries_C",(PC,IS),(pc,DirichletBoundaries)); 754 PetscFunctionReturn(0); 755 } 756 757 static PetscErrorCode PCBDDCSetDirichletBoundariesLocal_BDDC(PC pc,IS DirichletBoundaries) 758 { 759 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 760 PetscBool isequal = PETSC_FALSE; 761 762 PetscFunctionBegin; 763 PetscCall(PetscObjectReference((PetscObject)DirichletBoundaries)); 764 if (pcbddc->DirichletBoundariesLocal) { 765 PetscCall(ISEqual(DirichletBoundaries,pcbddc->DirichletBoundariesLocal,&isequal)); 766 } 767 /* last user setting takes precedence -> destroy any other customization */ 768 PetscCall(ISDestroy(&pcbddc->DirichletBoundariesLocal)); 769 PetscCall(ISDestroy(&pcbddc->DirichletBoundaries)); 770 pcbddc->DirichletBoundariesLocal = DirichletBoundaries; 771 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 772 PetscFunctionReturn(0); 773 } 774 775 /*@ 776 PCBDDCSetDirichletBoundariesLocal - Set IS defining Dirichlet boundaries for the global problem in local ordering. 777 778 Collective 779 780 Input Parameters: 781 + pc - the preconditioning context 782 - DirichletBoundaries - parallel IS defining the Dirichlet boundaries (in local ordering) 783 784 Level: intermediate 785 786 Notes: 787 788 .seealso: PCBDDC, PCBDDCSetDirichletBoundaries(), MatZeroRows(), MatZeroRowsColumns() 789 @*/ 790 PetscErrorCode PCBDDCSetDirichletBoundariesLocal(PC pc,IS DirichletBoundaries) 791 { 792 PetscFunctionBegin; 793 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 794 PetscValidHeaderSpecific(DirichletBoundaries,IS_CLASSID,2); 795 PetscCheckSameComm(pc,1,DirichletBoundaries,2); 796 PetscTryMethod(pc,"PCBDDCSetDirichletBoundariesLocal_C",(PC,IS),(pc,DirichletBoundaries)); 797 PetscFunctionReturn(0); 798 } 799 800 static PetscErrorCode PCBDDCSetNeumannBoundaries_BDDC(PC pc,IS NeumannBoundaries) 801 { 802 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 803 PetscBool isequal = PETSC_FALSE; 804 805 PetscFunctionBegin; 806 PetscCall(PetscObjectReference((PetscObject)NeumannBoundaries)); 807 if (pcbddc->NeumannBoundaries) { 808 PetscCall(ISEqual(NeumannBoundaries,pcbddc->NeumannBoundaries,&isequal)); 809 } 810 /* last user setting takes precedence -> destroy any other customization */ 811 PetscCall(ISDestroy(&pcbddc->NeumannBoundariesLocal)); 812 PetscCall(ISDestroy(&pcbddc->NeumannBoundaries)); 813 pcbddc->NeumannBoundaries = NeumannBoundaries; 814 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 815 PetscFunctionReturn(0); 816 } 817 818 /*@ 819 PCBDDCSetNeumannBoundaries - Set IS defining Neumann boundaries for the global problem. 820 821 Collective 822 823 Input Parameters: 824 + pc - the preconditioning context 825 - NeumannBoundaries - parallel IS defining the Neumann boundaries 826 827 Level: intermediate 828 829 Notes: 830 Any process can list any global node 831 832 .seealso: PCBDDC, PCBDDCSetNeumannBoundariesLocal() 833 @*/ 834 PetscErrorCode PCBDDCSetNeumannBoundaries(PC pc,IS NeumannBoundaries) 835 { 836 PetscFunctionBegin; 837 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 838 PetscValidHeaderSpecific(NeumannBoundaries,IS_CLASSID,2); 839 PetscCheckSameComm(pc,1,NeumannBoundaries,2); 840 PetscTryMethod(pc,"PCBDDCSetNeumannBoundaries_C",(PC,IS),(pc,NeumannBoundaries)); 841 PetscFunctionReturn(0); 842 } 843 844 static PetscErrorCode PCBDDCSetNeumannBoundariesLocal_BDDC(PC pc,IS NeumannBoundaries) 845 { 846 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 847 PetscBool isequal = PETSC_FALSE; 848 849 PetscFunctionBegin; 850 PetscCall(PetscObjectReference((PetscObject)NeumannBoundaries)); 851 if (pcbddc->NeumannBoundariesLocal) { 852 PetscCall(ISEqual(NeumannBoundaries,pcbddc->NeumannBoundariesLocal,&isequal)); 853 } 854 /* last user setting takes precedence -> destroy any other customization */ 855 PetscCall(ISDestroy(&pcbddc->NeumannBoundariesLocal)); 856 PetscCall(ISDestroy(&pcbddc->NeumannBoundaries)); 857 pcbddc->NeumannBoundariesLocal = NeumannBoundaries; 858 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 859 PetscFunctionReturn(0); 860 } 861 862 /*@ 863 PCBDDCSetNeumannBoundariesLocal - Set IS defining Neumann boundaries for the global problem in local ordering. 864 865 Collective 866 867 Input Parameters: 868 + pc - the preconditioning context 869 - NeumannBoundaries - parallel IS defining the subdomain part of Neumann boundaries (in local ordering) 870 871 Level: intermediate 872 873 Notes: 874 875 .seealso: PCBDDC, PCBDDCSetNeumannBoundaries() 876 @*/ 877 PetscErrorCode PCBDDCSetNeumannBoundariesLocal(PC pc,IS NeumannBoundaries) 878 { 879 PetscFunctionBegin; 880 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 881 PetscValidHeaderSpecific(NeumannBoundaries,IS_CLASSID,2); 882 PetscCheckSameComm(pc,1,NeumannBoundaries,2); 883 PetscTryMethod(pc,"PCBDDCSetNeumannBoundariesLocal_C",(PC,IS),(pc,NeumannBoundaries)); 884 PetscFunctionReturn(0); 885 } 886 887 static PetscErrorCode PCBDDCGetDirichletBoundaries_BDDC(PC pc,IS *DirichletBoundaries) 888 { 889 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 890 891 PetscFunctionBegin; 892 *DirichletBoundaries = pcbddc->DirichletBoundaries; 893 PetscFunctionReturn(0); 894 } 895 896 /*@ 897 PCBDDCGetDirichletBoundaries - Get parallel IS for Dirichlet boundaries 898 899 Collective 900 901 Input Parameters: 902 . pc - the preconditioning context 903 904 Output Parameters: 905 . DirichletBoundaries - index set defining the Dirichlet boundaries 906 907 Level: intermediate 908 909 Notes: 910 The IS returned (if any) is the same passed in earlier by the user with PCBDDCSetDirichletBoundaries 911 912 .seealso: PCBDDC 913 @*/ 914 PetscErrorCode PCBDDCGetDirichletBoundaries(PC pc,IS *DirichletBoundaries) 915 { 916 PetscFunctionBegin; 917 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 918 PetscUseMethod(pc,"PCBDDCGetDirichletBoundaries_C",(PC,IS*),(pc,DirichletBoundaries)); 919 PetscFunctionReturn(0); 920 } 921 922 static PetscErrorCode PCBDDCGetDirichletBoundariesLocal_BDDC(PC pc,IS *DirichletBoundaries) 923 { 924 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 925 926 PetscFunctionBegin; 927 *DirichletBoundaries = pcbddc->DirichletBoundariesLocal; 928 PetscFunctionReturn(0); 929 } 930 931 /*@ 932 PCBDDCGetDirichletBoundariesLocal - Get parallel IS for Dirichlet boundaries (in local ordering) 933 934 Collective 935 936 Input Parameters: 937 . pc - the preconditioning context 938 939 Output Parameters: 940 . DirichletBoundaries - index set defining the subdomain part of Dirichlet boundaries 941 942 Level: intermediate 943 944 Notes: 945 The IS returned could be the same passed in earlier by the user (if provided with PCBDDCSetDirichletBoundariesLocal) or a global-to-local map of the global IS (if provided with PCBDDCSetDirichletBoundaries). 946 In the latter case, the IS will be available after PCSetUp. 947 948 .seealso: PCBDDC 949 @*/ 950 PetscErrorCode PCBDDCGetDirichletBoundariesLocal(PC pc,IS *DirichletBoundaries) 951 { 952 PetscFunctionBegin; 953 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 954 PetscUseMethod(pc,"PCBDDCGetDirichletBoundariesLocal_C",(PC,IS*),(pc,DirichletBoundaries)); 955 PetscFunctionReturn(0); 956 } 957 958 static PetscErrorCode PCBDDCGetNeumannBoundaries_BDDC(PC pc,IS *NeumannBoundaries) 959 { 960 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 961 962 PetscFunctionBegin; 963 *NeumannBoundaries = pcbddc->NeumannBoundaries; 964 PetscFunctionReturn(0); 965 } 966 967 /*@ 968 PCBDDCGetNeumannBoundaries - Get parallel IS for Neumann boundaries 969 970 Collective 971 972 Input Parameters: 973 . pc - the preconditioning context 974 975 Output Parameters: 976 . NeumannBoundaries - index set defining the Neumann boundaries 977 978 Level: intermediate 979 980 Notes: 981 The IS returned (if any) is the same passed in earlier by the user with PCBDDCSetNeumannBoundaries 982 983 .seealso: PCBDDC 984 @*/ 985 PetscErrorCode PCBDDCGetNeumannBoundaries(PC pc,IS *NeumannBoundaries) 986 { 987 PetscFunctionBegin; 988 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 989 PetscUseMethod(pc,"PCBDDCGetNeumannBoundaries_C",(PC,IS*),(pc,NeumannBoundaries)); 990 PetscFunctionReturn(0); 991 } 992 993 static PetscErrorCode PCBDDCGetNeumannBoundariesLocal_BDDC(PC pc,IS *NeumannBoundaries) 994 { 995 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 996 997 PetscFunctionBegin; 998 *NeumannBoundaries = pcbddc->NeumannBoundariesLocal; 999 PetscFunctionReturn(0); 1000 } 1001 1002 /*@ 1003 PCBDDCGetNeumannBoundariesLocal - Get parallel IS for Neumann boundaries (in local ordering) 1004 1005 Collective 1006 1007 Input Parameters: 1008 . pc - the preconditioning context 1009 1010 Output Parameters: 1011 . NeumannBoundaries - index set defining the subdomain part of Neumann boundaries 1012 1013 Level: intermediate 1014 1015 Notes: 1016 The IS returned could be the same passed in earlier by the user (if provided with PCBDDCSetNeumannBoundariesLocal) or a global-to-local map of the global IS (if provided with PCBDDCSetNeumannBoundaries). 1017 In the latter case, the IS will be available after PCSetUp. 1018 1019 .seealso: PCBDDC 1020 @*/ 1021 PetscErrorCode PCBDDCGetNeumannBoundariesLocal(PC pc,IS *NeumannBoundaries) 1022 { 1023 PetscFunctionBegin; 1024 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1025 PetscUseMethod(pc,"PCBDDCGetNeumannBoundariesLocal_C",(PC,IS*),(pc,NeumannBoundaries)); 1026 PetscFunctionReturn(0); 1027 } 1028 1029 static PetscErrorCode PCBDDCSetLocalAdjacencyGraph_BDDC(PC pc, PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode) 1030 { 1031 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 1032 PCBDDCGraph mat_graph = pcbddc->mat_graph; 1033 PetscBool same_data = PETSC_FALSE; 1034 1035 PetscFunctionBegin; 1036 if (!nvtxs) { 1037 if (copymode == PETSC_OWN_POINTER) { 1038 PetscCall(PetscFree(xadj)); 1039 PetscCall(PetscFree(adjncy)); 1040 } 1041 PetscCall(PCBDDCGraphResetCSR(mat_graph)); 1042 PetscFunctionReturn(0); 1043 } 1044 if (mat_graph->nvtxs == nvtxs && mat_graph->freecsr) { /* we own the data */ 1045 if (mat_graph->xadj == xadj && mat_graph->adjncy == adjncy) same_data = PETSC_TRUE; 1046 if (!same_data && mat_graph->xadj[nvtxs] == xadj[nvtxs]) { 1047 PetscCall(PetscArraycmp(xadj,mat_graph->xadj,nvtxs+1,&same_data)); 1048 if (same_data) { 1049 PetscCall(PetscArraycmp(adjncy,mat_graph->adjncy,xadj[nvtxs],&same_data)); 1050 } 1051 } 1052 } 1053 if (!same_data) { 1054 /* free old CSR */ 1055 PetscCall(PCBDDCGraphResetCSR(mat_graph)); 1056 /* get CSR into graph structure */ 1057 if (copymode == PETSC_COPY_VALUES) { 1058 PetscCall(PetscMalloc1(nvtxs+1,&mat_graph->xadj)); 1059 PetscCall(PetscMalloc1(xadj[nvtxs],&mat_graph->adjncy)); 1060 PetscCall(PetscArraycpy(mat_graph->xadj,xadj,nvtxs+1)); 1061 PetscCall(PetscArraycpy(mat_graph->adjncy,adjncy,xadj[nvtxs])); 1062 mat_graph->freecsr = PETSC_TRUE; 1063 } else if (copymode == PETSC_OWN_POINTER) { 1064 mat_graph->xadj = (PetscInt*)xadj; 1065 mat_graph->adjncy = (PetscInt*)adjncy; 1066 mat_graph->freecsr = PETSC_TRUE; 1067 } else if (copymode == PETSC_USE_POINTER) { 1068 mat_graph->xadj = (PetscInt*)xadj; 1069 mat_graph->adjncy = (PetscInt*)adjncy; 1070 mat_graph->freecsr = PETSC_FALSE; 1071 } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Unsupported copy mode %D",copymode); 1072 mat_graph->nvtxs_csr = nvtxs; 1073 pcbddc->recompute_topography = PETSC_TRUE; 1074 } 1075 PetscFunctionReturn(0); 1076 } 1077 1078 /*@ 1079 PCBDDCSetLocalAdjacencyGraph - Set adjacency structure (CSR graph) of the local degrees of freedom. 1080 1081 Not collective 1082 1083 Input Parameters: 1084 + pc - the preconditioning context. 1085 . nvtxs - number of local vertices of the graph (i.e., the number of local dofs). 1086 . xadj, adjncy - the connectivity of the dofs in CSR format. 1087 - copymode - supported modes are PETSC_COPY_VALUES, PETSC_USE_POINTER or PETSC_OWN_POINTER. 1088 1089 Level: intermediate 1090 1091 Notes: 1092 A dof is considered connected with all local dofs if xadj[dof+1]-xadj[dof] == 1 and adjncy[xadj[dof]] is negative. 1093 1094 .seealso: PCBDDC,PetscCopyMode 1095 @*/ 1096 PetscErrorCode PCBDDCSetLocalAdjacencyGraph(PC pc,PetscInt nvtxs,const PetscInt xadj[],const PetscInt adjncy[], PetscCopyMode copymode) 1097 { 1098 void (*f)(void) = NULL; 1099 1100 PetscFunctionBegin; 1101 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1102 if (nvtxs) { 1103 PetscValidIntPointer(xadj,3); 1104 if (xadj[nvtxs]) PetscValidIntPointer(adjncy,4); 1105 } 1106 PetscTryMethod(pc,"PCBDDCSetLocalAdjacencyGraph_C",(PC,PetscInt,const PetscInt[],const PetscInt[],PetscCopyMode),(pc,nvtxs,xadj,adjncy,copymode)); 1107 /* free arrays if PCBDDC is not the PC type */ 1108 PetscCall(PetscObjectQueryFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",&f)); 1109 if (!f && copymode == PETSC_OWN_POINTER) { 1110 PetscCall(PetscFree(xadj)); 1111 PetscCall(PetscFree(adjncy)); 1112 } 1113 PetscFunctionReturn(0); 1114 } 1115 1116 static PetscErrorCode PCBDDCSetDofsSplittingLocal_BDDC(PC pc,PetscInt n_is, IS ISForDofs[]) 1117 { 1118 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 1119 PetscInt i; 1120 PetscBool isequal = PETSC_FALSE; 1121 1122 PetscFunctionBegin; 1123 if (pcbddc->n_ISForDofsLocal == n_is) { 1124 for (i=0;i<n_is;i++) { 1125 PetscBool isequalt; 1126 PetscCall(ISEqual(ISForDofs[i],pcbddc->ISForDofsLocal[i],&isequalt)); 1127 if (!isequalt) break; 1128 } 1129 if (i == n_is) isequal = PETSC_TRUE; 1130 } 1131 for (i=0;i<n_is;i++) { 1132 PetscCall(PetscObjectReference((PetscObject)ISForDofs[i])); 1133 } 1134 /* Destroy ISes if they were already set */ 1135 for (i=0;i<pcbddc->n_ISForDofsLocal;i++) { 1136 PetscCall(ISDestroy(&pcbddc->ISForDofsLocal[i])); 1137 } 1138 PetscCall(PetscFree(pcbddc->ISForDofsLocal)); 1139 /* last user setting takes precedence -> destroy any other customization */ 1140 for (i=0;i<pcbddc->n_ISForDofs;i++) { 1141 PetscCall(ISDestroy(&pcbddc->ISForDofs[i])); 1142 } 1143 PetscCall(PetscFree(pcbddc->ISForDofs)); 1144 pcbddc->n_ISForDofs = 0; 1145 /* allocate space then set */ 1146 if (n_is) { 1147 PetscCall(PetscMalloc1(n_is,&pcbddc->ISForDofsLocal)); 1148 } 1149 for (i=0;i<n_is;i++) { 1150 pcbddc->ISForDofsLocal[i] = ISForDofs[i]; 1151 } 1152 pcbddc->n_ISForDofsLocal = n_is; 1153 if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE; 1154 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 1155 PetscFunctionReturn(0); 1156 } 1157 1158 /*@ 1159 PCBDDCSetDofsSplittingLocal - Set index sets defining fields of the local subdomain matrix 1160 1161 Collective 1162 1163 Input Parameters: 1164 + pc - the preconditioning context 1165 . n_is - number of index sets defining the fields 1166 - ISForDofs - array of IS describing the fields in local ordering 1167 1168 Level: intermediate 1169 1170 Notes: 1171 n_is should be the same among processes. Not all nodes need to be listed: unlisted nodes will belong to the complement field. 1172 1173 .seealso: PCBDDC 1174 @*/ 1175 PetscErrorCode PCBDDCSetDofsSplittingLocal(PC pc,PetscInt n_is, IS ISForDofs[]) 1176 { 1177 PetscInt i; 1178 1179 PetscFunctionBegin; 1180 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1181 PetscValidLogicalCollectiveInt(pc,n_is,2); 1182 for (i=0;i<n_is;i++) { 1183 PetscCheckSameComm(pc,1,ISForDofs[i],3); 1184 PetscValidHeaderSpecific(ISForDofs[i],IS_CLASSID,3); 1185 } 1186 PetscTryMethod(pc,"PCBDDCSetDofsSplittingLocal_C",(PC,PetscInt,IS[]),(pc,n_is,ISForDofs)); 1187 PetscFunctionReturn(0); 1188 } 1189 1190 static PetscErrorCode PCBDDCSetDofsSplitting_BDDC(PC pc,PetscInt n_is, IS ISForDofs[]) 1191 { 1192 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 1193 PetscInt i; 1194 PetscBool isequal = PETSC_FALSE; 1195 1196 PetscFunctionBegin; 1197 if (pcbddc->n_ISForDofs == n_is) { 1198 for (i=0;i<n_is;i++) { 1199 PetscBool isequalt; 1200 PetscCall(ISEqual(ISForDofs[i],pcbddc->ISForDofs[i],&isequalt)); 1201 if (!isequalt) break; 1202 } 1203 if (i == n_is) isequal = PETSC_TRUE; 1204 } 1205 for (i=0;i<n_is;i++) { 1206 PetscCall(PetscObjectReference((PetscObject)ISForDofs[i])); 1207 } 1208 /* Destroy ISes if they were already set */ 1209 for (i=0;i<pcbddc->n_ISForDofs;i++) { 1210 PetscCall(ISDestroy(&pcbddc->ISForDofs[i])); 1211 } 1212 PetscCall(PetscFree(pcbddc->ISForDofs)); 1213 /* last user setting takes precedence -> destroy any other customization */ 1214 for (i=0;i<pcbddc->n_ISForDofsLocal;i++) { 1215 PetscCall(ISDestroy(&pcbddc->ISForDofsLocal[i])); 1216 } 1217 PetscCall(PetscFree(pcbddc->ISForDofsLocal)); 1218 pcbddc->n_ISForDofsLocal = 0; 1219 /* allocate space then set */ 1220 if (n_is) { 1221 PetscCall(PetscMalloc1(n_is,&pcbddc->ISForDofs)); 1222 } 1223 for (i=0;i<n_is;i++) { 1224 pcbddc->ISForDofs[i] = ISForDofs[i]; 1225 } 1226 pcbddc->n_ISForDofs = n_is; 1227 if (n_is) pcbddc->user_provided_isfordofs = PETSC_TRUE; 1228 if (!isequal) pcbddc->recompute_topography = PETSC_TRUE; 1229 PetscFunctionReturn(0); 1230 } 1231 1232 /*@ 1233 PCBDDCSetDofsSplitting - Set index sets defining fields of the global matrix 1234 1235 Collective 1236 1237 Input Parameters: 1238 + pc - the preconditioning context 1239 . n_is - number of index sets defining the fields 1240 - ISForDofs - array of IS describing the fields in global ordering 1241 1242 Level: intermediate 1243 1244 Notes: 1245 Any process can list any global node. Not all nodes need to be listed: unlisted nodes will belong to the complement field. 1246 1247 .seealso: PCBDDC 1248 @*/ 1249 PetscErrorCode PCBDDCSetDofsSplitting(PC pc,PetscInt n_is, IS ISForDofs[]) 1250 { 1251 PetscInt i; 1252 1253 PetscFunctionBegin; 1254 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 1255 PetscValidLogicalCollectiveInt(pc,n_is,2); 1256 for (i=0;i<n_is;i++) { 1257 PetscValidHeaderSpecific(ISForDofs[i],IS_CLASSID,3); 1258 PetscCheckSameComm(pc,1,ISForDofs[i],3); 1259 } 1260 PetscTryMethod(pc,"PCBDDCSetDofsSplitting_C",(PC,PetscInt,IS[]),(pc,n_is,ISForDofs)); 1261 PetscFunctionReturn(0); 1262 } 1263 1264 /* 1265 PCPreSolve_BDDC - Changes the right hand side and (if necessary) the initial 1266 guess if a transformation of basis approach has been selected. 1267 1268 Input Parameter: 1269 + pc - the preconditioner context 1270 1271 Application Interface Routine: PCPreSolve() 1272 1273 Notes: 1274 The interface routine PCPreSolve() is not usually called directly by 1275 the user, but instead is called by KSPSolve(). 1276 */ 1277 static PetscErrorCode PCPreSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x) 1278 { 1279 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 1280 PC_IS *pcis = (PC_IS*)(pc->data); 1281 Vec used_vec; 1282 PetscBool iscg = PETSC_FALSE, save_rhs = PETSC_TRUE, benign_correction_computed; 1283 1284 PetscFunctionBegin; 1285 /* if we are working with CG, one dirichlet solve can be avoided during Krylov iterations */ 1286 if (ksp) { 1287 PetscBool isgroppcg, ispipecg, ispipelcg, ispipecgrr; 1288 1289 PetscCall(PetscObjectTypeCompare((PetscObject)ksp,KSPCG,&iscg)); 1290 PetscCall(PetscObjectTypeCompare((PetscObject)ksp,KSPGROPPCG,&isgroppcg)); 1291 PetscCall(PetscObjectTypeCompare((PetscObject)ksp,KSPPIPECG,&ispipecg)); 1292 PetscCall(PetscObjectTypeCompare((PetscObject)ksp,KSPPIPECG,&ispipelcg)); 1293 PetscCall(PetscObjectTypeCompare((PetscObject)ksp,KSPPIPECGRR,&ispipecgrr)); 1294 iscg = (PetscBool)(iscg || isgroppcg || ispipecg || ispipelcg || ispipecgrr); 1295 if (pcbddc->benign_apply_coarse_only || pcbddc->switch_static || !iscg || pc->mat != pc->pmat) { 1296 PetscCall(PCBDDCSetUseExactDirichlet(pc,PETSC_FALSE)); 1297 } 1298 } 1299 if (pcbddc->benign_apply_coarse_only || pcbddc->switch_static || pc->mat != pc->pmat) { 1300 PetscCall(PCBDDCSetUseExactDirichlet(pc,PETSC_FALSE)); 1301 } 1302 1303 /* Creates parallel work vectors used in presolve */ 1304 if (!pcbddc->original_rhs) { 1305 PetscCall(VecDuplicate(pcis->vec1_global,&pcbddc->original_rhs)); 1306 } 1307 if (!pcbddc->temp_solution) { 1308 PetscCall(VecDuplicate(pcis->vec1_global,&pcbddc->temp_solution)); 1309 } 1310 1311 pcbddc->temp_solution_used = PETSC_FALSE; 1312 if (x) { 1313 PetscCall(PetscObjectReference((PetscObject)x)); 1314 used_vec = x; 1315 } else { /* it can only happen when calling PCBDDCMatFETIDPGetRHS */ 1316 PetscCall(PetscObjectReference((PetscObject)pcbddc->temp_solution)); 1317 used_vec = pcbddc->temp_solution; 1318 PetscCall(VecSet(used_vec,0.0)); 1319 pcbddc->temp_solution_used = PETSC_TRUE; 1320 PetscCall(VecCopy(rhs,pcbddc->original_rhs)); 1321 save_rhs = PETSC_FALSE; 1322 pcbddc->eliminate_dirdofs = PETSC_TRUE; 1323 } 1324 1325 /* hack into ksp data structure since PCPreSolve comes earlier than setting to zero the guess in src/ksp/ksp/interface/itfunc.c */ 1326 if (ksp) { 1327 /* store the flag for the initial guess since it will be restored back during PCPostSolve_BDDC */ 1328 PetscCall(KSPGetInitialGuessNonzero(ksp,&pcbddc->ksp_guess_nonzero)); 1329 if (!pcbddc->ksp_guess_nonzero) { 1330 PetscCall(VecSet(used_vec,0.0)); 1331 } 1332 } 1333 1334 pcbddc->rhs_change = PETSC_FALSE; 1335 /* Take into account zeroed rows -> change rhs and store solution removed */ 1336 if (rhs && pcbddc->eliminate_dirdofs) { 1337 IS dirIS = NULL; 1338 1339 /* DirichletBoundariesLocal may not be consistent among neighbours; gets a dirichlet dofs IS from graph (may be cached) */ 1340 PetscCall(PCBDDCGraphGetDirichletDofs(pcbddc->mat_graph,&dirIS)); 1341 if (dirIS) { 1342 Mat_IS *matis = (Mat_IS*)pc->pmat->data; 1343 PetscInt dirsize,i,*is_indices; 1344 PetscScalar *array_x; 1345 const PetscScalar *array_diagonal; 1346 1347 PetscCall(MatGetDiagonal(pc->pmat,pcis->vec1_global)); 1348 PetscCall(VecPointwiseDivide(pcis->vec1_global,rhs,pcis->vec1_global)); 1349 PetscCall(VecScatterBegin(matis->rctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD)); 1350 PetscCall(VecScatterEnd(matis->rctx,pcis->vec1_global,pcis->vec2_N,INSERT_VALUES,SCATTER_FORWARD)); 1351 PetscCall(VecScatterBegin(matis->rctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD)); 1352 PetscCall(VecScatterEnd(matis->rctx,used_vec,pcis->vec1_N,INSERT_VALUES,SCATTER_FORWARD)); 1353 PetscCall(ISGetLocalSize(dirIS,&dirsize)); 1354 PetscCall(VecGetArray(pcis->vec1_N,&array_x)); 1355 PetscCall(VecGetArrayRead(pcis->vec2_N,&array_diagonal)); 1356 PetscCall(ISGetIndices(dirIS,(const PetscInt**)&is_indices)); 1357 for (i=0; i<dirsize; i++) array_x[is_indices[i]] = array_diagonal[is_indices[i]]; 1358 PetscCall(ISRestoreIndices(dirIS,(const PetscInt**)&is_indices)); 1359 PetscCall(VecRestoreArrayRead(pcis->vec2_N,&array_diagonal)); 1360 PetscCall(VecRestoreArray(pcis->vec1_N,&array_x)); 1361 PetscCall(VecScatterBegin(matis->rctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE)); 1362 PetscCall(VecScatterEnd(matis->rctx,pcis->vec1_N,used_vec,INSERT_VALUES,SCATTER_REVERSE)); 1363 pcbddc->rhs_change = PETSC_TRUE; 1364 PetscCall(ISDestroy(&dirIS)); 1365 } 1366 } 1367 1368 /* remove the computed solution or the initial guess from the rhs */ 1369 if (pcbddc->rhs_change || (ksp && pcbddc->ksp_guess_nonzero)) { 1370 /* save the original rhs */ 1371 if (save_rhs) { 1372 PetscCall(VecSwap(rhs,pcbddc->original_rhs)); 1373 save_rhs = PETSC_FALSE; 1374 } 1375 pcbddc->rhs_change = PETSC_TRUE; 1376 PetscCall(VecScale(used_vec,-1.0)); 1377 PetscCall(MatMultAdd(pc->mat,used_vec,pcbddc->original_rhs,rhs)); 1378 PetscCall(VecScale(used_vec,-1.0)); 1379 PetscCall(VecCopy(used_vec,pcbddc->temp_solution)); 1380 pcbddc->temp_solution_used = PETSC_TRUE; 1381 if (ksp) { 1382 PetscCall(KSPSetInitialGuessNonzero(ksp,PETSC_FALSE)); 1383 } 1384 } 1385 PetscCall(VecDestroy(&used_vec)); 1386 1387 /* compute initial vector in benign space if needed 1388 and remove non-benign solution from the rhs */ 1389 benign_correction_computed = PETSC_FALSE; 1390 if (rhs && pcbddc->benign_compute_correction && (pcbddc->benign_have_null || pcbddc->benign_apply_coarse_only)) { 1391 /* compute u^*_h using ideas similar to those in Xuemin Tu's PhD thesis (see Section 4.8.1) 1392 Recursively apply BDDC in the multilevel case */ 1393 if (!pcbddc->benign_vec) { 1394 PetscCall(VecDuplicate(rhs,&pcbddc->benign_vec)); 1395 } 1396 /* keep applying coarse solver unless we no longer have benign subdomains */ 1397 pcbddc->benign_apply_coarse_only = pcbddc->benign_have_null ? PETSC_TRUE : PETSC_FALSE; 1398 if (!pcbddc->benign_skip_correction) { 1399 PetscCall(PCApply_BDDC(pc,rhs,pcbddc->benign_vec)); 1400 benign_correction_computed = PETSC_TRUE; 1401 if (pcbddc->temp_solution_used) { 1402 PetscCall(VecAXPY(pcbddc->temp_solution,1.0,pcbddc->benign_vec)); 1403 } 1404 PetscCall(VecScale(pcbddc->benign_vec,-1.0)); 1405 /* store the original rhs if not done earlier */ 1406 if (save_rhs) { 1407 PetscCall(VecSwap(rhs,pcbddc->original_rhs)); 1408 } 1409 if (pcbddc->rhs_change) { 1410 PetscCall(MatMultAdd(pc->mat,pcbddc->benign_vec,rhs,rhs)); 1411 } else { 1412 PetscCall(MatMultAdd(pc->mat,pcbddc->benign_vec,pcbddc->original_rhs,rhs)); 1413 } 1414 pcbddc->rhs_change = PETSC_TRUE; 1415 } 1416 pcbddc->benign_apply_coarse_only = PETSC_FALSE; 1417 } else { 1418 PetscCall(VecDestroy(&pcbddc->benign_vec)); 1419 } 1420 1421 /* dbg output */ 1422 if (pcbddc->dbg_flag && benign_correction_computed) { 1423 Vec v; 1424 1425 PetscCall(VecDuplicate(pcis->vec1_global,&v)); 1426 if (pcbddc->ChangeOfBasisMatrix) { 1427 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix,rhs,v)); 1428 } else { 1429 PetscCall(VecCopy(rhs,v)); 1430 } 1431 PetscCall(PCBDDCBenignGetOrSetP0(pc,v,PETSC_TRUE)); 1432 PetscCall(PetscViewerASCIIPrintf(pcbddc->dbg_viewer,"LEVEL %D: is the correction benign?\n",pcbddc->current_level)); 1433 PetscCall(PetscScalarView(pcbddc->benign_n,pcbddc->benign_p0,pcbddc->dbg_viewer)); 1434 PetscCall(PetscViewerFlush(pcbddc->dbg_viewer)); 1435 PetscCall(VecDestroy(&v)); 1436 } 1437 1438 /* set initial guess if using PCG */ 1439 pcbddc->exact_dirichlet_trick_app = PETSC_FALSE; 1440 if (x && pcbddc->use_exact_dirichlet_trick) { 1441 PetscCall(VecSet(x,0.0)); 1442 if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior) { 1443 if (benign_correction_computed) { /* we have already saved the changed rhs */ 1444 PetscCall(VecLockReadPop(pcis->vec1_global)); 1445 } else { 1446 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix,rhs,pcis->vec1_global)); 1447 } 1448 PetscCall(VecScatterBegin(pcis->global_to_D,pcis->vec1_global,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD)); 1449 PetscCall(VecScatterEnd(pcis->global_to_D,pcis->vec1_global,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD)); 1450 } else { 1451 PetscCall(VecScatterBegin(pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD)); 1452 PetscCall(VecScatterEnd(pcis->global_to_D,rhs,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD)); 1453 } 1454 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0],pc,0,0,0)); 1455 PetscCall(KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D)); 1456 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0],pc,0,0,0)); 1457 PetscCall(KSPCheckSolve(pcbddc->ksp_D,pc,pcis->vec2_D)); 1458 if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior) { 1459 PetscCall(VecSet(pcis->vec1_global,0.)); 1460 PetscCall(VecScatterBegin(pcis->global_to_D,pcis->vec2_D,pcis->vec1_global,INSERT_VALUES,SCATTER_REVERSE)); 1461 PetscCall(VecScatterEnd(pcis->global_to_D,pcis->vec2_D,pcis->vec1_global,INSERT_VALUES,SCATTER_REVERSE)); 1462 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix,pcis->vec1_global,x)); 1463 } else { 1464 PetscCall(VecScatterBegin(pcis->global_to_D,pcis->vec2_D,x,INSERT_VALUES,SCATTER_REVERSE)); 1465 PetscCall(VecScatterEnd(pcis->global_to_D,pcis->vec2_D,x,INSERT_VALUES,SCATTER_REVERSE)); 1466 } 1467 if (ksp) { 1468 PetscCall(KSPSetInitialGuessNonzero(ksp,PETSC_TRUE)); 1469 } 1470 pcbddc->exact_dirichlet_trick_app = PETSC_TRUE; 1471 } else if (pcbddc->ChangeOfBasisMatrix && pcbddc->change_interior && benign_correction_computed && pcbddc->use_exact_dirichlet_trick) { 1472 PetscCall(VecLockReadPop(pcis->vec1_global)); 1473 } 1474 PetscFunctionReturn(0); 1475 } 1476 1477 /* 1478 PCPostSolve_BDDC - Changes the computed solution if a transformation of basis 1479 approach has been selected. Also, restores rhs to its original state. 1480 1481 Input Parameter: 1482 + pc - the preconditioner context 1483 1484 Application Interface Routine: PCPostSolve() 1485 1486 Notes: 1487 The interface routine PCPostSolve() is not usually called directly by 1488 the user, but instead is called by KSPSolve(). 1489 */ 1490 static PetscErrorCode PCPostSolve_BDDC(PC pc, KSP ksp, Vec rhs, Vec x) 1491 { 1492 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 1493 1494 PetscFunctionBegin; 1495 /* add solution removed in presolve */ 1496 if (x && pcbddc->rhs_change) { 1497 if (pcbddc->temp_solution_used) { 1498 PetscCall(VecAXPY(x,1.0,pcbddc->temp_solution)); 1499 } else if (pcbddc->benign_compute_correction && pcbddc->benign_vec) { 1500 PetscCall(VecAXPY(x,-1.0,pcbddc->benign_vec)); 1501 } 1502 /* restore to original state (not for FETI-DP) */ 1503 if (ksp) pcbddc->temp_solution_used = PETSC_FALSE; 1504 } 1505 1506 /* restore rhs to its original state (not needed for FETI-DP) */ 1507 if (rhs && pcbddc->rhs_change) { 1508 PetscCall(VecSwap(rhs,pcbddc->original_rhs)); 1509 pcbddc->rhs_change = PETSC_FALSE; 1510 } 1511 /* restore ksp guess state */ 1512 if (ksp) { 1513 PetscCall(KSPSetInitialGuessNonzero(ksp,pcbddc->ksp_guess_nonzero)); 1514 /* reset flag for exact dirichlet trick */ 1515 pcbddc->exact_dirichlet_trick_app = PETSC_FALSE; 1516 } 1517 PetscFunctionReturn(0); 1518 } 1519 1520 /* 1521 PCSetUp_BDDC - Prepares for the use of the BDDC preconditioner 1522 by setting data structures and options. 1523 1524 Input Parameter: 1525 + pc - the preconditioner context 1526 1527 Application Interface Routine: PCSetUp() 1528 1529 Notes: 1530 The interface routine PCSetUp() is not usually called directly by 1531 the user, but instead is called by PCApply() if necessary. 1532 */ 1533 PetscErrorCode PCSetUp_BDDC(PC pc) 1534 { 1535 PC_BDDC* pcbddc = (PC_BDDC*)pc->data; 1536 PCBDDCSubSchurs sub_schurs; 1537 Mat_IS* matis; 1538 MatNullSpace nearnullspace; 1539 Mat lA; 1540 IS lP,zerodiag = NULL; 1541 PetscInt nrows,ncols; 1542 PetscMPIInt size; 1543 PetscBool computesubschurs; 1544 PetscBool computeconstraintsmatrix; 1545 PetscBool new_nearnullspace_provided,ismatis,rl; 1546 1547 PetscFunctionBegin; 1548 PetscCall(PetscObjectTypeCompare((PetscObject)pc->pmat,MATIS,&ismatis)); 1549 PetscCheck(ismatis,PetscObjectComm((PetscObject)pc),PETSC_ERR_ARG_WRONG,"PCBDDC preconditioner requires matrix of type MATIS"); 1550 PetscCall(MatGetSize(pc->pmat,&nrows,&ncols)); 1551 PetscCheck(nrows == ncols,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"PCBDDC preconditioner requires a square preconditioning matrix"); 1552 PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)pc),&size)); 1553 1554 matis = (Mat_IS*)pc->pmat->data; 1555 /* the following lines of code should be replaced by a better logic between PCIS, PCNN, PCBDDC and other future nonoverlapping preconditioners */ 1556 /* For BDDC we need to define a local "Neumann" problem different to that defined in PCISSetup 1557 Also, BDDC builds its own KSP for the Dirichlet problem */ 1558 rl = pcbddc->recompute_topography; 1559 if (!pc->setupcalled || pc->flag == DIFFERENT_NONZERO_PATTERN) rl = PETSC_TRUE; 1560 PetscCall(MPIU_Allreduce(&rl,&pcbddc->recompute_topography,1,MPIU_BOOL,MPI_LOR,PetscObjectComm((PetscObject)pc))); 1561 if (pcbddc->recompute_topography) { 1562 pcbddc->graphanalyzed = PETSC_FALSE; 1563 computeconstraintsmatrix = PETSC_TRUE; 1564 } else { 1565 computeconstraintsmatrix = PETSC_FALSE; 1566 } 1567 1568 /* check parameters' compatibility */ 1569 if (!pcbddc->use_deluxe_scaling) pcbddc->deluxe_zerorows = PETSC_FALSE; 1570 pcbddc->adaptive_selection = (PetscBool)(pcbddc->adaptive_threshold[0] != 0.0 || pcbddc->adaptive_threshold[1] != 0.0); 1571 pcbddc->use_deluxe_scaling = (PetscBool)(pcbddc->use_deluxe_scaling && size > 1); 1572 pcbddc->adaptive_selection = (PetscBool)(pcbddc->adaptive_selection && size > 1); 1573 pcbddc->adaptive_userdefined = (PetscBool)(pcbddc->adaptive_selection && pcbddc->adaptive_userdefined); 1574 if (pcbddc->adaptive_selection) pcbddc->use_faces = PETSC_TRUE; 1575 1576 computesubschurs = (PetscBool)(pcbddc->adaptive_selection || pcbddc->use_deluxe_scaling); 1577 1578 /* activate all connected components if the netflux has been requested */ 1579 if (pcbddc->compute_nonetflux) { 1580 pcbddc->use_vertices = PETSC_TRUE; 1581 pcbddc->use_edges = PETSC_TRUE; 1582 pcbddc->use_faces = PETSC_TRUE; 1583 } 1584 1585 /* Get stdout for dbg */ 1586 if (pcbddc->dbg_flag) { 1587 if (!pcbddc->dbg_viewer) { 1588 pcbddc->dbg_viewer = PETSC_VIEWER_STDOUT_(PetscObjectComm((PetscObject)pc)); 1589 } 1590 PetscCall(PetscViewerASCIIPushSynchronized(pcbddc->dbg_viewer)); 1591 PetscCall(PetscViewerASCIIAddTab(pcbddc->dbg_viewer,2*pcbddc->current_level)); 1592 } 1593 1594 /* process topology information */ 1595 PetscCall(PetscLogEventBegin(PC_BDDC_Topology[pcbddc->current_level],pc,0,0,0)); 1596 if (pcbddc->recompute_topography) { 1597 PetscCall(PCBDDCComputeLocalTopologyInfo(pc)); 1598 if (pcbddc->discretegradient) { 1599 PetscCall(PCBDDCNedelecSupport(pc)); 1600 } 1601 } 1602 if (pcbddc->corner_selected) pcbddc->use_vertices = PETSC_TRUE; 1603 1604 /* change basis if requested by the user */ 1605 if (pcbddc->user_ChangeOfBasisMatrix) { 1606 /* use_change_of_basis flag is used to automatically compute a change of basis from constraints */ 1607 pcbddc->use_change_of_basis = PETSC_FALSE; 1608 PetscCall(PCBDDCComputeLocalMatrix(pc,pcbddc->user_ChangeOfBasisMatrix)); 1609 } else { 1610 PetscCall(MatDestroy(&pcbddc->local_mat)); 1611 PetscCall(PetscObjectReference((PetscObject)matis->A)); 1612 pcbddc->local_mat = matis->A; 1613 } 1614 1615 /* 1616 Compute change of basis on local pressures (aka zerodiag dofs) with the benign trick 1617 This should come earlier then PCISSetUp for extracting the correct subdomain matrices 1618 */ 1619 PetscCall(PCBDDCBenignShellMat(pc,PETSC_TRUE)); 1620 if (pcbddc->benign_saddle_point) { 1621 PC_IS* pcis = (PC_IS*)pc->data; 1622 1623 if (pcbddc->user_ChangeOfBasisMatrix || pcbddc->use_change_of_basis || !computesubschurs) pcbddc->benign_change_explicit = PETSC_TRUE; 1624 /* detect local saddle point and change the basis in pcbddc->local_mat */ 1625 PetscCall(PCBDDCBenignDetectSaddlePoint(pc,(PetscBool)(!pcbddc->recompute_topography),&zerodiag)); 1626 /* pop B0 mat from local mat */ 1627 PetscCall(PCBDDCBenignPopOrPushB0(pc,PETSC_TRUE)); 1628 /* give pcis a hint to not reuse submatrices during PCISCreate */ 1629 if (pc->flag == SAME_NONZERO_PATTERN && pcis->reusesubmatrices == PETSC_TRUE) { 1630 if (pcbddc->benign_n && (pcbddc->benign_change_explicit || pcbddc->dbg_flag)) { 1631 pcis->reusesubmatrices = PETSC_FALSE; 1632 } else { 1633 pcis->reusesubmatrices = PETSC_TRUE; 1634 } 1635 } else { 1636 pcis->reusesubmatrices = PETSC_FALSE; 1637 } 1638 } 1639 1640 /* propagate relevant information */ 1641 if (matis->A->symmetric_set) { 1642 PetscCall(MatSetOption(pcbddc->local_mat,MAT_SYMMETRIC,matis->A->symmetric)); 1643 } 1644 if (matis->A->spd_set) { 1645 PetscCall(MatSetOption(pcbddc->local_mat,MAT_SPD,matis->A->spd)); 1646 } 1647 1648 /* Set up all the "iterative substructuring" common block without computing solvers */ 1649 { 1650 Mat temp_mat; 1651 1652 temp_mat = matis->A; 1653 matis->A = pcbddc->local_mat; 1654 PetscCall(PCISSetUp(pc,PETSC_TRUE,PETSC_FALSE)); 1655 pcbddc->local_mat = matis->A; 1656 matis->A = temp_mat; 1657 } 1658 1659 /* Analyze interface */ 1660 if (!pcbddc->graphanalyzed) { 1661 PetscCall(PCBDDCAnalyzeInterface(pc)); 1662 computeconstraintsmatrix = PETSC_TRUE; 1663 if (pcbddc->adaptive_selection && !pcbddc->use_deluxe_scaling && !pcbddc->mat_graph->twodim) { 1664 SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Cannot compute the adaptive primal space for a problem with 3D edges without deluxe scaling"); 1665 } 1666 if (pcbddc->compute_nonetflux) { 1667 MatNullSpace nnfnnsp; 1668 1669 PetscCheck(pcbddc->divudotp,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Missing divudotp operator"); 1670 PetscCall(PCBDDCComputeNoNetFlux(pc->pmat,pcbddc->divudotp,pcbddc->divudotp_trans,pcbddc->divudotp_vl2l,pcbddc->mat_graph,&nnfnnsp)); 1671 /* TODO what if a nearnullspace is already attached? */ 1672 if (nnfnnsp) { 1673 PetscCall(MatSetNearNullSpace(pc->pmat,nnfnnsp)); 1674 PetscCall(MatNullSpaceDestroy(&nnfnnsp)); 1675 } 1676 } 1677 } 1678 PetscCall(PetscLogEventEnd(PC_BDDC_Topology[pcbddc->current_level],pc,0,0,0)); 1679 1680 /* check existence of a divergence free extension, i.e. 1681 b(v_I,p_0) = 0 for all v_I (raise error if not). 1682 Also, check that PCBDDCBenignGetOrSetP0 works */ 1683 if (pcbddc->benign_saddle_point && pcbddc->dbg_flag > 1) { 1684 PetscCall(PCBDDCBenignCheck(pc,zerodiag)); 1685 } 1686 PetscCall(ISDestroy(&zerodiag)); 1687 1688 /* Setup local dirichlet solver ksp_D and sub_schurs solvers */ 1689 if (computesubschurs && pcbddc->recompute_topography) { 1690 PetscCall(PCBDDCInitSubSchurs(pc)); 1691 } 1692 /* SetUp Scaling operator (scaling matrices could be needed in SubSchursSetUp)*/ 1693 if (!pcbddc->use_deluxe_scaling) { 1694 PetscCall(PCBDDCScalingSetUp(pc)); 1695 } 1696 1697 /* finish setup solvers and do adaptive selection of constraints */ 1698 sub_schurs = pcbddc->sub_schurs; 1699 if (sub_schurs && sub_schurs->schur_explicit) { 1700 if (computesubschurs) { 1701 PetscCall(PCBDDCSetUpSubSchurs(pc)); 1702 } 1703 PetscCall(PCBDDCSetUpLocalSolvers(pc,PETSC_TRUE,PETSC_FALSE)); 1704 } else { 1705 PetscCall(PCBDDCSetUpLocalSolvers(pc,PETSC_TRUE,PETSC_FALSE)); 1706 if (computesubschurs) { 1707 PetscCall(PCBDDCSetUpSubSchurs(pc)); 1708 } 1709 } 1710 if (pcbddc->adaptive_selection) { 1711 PetscCall(PCBDDCAdaptiveSelection(pc)); 1712 computeconstraintsmatrix = PETSC_TRUE; 1713 } 1714 1715 /* infer if NullSpace object attached to Mat via MatSetNearNullSpace has changed */ 1716 new_nearnullspace_provided = PETSC_FALSE; 1717 PetscCall(MatGetNearNullSpace(pc->pmat,&nearnullspace)); 1718 if (pcbddc->onearnullspace) { /* already used nearnullspace */ 1719 if (!nearnullspace) { /* near null space attached to mat has been destroyed */ 1720 new_nearnullspace_provided = PETSC_TRUE; 1721 } else { 1722 /* determine if the two nullspaces are different (should be lightweight) */ 1723 if (nearnullspace != pcbddc->onearnullspace) { 1724 new_nearnullspace_provided = PETSC_TRUE; 1725 } else { /* maybe the user has changed the content of the nearnullspace so check vectors ObjectStateId */ 1726 PetscInt i; 1727 const Vec *nearnullvecs; 1728 PetscObjectState state; 1729 PetscInt nnsp_size; 1730 PetscCall(MatNullSpaceGetVecs(nearnullspace,NULL,&nnsp_size,&nearnullvecs)); 1731 for (i=0;i<nnsp_size;i++) { 1732 PetscCall(PetscObjectStateGet((PetscObject)nearnullvecs[i],&state)); 1733 if (pcbddc->onearnullvecs_state[i] != state) { 1734 new_nearnullspace_provided = PETSC_TRUE; 1735 break; 1736 } 1737 } 1738 } 1739 } 1740 } else { 1741 if (!nearnullspace) { /* both nearnullspaces are null */ 1742 new_nearnullspace_provided = PETSC_FALSE; 1743 } else { /* nearnullspace attached later */ 1744 new_nearnullspace_provided = PETSC_TRUE; 1745 } 1746 } 1747 1748 /* Setup constraints and related work vectors */ 1749 /* reset primal space flags */ 1750 PetscCall(PetscLogEventBegin(PC_BDDC_LocalWork[pcbddc->current_level],pc,0,0,0)); 1751 pcbddc->new_primal_space = PETSC_FALSE; 1752 pcbddc->new_primal_space_local = PETSC_FALSE; 1753 if (computeconstraintsmatrix || new_nearnullspace_provided) { 1754 /* It also sets the primal space flags */ 1755 PetscCall(PCBDDCConstraintsSetUp(pc)); 1756 } 1757 /* Allocate needed local vectors (which depends on quantities defined during ConstraintsSetUp) */ 1758 PetscCall(PCBDDCSetUpLocalWorkVectors(pc)); 1759 1760 if (pcbddc->use_change_of_basis) { 1761 PC_IS *pcis = (PC_IS*)(pc->data); 1762 1763 PetscCall(PCBDDCComputeLocalMatrix(pc,pcbddc->ChangeOfBasisMatrix)); 1764 if (pcbddc->benign_change) { 1765 PetscCall(MatDestroy(&pcbddc->benign_B0)); 1766 /* pop B0 from pcbddc->local_mat */ 1767 PetscCall(PCBDDCBenignPopOrPushB0(pc,PETSC_TRUE)); 1768 } 1769 /* get submatrices */ 1770 PetscCall(MatDestroy(&pcis->A_IB)); 1771 PetscCall(MatDestroy(&pcis->A_BI)); 1772 PetscCall(MatDestroy(&pcis->A_BB)); 1773 PetscCall(MatCreateSubMatrix(pcbddc->local_mat,pcis->is_B_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_BB)); 1774 PetscCall(MatCreateSubMatrix(pcbddc->local_mat,pcis->is_I_local,pcis->is_B_local,MAT_INITIAL_MATRIX,&pcis->A_IB)); 1775 PetscCall(MatCreateSubMatrix(pcbddc->local_mat,pcis->is_B_local,pcis->is_I_local,MAT_INITIAL_MATRIX,&pcis->A_BI)); 1776 /* set flag in pcis to not reuse submatrices during PCISCreate */ 1777 pcis->reusesubmatrices = PETSC_FALSE; 1778 } else if (!pcbddc->user_ChangeOfBasisMatrix && !pcbddc->benign_change) { 1779 PetscCall(MatDestroy(&pcbddc->local_mat)); 1780 PetscCall(PetscObjectReference((PetscObject)matis->A)); 1781 pcbddc->local_mat = matis->A; 1782 } 1783 1784 /* interface pressure block row for B_C */ 1785 PetscCall(PetscObjectQuery((PetscObject)pc,"__KSPFETIDP_lP" ,(PetscObject*)&lP)); 1786 PetscCall(PetscObjectQuery((PetscObject)pc,"__KSPFETIDP_lA" ,(PetscObject*)&lA)); 1787 if (lA && lP) { 1788 PC_IS* pcis = (PC_IS*)pc->data; 1789 Mat B_BI,B_BB,Bt_BI,Bt_BB; 1790 PetscBool issym; 1791 PetscCall(MatIsSymmetric(lA,PETSC_SMALL,&issym)); 1792 if (issym) { 1793 PetscCall(MatCreateSubMatrix(lA,lP,pcis->is_I_local,MAT_INITIAL_MATRIX,&B_BI)); 1794 PetscCall(MatCreateSubMatrix(lA,lP,pcis->is_B_local,MAT_INITIAL_MATRIX,&B_BB)); 1795 PetscCall(MatCreateTranspose(B_BI,&Bt_BI)); 1796 PetscCall(MatCreateTranspose(B_BB,&Bt_BB)); 1797 } else { 1798 PetscCall(MatCreateSubMatrix(lA,lP,pcis->is_I_local,MAT_INITIAL_MATRIX,&B_BI)); 1799 PetscCall(MatCreateSubMatrix(lA,lP,pcis->is_B_local,MAT_INITIAL_MATRIX,&B_BB)); 1800 PetscCall(MatCreateSubMatrix(lA,pcis->is_I_local,lP,MAT_INITIAL_MATRIX,&Bt_BI)); 1801 PetscCall(MatCreateSubMatrix(lA,pcis->is_B_local,lP,MAT_INITIAL_MATRIX,&Bt_BB)); 1802 } 1803 PetscCall(PetscObjectCompose((PetscObject)pc,"__KSPFETIDP_B_BI",(PetscObject)B_BI)); 1804 PetscCall(PetscObjectCompose((PetscObject)pc,"__KSPFETIDP_B_BB",(PetscObject)B_BB)); 1805 PetscCall(PetscObjectCompose((PetscObject)pc,"__KSPFETIDP_Bt_BI",(PetscObject)Bt_BI)); 1806 PetscCall(PetscObjectCompose((PetscObject)pc,"__KSPFETIDP_Bt_BB",(PetscObject)Bt_BB)); 1807 PetscCall(MatDestroy(&B_BI)); 1808 PetscCall(MatDestroy(&B_BB)); 1809 PetscCall(MatDestroy(&Bt_BI)); 1810 PetscCall(MatDestroy(&Bt_BB)); 1811 } 1812 PetscCall(PetscLogEventEnd(PC_BDDC_LocalWork[pcbddc->current_level],pc,0,0,0)); 1813 1814 /* SetUp coarse and local Neumann solvers */ 1815 PetscCall(PCBDDCSetUpSolvers(pc)); 1816 /* SetUp Scaling operator */ 1817 if (pcbddc->use_deluxe_scaling) { 1818 PetscCall(PCBDDCScalingSetUp(pc)); 1819 } 1820 1821 /* mark topography as done */ 1822 pcbddc->recompute_topography = PETSC_FALSE; 1823 1824 /* wrap pcis->A_IB and pcis->A_BI if we did not change explicitly the variables on the pressures */ 1825 PetscCall(PCBDDCBenignShellMat(pc,PETSC_FALSE)); 1826 1827 if (pcbddc->dbg_flag) { 1828 PetscCall(PetscViewerASCIISubtractTab(pcbddc->dbg_viewer,2*pcbddc->current_level)); 1829 PetscCall(PetscViewerASCIIPopSynchronized(pcbddc->dbg_viewer)); 1830 } 1831 PetscFunctionReturn(0); 1832 } 1833 1834 /* 1835 PCApply_BDDC - Applies the BDDC operator to a vector. 1836 1837 Input Parameters: 1838 + pc - the preconditioner context 1839 - r - input vector (global) 1840 1841 Output Parameter: 1842 . z - output vector (global) 1843 1844 Application Interface Routine: PCApply() 1845 */ 1846 PetscErrorCode PCApply_BDDC(PC pc,Vec r,Vec z) 1847 { 1848 PC_IS *pcis = (PC_IS*)(pc->data); 1849 PC_BDDC *pcbddc = (PC_BDDC*)(pc->data); 1850 Mat lA = NULL; 1851 PetscInt n_B = pcis->n_B, n_D = pcis->n - n_B; 1852 const PetscScalar one = 1.0; 1853 const PetscScalar m_one = -1.0; 1854 const PetscScalar zero = 0.0; 1855 /* This code is similar to that provided in nn.c for PCNN 1856 NN interface preconditioner changed to BDDC 1857 Added support for M_3 preconditioner in the reference article (code is active if pcbddc->switch_static == PETSC_TRUE) */ 1858 1859 PetscFunctionBegin; 1860 PetscCall(PetscCitationsRegister(citation,&cited)); 1861 if (pcbddc->switch_static) { 1862 PetscCall(MatISGetLocalMat(pc->useAmat ? pc->mat : pc->pmat,&lA)); 1863 } 1864 1865 if (pcbddc->ChangeOfBasisMatrix) { 1866 Vec swap; 1867 1868 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix,r,pcbddc->work_change)); 1869 swap = pcbddc->work_change; 1870 pcbddc->work_change = r; 1871 r = swap; 1872 /* save rhs so that we don't need to apply the change of basis for the exact dirichlet trick in PreSolve */ 1873 if (pcbddc->benign_apply_coarse_only && pcbddc->use_exact_dirichlet_trick && pcbddc->change_interior) { 1874 PetscCall(VecCopy(r,pcis->vec1_global)); 1875 PetscCall(VecLockReadPush(pcis->vec1_global)); 1876 } 1877 } 1878 if (pcbddc->benign_have_null) { /* get p0 from r */ 1879 PetscCall(PCBDDCBenignGetOrSetP0(pc,r,PETSC_TRUE)); 1880 } 1881 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_DIRICHLET && !pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 1882 PetscCall(VecCopy(r,z)); 1883 /* First Dirichlet solve */ 1884 PetscCall(VecScatterBegin(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD)); 1885 PetscCall(VecScatterEnd(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD)); 1886 /* 1887 Assembling right hand side for BDDC operator 1888 - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE) 1889 - pcis->vec1_B the interface part of the global vector z 1890 */ 1891 if (n_D) { 1892 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0],pc,0,0,0)); 1893 PetscCall(KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D)); 1894 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0],pc,0,0,0)); 1895 PetscCall(KSPCheckSolve(pcbddc->ksp_D,pc,pcis->vec2_D)); 1896 PetscCall(VecScale(pcis->vec2_D,m_one)); 1897 if (pcbddc->switch_static) { 1898 PetscCall(VecSet(pcis->vec1_N,0.)); 1899 PetscCall(VecScatterBegin(pcis->N_to_D,pcis->vec2_D,pcis->vec1_N,INSERT_VALUES,SCATTER_REVERSE)); 1900 PetscCall(VecScatterEnd(pcis->N_to_D,pcis->vec2_D,pcis->vec1_N,INSERT_VALUES,SCATTER_REVERSE)); 1901 if (!pcbddc->switch_static_change) { 1902 PetscCall(MatMult(lA,pcis->vec1_N,pcis->vec2_N)); 1903 } else { 1904 PetscCall(MatMult(pcbddc->switch_static_change,pcis->vec1_N,pcis->vec2_N)); 1905 PetscCall(MatMult(lA,pcis->vec2_N,pcis->vec1_N)); 1906 PetscCall(MatMultTranspose(pcbddc->switch_static_change,pcis->vec1_N,pcis->vec2_N)); 1907 } 1908 PetscCall(VecScatterBegin(pcis->N_to_D,pcis->vec2_N,pcis->vec1_D,ADD_VALUES,SCATTER_FORWARD)); 1909 PetscCall(VecScatterEnd(pcis->N_to_D,pcis->vec2_N,pcis->vec1_D,ADD_VALUES,SCATTER_FORWARD)); 1910 PetscCall(VecScatterBegin(pcis->N_to_B,pcis->vec2_N,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD)); 1911 PetscCall(VecScatterEnd(pcis->N_to_B,pcis->vec2_N,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD)); 1912 } else { 1913 PetscCall(MatMult(pcis->A_BI,pcis->vec2_D,pcis->vec1_B)); 1914 } 1915 } else { 1916 PetscCall(VecSet(pcis->vec1_B,zero)); 1917 } 1918 PetscCall(VecScatterBegin(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE)); 1919 PetscCall(VecScatterEnd(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE)); 1920 PetscCall(PCBDDCScalingRestriction(pc,z,pcis->vec1_B)); 1921 } else { 1922 if (!pcbddc->benign_apply_coarse_only) { 1923 PetscCall(PCBDDCScalingRestriction(pc,r,pcis->vec1_B)); 1924 } 1925 } 1926 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_LUMP) { 1927 PetscCheck(pcbddc->switch_static,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"You forgot to pass -pc_bddc_switch_static"); 1928 PetscCall(VecScatterBegin(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD)); 1929 PetscCall(VecScatterEnd(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD)); 1930 } 1931 1932 /* Apply interface preconditioner 1933 input/output vecs: pcis->vec1_B and pcis->vec1_D */ 1934 PetscCall(PCBDDCApplyInterfacePreconditioner(pc,PETSC_FALSE)); 1935 1936 /* Apply transpose of partition of unity operator */ 1937 PetscCall(PCBDDCScalingExtension(pc,pcis->vec1_B,z)); 1938 if (pcbddc->interface_extension == PC_BDDC_INTERFACE_EXT_LUMP) { 1939 PetscCall(VecScatterBegin(pcis->global_to_D,pcis->vec1_D,z,INSERT_VALUES,SCATTER_REVERSE)); 1940 PetscCall(VecScatterEnd(pcis->global_to_D,pcis->vec1_D,z,INSERT_VALUES,SCATTER_REVERSE)); 1941 PetscFunctionReturn(0); 1942 } 1943 /* Second Dirichlet solve and assembling of output */ 1944 PetscCall(VecScatterBegin(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD)); 1945 PetscCall(VecScatterEnd(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD)); 1946 if (n_B) { 1947 if (pcbddc->switch_static) { 1948 PetscCall(VecScatterBegin(pcis->N_to_D,pcis->vec1_D,pcis->vec1_N,INSERT_VALUES,SCATTER_REVERSE)); 1949 PetscCall(VecScatterEnd(pcis->N_to_D,pcis->vec1_D,pcis->vec1_N,INSERT_VALUES,SCATTER_REVERSE)); 1950 PetscCall(VecScatterBegin(pcis->N_to_B,pcis->vec1_B,pcis->vec1_N,INSERT_VALUES,SCATTER_REVERSE)); 1951 PetscCall(VecScatterEnd(pcis->N_to_B,pcis->vec1_B,pcis->vec1_N,INSERT_VALUES,SCATTER_REVERSE)); 1952 if (!pcbddc->switch_static_change) { 1953 PetscCall(MatMult(lA,pcis->vec1_N,pcis->vec2_N)); 1954 } else { 1955 PetscCall(MatMult(pcbddc->switch_static_change,pcis->vec1_N,pcis->vec2_N)); 1956 PetscCall(MatMult(lA,pcis->vec2_N,pcis->vec1_N)); 1957 PetscCall(MatMultTranspose(pcbddc->switch_static_change,pcis->vec1_N,pcis->vec2_N)); 1958 } 1959 PetscCall(VecScatterBegin(pcis->N_to_D,pcis->vec2_N,pcis->vec3_D,INSERT_VALUES,SCATTER_FORWARD)); 1960 PetscCall(VecScatterEnd(pcis->N_to_D,pcis->vec2_N,pcis->vec3_D,INSERT_VALUES,SCATTER_FORWARD)); 1961 } else { 1962 PetscCall(MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec3_D)); 1963 } 1964 } else if (pcbddc->switch_static) { /* n_B is zero */ 1965 if (!pcbddc->switch_static_change) { 1966 PetscCall(MatMult(lA,pcis->vec1_D,pcis->vec3_D)); 1967 } else { 1968 PetscCall(MatMult(pcbddc->switch_static_change,pcis->vec1_D,pcis->vec1_N)); 1969 PetscCall(MatMult(lA,pcis->vec1_N,pcis->vec2_N)); 1970 PetscCall(MatMultTranspose(pcbddc->switch_static_change,pcis->vec2_N,pcis->vec3_D)); 1971 } 1972 } 1973 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0],pc,0,0,0)); 1974 PetscCall(KSPSolve(pcbddc->ksp_D,pcis->vec3_D,pcis->vec4_D)); 1975 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0],pc,0,0,0)); 1976 PetscCall(KSPCheckSolve(pcbddc->ksp_D,pc,pcis->vec4_D)); 1977 1978 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 1979 if (pcbddc->switch_static) { 1980 PetscCall(VecAXPBYPCZ(pcis->vec2_D,m_one,one,m_one,pcis->vec4_D,pcis->vec1_D)); 1981 } else { 1982 PetscCall(VecAXPBY(pcis->vec2_D,m_one,m_one,pcis->vec4_D)); 1983 } 1984 PetscCall(VecScatterBegin(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE)); 1985 PetscCall(VecScatterEnd(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE)); 1986 } else { 1987 if (pcbddc->switch_static) { 1988 PetscCall(VecAXPBY(pcis->vec4_D,one,m_one,pcis->vec1_D)); 1989 } else { 1990 PetscCall(VecScale(pcis->vec4_D,m_one)); 1991 } 1992 PetscCall(VecScatterBegin(pcis->global_to_D,pcis->vec4_D,z,INSERT_VALUES,SCATTER_REVERSE)); 1993 PetscCall(VecScatterEnd(pcis->global_to_D,pcis->vec4_D,z,INSERT_VALUES,SCATTER_REVERSE)); 1994 } 1995 if (pcbddc->benign_have_null) { /* set p0 (computed in PCBDDCApplyInterface) */ 1996 if (pcbddc->benign_apply_coarse_only) { 1997 PetscCall(PetscArrayzero(pcbddc->benign_p0,pcbddc->benign_n)); 1998 } 1999 PetscCall(PCBDDCBenignGetOrSetP0(pc,z,PETSC_FALSE)); 2000 } 2001 2002 if (pcbddc->ChangeOfBasisMatrix) { 2003 pcbddc->work_change = r; 2004 PetscCall(VecCopy(z,pcbddc->work_change)); 2005 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix,pcbddc->work_change,z)); 2006 } 2007 PetscFunctionReturn(0); 2008 } 2009 2010 /* 2011 PCApplyTranspose_BDDC - Applies the transpose of the BDDC operator to a vector. 2012 2013 Input Parameters: 2014 + pc - the preconditioner context 2015 - r - input vector (global) 2016 2017 Output Parameter: 2018 . z - output vector (global) 2019 2020 Application Interface Routine: PCApplyTranspose() 2021 */ 2022 PetscErrorCode PCApplyTranspose_BDDC(PC pc,Vec r,Vec z) 2023 { 2024 PC_IS *pcis = (PC_IS*)(pc->data); 2025 PC_BDDC *pcbddc = (PC_BDDC*)(pc->data); 2026 Mat lA = NULL; 2027 PetscInt n_B = pcis->n_B, n_D = pcis->n - n_B; 2028 const PetscScalar one = 1.0; 2029 const PetscScalar m_one = -1.0; 2030 const PetscScalar zero = 0.0; 2031 2032 PetscFunctionBegin; 2033 PetscCall(PetscCitationsRegister(citation,&cited)); 2034 if (pcbddc->switch_static) { 2035 PetscCall(MatISGetLocalMat(pc->useAmat ? pc->mat : pc->pmat,&lA)); 2036 } 2037 if (pcbddc->ChangeOfBasisMatrix) { 2038 Vec swap; 2039 2040 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix,r,pcbddc->work_change)); 2041 swap = pcbddc->work_change; 2042 pcbddc->work_change = r; 2043 r = swap; 2044 /* save rhs so that we don't need to apply the change of basis for the exact dirichlet trick in PreSolve */ 2045 if (pcbddc->benign_apply_coarse_only && pcbddc->exact_dirichlet_trick_app && pcbddc->change_interior) { 2046 PetscCall(VecCopy(r,pcis->vec1_global)); 2047 PetscCall(VecLockReadPush(pcis->vec1_global)); 2048 } 2049 } 2050 if (pcbddc->benign_have_null) { /* get p0 from r */ 2051 PetscCall(PCBDDCBenignGetOrSetP0(pc,r,PETSC_TRUE)); 2052 } 2053 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 2054 PetscCall(VecCopy(r,z)); 2055 /* First Dirichlet solve */ 2056 PetscCall(VecScatterBegin(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD)); 2057 PetscCall(VecScatterEnd(pcis->global_to_D,r,pcis->vec1_D,INSERT_VALUES,SCATTER_FORWARD)); 2058 /* 2059 Assembling right hand side for BDDC operator 2060 - pcis->vec1_D for the Dirichlet part (if needed, i.e. pcbddc->switch_static == PETSC_TRUE) 2061 - pcis->vec1_B the interface part of the global vector z 2062 */ 2063 if (n_D) { 2064 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0],pc,0,0,0)); 2065 PetscCall(KSPSolveTranspose(pcbddc->ksp_D,pcis->vec1_D,pcis->vec2_D)); 2066 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0],pc,0,0,0)); 2067 PetscCall(KSPCheckSolve(pcbddc->ksp_D,pc,pcis->vec2_D)); 2068 PetscCall(VecScale(pcis->vec2_D,m_one)); 2069 if (pcbddc->switch_static) { 2070 PetscCall(VecSet(pcis->vec1_N,0.)); 2071 PetscCall(VecScatterBegin(pcis->N_to_D,pcis->vec2_D,pcis->vec1_N,INSERT_VALUES,SCATTER_REVERSE)); 2072 PetscCall(VecScatterEnd(pcis->N_to_D,pcis->vec2_D,pcis->vec1_N,INSERT_VALUES,SCATTER_REVERSE)); 2073 if (!pcbddc->switch_static_change) { 2074 PetscCall(MatMultTranspose(lA,pcis->vec1_N,pcis->vec2_N)); 2075 } else { 2076 PetscCall(MatMult(pcbddc->switch_static_change,pcis->vec1_N,pcis->vec2_N)); 2077 PetscCall(MatMultTranspose(lA,pcis->vec2_N,pcis->vec1_N)); 2078 PetscCall(MatMultTranspose(pcbddc->switch_static_change,pcis->vec1_N,pcis->vec2_N)); 2079 } 2080 PetscCall(VecScatterBegin(pcis->N_to_D,pcis->vec2_N,pcis->vec1_D,ADD_VALUES,SCATTER_FORWARD)); 2081 PetscCall(VecScatterEnd(pcis->N_to_D,pcis->vec2_N,pcis->vec1_D,ADD_VALUES,SCATTER_FORWARD)); 2082 PetscCall(VecScatterBegin(pcis->N_to_B,pcis->vec2_N,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD)); 2083 PetscCall(VecScatterEnd(pcis->N_to_B,pcis->vec2_N,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD)); 2084 } else { 2085 PetscCall(MatMultTranspose(pcis->A_IB,pcis->vec2_D,pcis->vec1_B)); 2086 } 2087 } else { 2088 PetscCall(VecSet(pcis->vec1_B,zero)); 2089 } 2090 PetscCall(VecScatterBegin(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE)); 2091 PetscCall(VecScatterEnd(pcis->global_to_B,pcis->vec1_B,z,ADD_VALUES,SCATTER_REVERSE)); 2092 PetscCall(PCBDDCScalingRestriction(pc,z,pcis->vec1_B)); 2093 } else { 2094 PetscCall(PCBDDCScalingRestriction(pc,r,pcis->vec1_B)); 2095 } 2096 2097 /* Apply interface preconditioner 2098 input/output vecs: pcis->vec1_B and pcis->vec1_D */ 2099 PetscCall(PCBDDCApplyInterfacePreconditioner(pc,PETSC_TRUE)); 2100 2101 /* Apply transpose of partition of unity operator */ 2102 PetscCall(PCBDDCScalingExtension(pc,pcis->vec1_B,z)); 2103 2104 /* Second Dirichlet solve and assembling of output */ 2105 PetscCall(VecScatterBegin(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD)); 2106 PetscCall(VecScatterEnd(pcis->global_to_B,z,pcis->vec1_B,INSERT_VALUES,SCATTER_FORWARD)); 2107 if (n_B) { 2108 if (pcbddc->switch_static) { 2109 PetscCall(VecScatterBegin(pcis->N_to_D,pcis->vec1_D,pcis->vec1_N,INSERT_VALUES,SCATTER_REVERSE)); 2110 PetscCall(VecScatterEnd(pcis->N_to_D,pcis->vec1_D,pcis->vec1_N,INSERT_VALUES,SCATTER_REVERSE)); 2111 PetscCall(VecScatterBegin(pcis->N_to_B,pcis->vec1_B,pcis->vec1_N,INSERT_VALUES,SCATTER_REVERSE)); 2112 PetscCall(VecScatterEnd(pcis->N_to_B,pcis->vec1_B,pcis->vec1_N,INSERT_VALUES,SCATTER_REVERSE)); 2113 if (!pcbddc->switch_static_change) { 2114 PetscCall(MatMultTranspose(lA,pcis->vec1_N,pcis->vec2_N)); 2115 } else { 2116 PetscCall(MatMult(pcbddc->switch_static_change,pcis->vec1_N,pcis->vec2_N)); 2117 PetscCall(MatMultTranspose(lA,pcis->vec2_N,pcis->vec1_N)); 2118 PetscCall(MatMultTranspose(pcbddc->switch_static_change,pcis->vec1_N,pcis->vec2_N)); 2119 } 2120 PetscCall(VecScatterBegin(pcis->N_to_D,pcis->vec2_N,pcis->vec3_D,INSERT_VALUES,SCATTER_FORWARD)); 2121 PetscCall(VecScatterEnd(pcis->N_to_D,pcis->vec2_N,pcis->vec3_D,INSERT_VALUES,SCATTER_FORWARD)); 2122 } else { 2123 PetscCall(MatMultTranspose(pcis->A_BI,pcis->vec1_B,pcis->vec3_D)); 2124 } 2125 } else if (pcbddc->switch_static) { /* n_B is zero */ 2126 if (!pcbddc->switch_static_change) { 2127 PetscCall(MatMultTranspose(lA,pcis->vec1_D,pcis->vec3_D)); 2128 } else { 2129 PetscCall(MatMult(pcbddc->switch_static_change,pcis->vec1_D,pcis->vec1_N)); 2130 PetscCall(MatMultTranspose(lA,pcis->vec1_N,pcis->vec2_N)); 2131 PetscCall(MatMultTranspose(pcbddc->switch_static_change,pcis->vec2_N,pcis->vec3_D)); 2132 } 2133 } 2134 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0],pc,0,0,0)); 2135 PetscCall(KSPSolveTranspose(pcbddc->ksp_D,pcis->vec3_D,pcis->vec4_D)); 2136 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0],pc,0,0,0)); 2137 PetscCall(KSPCheckSolve(pcbddc->ksp_D,pc,pcis->vec4_D)); 2138 if (!pcbddc->exact_dirichlet_trick_app && !pcbddc->benign_apply_coarse_only) { 2139 if (pcbddc->switch_static) { 2140 PetscCall(VecAXPBYPCZ(pcis->vec2_D,m_one,one,m_one,pcis->vec4_D,pcis->vec1_D)); 2141 } else { 2142 PetscCall(VecAXPBY(pcis->vec2_D,m_one,m_one,pcis->vec4_D)); 2143 } 2144 PetscCall(VecScatterBegin(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE)); 2145 PetscCall(VecScatterEnd(pcis->global_to_D,pcis->vec2_D,z,INSERT_VALUES,SCATTER_REVERSE)); 2146 } else { 2147 if (pcbddc->switch_static) { 2148 PetscCall(VecAXPBY(pcis->vec4_D,one,m_one,pcis->vec1_D)); 2149 } else { 2150 PetscCall(VecScale(pcis->vec4_D,m_one)); 2151 } 2152 PetscCall(VecScatterBegin(pcis->global_to_D,pcis->vec4_D,z,INSERT_VALUES,SCATTER_REVERSE)); 2153 PetscCall(VecScatterEnd(pcis->global_to_D,pcis->vec4_D,z,INSERT_VALUES,SCATTER_REVERSE)); 2154 } 2155 if (pcbddc->benign_have_null) { /* set p0 (computed in PCBDDCApplyInterface) */ 2156 PetscCall(PCBDDCBenignGetOrSetP0(pc,z,PETSC_FALSE)); 2157 } 2158 if (pcbddc->ChangeOfBasisMatrix) { 2159 pcbddc->work_change = r; 2160 PetscCall(VecCopy(z,pcbddc->work_change)); 2161 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix,pcbddc->work_change,z)); 2162 } 2163 PetscFunctionReturn(0); 2164 } 2165 2166 PetscErrorCode PCReset_BDDC(PC pc) 2167 { 2168 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 2169 PC_IS *pcis = (PC_IS*)pc->data; 2170 KSP kspD,kspR,kspC; 2171 2172 PetscFunctionBegin; 2173 /* free BDDC custom data */ 2174 PetscCall(PCBDDCResetCustomization(pc)); 2175 /* destroy objects related to topography */ 2176 PetscCall(PCBDDCResetTopography(pc)); 2177 /* destroy objects for scaling operator */ 2178 PetscCall(PCBDDCScalingDestroy(pc)); 2179 /* free solvers stuff */ 2180 PetscCall(PCBDDCResetSolvers(pc)); 2181 /* free global vectors needed in presolve */ 2182 PetscCall(VecDestroy(&pcbddc->temp_solution)); 2183 PetscCall(VecDestroy(&pcbddc->original_rhs)); 2184 /* free data created by PCIS */ 2185 PetscCall(PCISDestroy(pc)); 2186 2187 /* restore defaults */ 2188 kspD = pcbddc->ksp_D; 2189 kspR = pcbddc->ksp_R; 2190 kspC = pcbddc->coarse_ksp; 2191 PetscCall(PetscMemzero(pc->data,sizeof(*pcbddc))); 2192 pcis->n_neigh = -1; 2193 pcis->scaling_factor = 1.0; 2194 pcis->reusesubmatrices = PETSC_TRUE; 2195 pcbddc->use_local_adj = PETSC_TRUE; 2196 pcbddc->use_vertices = PETSC_TRUE; 2197 pcbddc->use_edges = PETSC_TRUE; 2198 pcbddc->symmetric_primal = PETSC_TRUE; 2199 pcbddc->vertex_size = 1; 2200 pcbddc->recompute_topography = PETSC_TRUE; 2201 pcbddc->coarse_size = -1; 2202 pcbddc->use_exact_dirichlet_trick = PETSC_TRUE; 2203 pcbddc->coarsening_ratio = 8; 2204 pcbddc->coarse_eqs_per_proc = 1; 2205 pcbddc->benign_compute_correction = PETSC_TRUE; 2206 pcbddc->nedfield = -1; 2207 pcbddc->nedglobal = PETSC_TRUE; 2208 pcbddc->graphmaxcount = PETSC_MAX_INT; 2209 pcbddc->sub_schurs_layers = -1; 2210 pcbddc->ksp_D = kspD; 2211 pcbddc->ksp_R = kspR; 2212 pcbddc->coarse_ksp = kspC; 2213 PetscFunctionReturn(0); 2214 } 2215 2216 PetscErrorCode PCDestroy_BDDC(PC pc) 2217 { 2218 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 2219 2220 PetscFunctionBegin; 2221 PetscCall(PCReset_BDDC(pc)); 2222 PetscCall(KSPDestroy(&pcbddc->ksp_D)); 2223 PetscCall(KSPDestroy(&pcbddc->ksp_R)); 2224 PetscCall(KSPDestroy(&pcbddc->coarse_ksp)); 2225 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDiscreteGradient_C",NULL)); 2226 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDivergenceMat_C",NULL)); 2227 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetChangeOfBasisMat_C",NULL)); 2228 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",NULL)); 2229 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesIS_C",NULL)); 2230 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",NULL)); 2231 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLevel_C",NULL)); 2232 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetUseExactDirichlet_C",NULL)); 2233 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLevels_C",NULL)); 2234 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",NULL)); 2235 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundariesLocal_C",NULL)); 2236 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",NULL)); 2237 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundariesLocal_C",NULL)); 2238 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",NULL)); 2239 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundariesLocal_C",NULL)); 2240 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",NULL)); 2241 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundariesLocal_C",NULL)); 2242 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",NULL)); 2243 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplittingLocal_C",NULL)); 2244 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",NULL)); 2245 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",NULL)); 2246 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",NULL)); 2247 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",NULL)); 2248 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCPreSolveChangeRHS_C",NULL)); 2249 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCSetCoordinates_C",NULL)); 2250 PetscCall(PetscFree(pc->data)); 2251 PetscFunctionReturn(0); 2252 } 2253 2254 static PetscErrorCode PCSetCoordinates_BDDC(PC pc, PetscInt dim, PetscInt nloc, PetscReal *coords) 2255 { 2256 PC_BDDC *pcbddc = (PC_BDDC*)pc->data; 2257 PCBDDCGraph mat_graph = pcbddc->mat_graph; 2258 2259 PetscFunctionBegin; 2260 PetscCall(PetscFree(mat_graph->coords)); 2261 PetscCall(PetscMalloc1(nloc*dim,&mat_graph->coords)); 2262 PetscCall(PetscArraycpy(mat_graph->coords,coords,nloc*dim)); 2263 mat_graph->cnloc = nloc; 2264 mat_graph->cdim = dim; 2265 mat_graph->cloc = PETSC_FALSE; 2266 /* flg setup */ 2267 pcbddc->recompute_topography = PETSC_TRUE; 2268 pcbddc->corner_selected = PETSC_FALSE; 2269 PetscFunctionReturn(0); 2270 } 2271 2272 static PetscErrorCode PCPreSolveChangeRHS_BDDC(PC pc, PetscBool* change) 2273 { 2274 PetscFunctionBegin; 2275 *change = PETSC_TRUE; 2276 PetscFunctionReturn(0); 2277 } 2278 2279 static PetscErrorCode PCBDDCMatFETIDPGetRHS_BDDC(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs) 2280 { 2281 FETIDPMat_ctx mat_ctx; 2282 Vec work; 2283 PC_IS* pcis; 2284 PC_BDDC* pcbddc; 2285 2286 PetscFunctionBegin; 2287 PetscCall(MatShellGetContext(fetidp_mat,&mat_ctx)); 2288 pcis = (PC_IS*)mat_ctx->pc->data; 2289 pcbddc = (PC_BDDC*)mat_ctx->pc->data; 2290 2291 PetscCall(VecSet(fetidp_flux_rhs,0.0)); 2292 /* copy rhs since we may change it during PCPreSolve_BDDC */ 2293 if (!pcbddc->original_rhs) { 2294 PetscCall(VecDuplicate(pcis->vec1_global,&pcbddc->original_rhs)); 2295 } 2296 if (mat_ctx->rhs_flip) { 2297 PetscCall(VecPointwiseMult(pcbddc->original_rhs,standard_rhs,mat_ctx->rhs_flip)); 2298 } else { 2299 PetscCall(VecCopy(standard_rhs,pcbddc->original_rhs)); 2300 } 2301 if (mat_ctx->g2g_p) { 2302 /* interface pressure rhs */ 2303 PetscCall(VecScatterBegin(mat_ctx->g2g_p,fetidp_flux_rhs,pcbddc->original_rhs,INSERT_VALUES,SCATTER_REVERSE)); 2304 PetscCall(VecScatterEnd(mat_ctx->g2g_p,fetidp_flux_rhs,pcbddc->original_rhs,INSERT_VALUES,SCATTER_REVERSE)); 2305 PetscCall(VecScatterBegin(mat_ctx->g2g_p,standard_rhs,fetidp_flux_rhs,INSERT_VALUES,SCATTER_FORWARD)); 2306 PetscCall(VecScatterEnd(mat_ctx->g2g_p,standard_rhs,fetidp_flux_rhs,INSERT_VALUES,SCATTER_FORWARD)); 2307 if (!mat_ctx->rhs_flip) { 2308 PetscCall(VecScale(fetidp_flux_rhs,-1.)); 2309 } 2310 } 2311 /* 2312 change of basis for physical rhs if needed 2313 It also changes the rhs in case of dirichlet boundaries 2314 */ 2315 PetscCall(PCPreSolve_BDDC(mat_ctx->pc,NULL,pcbddc->original_rhs,NULL)); 2316 if (pcbddc->ChangeOfBasisMatrix) { 2317 PetscCall(MatMultTranspose(pcbddc->ChangeOfBasisMatrix,pcbddc->original_rhs,pcbddc->work_change)); 2318 work = pcbddc->work_change; 2319 } else { 2320 work = pcbddc->original_rhs; 2321 } 2322 /* store vectors for computation of fetidp final solution */ 2323 PetscCall(VecScatterBegin(pcis->global_to_D,work,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD)); 2324 PetscCall(VecScatterEnd(pcis->global_to_D,work,mat_ctx->temp_solution_D,INSERT_VALUES,SCATTER_FORWARD)); 2325 /* scale rhs since it should be unassembled */ 2326 /* TODO use counter scaling? (also below) */ 2327 PetscCall(VecScatterBegin(pcis->global_to_B,work,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD)); 2328 PetscCall(VecScatterEnd(pcis->global_to_B,work,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD)); 2329 /* Apply partition of unity */ 2330 PetscCall(VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B)); 2331 /* PetscCall(PCBDDCScalingRestriction(mat_ctx->pc,work,mat_ctx->temp_solution_B)); */ 2332 if (!pcbddc->switch_static) { 2333 /* compute partially subassembled Schur complement right-hand side */ 2334 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0],mat_ctx->pc,0,0,0)); 2335 PetscCall(KSPSolve(pcbddc->ksp_D,mat_ctx->temp_solution_D,pcis->vec1_D)); 2336 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0],mat_ctx->pc,0,0,0)); 2337 /* Cannot propagate up error in KSPSolve() because there is no access to the PC */ 2338 PetscCall(MatMult(pcis->A_BI,pcis->vec1_D,pcis->vec1_B)); 2339 PetscCall(VecAXPY(mat_ctx->temp_solution_B,-1.0,pcis->vec1_B)); 2340 PetscCall(VecSet(work,0.0)); 2341 PetscCall(VecScatterBegin(pcis->global_to_B,mat_ctx->temp_solution_B,work,ADD_VALUES,SCATTER_REVERSE)); 2342 PetscCall(VecScatterEnd(pcis->global_to_B,mat_ctx->temp_solution_B,work,ADD_VALUES,SCATTER_REVERSE)); 2343 /* PetscCall(PCBDDCScalingRestriction(mat_ctx->pc,work,mat_ctx->temp_solution_B)); */ 2344 PetscCall(VecScatterBegin(pcis->global_to_B,work,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD)); 2345 PetscCall(VecScatterEnd(pcis->global_to_B,work,mat_ctx->temp_solution_B,INSERT_VALUES,SCATTER_FORWARD)); 2346 PetscCall(VecPointwiseMult(mat_ctx->temp_solution_B,pcis->D,mat_ctx->temp_solution_B)); 2347 } 2348 /* BDDC rhs */ 2349 PetscCall(VecCopy(mat_ctx->temp_solution_B,pcis->vec1_B)); 2350 if (pcbddc->switch_static) { 2351 PetscCall(VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D)); 2352 } 2353 /* apply BDDC */ 2354 PetscCall(PetscArrayzero(pcbddc->benign_p0,pcbddc->benign_n)); 2355 PetscCall(PCBDDCApplyInterfacePreconditioner(mat_ctx->pc,PETSC_FALSE)); 2356 PetscCall(PetscArrayzero(pcbddc->benign_p0,pcbddc->benign_n)); 2357 2358 /* Application of B_delta and assembling of rhs for fetidp fluxes */ 2359 PetscCall(MatMult(mat_ctx->B_delta,pcis->vec1_B,mat_ctx->lambda_local)); 2360 PetscCall(VecScatterBegin(mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD)); 2361 PetscCall(VecScatterEnd(mat_ctx->l2g_lambda,mat_ctx->lambda_local,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD)); 2362 /* Add contribution to interface pressures */ 2363 if (mat_ctx->l2g_p) { 2364 PetscCall(MatMult(mat_ctx->B_BB,pcis->vec1_B,mat_ctx->vP)); 2365 if (pcbddc->switch_static) { 2366 PetscCall(MatMultAdd(mat_ctx->B_BI,pcis->vec1_D,mat_ctx->vP,mat_ctx->vP)); 2367 } 2368 PetscCall(VecScatterBegin(mat_ctx->l2g_p,mat_ctx->vP,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD)); 2369 PetscCall(VecScatterEnd(mat_ctx->l2g_p,mat_ctx->vP,fetidp_flux_rhs,ADD_VALUES,SCATTER_FORWARD)); 2370 } 2371 PetscFunctionReturn(0); 2372 } 2373 2374 /*@ 2375 PCBDDCMatFETIDPGetRHS - Compute the right-hand side for FETI-DP linear system using the physical right-hand side 2376 2377 Collective 2378 2379 Input Parameters: 2380 + fetidp_mat - the FETI-DP matrix object obtained by a call to PCBDDCCreateFETIDPOperators 2381 - standard_rhs - the right-hand side of the original linear system 2382 2383 Output Parameters: 2384 . fetidp_flux_rhs - the right-hand side for the FETI-DP linear system 2385 2386 Level: developer 2387 2388 Notes: 2389 2390 .seealso: PCBDDC, PCBDDCCreateFETIDPOperators, PCBDDCMatFETIDPGetSolution 2391 @*/ 2392 PetscErrorCode PCBDDCMatFETIDPGetRHS(Mat fetidp_mat, Vec standard_rhs, Vec fetidp_flux_rhs) 2393 { 2394 FETIDPMat_ctx mat_ctx; 2395 2396 PetscFunctionBegin; 2397 PetscValidHeaderSpecific(fetidp_mat,MAT_CLASSID,1); 2398 PetscValidHeaderSpecific(standard_rhs,VEC_CLASSID,2); 2399 PetscValidHeaderSpecific(fetidp_flux_rhs,VEC_CLASSID,3); 2400 PetscCall(MatShellGetContext(fetidp_mat,&mat_ctx)); 2401 PetscUseMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetRHS_C",(Mat,Vec,Vec),(fetidp_mat,standard_rhs,fetidp_flux_rhs)); 2402 PetscFunctionReturn(0); 2403 } 2404 2405 static PetscErrorCode PCBDDCMatFETIDPGetSolution_BDDC(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol) 2406 { 2407 FETIDPMat_ctx mat_ctx; 2408 PC_IS* pcis; 2409 PC_BDDC* pcbddc; 2410 Vec work; 2411 2412 PetscFunctionBegin; 2413 PetscCall(MatShellGetContext(fetidp_mat,&mat_ctx)); 2414 pcis = (PC_IS*)mat_ctx->pc->data; 2415 pcbddc = (PC_BDDC*)mat_ctx->pc->data; 2416 2417 /* apply B_delta^T */ 2418 PetscCall(VecSet(pcis->vec1_B,0.)); 2419 PetscCall(VecScatterBegin(mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE)); 2420 PetscCall(VecScatterEnd(mat_ctx->l2g_lambda,fetidp_flux_sol,mat_ctx->lambda_local,INSERT_VALUES,SCATTER_REVERSE)); 2421 PetscCall(MatMultTranspose(mat_ctx->B_delta,mat_ctx->lambda_local,pcis->vec1_B)); 2422 if (mat_ctx->l2g_p) { 2423 PetscCall(VecScatterBegin(mat_ctx->l2g_p,fetidp_flux_sol,mat_ctx->vP,INSERT_VALUES,SCATTER_REVERSE)); 2424 PetscCall(VecScatterEnd(mat_ctx->l2g_p,fetidp_flux_sol,mat_ctx->vP,INSERT_VALUES,SCATTER_REVERSE)); 2425 PetscCall(MatMultAdd(mat_ctx->Bt_BB,mat_ctx->vP,pcis->vec1_B,pcis->vec1_B)); 2426 } 2427 2428 /* compute rhs for BDDC application */ 2429 PetscCall(VecAYPX(pcis->vec1_B,-1.0,mat_ctx->temp_solution_B)); 2430 if (pcbddc->switch_static) { 2431 PetscCall(VecCopy(mat_ctx->temp_solution_D,pcis->vec1_D)); 2432 if (mat_ctx->l2g_p) { 2433 PetscCall(VecScale(mat_ctx->vP,-1.)); 2434 PetscCall(MatMultAdd(mat_ctx->Bt_BI,mat_ctx->vP,pcis->vec1_D,pcis->vec1_D)); 2435 } 2436 } 2437 2438 /* apply BDDC */ 2439 PetscCall(PetscArrayzero(pcbddc->benign_p0,pcbddc->benign_n)); 2440 PetscCall(PCBDDCApplyInterfacePreconditioner(mat_ctx->pc,PETSC_FALSE)); 2441 2442 /* put values into global vector */ 2443 if (pcbddc->ChangeOfBasisMatrix) work = pcbddc->work_change; 2444 else work = standard_sol; 2445 PetscCall(VecScatterBegin(pcis->global_to_B,pcis->vec1_B,work,INSERT_VALUES,SCATTER_REVERSE)); 2446 PetscCall(VecScatterEnd(pcis->global_to_B,pcis->vec1_B,work,INSERT_VALUES,SCATTER_REVERSE)); 2447 if (!pcbddc->switch_static) { 2448 /* compute values into the interior if solved for the partially subassembled Schur complement */ 2449 PetscCall(MatMult(pcis->A_IB,pcis->vec1_B,pcis->vec1_D)); 2450 PetscCall(VecAYPX(pcis->vec1_D,-1.0,mat_ctx->temp_solution_D)); 2451 PetscCall(PetscLogEventBegin(PC_BDDC_Solves[pcbddc->current_level][0],mat_ctx->pc,0,0,0)); 2452 PetscCall(KSPSolve(pcbddc->ksp_D,pcis->vec1_D,pcis->vec1_D)); 2453 PetscCall(PetscLogEventEnd(PC_BDDC_Solves[pcbddc->current_level][0],mat_ctx->pc,0,0,0)); 2454 /* Cannot propagate up error in KSPSolve() because there is no access to the PC */ 2455 } 2456 2457 PetscCall(VecScatterBegin(pcis->global_to_D,pcis->vec1_D,work,INSERT_VALUES,SCATTER_REVERSE)); 2458 PetscCall(VecScatterEnd(pcis->global_to_D,pcis->vec1_D,work,INSERT_VALUES,SCATTER_REVERSE)); 2459 /* add p0 solution to final solution */ 2460 PetscCall(PCBDDCBenignGetOrSetP0(mat_ctx->pc,work,PETSC_FALSE)); 2461 if (pcbddc->ChangeOfBasisMatrix) { 2462 PetscCall(MatMult(pcbddc->ChangeOfBasisMatrix,work,standard_sol)); 2463 } 2464 PetscCall(PCPostSolve_BDDC(mat_ctx->pc,NULL,NULL,standard_sol)); 2465 if (mat_ctx->g2g_p) { 2466 PetscCall(VecScatterBegin(mat_ctx->g2g_p,fetidp_flux_sol,standard_sol,INSERT_VALUES,SCATTER_REVERSE)); 2467 PetscCall(VecScatterEnd(mat_ctx->g2g_p,fetidp_flux_sol,standard_sol,INSERT_VALUES,SCATTER_REVERSE)); 2468 } 2469 PetscFunctionReturn(0); 2470 } 2471 2472 static PetscErrorCode PCView_BDDCIPC(PC pc, PetscViewer viewer) 2473 { 2474 BDDCIPC_ctx bddcipc_ctx; 2475 PetscBool isascii; 2476 2477 PetscFunctionBegin; 2478 PetscCall(PCShellGetContext(pc,&bddcipc_ctx)); 2479 PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&isascii)); 2480 if (isascii) { 2481 PetscCall(PetscViewerASCIIPrintf(viewer,"BDDC interface preconditioner\n")); 2482 } 2483 PetscCall(PetscViewerASCIIPushTab(viewer)); 2484 PetscCall(PCView(bddcipc_ctx->bddc,viewer)); 2485 PetscCall(PetscViewerASCIIPopTab(viewer)); 2486 PetscFunctionReturn(0); 2487 } 2488 2489 static PetscErrorCode PCSetUp_BDDCIPC(PC pc) 2490 { 2491 BDDCIPC_ctx bddcipc_ctx; 2492 PetscBool isbddc; 2493 Vec vv; 2494 IS is; 2495 PC_IS *pcis; 2496 2497 PetscFunctionBegin; 2498 PetscCall(PCShellGetContext(pc,&bddcipc_ctx)); 2499 PetscCall(PetscObjectTypeCompare((PetscObject)bddcipc_ctx->bddc,PCBDDC,&isbddc)); 2500 PetscCheck(isbddc,PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"Invalid type %s. Must be of type bddc",((PetscObject)bddcipc_ctx->bddc)->type_name); 2501 PetscCall(PCSetUp(bddcipc_ctx->bddc)); 2502 2503 /* create interface scatter */ 2504 pcis = (PC_IS*)(bddcipc_ctx->bddc->data); 2505 PetscCall(VecScatterDestroy(&bddcipc_ctx->g2l)); 2506 PetscCall(MatCreateVecs(pc->pmat,&vv,NULL)); 2507 PetscCall(ISRenumber(pcis->is_B_global,NULL,NULL,&is)); 2508 PetscCall(VecScatterCreate(vv,is,pcis->vec1_B,NULL,&bddcipc_ctx->g2l)); 2509 PetscCall(ISDestroy(&is)); 2510 PetscCall(VecDestroy(&vv)); 2511 PetscFunctionReturn(0); 2512 } 2513 2514 static PetscErrorCode PCApply_BDDCIPC(PC pc, Vec r, Vec x) 2515 { 2516 BDDCIPC_ctx bddcipc_ctx; 2517 PC_IS *pcis; 2518 VecScatter tmps; 2519 2520 PetscFunctionBegin; 2521 PetscCall(PCShellGetContext(pc,&bddcipc_ctx)); 2522 pcis = (PC_IS*)(bddcipc_ctx->bddc->data); 2523 tmps = pcis->global_to_B; 2524 pcis->global_to_B = bddcipc_ctx->g2l; 2525 PetscCall(PCBDDCScalingRestriction(bddcipc_ctx->bddc,r,pcis->vec1_B)); 2526 PetscCall(PCBDDCApplyInterfacePreconditioner(bddcipc_ctx->bddc,PETSC_FALSE)); 2527 PetscCall(PCBDDCScalingExtension(bddcipc_ctx->bddc,pcis->vec1_B,x)); 2528 pcis->global_to_B = tmps; 2529 PetscFunctionReturn(0); 2530 } 2531 2532 static PetscErrorCode PCApplyTranspose_BDDCIPC(PC pc, Vec r, Vec x) 2533 { 2534 BDDCIPC_ctx bddcipc_ctx; 2535 PC_IS *pcis; 2536 VecScatter tmps; 2537 2538 PetscFunctionBegin; 2539 PetscCall(PCShellGetContext(pc,&bddcipc_ctx)); 2540 pcis = (PC_IS*)(bddcipc_ctx->bddc->data); 2541 tmps = pcis->global_to_B; 2542 pcis->global_to_B = bddcipc_ctx->g2l; 2543 PetscCall(PCBDDCScalingRestriction(bddcipc_ctx->bddc,r,pcis->vec1_B)); 2544 PetscCall(PCBDDCApplyInterfacePreconditioner(bddcipc_ctx->bddc,PETSC_TRUE)); 2545 PetscCall(PCBDDCScalingExtension(bddcipc_ctx->bddc,pcis->vec1_B,x)); 2546 pcis->global_to_B = tmps; 2547 PetscFunctionReturn(0); 2548 } 2549 2550 static PetscErrorCode PCDestroy_BDDCIPC(PC pc) 2551 { 2552 BDDCIPC_ctx bddcipc_ctx; 2553 2554 PetscFunctionBegin; 2555 PetscCall(PCShellGetContext(pc,&bddcipc_ctx)); 2556 PetscCall(PCDestroy(&bddcipc_ctx->bddc)); 2557 PetscCall(VecScatterDestroy(&bddcipc_ctx->g2l)); 2558 PetscCall(PetscFree(bddcipc_ctx)); 2559 PetscFunctionReturn(0); 2560 } 2561 2562 /*@ 2563 PCBDDCMatFETIDPGetSolution - Compute the physical solution using the solution of the FETI-DP linear system 2564 2565 Collective 2566 2567 Input Parameters: 2568 + fetidp_mat - the FETI-DP matrix obtained by a call to PCBDDCCreateFETIDPOperators 2569 - fetidp_flux_sol - the solution of the FETI-DP linear system 2570 2571 Output Parameters: 2572 . standard_sol - the solution defined on the physical domain 2573 2574 Level: developer 2575 2576 Notes: 2577 2578 .seealso: PCBDDC, PCBDDCCreateFETIDPOperators, PCBDDCMatFETIDPGetRHS 2579 @*/ 2580 PetscErrorCode PCBDDCMatFETIDPGetSolution(Mat fetidp_mat, Vec fetidp_flux_sol, Vec standard_sol) 2581 { 2582 FETIDPMat_ctx mat_ctx; 2583 2584 PetscFunctionBegin; 2585 PetscValidHeaderSpecific(fetidp_mat,MAT_CLASSID,1); 2586 PetscValidHeaderSpecific(fetidp_flux_sol,VEC_CLASSID,2); 2587 PetscValidHeaderSpecific(standard_sol,VEC_CLASSID,3); 2588 PetscCall(MatShellGetContext(fetidp_mat,&mat_ctx)); 2589 PetscUseMethod(mat_ctx->pc,"PCBDDCMatFETIDPGetSolution_C",(Mat,Vec,Vec),(fetidp_mat,fetidp_flux_sol,standard_sol)); 2590 PetscFunctionReturn(0); 2591 } 2592 2593 static PetscErrorCode PCBDDCCreateFETIDPOperators_BDDC(PC pc, PetscBool fully_redundant, const char* prefix, Mat *fetidp_mat, PC *fetidp_pc) 2594 { 2595 2596 FETIDPMat_ctx fetidpmat_ctx; 2597 Mat newmat; 2598 FETIDPPC_ctx fetidppc_ctx; 2599 PC newpc; 2600 MPI_Comm comm; 2601 2602 PetscFunctionBegin; 2603 PetscCall(PetscObjectGetComm((PetscObject)pc,&comm)); 2604 /* FETI-DP matrix */ 2605 PetscCall(PCBDDCCreateFETIDPMatContext(pc,&fetidpmat_ctx)); 2606 fetidpmat_ctx->fully_redundant = fully_redundant; 2607 PetscCall(PCBDDCSetupFETIDPMatContext(fetidpmat_ctx)); 2608 PetscCall(MatCreateShell(comm,fetidpmat_ctx->n,fetidpmat_ctx->n,fetidpmat_ctx->N,fetidpmat_ctx->N,fetidpmat_ctx,&newmat)); 2609 PetscCall(PetscObjectSetName((PetscObject)newmat,!fetidpmat_ctx->l2g_lambda_only ? "F" : "G")); 2610 PetscCall(MatShellSetOperation(newmat,MATOP_MULT,(void (*)(void))FETIDPMatMult)); 2611 PetscCall(MatShellSetOperation(newmat,MATOP_MULT_TRANSPOSE,(void (*)(void))FETIDPMatMultTranspose)); 2612 PetscCall(MatShellSetOperation(newmat,MATOP_DESTROY,(void (*)(void))PCBDDCDestroyFETIDPMat)); 2613 /* propagate MatOptions */ 2614 { 2615 PC_BDDC *pcbddc = (PC_BDDC*)fetidpmat_ctx->pc->data; 2616 PetscBool issym; 2617 2618 PetscCall(MatGetOption(pc->mat,MAT_SYMMETRIC,&issym)); 2619 if (issym || pcbddc->symmetric_primal) { 2620 PetscCall(MatSetOption(newmat,MAT_SYMMETRIC,PETSC_TRUE)); 2621 } 2622 } 2623 PetscCall(MatSetOptionsPrefix(newmat,prefix)); 2624 PetscCall(MatAppendOptionsPrefix(newmat,"fetidp_")); 2625 PetscCall(MatSetUp(newmat)); 2626 /* FETI-DP preconditioner */ 2627 PetscCall(PCBDDCCreateFETIDPPCContext(pc,&fetidppc_ctx)); 2628 PetscCall(PCBDDCSetupFETIDPPCContext(newmat,fetidppc_ctx)); 2629 PetscCall(PCCreate(comm,&newpc)); 2630 PetscCall(PCSetOperators(newpc,newmat,newmat)); 2631 PetscCall(PCSetOptionsPrefix(newpc,prefix)); 2632 PetscCall(PCAppendOptionsPrefix(newpc,"fetidp_")); 2633 PetscCall(PCSetErrorIfFailure(newpc,pc->erroriffailure)); 2634 if (!fetidpmat_ctx->l2g_lambda_only) { /* standard FETI-DP */ 2635 PetscCall(PCSetType(newpc,PCSHELL)); 2636 PetscCall(PCShellSetName(newpc,"FETI-DP multipliers")); 2637 PetscCall(PCShellSetContext(newpc,fetidppc_ctx)); 2638 PetscCall(PCShellSetApply(newpc,FETIDPPCApply)); 2639 PetscCall(PCShellSetApplyTranspose(newpc,FETIDPPCApplyTranspose)); 2640 PetscCall(PCShellSetView(newpc,FETIDPPCView)); 2641 PetscCall(PCShellSetDestroy(newpc,PCBDDCDestroyFETIDPPC)); 2642 } else { /* saddle-point FETI-DP */ 2643 Mat M; 2644 PetscInt psize; 2645 PetscBool fake = PETSC_FALSE, isfieldsplit; 2646 2647 PetscCall(ISViewFromOptions(fetidpmat_ctx->lagrange,NULL,"-lag_view")); 2648 PetscCall(ISViewFromOptions(fetidpmat_ctx->pressure,NULL,"-press_view")); 2649 PetscCall(PetscObjectQuery((PetscObject)pc,"__KSPFETIDP_PPmat",(PetscObject*)&M)); 2650 PetscCall(PCSetType(newpc,PCFIELDSPLIT)); 2651 PetscCall(PCFieldSplitSetIS(newpc,"lag",fetidpmat_ctx->lagrange)); 2652 PetscCall(PCFieldSplitSetIS(newpc,"p",fetidpmat_ctx->pressure)); 2653 PetscCall(PCFieldSplitSetType(newpc,PC_COMPOSITE_SCHUR)); 2654 PetscCall(PCFieldSplitSetSchurFactType(newpc,PC_FIELDSPLIT_SCHUR_FACT_DIAG)); 2655 PetscCall(ISGetSize(fetidpmat_ctx->pressure,&psize)); 2656 if (psize != M->rmap->N) { 2657 Mat M2; 2658 PetscInt lpsize; 2659 2660 fake = PETSC_TRUE; 2661 PetscCall(ISGetLocalSize(fetidpmat_ctx->pressure,&lpsize)); 2662 PetscCall(MatCreate(comm,&M2)); 2663 PetscCall(MatSetType(M2,MATAIJ)); 2664 PetscCall(MatSetSizes(M2,lpsize,lpsize,psize,psize)); 2665 PetscCall(MatSetUp(M2)); 2666 PetscCall(MatAssemblyBegin(M2,MAT_FINAL_ASSEMBLY)); 2667 PetscCall(MatAssemblyEnd(M2,MAT_FINAL_ASSEMBLY)); 2668 PetscCall(PCFieldSplitSetSchurPre(newpc,PC_FIELDSPLIT_SCHUR_PRE_USER,M2)); 2669 PetscCall(MatDestroy(&M2)); 2670 } else { 2671 PetscCall(PCFieldSplitSetSchurPre(newpc,PC_FIELDSPLIT_SCHUR_PRE_USER,M)); 2672 } 2673 PetscCall(PCFieldSplitSetSchurScale(newpc,1.0)); 2674 2675 /* we need to setfromoptions and setup here to access the blocks */ 2676 PetscCall(PCSetFromOptions(newpc)); 2677 PetscCall(PCSetUp(newpc)); 2678 2679 /* user may have changed the type (e.g. -fetidp_pc_type none) */ 2680 PetscCall(PetscObjectTypeCompare((PetscObject)newpc,PCFIELDSPLIT,&isfieldsplit)); 2681 if (isfieldsplit) { 2682 KSP *ksps; 2683 PC ppc,lagpc; 2684 PetscInt nn; 2685 PetscBool ismatis,matisok = PETSC_FALSE,check = PETSC_FALSE; 2686 2687 /* set the solver for the (0,0) block */ 2688 PetscCall(PCFieldSplitSchurGetSubKSP(newpc,&nn,&ksps)); 2689 if (!nn) { /* not of type PC_COMPOSITE_SCHUR */ 2690 PetscCall(PCFieldSplitGetSubKSP(newpc,&nn,&ksps)); 2691 if (!fake) { /* pass pmat to the pressure solver */ 2692 Mat F; 2693 2694 PetscCall(KSPGetOperators(ksps[1],&F,NULL)); 2695 PetscCall(KSPSetOperators(ksps[1],F,M)); 2696 } 2697 } else { 2698 PetscBool issym; 2699 Mat S; 2700 2701 PetscCall(PCFieldSplitSchurGetS(newpc,&S)); 2702 2703 PetscCall(MatGetOption(newmat,MAT_SYMMETRIC,&issym)); 2704 if (issym) { 2705 PetscCall(MatSetOption(S,MAT_SYMMETRIC,PETSC_TRUE)); 2706 } 2707 } 2708 PetscCall(KSPGetPC(ksps[0],&lagpc)); 2709 PetscCall(PCSetType(lagpc,PCSHELL)); 2710 PetscCall(PCShellSetName(lagpc,"FETI-DP multipliers")); 2711 PetscCall(PCShellSetContext(lagpc,fetidppc_ctx)); 2712 PetscCall(PCShellSetApply(lagpc,FETIDPPCApply)); 2713 PetscCall(PCShellSetApplyTranspose(lagpc,FETIDPPCApplyTranspose)); 2714 PetscCall(PCShellSetView(lagpc,FETIDPPCView)); 2715 PetscCall(PCShellSetDestroy(lagpc,PCBDDCDestroyFETIDPPC)); 2716 2717 /* Olof's idea: interface Schur complement preconditioner for the mass matrix */ 2718 PetscCall(KSPGetPC(ksps[1],&ppc)); 2719 if (fake) { 2720 BDDCIPC_ctx bddcipc_ctx; 2721 PetscContainer c; 2722 2723 matisok = PETSC_TRUE; 2724 2725 /* create inner BDDC solver */ 2726 PetscCall(PetscNew(&bddcipc_ctx)); 2727 PetscCall(PCCreate(comm,&bddcipc_ctx->bddc)); 2728 PetscCall(PCSetType(bddcipc_ctx->bddc,PCBDDC)); 2729 PetscCall(PCSetOperators(bddcipc_ctx->bddc,M,M)); 2730 PetscCall(PetscObjectQuery((PetscObject)pc,"__KSPFETIDP_pCSR",(PetscObject*)&c)); 2731 PetscCall(PetscObjectTypeCompare((PetscObject)M,MATIS,&ismatis)); 2732 if (c && ismatis) { 2733 Mat lM; 2734 PetscInt *csr,n; 2735 2736 PetscCall(MatISGetLocalMat(M,&lM)); 2737 PetscCall(MatGetSize(lM,&n,NULL)); 2738 PetscCall(PetscContainerGetPointer(c,(void**)&csr)); 2739 PetscCall(PCBDDCSetLocalAdjacencyGraph(bddcipc_ctx->bddc,n,csr,csr + (n + 1),PETSC_COPY_VALUES)); 2740 PetscCall(MatISRestoreLocalMat(M,&lM)); 2741 } 2742 PetscCall(PCSetOptionsPrefix(bddcipc_ctx->bddc,((PetscObject)ksps[1])->prefix)); 2743 PetscCall(PCSetErrorIfFailure(bddcipc_ctx->bddc,pc->erroriffailure)); 2744 PetscCall(PCSetFromOptions(bddcipc_ctx->bddc)); 2745 2746 /* wrap the interface application */ 2747 PetscCall(PCSetType(ppc,PCSHELL)); 2748 PetscCall(PCShellSetName(ppc,"FETI-DP pressure")); 2749 PetscCall(PCShellSetContext(ppc,bddcipc_ctx)); 2750 PetscCall(PCShellSetSetUp(ppc,PCSetUp_BDDCIPC)); 2751 PetscCall(PCShellSetApply(ppc,PCApply_BDDCIPC)); 2752 PetscCall(PCShellSetApplyTranspose(ppc,PCApplyTranspose_BDDCIPC)); 2753 PetscCall(PCShellSetView(ppc,PCView_BDDCIPC)); 2754 PetscCall(PCShellSetDestroy(ppc,PCDestroy_BDDCIPC)); 2755 } 2756 2757 /* determine if we need to assemble M to construct a preconditioner */ 2758 if (!matisok) { 2759 PetscCall(PetscObjectTypeCompare((PetscObject)M,MATIS,&ismatis)); 2760 PetscCall(PetscObjectTypeCompareAny((PetscObject)ppc,&matisok,PCBDDC,PCJACOBI,PCNONE,PCMG,"")); 2761 if (ismatis && !matisok) { 2762 PetscCall(MatConvert(M,MATAIJ,MAT_INPLACE_MATRIX,&M)); 2763 } 2764 } 2765 2766 /* run the subproblems to check convergence */ 2767 PetscCall(PetscOptionsGetBool(NULL,((PetscObject)newmat)->prefix,"-check_saddlepoint",&check,NULL)); 2768 if (check) { 2769 PetscInt i; 2770 2771 for (i=0;i<nn;i++) { 2772 KSP kspC; 2773 PC pc; 2774 Mat F,pF; 2775 Vec x,y; 2776 PetscBool isschur,prec = PETSC_TRUE; 2777 2778 PetscCall(KSPCreate(PetscObjectComm((PetscObject)ksps[i]),&kspC)); 2779 PetscCall(KSPSetOptionsPrefix(kspC,((PetscObject)ksps[i])->prefix)); 2780 PetscCall(KSPAppendOptionsPrefix(kspC,"check_")); 2781 PetscCall(KSPGetOperators(ksps[i],&F,&pF)); 2782 PetscCall(PetscObjectTypeCompare((PetscObject)F,MATSCHURCOMPLEMENT,&isschur)); 2783 if (isschur) { 2784 KSP kspS,kspS2; 2785 Mat A00,pA00,A10,A01,A11; 2786 char prefix[256]; 2787 2788 PetscCall(MatSchurComplementGetKSP(F,&kspS)); 2789 PetscCall(MatSchurComplementGetSubMatrices(F,&A00,&pA00,&A01,&A10,&A11)); 2790 PetscCall(MatCreateSchurComplement(A00,pA00,A01,A10,A11,&F)); 2791 PetscCall(MatSchurComplementGetKSP(F,&kspS2)); 2792 PetscCall(PetscSNPrintf(prefix,sizeof(prefix),"%sschur_",((PetscObject)kspC)->prefix)); 2793 PetscCall(KSPSetOptionsPrefix(kspS2,prefix)); 2794 PetscCall(KSPGetPC(kspS2,&pc)); 2795 PetscCall(PCSetType(pc,PCKSP)); 2796 PetscCall(PCKSPSetKSP(pc,kspS)); 2797 PetscCall(KSPSetFromOptions(kspS2)); 2798 PetscCall(KSPGetPC(kspS2,&pc)); 2799 PetscCall(PCSetUseAmat(pc,PETSC_TRUE)); 2800 } else { 2801 PetscCall(PetscObjectReference((PetscObject)F)); 2802 } 2803 PetscCall(KSPSetFromOptions(kspC)); 2804 PetscCall(PetscOptionsGetBool(NULL,((PetscObject)kspC)->prefix,"-preconditioned",&prec,NULL)); 2805 if (prec) { 2806 PetscCall(KSPGetPC(ksps[i],&pc)); 2807 PetscCall(KSPSetPC(kspC,pc)); 2808 } 2809 PetscCall(KSPSetOperators(kspC,F,pF)); 2810 PetscCall(MatCreateVecs(F,&x,&y)); 2811 PetscCall(VecSetRandom(x,NULL)); 2812 PetscCall(MatMult(F,x,y)); 2813 PetscCall(KSPSolve(kspC,y,x)); 2814 PetscCall(KSPCheckSolve(kspC,pc,x)); 2815 PetscCall(KSPDestroy(&kspC)); 2816 PetscCall(MatDestroy(&F)); 2817 PetscCall(VecDestroy(&x)); 2818 PetscCall(VecDestroy(&y)); 2819 } 2820 } 2821 PetscCall(PetscFree(ksps)); 2822 } 2823 } 2824 /* return pointers for objects created */ 2825 *fetidp_mat = newmat; 2826 *fetidp_pc = newpc; 2827 PetscFunctionReturn(0); 2828 } 2829 2830 /*@C 2831 PCBDDCCreateFETIDPOperators - Create FETI-DP operators 2832 2833 Collective 2834 2835 Input Parameters: 2836 + pc - the BDDC preconditioning context (setup should have been called before) 2837 . fully_redundant - true for a fully redundant set of Lagrange multipliers 2838 - prefix - optional options database prefix for the objects to be created (can be NULL) 2839 2840 Output Parameters: 2841 + fetidp_mat - shell FETI-DP matrix object 2842 - fetidp_pc - shell Dirichlet preconditioner for FETI-DP matrix 2843 2844 Level: developer 2845 2846 Notes: 2847 Currently the only operations provided for FETI-DP matrix are MatMult and MatMultTranspose 2848 2849 .seealso: PCBDDC, PCBDDCMatFETIDPGetRHS, PCBDDCMatFETIDPGetSolution 2850 @*/ 2851 PetscErrorCode PCBDDCCreateFETIDPOperators(PC pc, PetscBool fully_redundant, const char *prefix, Mat *fetidp_mat, PC *fetidp_pc) 2852 { 2853 PetscFunctionBegin; 2854 PetscValidHeaderSpecific(pc,PC_CLASSID,1); 2855 if (pc->setupcalled) { 2856 PetscUseMethod(pc,"PCBDDCCreateFETIDPOperators_C",(PC,PetscBool,const char*,Mat*,PC*),(pc,fully_redundant,prefix,fetidp_mat,fetidp_pc)); 2857 } else SETERRQ(PetscObjectComm((PetscObject)pc),PETSC_ERR_SUP,"You must call PCSetup_BDDC() first"); 2858 PetscFunctionReturn(0); 2859 } 2860 /* -------------------------------------------------------------------------- */ 2861 /*MC 2862 PCBDDC - Balancing Domain Decomposition by Constraints. 2863 2864 An implementation of the BDDC preconditioner based on the bibliography found below. 2865 2866 The matrix to be preconditioned (Pmat) must be of type MATIS. 2867 2868 Currently works with MATIS matrices with local matrices of type MATSEQAIJ, MATSEQBAIJ or MATSEQSBAIJ, either with real or complex numbers. 2869 2870 It also works with unsymmetric and indefinite problems. 2871 2872 Unlike 'conventional' interface preconditioners, PCBDDC iterates over all degrees of freedom, not just those on the interface. This allows the use of approximate solvers on the subdomains. 2873 2874 Approximate local solvers are automatically adapted (see [1]) if the user has attached a nullspace object to the subdomain matrices, and informed BDDC of using approximate solvers (via the command line). 2875 2876 Boundary nodes are split in vertices, edges and faces classes using information from the local to global mapping of dofs and the local connectivity graph of nodes. The latter can be customized by using PCBDDCSetLocalAdjacencyGraph() 2877 Additional information on dofs can be provided by using PCBDDCSetDofsSplitting(), PCBDDCSetDirichletBoundaries(), PCBDDCSetNeumannBoundaries(), and PCBDDCSetPrimalVerticesIS() and their local counterparts. 2878 2879 Constraints can be customized by attaching a MatNullSpace object to the MATIS matrix via MatSetNearNullSpace(). Non-singular modes are retained via SVD. 2880 2881 Change of basis is performed similarly to [2] when requested. When more than one constraint is present on a single connected component (i.e. an edge or a face), a robust method based on local QR factorizations is used. 2882 User defined change of basis can be passed to PCBDDC by using PCBDDCSetChangeOfBasisMat() 2883 2884 The PETSc implementation also supports multilevel BDDC [3]. Coarse grids are partitioned using a MatPartitioning object. 2885 2886 Adaptive selection of primal constraints [4] is supported for SPD systems with high-contrast in the coefficients if MUMPS or MKL_PARDISO are present. Future versions of the code will also consider using PASTIX. 2887 2888 An experimental interface to the FETI-DP method is available. FETI-DP operators could be created using PCBDDCCreateFETIDPOperators(). A stand-alone class for the FETI-DP method will be provided in the next releases. 2889 2890 Options Database Keys (some of them, run with -help for a complete list): 2891 2892 + -pc_bddc_use_vertices <true> - use or not vertices in primal space 2893 . -pc_bddc_use_edges <true> - use or not edges in primal space 2894 . -pc_bddc_use_faces <false> - use or not faces in primal space 2895 . -pc_bddc_symmetric <true> - symmetric computation of primal basis functions. Specify false for unsymmetric problems 2896 . -pc_bddc_use_change_of_basis <false> - use change of basis approach (on edges only) 2897 . -pc_bddc_use_change_on_faces <false> - use change of basis approach on faces if change of basis has been requested 2898 . -pc_bddc_switch_static <false> - switches from M_2 (default) to M_3 operator (see reference article [1]) 2899 . -pc_bddc_levels <0> - maximum number of levels for multilevel 2900 . -pc_bddc_coarsening_ratio <8> - number of subdomains which will be aggregated together at the coarser level (e.g. H/h ratio at the coarser level, significative only in the multilevel case) 2901 . -pc_bddc_coarse_redistribute <0> - size of a subset of processors where the coarse problem will be remapped (the value is ignored if not at the coarsest level) 2902 . -pc_bddc_use_deluxe_scaling <false> - use deluxe scaling 2903 . -pc_bddc_schur_layers <\-1> - select the economic version of deluxe scaling by specifying the number of layers (-1 corresponds to the original deluxe scaling) 2904 . -pc_bddc_adaptive_threshold <0.0> - when a value different than zero is specified, adaptive selection of constraints is performed on edges and faces (requires deluxe scaling and MUMPS or MKL_PARDISO installed) 2905 - -pc_bddc_check_level <0> - set verbosity level of debugging output 2906 2907 Options for Dirichlet, Neumann or coarse solver can be set with 2908 .vb 2909 -pc_bddc_dirichlet_ 2910 -pc_bddc_neumann_ 2911 -pc_bddc_coarse_ 2912 .ve 2913 e.g. -pc_bddc_dirichlet_ksp_type richardson -pc_bddc_dirichlet_pc_type gamg. PCBDDC uses by default KSPPREONLY and PCLU. 2914 2915 When using a multilevel approach, solvers' options at the N-th level (N > 1) can be specified as 2916 .vb 2917 -pc_bddc_dirichlet_lN_ 2918 -pc_bddc_neumann_lN_ 2919 -pc_bddc_coarse_lN_ 2920 .ve 2921 Note that level number ranges from the finest (0) to the coarsest (N). 2922 In order to specify options for the BDDC operators at the coarser levels (and not for the solvers), prepend -pc_bddc_coarse_ or -pc_bddc_coarse_l to the option, e.g. 2923 .vb 2924 -pc_bddc_coarse_pc_bddc_adaptive_threshold 5 -pc_bddc_coarse_l1_pc_bddc_redistribute 3 2925 .ve 2926 will use a threshold of 5 for constraints' selection at the first coarse level and will redistribute the coarse problem of the first coarse level on 3 processors 2927 2928 References: 2929 + * - C. R. Dohrmann. "An approximate BDDC preconditioner", Numerical Linear Algebra with Applications Volume 14, Issue 2, pages 149--168, March 2007 2930 . * - A. Klawonn and O. B. Widlund. "Dual-Primal FETI Methods for Linear Elasticity", Communications on Pure and Applied Mathematics Volume 59, Issue 11, pages 1523--1572, November 2006 2931 . * - J. Mandel, B. Sousedik, C. R. Dohrmann. "Multispace and Multilevel BDDC", Computing Volume 83, Issue 2--3, pages 55--85, November 2008 2932 - * - C. Pechstein and C. R. Dohrmann. "Modern domain decomposition methods BDDC, deluxe scaling, and an algebraic approach", Seminar talk, Linz, December 2013, http://people.ricam.oeaw.ac.at/c.pechstein/pechstein-bddc2013.pdf 2933 2934 Level: intermediate 2935 2936 Developer Notes: 2937 2938 Contributed by Stefano Zampini 2939 2940 .seealso: PCCreate(), PCSetType(), PCType (for list of available types), PC, MATIS 2941 M*/ 2942 2943 PETSC_EXTERN PetscErrorCode PCCreate_BDDC(PC pc) 2944 { 2945 PC_BDDC *pcbddc; 2946 2947 PetscFunctionBegin; 2948 PetscCall(PetscNewLog(pc,&pcbddc)); 2949 pc->data = pcbddc; 2950 2951 /* create PCIS data structure */ 2952 PetscCall(PCISCreate(pc)); 2953 2954 /* create local graph structure */ 2955 PetscCall(PCBDDCGraphCreate(&pcbddc->mat_graph)); 2956 2957 /* BDDC nonzero defaults */ 2958 pcbddc->use_nnsp = PETSC_TRUE; 2959 pcbddc->use_local_adj = PETSC_TRUE; 2960 pcbddc->use_vertices = PETSC_TRUE; 2961 pcbddc->use_edges = PETSC_TRUE; 2962 pcbddc->symmetric_primal = PETSC_TRUE; 2963 pcbddc->vertex_size = 1; 2964 pcbddc->recompute_topography = PETSC_TRUE; 2965 pcbddc->coarse_size = -1; 2966 pcbddc->use_exact_dirichlet_trick = PETSC_TRUE; 2967 pcbddc->coarsening_ratio = 8; 2968 pcbddc->coarse_eqs_per_proc = 1; 2969 pcbddc->benign_compute_correction = PETSC_TRUE; 2970 pcbddc->nedfield = -1; 2971 pcbddc->nedglobal = PETSC_TRUE; 2972 pcbddc->graphmaxcount = PETSC_MAX_INT; 2973 pcbddc->sub_schurs_layers = -1; 2974 pcbddc->adaptive_threshold[0] = 0.0; 2975 pcbddc->adaptive_threshold[1] = 0.0; 2976 2977 /* function pointers */ 2978 pc->ops->apply = PCApply_BDDC; 2979 pc->ops->applytranspose = PCApplyTranspose_BDDC; 2980 pc->ops->setup = PCSetUp_BDDC; 2981 pc->ops->destroy = PCDestroy_BDDC; 2982 pc->ops->setfromoptions = PCSetFromOptions_BDDC; 2983 pc->ops->view = PCView_BDDC; 2984 pc->ops->applyrichardson = NULL; 2985 pc->ops->applysymmetricleft = NULL; 2986 pc->ops->applysymmetricright = NULL; 2987 pc->ops->presolve = PCPreSolve_BDDC; 2988 pc->ops->postsolve = PCPostSolve_BDDC; 2989 pc->ops->reset = PCReset_BDDC; 2990 2991 /* composing function */ 2992 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDiscreteGradient_C",PCBDDCSetDiscreteGradient_BDDC)); 2993 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDivergenceMat_C",PCBDDCSetDivergenceMat_BDDC)); 2994 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetChangeOfBasisMat_C",PCBDDCSetChangeOfBasisMat_BDDC)); 2995 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesLocalIS_C",PCBDDCSetPrimalVerticesLocalIS_BDDC)); 2996 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetPrimalVerticesIS_C",PCBDDCSetPrimalVerticesIS_BDDC)); 2997 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetPrimalVerticesLocalIS_C",PCBDDCGetPrimalVerticesLocalIS_BDDC)); 2998 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetPrimalVerticesIS_C",PCBDDCGetPrimalVerticesIS_BDDC)); 2999 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetCoarseningRatio_C",PCBDDCSetCoarseningRatio_BDDC)); 3000 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLevel_C",PCBDDCSetLevel_BDDC)); 3001 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetUseExactDirichlet_C",PCBDDCSetUseExactDirichlet_BDDC)); 3002 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLevels_C",PCBDDCSetLevels_BDDC)); 3003 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundaries_C",PCBDDCSetDirichletBoundaries_BDDC)); 3004 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDirichletBoundariesLocal_C",PCBDDCSetDirichletBoundariesLocal_BDDC)); 3005 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundaries_C",PCBDDCSetNeumannBoundaries_BDDC)); 3006 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetNeumannBoundariesLocal_C",PCBDDCSetNeumannBoundariesLocal_BDDC)); 3007 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundaries_C",PCBDDCGetDirichletBoundaries_BDDC)); 3008 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetDirichletBoundariesLocal_C",PCBDDCGetDirichletBoundariesLocal_BDDC)); 3009 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundaries_C",PCBDDCGetNeumannBoundaries_BDDC)); 3010 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCGetNeumannBoundariesLocal_C",PCBDDCGetNeumannBoundariesLocal_BDDC)); 3011 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplitting_C",PCBDDCSetDofsSplitting_BDDC)); 3012 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetDofsSplittingLocal_C",PCBDDCSetDofsSplittingLocal_BDDC)); 3013 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCSetLocalAdjacencyGraph_C",PCBDDCSetLocalAdjacencyGraph_BDDC)); 3014 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCCreateFETIDPOperators_C",PCBDDCCreateFETIDPOperators_BDDC)); 3015 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetRHS_C",PCBDDCMatFETIDPGetRHS_BDDC)); 3016 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCBDDCMatFETIDPGetSolution_C",PCBDDCMatFETIDPGetSolution_BDDC)); 3017 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCPreSolveChangeRHS_C",PCPreSolveChangeRHS_BDDC)); 3018 PetscCall(PetscObjectComposeFunction((PetscObject)pc,"PCSetCoordinates_C",PCSetCoordinates_BDDC)); 3019 PetscFunctionReturn(0); 3020 } 3021 3022 /*@C 3023 PCBDDCInitializePackage - This function initializes everything in the PCBDDC package. It is called 3024 from PCInitializePackage(). 3025 3026 Level: developer 3027 3028 .seealso: PetscInitialize() 3029 @*/ 3030 PetscErrorCode PCBDDCInitializePackage(void) 3031 { 3032 int i; 3033 3034 PetscFunctionBegin; 3035 if (PCBDDCPackageInitialized) PetscFunctionReturn(0); 3036 PCBDDCPackageInitialized = PETSC_TRUE; 3037 PetscCall(PetscRegisterFinalize(PCBDDCFinalizePackage)); 3038 3039 /* general events */ 3040 PetscCall(PetscLogEventRegister("PCBDDCTopo",PC_CLASSID,&PC_BDDC_Topology[0])); 3041 PetscCall(PetscLogEventRegister("PCBDDCLKSP",PC_CLASSID,&PC_BDDC_LocalSolvers[0])); 3042 PetscCall(PetscLogEventRegister("PCBDDCLWor",PC_CLASSID,&PC_BDDC_LocalWork[0])); 3043 PetscCall(PetscLogEventRegister("PCBDDCCorr",PC_CLASSID,&PC_BDDC_CorrectionSetUp[0])); 3044 PetscCall(PetscLogEventRegister("PCBDDCASet",PC_CLASSID,&PC_BDDC_ApproxSetUp[0])); 3045 PetscCall(PetscLogEventRegister("PCBDDCAApp",PC_CLASSID,&PC_BDDC_ApproxApply[0])); 3046 PetscCall(PetscLogEventRegister("PCBDDCCSet",PC_CLASSID,&PC_BDDC_CoarseSetUp[0])); 3047 PetscCall(PetscLogEventRegister("PCBDDCCKSP",PC_CLASSID,&PC_BDDC_CoarseSolver[0])); 3048 PetscCall(PetscLogEventRegister("PCBDDCAdap",PC_CLASSID,&PC_BDDC_AdaptiveSetUp[0])); 3049 PetscCall(PetscLogEventRegister("PCBDDCScal",PC_CLASSID,&PC_BDDC_Scaling[0])); 3050 PetscCall(PetscLogEventRegister("PCBDDCSchr",PC_CLASSID,&PC_BDDC_Schurs[0])); 3051 PetscCall(PetscLogEventRegister("PCBDDCDirS",PC_CLASSID,&PC_BDDC_Solves[0][0])); 3052 PetscCall(PetscLogEventRegister("PCBDDCNeuS",PC_CLASSID,&PC_BDDC_Solves[0][1])); 3053 PetscCall(PetscLogEventRegister("PCBDDCCoaS",PC_CLASSID,&PC_BDDC_Solves[0][2])); 3054 for (i=1;i<PETSC_PCBDDC_MAXLEVELS;i++) { 3055 char ename[32]; 3056 3057 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCTopo l%02d",i)); 3058 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_Topology[i])); 3059 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCLKSP l%02d",i)); 3060 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_LocalSolvers[i])); 3061 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCLWor l%02d",i)); 3062 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_LocalWork[i])); 3063 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCCorr l%02d",i)); 3064 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_CorrectionSetUp[i])); 3065 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCASet l%02d",i)); 3066 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_ApproxSetUp[i])); 3067 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCAApp l%02d",i)); 3068 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_ApproxApply[i])); 3069 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCCSet l%02d",i)); 3070 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_CoarseSetUp[i])); 3071 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCCKSP l%02d",i)); 3072 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_CoarseSolver[i])); 3073 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCAdap l%02d",i)); 3074 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_AdaptiveSetUp[i])); 3075 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCScal l%02d",i)); 3076 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_Scaling[i])); 3077 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCSchr l%02d",i)); 3078 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_Schurs[i])); 3079 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCDirS l%02d",i)); 3080 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_Solves[i][0])); 3081 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCNeuS l%02d",i)); 3082 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_Solves[i][1])); 3083 PetscCall(PetscSNPrintf(ename,sizeof(ename),"PCBDDCCoaS l%02d",i)); 3084 PetscCall(PetscLogEventRegister(ename,PC_CLASSID,&PC_BDDC_Solves[i][2])); 3085 } 3086 PetscFunctionReturn(0); 3087 } 3088 3089 /*@C 3090 PCBDDCFinalizePackage - This function frees everything from the PCBDDC package. It is 3091 called from PetscFinalize() automatically. 3092 3093 Level: developer 3094 3095 .seealso: PetscFinalize() 3096 @*/ 3097 PetscErrorCode PCBDDCFinalizePackage(void) 3098 { 3099 PetscFunctionBegin; 3100 PCBDDCPackageInitialized = PETSC_FALSE; 3101 PetscFunctionReturn(0); 3102 } 3103