1 #include <../src/tao/constrained/impls/admm/admm.h> /*I "petsctao.h" I*/ 2 #include <petsctao.h> 3 #include <petsc/private/petscimpl.h> 4 5 /* Updates terminating criteria 6 * 7 * 1 ||r_k|| = ||Ax+Bz-c|| =< catol_admm* max{||Ax||,||Bz||,||c||} 8 * 9 * 2. Updates dual residual, d_k 10 * 11 * 3. ||d_k|| = ||mu*A^T*B(z_k-z_{k-1})|| =< gatol_admm * ||A^Ty|| */ 12 13 static PetscBool cited = PETSC_FALSE; 14 static const char citation[] = "@misc{xu2017adaptive,\n" 15 " title={Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation},\n" 16 " author={Zheng Xu and Mario A. T. Figueiredo and Xiaoming Yuan and Christoph Studer and Tom Goldstein},\n" 17 " year={2017},\n" 18 " eprint={1704.02712},\n" 19 " archivePrefix={arXiv},\n" 20 " primaryClass={cs.CV}\n" 21 "} \n"; 22 23 const char *const TaoADMMRegularizerTypes[] = {"REGULARIZER_USER", "REGULARIZER_SOFT_THRESH", "TaoADMMRegularizerType", "TAO_ADMM_", NULL}; 24 const char *const TaoADMMUpdateTypes[] = {"UPDATE_BASIC", "UPDATE_ADAPTIVE", "UPDATE_ADAPTIVE_RELAXED", "TaoADMMUpdateType", "TAO_ADMM_", NULL}; 25 const char *const TaoALMMTypes[] = {"CLASSIC", "PHR", "TaoALMMType", "TAO_ALMM_", NULL}; 26 27 static PetscErrorCode TaoADMMToleranceUpdate(Tao tao) 28 { 29 TAO_ADMM *am = (TAO_ADMM *)tao->data; 30 PetscReal Axnorm, Bznorm, ATynorm, temp; 31 Vec tempJR, tempL; 32 Tao mis; 33 34 PetscFunctionBegin; 35 mis = am->subsolverX; 36 tempJR = am->workJacobianRight; 37 tempL = am->workLeft; 38 /* ATy */ 39 PetscCall(TaoComputeJacobianEquality(mis, am->y, mis->jacobian_equality, mis->jacobian_equality_pre)); 40 PetscCall(MatMultTranspose(mis->jacobian_equality, am->y, tempJR)); 41 PetscCall(VecNorm(tempJR, NORM_2, &ATynorm)); 42 /* dualres = mu * ||AT(Bz-Bzold)||_2 */ 43 PetscCall(VecWAXPY(tempJR, -1., am->Bzold, am->Bz)); 44 PetscCall(MatMultTranspose(mis->jacobian_equality, tempJR, tempL)); 45 PetscCall(VecNorm(tempL, NORM_2, &am->dualres)); 46 am->dualres *= am->mu; 47 48 /* ||Ax||_2, ||Bz||_2 */ 49 PetscCall(VecNorm(am->Ax, NORM_2, &Axnorm)); 50 PetscCall(VecNorm(am->Bz, NORM_2, &Bznorm)); 51 52 /* Set catol to be catol_admm * max{||Ax||,||Bz||,||c||} * 53 * Set gatol to be gatol_admm * ||A^Ty|| * 54 * while cnorm is ||r_k||_2, and gnorm is ||d_k||_2 */ 55 temp = am->catol_admm * PetscMax(Axnorm, (!am->const_norm) ? Bznorm : PetscMax(Bznorm, am->const_norm)); 56 PetscCall(TaoSetConstraintTolerances(tao, temp, PETSC_DEFAULT)); 57 PetscCall(TaoSetTolerances(tao, am->gatol_admm * ATynorm, PETSC_DEFAULT, PETSC_DEFAULT)); 58 PetscFunctionReturn(PETSC_SUCCESS); 59 } 60 61 /* Penaly Update for Adaptive ADMM. */ 62 static PetscErrorCode AdaptiveADMMPenaltyUpdate(Tao tao) 63 { 64 TAO_ADMM *am = (TAO_ADMM *)tao->data; 65 PetscReal ydiff_norm, yhatdiff_norm, Axdiff_norm, Bzdiff_norm, Axyhat, Bzy, a_sd, a_mg, a_k, b_sd, b_mg, b_k; 66 PetscBool hflag, gflag; 67 Vec tempJR, tempJR2; 68 69 PetscFunctionBegin; 70 tempJR = am->workJacobianRight; 71 tempJR2 = am->workJacobianRight2; 72 hflag = PETSC_FALSE; 73 gflag = PETSC_FALSE; 74 a_k = -1; 75 b_k = -1; 76 77 PetscCall(VecWAXPY(tempJR, -1., am->Axold, am->Ax)); 78 PetscCall(VecWAXPY(tempJR2, -1., am->yhatold, am->yhat)); 79 PetscCall(VecNorm(tempJR, NORM_2, &Axdiff_norm)); 80 PetscCall(VecNorm(tempJR2, NORM_2, &yhatdiff_norm)); 81 PetscCall(VecDot(tempJR, tempJR2, &Axyhat)); 82 83 PetscCall(VecWAXPY(tempJR, -1., am->Bz0, am->Bz)); 84 PetscCall(VecWAXPY(tempJR2, -1., am->y, am->y0)); 85 PetscCall(VecNorm(tempJR, NORM_2, &Bzdiff_norm)); 86 PetscCall(VecNorm(tempJR2, NORM_2, &ydiff_norm)); 87 PetscCall(VecDot(tempJR, tempJR2, &Bzy)); 88 89 if (Axyhat > am->orthval * Axdiff_norm * yhatdiff_norm + am->mueps) { 90 hflag = PETSC_TRUE; 91 a_sd = PetscSqr(yhatdiff_norm) / Axyhat; /* alphaSD */ 92 a_mg = Axyhat / PetscSqr(Axdiff_norm); /* alphaMG */ 93 a_k = (a_mg / a_sd) > 0.5 ? a_mg : a_sd - 0.5 * a_mg; 94 } 95 if (Bzy > am->orthval * Bzdiff_norm * ydiff_norm + am->mueps) { 96 gflag = PETSC_TRUE; 97 b_sd = PetscSqr(ydiff_norm) / Bzy; /* betaSD */ 98 b_mg = Bzy / PetscSqr(Bzdiff_norm); /* betaMG */ 99 b_k = (b_mg / b_sd) > 0.5 ? b_mg : b_sd - 0.5 * b_mg; 100 } 101 am->muold = am->mu; 102 if (gflag && hflag) { 103 am->mu = PetscSqrtReal(a_k * b_k); 104 } else if (hflag) { 105 am->mu = a_k; 106 } else if (gflag) { 107 am->mu = b_k; 108 } 109 if (am->mu > am->muold) am->mu = am->muold; 110 if (am->mu < am->mumin) am->mu = am->mumin; 111 PetscFunctionReturn(PETSC_SUCCESS); 112 } 113 114 static PetscErrorCode TaoADMMSetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType type) 115 { 116 TAO_ADMM *am = (TAO_ADMM *)tao->data; 117 118 PetscFunctionBegin; 119 am->regswitch = type; 120 PetscFunctionReturn(PETSC_SUCCESS); 121 } 122 123 static PetscErrorCode TaoADMMGetRegularizerType_ADMM(Tao tao, TaoADMMRegularizerType *type) 124 { 125 TAO_ADMM *am = (TAO_ADMM *)tao->data; 126 127 PetscFunctionBegin; 128 *type = am->regswitch; 129 PetscFunctionReturn(PETSC_SUCCESS); 130 } 131 132 static PetscErrorCode TaoADMMSetUpdateType_ADMM(Tao tao, TaoADMMUpdateType type) 133 { 134 TAO_ADMM *am = (TAO_ADMM *)tao->data; 135 136 PetscFunctionBegin; 137 am->update = type; 138 PetscFunctionReturn(PETSC_SUCCESS); 139 } 140 141 static PetscErrorCode TaoADMMGetUpdateType_ADMM(Tao tao, TaoADMMUpdateType *type) 142 { 143 TAO_ADMM *am = (TAO_ADMM *)tao->data; 144 145 PetscFunctionBegin; 146 *type = am->update; 147 PetscFunctionReturn(PETSC_SUCCESS); 148 } 149 150 /* This routine updates Jacobians with new x,z vectors, 151 * and then updates Ax and Bz vectors, then computes updated residual vector*/ 152 static PetscErrorCode ADMMUpdateConstraintResidualVector(Tao tao, Vec x, Vec z, Vec Ax, Vec Bz, Vec residual) 153 { 154 TAO_ADMM *am = (TAO_ADMM *)tao->data; 155 Tao mis, reg; 156 157 PetscFunctionBegin; 158 mis = am->subsolverX; 159 reg = am->subsolverZ; 160 PetscCall(TaoComputeJacobianEquality(mis, x, mis->jacobian_equality, mis->jacobian_equality_pre)); 161 PetscCall(MatMult(mis->jacobian_equality, x, Ax)); 162 PetscCall(TaoComputeJacobianEquality(reg, z, reg->jacobian_equality, reg->jacobian_equality_pre)); 163 PetscCall(MatMult(reg->jacobian_equality, z, Bz)); 164 165 PetscCall(VecWAXPY(residual, 1., Bz, Ax)); 166 if (am->constraint != NULL) PetscCall(VecAXPY(residual, -1., am->constraint)); 167 PetscFunctionReturn(PETSC_SUCCESS); 168 } 169 170 /* Updates Augmented Lagrangians to given routines * 171 * For subsolverX, routine needs to be ComputeObjectiveAndGraidnet 172 * Separate Objective and Gradient routines are not supported. */ 173 static PetscErrorCode SubObjGradUpdate(Tao tao, Vec x, PetscReal *f, Vec g, void *ptr) 174 { 175 Tao parent = (Tao)ptr; 176 TAO_ADMM *am = (TAO_ADMM *)parent->data; 177 PetscReal temp, temp2; 178 Vec tempJR; 179 180 PetscFunctionBegin; 181 tempJR = am->workJacobianRight; 182 PetscCall(ADMMUpdateConstraintResidualVector(parent, x, am->subsolverZ->solution, am->Ax, am->Bz, am->residual)); 183 PetscCall((*am->ops->misfitobjgrad)(am->subsolverX, x, f, g, am->misfitobjgradP)); 184 185 am->last_misfit_val = *f; 186 /* Objective Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */ 187 PetscCall(VecTDot(am->residual, am->y, &temp)); 188 PetscCall(VecTDot(am->residual, am->residual, &temp2)); 189 *f += temp + (am->mu / 2) * temp2; 190 191 /* Gradient. Add + mu*AT(Ax+Bz-c) + yTA*/ 192 PetscCall(MatMultTranspose(tao->jacobian_equality, am->residual, tempJR)); 193 PetscCall(VecAXPY(g, am->mu, tempJR)); 194 PetscCall(MatMultTranspose(tao->jacobian_equality, am->y, tempJR)); 195 PetscCall(VecAXPY(g, 1., tempJR)); 196 PetscFunctionReturn(PETSC_SUCCESS); 197 } 198 199 /* Updates Augmented Lagrangians to given routines 200 * For subsolverZ, routine needs to be ComputeObjectiveAndGraidnet 201 * Separate Objective and Gradient routines are not supported. */ 202 static PetscErrorCode RegObjGradUpdate(Tao tao, Vec z, PetscReal *f, Vec g, void *ptr) 203 { 204 Tao parent = (Tao)ptr; 205 TAO_ADMM *am = (TAO_ADMM *)parent->data; 206 PetscReal temp, temp2; 207 Vec tempJR; 208 209 PetscFunctionBegin; 210 tempJR = am->workJacobianRight; 211 PetscCall(ADMMUpdateConstraintResidualVector(parent, am->subsolverX->solution, z, am->Ax, am->Bz, am->residual)); 212 PetscCall((*am->ops->regobjgrad)(am->subsolverZ, z, f, g, am->regobjgradP)); 213 am->last_reg_val = *f; 214 /* Objective Add + yT(Ax+Bz-c) + mu/2*||Ax+Bz-c||_2^2 */ 215 PetscCall(VecTDot(am->residual, am->y, &temp)); 216 PetscCall(VecTDot(am->residual, am->residual, &temp2)); 217 *f += temp + (am->mu / 2) * temp2; 218 219 /* Gradient. Add + mu*BT(Ax+Bz-c) + yTB*/ 220 PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->residual, tempJR)); 221 PetscCall(VecAXPY(g, am->mu, tempJR)); 222 PetscCall(MatMultTranspose(am->subsolverZ->jacobian_equality, am->y, tempJR)); 223 PetscCall(VecAXPY(g, 1., tempJR)); 224 PetscFunctionReturn(PETSC_SUCCESS); 225 } 226 227 /* Computes epsilon padded L1 norm lambda*sum(sqrt(x^2+eps^2)-eps */ 228 static PetscErrorCode ADMML1EpsilonNorm(Tao tao, Vec x, PetscReal eps, PetscReal *norm) 229 { 230 TAO_ADMM *am = (TAO_ADMM *)tao->data; 231 PetscInt N; 232 233 PetscFunctionBegin; 234 PetscCall(VecGetSize(am->workLeft, &N)); 235 PetscCall(VecPointwiseMult(am->workLeft, x, x)); 236 PetscCall(VecShift(am->workLeft, am->l1epsilon * am->l1epsilon)); 237 PetscCall(VecSqrtAbs(am->workLeft)); 238 PetscCall(VecSum(am->workLeft, norm)); 239 *norm += N * am->l1epsilon; 240 *norm *= am->lambda; 241 PetscFunctionReturn(PETSC_SUCCESS); 242 } 243 244 static PetscErrorCode ADMMInternalHessianUpdate(Mat H, Mat Constraint, PetscBool Identity, void *ptr) 245 { 246 TAO_ADMM *am = (TAO_ADMM *)ptr; 247 248 PetscFunctionBegin; 249 switch (am->update) { 250 case (TAO_ADMM_UPDATE_BASIC): 251 break; 252 case (TAO_ADMM_UPDATE_ADAPTIVE): 253 case (TAO_ADMM_UPDATE_ADAPTIVE_RELAXED): 254 if (H && (am->muold != am->mu)) { 255 if (!Identity) { 256 PetscCall(MatAXPY(H, am->mu - am->muold, Constraint, DIFFERENT_NONZERO_PATTERN)); 257 } else { 258 PetscCall(MatShift(H, am->mu - am->muold)); 259 } 260 } 261 break; 262 } 263 PetscFunctionReturn(PETSC_SUCCESS); 264 } 265 266 /* Updates Hessian - adds second derivative of augmented Lagrangian 267 * H \gets H + \rho*ATA 268 Here, \rho does not change in TAO_ADMM_UPDATE_BASIC - thus no-op 269 For ADAPTAIVE,ADAPTIVE_RELAXED, 270 H \gets H + (\rho-\rhoold)*ATA 271 Here, we assume that A is linear constraint i.e., does not change. 272 Thus, for both ADAPTIVE, and RELAXED, ATA matrix is pre-set (except for A=I (null case)) see TaoSetUp_ADMM */ 273 static PetscErrorCode SubHessianUpdate(Tao tao, Vec x, Mat H, Mat Hpre, void *ptr) 274 { 275 Tao parent = (Tao)ptr; 276 TAO_ADMM *am = (TAO_ADMM *)parent->data; 277 278 PetscFunctionBegin; 279 if (am->Hxchange) { 280 /* Case where Hessian gets updated with respect to x vector input. */ 281 PetscCall((*am->ops->misfithess)(am->subsolverX, x, H, Hpre, am->misfithessP)); 282 PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am)); 283 } else if (am->Hxbool) { 284 /* Hessian doesn't get updated. H(x) = c */ 285 /* Update Lagrangian only only per TAO call */ 286 PetscCall(ADMMInternalHessianUpdate(am->subsolverX->hessian, am->ATA, am->xJI, am)); 287 am->Hxbool = PETSC_FALSE; 288 } 289 PetscFunctionReturn(PETSC_SUCCESS); 290 } 291 292 /* Same as SubHessianUpdate, except for B matrix instead of A matrix */ 293 static PetscErrorCode RegHessianUpdate(Tao tao, Vec z, Mat H, Mat Hpre, void *ptr) 294 { 295 Tao parent = (Tao)ptr; 296 TAO_ADMM *am = (TAO_ADMM *)parent->data; 297 298 PetscFunctionBegin; 299 300 if (am->Hzchange) { 301 /* Case where Hessian gets updated with respect to x vector input. */ 302 PetscCall((*am->ops->reghess)(am->subsolverZ, z, H, Hpre, am->reghessP)); 303 PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am)); 304 } else if (am->Hzbool) { 305 /* Hessian doesn't get updated. H(x) = c */ 306 /* Update Lagrangian only only per TAO call */ 307 PetscCall(ADMMInternalHessianUpdate(am->subsolverZ->hessian, am->BTB, am->zJI, am)); 308 am->Hzbool = PETSC_FALSE; 309 } 310 PetscFunctionReturn(PETSC_SUCCESS); 311 } 312 313 /* Shell Matrix routine for A matrix. 314 * This gets used when user puts NULL for 315 * TaoSetJacobianEqualityRoutine(tao, NULL,NULL, ...) 316 * Essentially sets A=I*/ 317 static PetscErrorCode JacobianIdentity(Mat mat, Vec in, Vec out) 318 { 319 PetscFunctionBegin; 320 PetscCall(VecCopy(in, out)); 321 PetscFunctionReturn(PETSC_SUCCESS); 322 } 323 324 /* Shell Matrix routine for B matrix. 325 * This gets used when user puts NULL for 326 * TaoADMMSetRegularizerConstraintJacobian(tao, NULL,NULL, ...) 327 * Sets B=-I */ 328 static PetscErrorCode JacobianIdentityB(Mat mat, Vec in, Vec out) 329 { 330 PetscFunctionBegin; 331 PetscCall(VecCopy(in, out)); 332 PetscCall(VecScale(out, -1.)); 333 PetscFunctionReturn(PETSC_SUCCESS); 334 } 335 336 /* Solve f(x) + g(z) s.t. Ax + Bz = c */ 337 static PetscErrorCode TaoSolve_ADMM(Tao tao) 338 { 339 TAO_ADMM *am = (TAO_ADMM *)tao->data; 340 PetscInt N; 341 PetscReal reg_func; 342 PetscBool is_reg_shell; 343 Vec tempL; 344 345 PetscFunctionBegin; 346 if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) { 347 PetscCheck(am->subsolverX->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetMisfitConstraintJacobian() first"); 348 PetscCheck(am->subsolverZ->ops->computejacobianequality, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Must call TaoADMMSetRegularizerConstraintJacobian() first"); 349 if (am->constraint != NULL) PetscCall(VecNorm(am->constraint, NORM_2, &am->const_norm)); 350 } 351 tempL = am->workLeft; 352 PetscCall(VecGetSize(tempL, &N)); 353 354 if (am->Hx && am->ops->misfithess) PetscCall(TaoSetHessian(am->subsolverX, am->Hx, am->Hx, SubHessianUpdate, tao)); 355 356 if (!am->zJI) { 357 /* Currently, B is assumed to be a linear system, i.e., not getting updated*/ 358 PetscCall(MatTransposeMatMult(am->JB, am->JB, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &(am->BTB))); 359 } 360 if (!am->xJI) { 361 /* Currently, A is assumed to be a linear system, i.e., not getting updated*/ 362 PetscCall(MatTransposeMatMult(am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality, MAT_INITIAL_MATRIX, PETSC_DEFAULT, &(am->ATA))); 363 } 364 365 is_reg_shell = PETSC_FALSE; 366 367 PetscCall(PetscObjectTypeCompare((PetscObject)am->subsolverZ, TAOSHELL, &is_reg_shell)); 368 369 if (!is_reg_shell) { 370 switch (am->regswitch) { 371 case (TAO_ADMM_REGULARIZER_USER): 372 break; 373 case (TAO_ADMM_REGULARIZER_SOFT_THRESH): 374 /* Soft Threshold. */ 375 break; 376 } 377 if (am->ops->regobjgrad) PetscCall(TaoSetObjectiveAndGradient(am->subsolverZ, NULL, RegObjGradUpdate, tao)); 378 if (am->Hz && am->ops->reghess) PetscCall(TaoSetHessian(am->subsolverZ, am->Hz, am->Hzpre, RegHessianUpdate, tao)); 379 } 380 381 switch (am->update) { 382 case TAO_ADMM_UPDATE_BASIC: 383 if (am->subsolverX->hessian) { 384 /* In basic case, Hessian does not get updated w.r.t. to spectral penalty 385 * Here, when A is set, i.e., am->xJI, add mu*ATA to Hessian*/ 386 if (!am->xJI) { 387 PetscCall(MatAXPY(am->subsolverX->hessian, am->mu, am->ATA, DIFFERENT_NONZERO_PATTERN)); 388 } else { 389 PetscCall(MatShift(am->subsolverX->hessian, am->mu)); 390 } 391 } 392 if (am->subsolverZ->hessian && am->regswitch == TAO_ADMM_REGULARIZER_USER) { 393 if (am->regswitch == TAO_ADMM_REGULARIZER_USER && !am->zJI) { 394 PetscCall(MatAXPY(am->subsolverZ->hessian, am->mu, am->BTB, DIFFERENT_NONZERO_PATTERN)); 395 } else { 396 PetscCall(MatShift(am->subsolverZ->hessian, am->mu)); 397 } 398 } 399 break; 400 case TAO_ADMM_UPDATE_ADAPTIVE: 401 case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED: 402 break; 403 } 404 405 PetscCall(PetscCitationsRegister(citation, &cited)); 406 tao->reason = TAO_CONTINUE_ITERATING; 407 408 while (tao->reason == TAO_CONTINUE_ITERATING) { 409 PetscTryTypeMethod(tao, update, tao->niter, tao->user_update); 410 PetscCall(VecCopy(am->Bz, am->Bzold)); 411 412 /* x update */ 413 PetscCall(TaoSolve(am->subsolverX)); 414 PetscCall(TaoComputeJacobianEquality(am->subsolverX, am->subsolverX->solution, am->subsolverX->jacobian_equality, am->subsolverX->jacobian_equality_pre)); 415 PetscCall(MatMult(am->subsolverX->jacobian_equality, am->subsolverX->solution, am->Ax)); 416 417 am->Hxbool = PETSC_TRUE; 418 419 /* z update */ 420 switch (am->regswitch) { 421 case TAO_ADMM_REGULARIZER_USER: 422 PetscCall(TaoSolve(am->subsolverZ)); 423 break; 424 case TAO_ADMM_REGULARIZER_SOFT_THRESH: 425 /* L1 assumes A,B jacobians are identity nxn matrix */ 426 PetscCall(VecWAXPY(am->workJacobianRight, 1 / am->mu, am->y, am->Ax)); 427 PetscCall(TaoSoftThreshold(am->workJacobianRight, -am->lambda / am->mu, am->lambda / am->mu, am->subsolverZ->solution)); 428 break; 429 } 430 am->Hzbool = PETSC_TRUE; 431 /* Returns Ax + Bz - c with updated Ax,Bz vectors */ 432 PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual)); 433 /* Dual variable, y += y + mu*(Ax+Bz-c) */ 434 PetscCall(VecWAXPY(am->y, am->mu, am->residual, am->yold)); 435 436 /* stopping tolerance update */ 437 PetscCall(TaoADMMToleranceUpdate(tao)); 438 439 /* Updating Spectral Penalty */ 440 switch (am->update) { 441 case TAO_ADMM_UPDATE_BASIC: 442 am->muold = am->mu; 443 break; 444 case TAO_ADMM_UPDATE_ADAPTIVE: 445 case TAO_ADMM_UPDATE_ADAPTIVE_RELAXED: 446 if (tao->niter == 0) { 447 PetscCall(VecCopy(am->y, am->y0)); 448 PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold)); 449 if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint)); 450 PetscCall(VecWAXPY(am->yhatold, -am->mu, am->residual, am->yold)); 451 PetscCall(VecCopy(am->Ax, am->Axold)); 452 PetscCall(VecCopy(am->Bz, am->Bz0)); 453 am->muold = am->mu; 454 } else if (tao->niter % am->T == 1) { 455 /* we have compute Bzold in a previous iteration, and we computed Ax above */ 456 PetscCall(VecWAXPY(am->residual, 1., am->Ax, am->Bzold)); 457 if (am->constraint) PetscCall(VecAXPY(am->residual, -1., am->constraint)); 458 PetscCall(VecWAXPY(am->yhat, -am->mu, am->residual, am->yold)); 459 PetscCall(AdaptiveADMMPenaltyUpdate(tao)); 460 PetscCall(VecCopy(am->Ax, am->Axold)); 461 PetscCall(VecCopy(am->Bz, am->Bz0)); 462 PetscCall(VecCopy(am->yhat, am->yhatold)); 463 PetscCall(VecCopy(am->y, am->y0)); 464 } else { 465 am->muold = am->mu; 466 } 467 break; 468 default: 469 break; 470 } 471 tao->niter++; 472 473 /* Calculate original function values. misfit part was done in TaoADMMToleranceUpdate*/ 474 switch (am->regswitch) { 475 case TAO_ADMM_REGULARIZER_USER: 476 if (is_reg_shell) { 477 PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, ®_func)); 478 } else { 479 PetscCall((*am->ops->regobjgrad)(am->subsolverZ, am->subsolverX->solution, ®_func, tempL, am->regobjgradP)); 480 } 481 break; 482 case TAO_ADMM_REGULARIZER_SOFT_THRESH: 483 PetscCall(ADMML1EpsilonNorm(tao, am->subsolverZ->solution, am->l1epsilon, ®_func)); 484 break; 485 } 486 PetscCall(VecCopy(am->y, am->yold)); 487 PetscCall(ADMMUpdateConstraintResidualVector(tao, am->subsolverX->solution, am->subsolverZ->solution, am->Ax, am->Bz, am->residual)); 488 PetscCall(VecNorm(am->residual, NORM_2, &am->resnorm)); 489 PetscCall(TaoLogConvergenceHistory(tao, am->last_misfit_val + reg_func, am->dualres, am->resnorm, tao->ksp_its)); 490 491 PetscCall(TaoMonitor(tao, tao->niter, am->last_misfit_val + reg_func, am->dualres, am->resnorm, 1.0)); 492 PetscUseTypeMethod(tao, convergencetest, tao->cnvP); 493 } 494 /* Update vectors */ 495 PetscCall(VecCopy(am->subsolverX->solution, tao->solution)); 496 PetscCall(VecCopy(am->subsolverX->gradient, tao->gradient)); 497 PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", NULL)); 498 PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", NULL)); 499 PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL)); 500 PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL)); 501 PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL)); 502 PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL)); 503 PetscFunctionReturn(PETSC_SUCCESS); 504 } 505 506 static PetscErrorCode TaoSetFromOptions_ADMM(Tao tao, PetscOptionItems *PetscOptionsObject) 507 { 508 TAO_ADMM *am = (TAO_ADMM *)tao->data; 509 510 PetscFunctionBegin; 511 PetscOptionsHeadBegin(PetscOptionsObject, "ADMM problem that solves f(x) in a form of f(x) + g(z) subject to x - z = 0. Norm 1 and 2 are supported. Different subsolver routines can be selected. "); 512 PetscCall(PetscOptionsReal("-tao_admm_regularizer_coefficient", "regularizer constant", "", am->lambda, &am->lambda, NULL)); 513 PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty", "Constant for Augmented Lagrangian term.", "", am->mu, &am->mu, NULL)); 514 PetscCall(PetscOptionsReal("-tao_admm_relaxation_parameter", "x relaxation parameter for Z update.", "", am->gamma, &am->gamma, NULL)); 515 PetscCall(PetscOptionsReal("-tao_admm_tolerance_update_factor", "ADMM dynamic tolerance update factor.", "", am->tol, &am->tol, NULL)); 516 PetscCall(PetscOptionsReal("-tao_admm_spectral_penalty_update_factor", "ADMM spectral penalty update curvature safeguard value.", "", am->orthval, &am->orthval, NULL)); 517 PetscCall(PetscOptionsReal("-tao_admm_minimum_spectral_penalty", "Set ADMM minimum spectral penalty.", "", am->mumin, &am->mumin, NULL)); 518 PetscCall(PetscOptionsEnum("-tao_admm_dual_update", "Lagrangian dual update policy", "TaoADMMUpdateType", TaoADMMUpdateTypes, (PetscEnum)am->update, (PetscEnum *)&am->update, NULL)); 519 PetscCall(PetscOptionsEnum("-tao_admm_regularizer_type", "ADMM regularizer update rule", "TaoADMMRegularizerType", TaoADMMRegularizerTypes, (PetscEnum)am->regswitch, (PetscEnum *)&am->regswitch, NULL)); 520 PetscOptionsHeadEnd(); 521 PetscCall(TaoSetFromOptions(am->subsolverX)); 522 if (am->regswitch != TAO_ADMM_REGULARIZER_SOFT_THRESH) PetscCall(TaoSetFromOptions(am->subsolverZ)); 523 PetscFunctionReturn(PETSC_SUCCESS); 524 } 525 526 static PetscErrorCode TaoView_ADMM(Tao tao, PetscViewer viewer) 527 { 528 TAO_ADMM *am = (TAO_ADMM *)tao->data; 529 530 PetscFunctionBegin; 531 PetscCall(PetscViewerASCIIPushTab(viewer)); 532 PetscCall(TaoView(am->subsolverX, viewer)); 533 PetscCall(TaoView(am->subsolverZ, viewer)); 534 PetscCall(PetscViewerASCIIPopTab(viewer)); 535 PetscFunctionReturn(PETSC_SUCCESS); 536 } 537 538 static PetscErrorCode TaoSetUp_ADMM(Tao tao) 539 { 540 TAO_ADMM *am = (TAO_ADMM *)tao->data; 541 PetscInt n, N, M; 542 543 PetscFunctionBegin; 544 PetscCall(VecGetLocalSize(tao->solution, &n)); 545 PetscCall(VecGetSize(tao->solution, &N)); 546 /* If Jacobian is given as NULL, it means Jacobian is identity matrix with size of solution vector */ 547 if (!am->JB) { 548 am->zJI = PETSC_TRUE; 549 PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JB)); 550 PetscCall(MatShellSetOperation(am->JB, MATOP_MULT, (void (*)(void))JacobianIdentityB)); 551 PetscCall(MatShellSetOperation(am->JB, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentityB)); 552 am->JBpre = am->JB; 553 } 554 if (!am->JA) { 555 am->xJI = PETSC_TRUE; 556 PetscCall(MatCreateShell(PetscObjectComm((PetscObject)tao), n, n, PETSC_DETERMINE, PETSC_DETERMINE, NULL, &am->JA)); 557 PetscCall(MatShellSetOperation(am->JA, MATOP_MULT, (void (*)(void))JacobianIdentity)); 558 PetscCall(MatShellSetOperation(am->JA, MATOP_MULT_TRANSPOSE, (void (*)(void))JacobianIdentity)); 559 am->JApre = am->JA; 560 } 561 PetscCall(MatCreateVecs(am->JA, NULL, &am->Ax)); 562 if (!tao->gradient) PetscCall(VecDuplicate(tao->solution, &tao->gradient)); 563 PetscCall(TaoSetSolution(am->subsolverX, tao->solution)); 564 if (!am->z) { 565 PetscCall(VecDuplicate(tao->solution, &am->z)); 566 PetscCall(VecSet(am->z, 0.0)); 567 } 568 PetscCall(TaoSetSolution(am->subsolverZ, am->z)); 569 if (!am->workLeft) PetscCall(VecDuplicate(tao->solution, &am->workLeft)); 570 if (!am->Axold) PetscCall(VecDuplicate(am->Ax, &am->Axold)); 571 if (!am->workJacobianRight) PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight)); 572 if (!am->workJacobianRight2) PetscCall(VecDuplicate(am->Ax, &am->workJacobianRight2)); 573 if (!am->Bz) PetscCall(VecDuplicate(am->Ax, &am->Bz)); 574 if (!am->Bzold) PetscCall(VecDuplicate(am->Ax, &am->Bzold)); 575 if (!am->Bz0) PetscCall(VecDuplicate(am->Ax, &am->Bz0)); 576 if (!am->y) { 577 PetscCall(VecDuplicate(am->Ax, &am->y)); 578 PetscCall(VecSet(am->y, 0.0)); 579 } 580 if (!am->yold) { 581 PetscCall(VecDuplicate(am->Ax, &am->yold)); 582 PetscCall(VecSet(am->yold, 0.0)); 583 } 584 if (!am->y0) { 585 PetscCall(VecDuplicate(am->Ax, &am->y0)); 586 PetscCall(VecSet(am->y0, 0.0)); 587 } 588 if (!am->yhat) { 589 PetscCall(VecDuplicate(am->Ax, &am->yhat)); 590 PetscCall(VecSet(am->yhat, 0.0)); 591 } 592 if (!am->yhatold) { 593 PetscCall(VecDuplicate(am->Ax, &am->yhatold)); 594 PetscCall(VecSet(am->yhatold, 0.0)); 595 } 596 if (!am->residual) { 597 PetscCall(VecDuplicate(am->Ax, &am->residual)); 598 PetscCall(VecSet(am->residual, 0.0)); 599 } 600 if (!am->constraint) { 601 am->constraint = NULL; 602 } else { 603 PetscCall(VecGetSize(am->constraint, &M)); 604 PetscCheck(M == N, PetscObjectComm((PetscObject)tao), PETSC_ERR_ARG_WRONGSTATE, "Solution vector and constraint vector must be of same size!"); 605 } 606 607 /* Save changed tao tolerance for adaptive tolerance */ 608 if (tao->gatol_changed) am->gatol_admm = tao->gatol; 609 if (tao->catol_changed) am->catol_admm = tao->catol; 610 611 /*Update spectral and dual elements to X subsolver */ 612 PetscCall(TaoSetObjectiveAndGradient(am->subsolverX, NULL, SubObjGradUpdate, tao)); 613 PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverX, am->JA, am->JApre, am->ops->misfitjac, am->misfitjacobianP)); 614 PetscCall(TaoSetJacobianEqualityRoutine(am->subsolverZ, am->JB, am->JBpre, am->ops->regjac, am->regjacobianP)); 615 PetscFunctionReturn(PETSC_SUCCESS); 616 } 617 618 static PetscErrorCode TaoDestroy_ADMM(Tao tao) 619 { 620 TAO_ADMM *am = (TAO_ADMM *)tao->data; 621 622 PetscFunctionBegin; 623 PetscCall(VecDestroy(&am->z)); 624 PetscCall(VecDestroy(&am->Ax)); 625 PetscCall(VecDestroy(&am->Axold)); 626 PetscCall(VecDestroy(&am->Bz)); 627 PetscCall(VecDestroy(&am->Bzold)); 628 PetscCall(VecDestroy(&am->Bz0)); 629 PetscCall(VecDestroy(&am->residual)); 630 PetscCall(VecDestroy(&am->y)); 631 PetscCall(VecDestroy(&am->yold)); 632 PetscCall(VecDestroy(&am->y0)); 633 PetscCall(VecDestroy(&am->yhat)); 634 PetscCall(VecDestroy(&am->yhatold)); 635 PetscCall(VecDestroy(&am->workLeft)); 636 PetscCall(VecDestroy(&am->workJacobianRight)); 637 PetscCall(VecDestroy(&am->workJacobianRight2)); 638 639 PetscCall(MatDestroy(&am->JA)); 640 PetscCall(MatDestroy(&am->JB)); 641 if (!am->xJI) PetscCall(MatDestroy(&am->JApre)); 642 if (!am->zJI) PetscCall(MatDestroy(&am->JBpre)); 643 if (am->Hx) { 644 PetscCall(MatDestroy(&am->Hx)); 645 PetscCall(MatDestroy(&am->Hxpre)); 646 } 647 if (am->Hz) { 648 PetscCall(MatDestroy(&am->Hz)); 649 PetscCall(MatDestroy(&am->Hzpre)); 650 } 651 PetscCall(MatDestroy(&am->ATA)); 652 PetscCall(MatDestroy(&am->BTB)); 653 PetscCall(TaoDestroy(&am->subsolverX)); 654 PetscCall(TaoDestroy(&am->subsolverZ)); 655 am->parent = NULL; 656 PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", NULL)); 657 PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", NULL)); 658 PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", NULL)); 659 PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", NULL)); 660 PetscCall(PetscFree(tao->data)); 661 PetscFunctionReturn(PETSC_SUCCESS); 662 } 663 664 /*MC 665 666 TAOADMM - Alternating direction method of multipliers method fo solving linear problems with 667 constraints. in a min_x f(x) + g(z) s.t. Ax+Bz=c. 668 This algorithm employs two sub Tao solvers, of which type can be specified 669 by the user. User need to provide ObjectiveAndGradient routine, and/or HessianRoutine for both subsolvers. 670 Hessians can be given boolean flag determining whether they change with respect to a input vector. This can be set via 671 TaoADMMSet{Misfit,Regularizer}HessianChangeStatus. 672 Second subsolver does support TAOSHELL. It should be noted that L1-norm is used for objective value for TAOSHELL type. 673 There is option to set regularizer option, and currently soft-threshold is implemented. For spectral penalty update, 674 currently there are basic option and adaptive option. 675 Constraint is set at Ax+Bz=c, and A and B can be set with TaoADMMSet{Misfit,Regularizer}ConstraintJacobian. 676 c can be set with TaoADMMSetConstraintVectorRHS. 677 The user can also provide regularizer weight for second subsolver. 678 679 References: 680 . * - Xu, Zheng and Figueiredo, Mario A. T. and Yuan, Xiaoming and Studer, Christoph and Goldstein, Tom 681 "Adaptive Relaxed ADMM: Convergence Theory and Practical Implementation" 682 The IEEE Conference on Computer Vision and Pattern Recognition (CVPR), July, 2017. 683 684 Options Database Keys: 685 + -tao_admm_regularizer_coefficient - regularizer constant (default 1.e-6) 686 . -tao_admm_spectral_penalty - Constant for Augmented Lagrangian term (default 1.) 687 . -tao_admm_relaxation_parameter - relaxation parameter for Z update (default 1.) 688 . -tao_admm_tolerance_update_factor - ADMM dynamic tolerance update factor (default 1.e-12) 689 . -tao_admm_spectral_penalty_update_factor - ADMM spectral penalty update curvature safeguard value (default 0.2) 690 . -tao_admm_minimum_spectral_penalty - Set ADMM minimum spectral penalty (default 0) 691 . -tao_admm_dual_update - Lagrangian dual update policy ("basic","adaptive","adaptive-relaxed") (default "basic") 692 - -tao_admm_regularizer_type - ADMM regularizer update rule ("user","soft-threshold") (default "soft-threshold") 693 694 Level: beginner 695 696 .seealso: `TaoADMMSetMisfitHessianChangeStatus()`, `TaoADMMSetRegHessianChangeStatus()`, `TaoADMMGetSpectralPenalty()`, 697 `TaoADMMGetMisfitSubsolver()`, `TaoADMMGetRegularizationSubsolver()`, `TaoADMMSetConstraintVectorRHS()`, 698 `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetRegularizerCoefficient()`, `TaoADMMGetRegularizerCoefficient()`, 699 `TaoADMMSetRegularizerConstraintJacobian()`, `TaoADMMSetMisfitConstraintJacobian()`, 700 `TaoADMMSetMisfitObjectiveAndGradientRoutine()`, `TaoADMMSetMisfitHessianRoutine()`, 701 `TaoADMMSetRegularizerObjectiveAndGradientRoutine()`, `TaoADMMSetRegularizerHessianRoutine()`, 702 `TaoGetADMMParentTao()`, `TaoADMMGetDualVector()`, `TaoADMMSetRegularizerType()`, 703 `TaoADMMGetRegularizerType()`, `TaoADMMSetUpdateType()`, `TaoADMMGetUpdateType()` 704 M*/ 705 706 PETSC_EXTERN PetscErrorCode TaoCreate_ADMM(Tao tao) 707 { 708 TAO_ADMM *am; 709 710 PetscFunctionBegin; 711 PetscCall(PetscNew(&am)); 712 713 tao->ops->destroy = TaoDestroy_ADMM; 714 tao->ops->setup = TaoSetUp_ADMM; 715 tao->ops->setfromoptions = TaoSetFromOptions_ADMM; 716 tao->ops->view = TaoView_ADMM; 717 tao->ops->solve = TaoSolve_ADMM; 718 719 tao->data = (void *)am; 720 am->l1epsilon = 1e-6; 721 am->lambda = 1e-4; 722 am->mu = 1.; 723 am->muold = 0.; 724 am->mueps = PETSC_MACHINE_EPSILON; 725 am->mumin = 0.; 726 am->orthval = 0.2; 727 am->T = 2; 728 am->parent = tao; 729 am->update = TAO_ADMM_UPDATE_BASIC; 730 am->regswitch = TAO_ADMM_REGULARIZER_SOFT_THRESH; 731 am->tol = PETSC_SMALL; 732 am->const_norm = 0; 733 am->resnorm = 0; 734 am->dualres = 0; 735 am->ops->regobjgrad = NULL; 736 am->ops->reghess = NULL; 737 am->gamma = 1; 738 am->regobjgradP = NULL; 739 am->reghessP = NULL; 740 am->gatol_admm = 1e-8; 741 am->catol_admm = 0; 742 am->Hxchange = PETSC_TRUE; 743 am->Hzchange = PETSC_TRUE; 744 am->Hzbool = PETSC_TRUE; 745 am->Hxbool = PETSC_TRUE; 746 747 PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverX)); 748 PetscCall(TaoSetOptionsPrefix(am->subsolverX, "misfit_")); 749 PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverX, (PetscObject)tao, 1)); 750 PetscCall(TaoCreate(PetscObjectComm((PetscObject)tao), &am->subsolverZ)); 751 PetscCall(TaoSetOptionsPrefix(am->subsolverZ, "reg_")); 752 PetscCall(PetscObjectIncrementTabLevel((PetscObject)am->subsolverZ, (PetscObject)tao, 1)); 753 754 PetscCall(TaoSetType(am->subsolverX, TAONLS)); 755 PetscCall(TaoSetType(am->subsolverZ, TAONLS)); 756 PetscCall(PetscObjectCompose((PetscObject)am->subsolverX, "TaoGetADMMParentTao_ADMM", (PetscObject)tao)); 757 PetscCall(PetscObjectCompose((PetscObject)am->subsolverZ, "TaoGetADMMParentTao_ADMM", (PetscObject)tao)); 758 PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetRegularizerType_C", TaoADMMSetRegularizerType_ADMM)); 759 PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetRegularizerType_C", TaoADMMGetRegularizerType_ADMM)); 760 PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMSetUpdateType_C", TaoADMMSetUpdateType_ADMM)); 761 PetscCall(PetscObjectComposeFunction((PetscObject)tao, "TaoADMMGetUpdateType_C", TaoADMMGetUpdateType_ADMM)); 762 PetscFunctionReturn(PETSC_SUCCESS); 763 } 764 765 /*@ 766 TaoADMMSetMisfitHessianChangeStatus - Set boolean that determines whether Hessian matrix of misfit subsolver changes with respect to input vector. 767 768 Collective 769 770 Input Parameters: 771 + tao - the Tao solver context. 772 - b - the Hessian matrix change status boolean, `PETSC_FALSE` when the Hessian matrix does not change, `PETSC_TRUE` otherwise. 773 774 Level: advanced 775 776 .seealso: `TAOADMM` 777 @*/ 778 PetscErrorCode TaoADMMSetMisfitHessianChangeStatus(Tao tao, PetscBool b) 779 { 780 TAO_ADMM *am = (TAO_ADMM *)tao->data; 781 782 PetscFunctionBegin; 783 am->Hxchange = b; 784 PetscFunctionReturn(PETSC_SUCCESS); 785 } 786 787 /*@ 788 TaoADMMSetRegHessianChangeStatus - Set boolean that determines whether Hessian matrix of regularization subsolver changes with respect to input vector. 789 790 Collective 791 792 Input Parameters: 793 + tao - the `Tao` solver context 794 - b - the Hessian matrix change status boolean, `PETSC_FALSE` when the Hessian matrix does not change, `PETSC_TRUE` otherwise. 795 796 Level: advanced 797 798 .seealso: `TAOADMM` 799 @*/ 800 PetscErrorCode TaoADMMSetRegHessianChangeStatus(Tao tao, PetscBool b) 801 { 802 TAO_ADMM *am = (TAO_ADMM *)tao->data; 803 804 PetscFunctionBegin; 805 am->Hzchange = b; 806 PetscFunctionReturn(PETSC_SUCCESS); 807 } 808 809 /*@ 810 TaoADMMSetSpectralPenalty - Set the spectral penalty (mu) value 811 812 Collective 813 814 Input Parameters: 815 + tao - the `Tao` solver context 816 - mu - spectral penalty 817 818 Level: advanced 819 820 .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TAOADMM` 821 @*/ 822 PetscErrorCode TaoADMMSetSpectralPenalty(Tao tao, PetscReal mu) 823 { 824 TAO_ADMM *am = (TAO_ADMM *)tao->data; 825 826 PetscFunctionBegin; 827 am->mu = mu; 828 PetscFunctionReturn(PETSC_SUCCESS); 829 } 830 831 /*@ 832 TaoADMMGetSpectralPenalty - Get the spectral penalty (mu) value 833 834 Collective 835 836 Input Parameter: 837 . tao - the `Tao` solver context 838 839 Output Parameter: 840 . mu - spectral penalty 841 842 Level: advanced 843 844 .seealso: `TaoADMMSetMinimumSpectralPenalty()`, `TaoADMMSetSpectralPenalty()`, `TAOADMM` 845 @*/ 846 PetscErrorCode TaoADMMGetSpectralPenalty(Tao tao, PetscReal *mu) 847 { 848 TAO_ADMM *am = (TAO_ADMM *)tao->data; 849 850 PetscFunctionBegin; 851 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 852 PetscValidRealPointer(mu, 2); 853 *mu = am->mu; 854 PetscFunctionReturn(PETSC_SUCCESS); 855 } 856 857 /*@ 858 TaoADMMGetMisfitSubsolver - Get the pointer to the misfit subsolver inside `TAOADMM` 859 860 Collective 861 862 Input Parameter: 863 . tao - the `Tao` solver context 864 865 Output Parameter: 866 . misfit - the `Tao` subsolver context 867 868 Level: advanced 869 870 .seealso: `TAOADMM`, `Tao` 871 @*/ 872 PetscErrorCode TaoADMMGetMisfitSubsolver(Tao tao, Tao *misfit) 873 { 874 TAO_ADMM *am = (TAO_ADMM *)tao->data; 875 876 PetscFunctionBegin; 877 *misfit = am->subsolverX; 878 PetscFunctionReturn(PETSC_SUCCESS); 879 } 880 881 /*@ 882 TaoADMMGetRegularizationSubsolver - Get the pointer to the regularization subsolver inside `TAOADMM` 883 884 Collective 885 886 Input Parameter: 887 . tao - the `Tao` solver context 888 889 Output Parameter: 890 . reg - the `Tao` subsolver context 891 892 Level: advanced 893 894 .seealso: `TAOADMM`, `Tao` 895 @*/ 896 PetscErrorCode TaoADMMGetRegularizationSubsolver(Tao tao, Tao *reg) 897 { 898 TAO_ADMM *am = (TAO_ADMM *)tao->data; 899 900 PetscFunctionBegin; 901 *reg = am->subsolverZ; 902 PetscFunctionReturn(PETSC_SUCCESS); 903 } 904 905 /*@ 906 TaoADMMSetConstraintVectorRHS - Set the RHS constraint vector for `TAOADMM` 907 908 Collective 909 910 Input Parameters: 911 + tao - the `Tao` solver context 912 - c - RHS vector 913 914 Level: advanced 915 916 .seealso: `TAOADMM` 917 @*/ 918 PetscErrorCode TaoADMMSetConstraintVectorRHS(Tao tao, Vec c) 919 { 920 TAO_ADMM *am = (TAO_ADMM *)tao->data; 921 922 PetscFunctionBegin; 923 am->constraint = c; 924 PetscFunctionReturn(PETSC_SUCCESS); 925 } 926 927 /*@ 928 TaoADMMSetMinimumSpectralPenalty - Set the minimum value for the spectral penalty 929 930 Collective 931 932 Input Parameters: 933 + tao - the `Tao` solver context 934 - mu - minimum spectral penalty value 935 936 Level: advanced 937 938 .seealso: `TaoADMMGetSpectralPenalty()`, `TAOADMM` 939 @*/ 940 PetscErrorCode TaoADMMSetMinimumSpectralPenalty(Tao tao, PetscReal mu) 941 { 942 TAO_ADMM *am = (TAO_ADMM *)tao->data; 943 944 PetscFunctionBegin; 945 am->mumin = mu; 946 PetscFunctionReturn(PETSC_SUCCESS); 947 } 948 949 /*@ 950 TaoADMMSetRegularizerCoefficient - Set the regularization coefficient lambda for L1 norm regularization case 951 952 Collective 953 954 Input Parameters: 955 + tao - the `Tao` solver context 956 - lambda - L1-norm regularizer coefficient 957 958 Level: advanced 959 960 .seealso: `TaoADMMSetMisfitConstraintJacobian()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM` 961 @*/ 962 PetscErrorCode TaoADMMSetRegularizerCoefficient(Tao tao, PetscReal lambda) 963 { 964 TAO_ADMM *am = (TAO_ADMM *)tao->data; 965 966 PetscFunctionBegin; 967 am->lambda = lambda; 968 PetscFunctionReturn(PETSC_SUCCESS); 969 } 970 971 /*@ 972 TaoADMMGetRegularizerCoefficient - Get the regularization coefficient lambda for L1 norm regularization case 973 974 Collective 975 976 Input Parameter: 977 . tao - the `Tao` solver context 978 979 Output Parameter: 980 . lambda - L1-norm regularizer coefficient 981 982 Level: advanced 983 984 .seealso: `TaoADMMSetMisfitConstraintJacobian()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM` 985 @*/ 986 PetscErrorCode TaoADMMGetRegularizerCoefficient(Tao tao, PetscReal *lambda) 987 { 988 TAO_ADMM *am = (TAO_ADMM *)tao->data; 989 990 PetscFunctionBegin; 991 *lambda = am->lambda; 992 PetscFunctionReturn(PETSC_SUCCESS); 993 } 994 995 /*@C 996 TaoADMMSetMisfitConstraintJacobian - Set the constraint matrix B for the `TAOADMM` algorithm. Matrix B constrains the z variable. 997 998 Collective 999 1000 Input Parameters: 1001 + tao - the Tao solver context 1002 . J - user-created regularizer constraint Jacobian matrix 1003 . Jpre - user-created regularizer Jacobian constraint matrix for constructing the preconditioner, often this is `J` 1004 . func - function pointer for the regularizer constraint Jacobian update function 1005 - ctx - user context for the regularizer Hessian 1006 1007 Level: advanced 1008 1009 .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetRegularizerConstraintJacobian()`, `TAOADMM` 1010 @*/ 1011 PetscErrorCode TaoADMMSetMisfitConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx) 1012 { 1013 TAO_ADMM *am = (TAO_ADMM *)tao->data; 1014 1015 PetscFunctionBegin; 1016 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 1017 if (J) { 1018 PetscValidHeaderSpecific(J, MAT_CLASSID, 2); 1019 PetscCheckSameComm(tao, 1, J, 2); 1020 } 1021 if (Jpre) { 1022 PetscValidHeaderSpecific(Jpre, MAT_CLASSID, 3); 1023 PetscCheckSameComm(tao, 1, Jpre, 3); 1024 } 1025 if (ctx) am->misfitjacobianP = ctx; 1026 if (func) am->ops->misfitjac = func; 1027 1028 if (J) { 1029 PetscCall(PetscObjectReference((PetscObject)J)); 1030 PetscCall(MatDestroy(&am->JA)); 1031 am->JA = J; 1032 } 1033 if (Jpre) { 1034 PetscCall(PetscObjectReference((PetscObject)Jpre)); 1035 PetscCall(MatDestroy(&am->JApre)); 1036 am->JApre = Jpre; 1037 } 1038 PetscFunctionReturn(PETSC_SUCCESS); 1039 } 1040 1041 /*@C 1042 TaoADMMSetRegularizerConstraintJacobian - Set the constraint matrix B for `TAOADMM` algorithm. Matrix B constraints z variable. 1043 1044 Collective 1045 1046 Input Parameters: 1047 + tao - the `Tao` solver context 1048 . J - user-created regularizer constraint Jacobian matrix 1049 . Jpre - user-created regularizer Jacobian constraint matrix for constructing the preconditioner, often this is `J` 1050 . func - function pointer for the regularizer constraint Jacobian update function 1051 - ctx - user context for the regularizer Hessian 1052 1053 Level: advanced 1054 1055 .seealso: `TaoADMMSetRegularizerCoefficient()`, `TaoADMMSetMisfitConstraintJacobian()`, `TAOADMM` 1056 @*/ 1057 PetscErrorCode TaoADMMSetRegularizerConstraintJacobian(Tao tao, Mat J, Mat Jpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx) 1058 { 1059 TAO_ADMM *am = (TAO_ADMM *)tao->data; 1060 1061 PetscFunctionBegin; 1062 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 1063 if (J) { 1064 PetscValidHeaderSpecific(J, MAT_CLASSID, 2); 1065 PetscCheckSameComm(tao, 1, J, 2); 1066 } 1067 if (Jpre) { 1068 PetscValidHeaderSpecific(Jpre, MAT_CLASSID, 3); 1069 PetscCheckSameComm(tao, 1, Jpre, 3); 1070 } 1071 if (ctx) am->regjacobianP = ctx; 1072 if (func) am->ops->regjac = func; 1073 1074 if (J) { 1075 PetscCall(PetscObjectReference((PetscObject)J)); 1076 PetscCall(MatDestroy(&am->JB)); 1077 am->JB = J; 1078 } 1079 if (Jpre) { 1080 PetscCall(PetscObjectReference((PetscObject)Jpre)); 1081 PetscCall(MatDestroy(&am->JBpre)); 1082 am->JBpre = Jpre; 1083 } 1084 PetscFunctionReturn(PETSC_SUCCESS); 1085 } 1086 1087 /*@C 1088 TaoADMMSetMisfitObjectiveAndGradientRoutine - Sets the user-defined misfit call-back function 1089 1090 Collective 1091 1092 Input Parameters: 1093 + tao - the `Tao` context 1094 . func - function pointer for the misfit value and gradient evaluation 1095 - ctx - user context for the misfit 1096 1097 Level: advanced 1098 1099 .seealso: `TAOADMM` 1100 @*/ 1101 PetscErrorCode TaoADMMSetMisfitObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx) 1102 { 1103 TAO_ADMM *am = (TAO_ADMM *)tao->data; 1104 1105 PetscFunctionBegin; 1106 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 1107 am->misfitobjgradP = ctx; 1108 am->ops->misfitobjgrad = func; 1109 PetscFunctionReturn(PETSC_SUCCESS); 1110 } 1111 1112 /*@C 1113 TaoADMMSetMisfitHessianRoutine - Sets the user-defined misfit Hessian call-back 1114 function into the algorithm, to be used for subsolverX. 1115 1116 Collective 1117 1118 Input Parameters: 1119 + tao - the `Tao` context 1120 . H - user-created matrix for the Hessian of the misfit term 1121 . Hpre - user-created matrix for the preconditioner of Hessian of the misfit term 1122 . func - function pointer for the misfit Hessian evaluation 1123 - ctx - user context for the misfit Hessian 1124 1125 Level: advanced 1126 1127 .seealso: `TAOADMM` 1128 @*/ 1129 PetscErrorCode TaoADMMSetMisfitHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx) 1130 { 1131 TAO_ADMM *am = (TAO_ADMM *)tao->data; 1132 1133 PetscFunctionBegin; 1134 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 1135 if (H) { 1136 PetscValidHeaderSpecific(H, MAT_CLASSID, 2); 1137 PetscCheckSameComm(tao, 1, H, 2); 1138 } 1139 if (Hpre) { 1140 PetscValidHeaderSpecific(Hpre, MAT_CLASSID, 3); 1141 PetscCheckSameComm(tao, 1, Hpre, 3); 1142 } 1143 if (ctx) am->misfithessP = ctx; 1144 if (func) am->ops->misfithess = func; 1145 if (H) { 1146 PetscCall(PetscObjectReference((PetscObject)H)); 1147 PetscCall(MatDestroy(&am->Hx)); 1148 am->Hx = H; 1149 } 1150 if (Hpre) { 1151 PetscCall(PetscObjectReference((PetscObject)Hpre)); 1152 PetscCall(MatDestroy(&am->Hxpre)); 1153 am->Hxpre = Hpre; 1154 } 1155 PetscFunctionReturn(PETSC_SUCCESS); 1156 } 1157 1158 /*@C 1159 TaoADMMSetRegularizerObjectiveAndGradientRoutine - Sets the user-defined regularizer call-back function 1160 1161 Collective 1162 1163 Input Parameters: 1164 + tao - the Tao context 1165 . func - function pointer for the regularizer value and gradient evaluation 1166 - ctx - user context for the regularizer 1167 1168 Level: advanced 1169 1170 .seealso: `TAOADMM` 1171 @*/ 1172 PetscErrorCode TaoADMMSetRegularizerObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void *), void *ctx) 1173 { 1174 TAO_ADMM *am = (TAO_ADMM *)tao->data; 1175 1176 PetscFunctionBegin; 1177 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 1178 am->regobjgradP = ctx; 1179 am->ops->regobjgrad = func; 1180 PetscFunctionReturn(PETSC_SUCCESS); 1181 } 1182 1183 /*@C 1184 TaoADMMSetRegularizerHessianRoutine - Sets the user-defined regularizer Hessian call-back 1185 function, to be used for subsolverZ. 1186 1187 Collective 1188 1189 Input Parameters: 1190 + tao - the `Tao` context 1191 . H - user-created matrix for the Hessian of the regularization term 1192 . Hpre - user-created matrix for the preconditioner of Hessian of the regularization term 1193 . func - function pointer for the regularizer Hessian evaluation 1194 - ctx - user context for the regularizer Hessian 1195 1196 Level: advanced 1197 1198 .seealso: `TAOADMM` 1199 @*/ 1200 PetscErrorCode TaoADMMSetRegularizerHessianRoutine(Tao tao, Mat H, Mat Hpre, PetscErrorCode (*func)(Tao, Vec, Mat, Mat, void *), void *ctx) 1201 { 1202 TAO_ADMM *am = (TAO_ADMM *)tao->data; 1203 1204 PetscFunctionBegin; 1205 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 1206 if (H) { 1207 PetscValidHeaderSpecific(H, MAT_CLASSID, 2); 1208 PetscCheckSameComm(tao, 1, H, 2); 1209 } 1210 if (Hpre) { 1211 PetscValidHeaderSpecific(Hpre, MAT_CLASSID, 3); 1212 PetscCheckSameComm(tao, 1, Hpre, 3); 1213 } 1214 if (ctx) am->reghessP = ctx; 1215 if (func) am->ops->reghess = func; 1216 if (H) { 1217 PetscCall(PetscObjectReference((PetscObject)H)); 1218 PetscCall(MatDestroy(&am->Hz)); 1219 am->Hz = H; 1220 } 1221 if (Hpre) { 1222 PetscCall(PetscObjectReference((PetscObject)Hpre)); 1223 PetscCall(MatDestroy(&am->Hzpre)); 1224 am->Hzpre = Hpre; 1225 } 1226 PetscFunctionReturn(PETSC_SUCCESS); 1227 } 1228 1229 /*@ 1230 TaoGetADMMParentTao - Gets pointer to parent `TAOADMM`, used by inner subsolver. 1231 1232 Collective 1233 1234 Input Parameter: 1235 . tao - the `Tao` context 1236 1237 Output Parameter: 1238 . admm_tao - the parent `Tao` context 1239 1240 Level: advanced 1241 1242 .seealso: `TAOADMM` 1243 @*/ 1244 PetscErrorCode TaoGetADMMParentTao(Tao tao, Tao *admm_tao) 1245 { 1246 PetscFunctionBegin; 1247 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 1248 PetscCall(PetscObjectQuery((PetscObject)tao, "TaoGetADMMParentTao_ADMM", (PetscObject *)admm_tao)); 1249 PetscFunctionReturn(PETSC_SUCCESS); 1250 } 1251 1252 /*@ 1253 TaoADMMGetDualVector - Returns the dual vector associated with the current `TAOADMM` state 1254 1255 Not Collective 1256 1257 Input Parameter: 1258 . tao - the `Tao` context 1259 1260 Output Parameter: 1261 . Y - the current solution 1262 1263 Level: intermediate 1264 1265 .seealso: `TAOADMM` 1266 @*/ 1267 PetscErrorCode TaoADMMGetDualVector(Tao tao, Vec *Y) 1268 { 1269 TAO_ADMM *am = (TAO_ADMM *)tao->data; 1270 1271 PetscFunctionBegin; 1272 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 1273 *Y = am->y; 1274 PetscFunctionReturn(PETSC_SUCCESS); 1275 } 1276 1277 /*@ 1278 TaoADMMSetRegularizerType - Set regularizer type for `TAOADMM` routine 1279 1280 Not Collective 1281 1282 Input Parameters: 1283 + tao - the `Tao` context 1284 - type - regularizer type 1285 1286 Options Database Key: 1287 . -tao_admm_regularizer_type <admm_regularizer_user,admm_regularizer_soft_thresh> - select the regularizer 1288 1289 Level: intermediate 1290 1291 .seealso: `TaoADMMGetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM` 1292 @*/ 1293 PetscErrorCode TaoADMMSetRegularizerType(Tao tao, TaoADMMRegularizerType type) 1294 { 1295 PetscFunctionBegin; 1296 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 1297 PetscValidLogicalCollectiveEnum(tao, type, 2); 1298 PetscTryMethod(tao, "TaoADMMSetRegularizerType_C", (Tao, TaoADMMRegularizerType), (tao, type)); 1299 PetscFunctionReturn(PETSC_SUCCESS); 1300 } 1301 1302 /*@ 1303 TaoADMMGetRegularizerType - Gets the type of regularizer routine for `TAOADMM` 1304 1305 Not Collective 1306 1307 Input Parameter: 1308 . tao - the `Tao` context 1309 1310 Output Parameter: 1311 . type - the type of regularizer 1312 1313 Level: intermediate 1314 1315 .seealso: `TaoADMMSetRegularizerType()`, `TaoADMMRegularizerType`, `TAOADMM` 1316 @*/ 1317 PetscErrorCode TaoADMMGetRegularizerType(Tao tao, TaoADMMRegularizerType *type) 1318 { 1319 PetscFunctionBegin; 1320 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 1321 PetscUseMethod(tao, "TaoADMMGetRegularizerType_C", (Tao, TaoADMMRegularizerType *), (tao, type)); 1322 PetscFunctionReturn(PETSC_SUCCESS); 1323 } 1324 1325 /*@ 1326 TaoADMMSetUpdateType - Set update routine for `TAOADMM` routine 1327 1328 Not Collective 1329 1330 Input Parameters: 1331 + tao - the `Tao` context 1332 - type - spectral parameter update type 1333 1334 Level: intermediate 1335 1336 .seealso: `TaoADMMGetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM` 1337 @*/ 1338 PetscErrorCode TaoADMMSetUpdateType(Tao tao, TaoADMMUpdateType type) 1339 { 1340 PetscFunctionBegin; 1341 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 1342 PetscValidLogicalCollectiveEnum(tao, type, 2); 1343 PetscTryMethod(tao, "TaoADMMSetUpdateType_C", (Tao, TaoADMMUpdateType), (tao, type)); 1344 PetscFunctionReturn(PETSC_SUCCESS); 1345 } 1346 1347 /*@ 1348 TaoADMMGetUpdateType - Gets the type of spectral penalty update routine for `TAOADMM` 1349 1350 Not Collective 1351 1352 Input Parameter: 1353 . tao - the `Tao` context 1354 1355 Output Parameter: 1356 . type - the type of spectral penalty update routine 1357 1358 Level: intermediate 1359 1360 .seealso: `TaoADMMSetUpdateType()`, `TaoADMMUpdateType`, `TAOADMM` 1361 @*/ 1362 PetscErrorCode TaoADMMGetUpdateType(Tao tao, TaoADMMUpdateType *type) 1363 { 1364 PetscFunctionBegin; 1365 PetscValidHeaderSpecific(tao, TAO_CLASSID, 1); 1366 PetscUseMethod(tao, "TaoADMMGetUpdateType_C", (Tao, TaoADMMUpdateType *), (tao, type)); 1367 PetscFunctionReturn(PETSC_SUCCESS); 1368 } 1369