xref: /petsc/src/tao/interface/taosolver_fg.c (revision 2e16c0ce58b3a4ec287cbc0a0807bfb0a0fa5ac9)
1 #include <petsc/private/taoimpl.h> /*I "petsctao.h" I*/
2 
3 /*@
4   TaoSetSolution - Sets the vector holding the initial guess for the solve
5 
6   Logically collective on tao
7 
8   Input Parameters:
9 + tao - the Tao context
10 - x0  - the initial guess
11 
12   Level: beginner
13 .seealso: `Tao`, `TaoCreate()`, `TaoSolve()`, `TaoGetSolution()`
14 @*/
15 PetscErrorCode TaoSetSolution(Tao tao, Vec x0)
16 {
17   PetscFunctionBegin;
18   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
19   if (x0) PetscValidHeaderSpecific(x0,VEC_CLASSID,2);
20   PetscCall(PetscObjectReference((PetscObject)x0));
21   PetscCall(VecDestroy(&tao->solution));
22   tao->solution = x0;
23   PetscFunctionReturn(0);
24 }
25 
26 PetscErrorCode TaoTestGradient(Tao tao,Vec x,Vec g1)
27 {
28   Vec               g2,g3;
29   PetscBool         complete_print = PETSC_FALSE,test = PETSC_FALSE;
30   PetscReal         hcnorm,fdnorm,hcmax,fdmax,diffmax,diffnorm;
31   PetscScalar       dot;
32   MPI_Comm          comm;
33   PetscViewer       viewer,mviewer;
34   PetscViewerFormat format;
35   PetscInt          tabs;
36   static PetscBool  directionsprinted = PETSC_FALSE;
37 
38   PetscFunctionBegin;
39   PetscObjectOptionsBegin((PetscObject)tao);
40   PetscCall(PetscOptionsName("-tao_test_gradient","Compare hand-coded and finite difference Gradients","None",&test));
41   PetscCall(PetscOptionsViewer("-tao_test_gradient_view","View difference between hand-coded and finite difference Gradients element entries","None",&mviewer,&format,&complete_print));
42   PetscOptionsEnd();
43   if (!test) {
44     if (complete_print) {
45       PetscCall(PetscViewerDestroy(&mviewer));
46     }
47     PetscFunctionReturn(0);
48   }
49 
50   PetscCall(PetscObjectGetComm((PetscObject)tao,&comm));
51   PetscCall(PetscViewerASCIIGetStdout(comm,&viewer));
52   PetscCall(PetscViewerASCIIGetTab(viewer, &tabs));
53   PetscCall(PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel));
54   PetscCall(PetscViewerASCIIPrintf(viewer,"  ---------- Testing Gradient -------------\n"));
55   if (!complete_print && !directionsprinted) {
56     PetscCall(PetscViewerASCIIPrintf(viewer,"  Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n"));
57     PetscCall(PetscViewerASCIIPrintf(viewer,"    of hand-coded and finite difference gradient entries greater than <threshold>.\n"));
58   }
59   if (!directionsprinted) {
60     PetscCall(PetscViewerASCIIPrintf(viewer,"  Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n"));
61     PetscCall(PetscViewerASCIIPrintf(viewer,"    O(1.e-8), the hand-coded Gradient is probably correct.\n"));
62     directionsprinted = PETSC_TRUE;
63   }
64   if (complete_print) PetscCall(PetscViewerPushFormat(mviewer,format));
65 
66   PetscCall(VecDuplicate(x,&g2));
67   PetscCall(VecDuplicate(x,&g3));
68 
69   /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
70   PetscCall(TaoDefaultComputeGradient(tao,x,g2,NULL));
71 
72   PetscCall(VecNorm(g2,NORM_2,&fdnorm));
73   PetscCall(VecNorm(g1,NORM_2,&hcnorm));
74   PetscCall(VecNorm(g2,NORM_INFINITY,&fdmax));
75   PetscCall(VecNorm(g1,NORM_INFINITY,&hcmax));
76   PetscCall(VecDot(g1,g2,&dot));
77   PetscCall(VecCopy(g1,g3));
78   PetscCall(VecAXPY(g3,-1.0,g2));
79   PetscCall(VecNorm(g3,NORM_2,&diffnorm));
80   PetscCall(VecNorm(g3,NORM_INFINITY,&diffmax));
81   PetscCall(PetscViewerASCIIPrintf(viewer,"  ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot)/(fdnorm*hcnorm))));
82   PetscCall(PetscViewerASCIIPrintf(viewer,"  2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffnorm/PetscMax(hcnorm,fdnorm)),(double)diffnorm));
83   PetscCall(PetscViewerASCIIPrintf(viewer,"  max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffmax/PetscMax(hcmax,fdmax)),(double)diffmax));
84 
85   if (complete_print) {
86     PetscCall(PetscViewerASCIIPrintf(viewer,"  Hand-coded gradient ----------\n"));
87     PetscCall(VecView(g1,mviewer));
88     PetscCall(PetscViewerASCIIPrintf(viewer,"  Finite difference gradient ----------\n"));
89     PetscCall(VecView(g2,mviewer));
90     PetscCall(PetscViewerASCIIPrintf(viewer,"  Hand-coded minus finite-difference gradient ----------\n"));
91     PetscCall(VecView(g3,mviewer));
92   }
93   PetscCall(VecDestroy(&g2));
94   PetscCall(VecDestroy(&g3));
95 
96   if (complete_print) {
97     PetscCall(PetscViewerPopFormat(mviewer));
98     PetscCall(PetscViewerDestroy(&mviewer));
99   }
100   PetscCall(PetscViewerASCIISetTab(viewer,tabs));
101   PetscFunctionReturn(0);
102 }
103 
104 /*@
105   TaoComputeGradient - Computes the gradient of the objective function
106 
107   Collective on tao
108 
109   Input Parameters:
110 + tao - the Tao context
111 - X - input vector
112 
113   Output Parameter:
114 . G - gradient vector
115 
116   Options Database Keys:
117 +    -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
118 -    -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
119 
120   Note:
121     `TaoComputeGradient()` is typically used within the implementation of the optimization method,
122   so most users would not generally call this routine themselves.
123 
124   Level: developer
125 
126 .seealso: `TaoComputeObjective()`, `TaoComputeObjectiveAndGradient()`, `TaoSetGradient()`
127 @*/
128 PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
129 {
130   PetscReal      dummy;
131 
132   PetscFunctionBegin;
133   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
134   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
135   PetscValidHeaderSpecific(G,VEC_CLASSID,3);
136   PetscCheckSameComm(tao,1,X,2);
137   PetscCheckSameComm(tao,1,G,3);
138   PetscCall(VecLockReadPush(X));
139   if (tao->ops->computegradient) {
140     PetscCall(PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL));
141     PetscStackPush("Tao user gradient evaluation routine");
142     PetscCall((*tao->ops->computegradient)(tao,X,G,tao->user_gradP));
143     PetscStackPop;
144     PetscCall(PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL));
145     tao->ngrads++;
146   } else if (tao->ops->computeobjectiveandgradient) {
147     PetscCall(PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL));
148     PetscStackPush("Tao user objective/gradient evaluation routine");
149     PetscCall((*tao->ops->computeobjectiveandgradient)(tao,X,&dummy,G,tao->user_objgradP));
150     PetscStackPop;
151     PetscCall(PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL));
152     tao->nfuncgrads++;
153   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetGradient() has not been called");
154   PetscCall(VecLockReadPop(X));
155 
156   PetscCall(TaoTestGradient(tao,X,G));
157   PetscFunctionReturn(0);
158 }
159 
160 /*@
161   TaoComputeObjective - Computes the objective function value at a given point
162 
163   Collective on tao
164 
165   Input Parameters:
166 + tao - the Tao context
167 - X - input vector
168 
169   Output Parameter:
170 . f - Objective value at X
171 
172   Note:
173     `TaoComputeObjective()` is typically used within the implementation of the optimization algorithm
174   so most users would not generally call this routine themselves.
175 
176   Level: developer
177 
178 .seealso: `Tao`, `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()`
179 @*/
180 PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
181 {
182   Vec            temp;
183 
184   PetscFunctionBegin;
185   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
186   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
187   PetscCheckSameComm(tao,1,X,2);
188   PetscCall(VecLockReadPush(X));
189   if (tao->ops->computeobjective) {
190     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL));
191     PetscStackPush("Tao user objective evaluation routine");
192     PetscCall((*tao->ops->computeobjective)(tao,X,f,tao->user_objP));
193     PetscStackPop;
194     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL));
195     tao->nfuncs++;
196   } else if (tao->ops->computeobjectiveandgradient) {
197     PetscCall(PetscInfo(tao,"Duplicating variable vector in order to call func/grad routine\n"));
198     PetscCall(VecDuplicate(X,&temp));
199     PetscCall(PetscLogEventBegin(TAO_ObjGradEval,tao,X,NULL,NULL));
200     PetscStackPush("Tao user objective/gradient evaluation routine");
201     PetscCall((*tao->ops->computeobjectiveandgradient)(tao,X,f,temp,tao->user_objgradP));
202     PetscStackPop;
203     PetscCall(PetscLogEventEnd(TAO_ObjGradEval,tao,X,NULL,NULL));
204     PetscCall(VecDestroy(&temp));
205     tao->nfuncgrads++;
206   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjective() has not been called");
207   PetscCall(PetscInfo(tao,"TAO Function evaluation: %20.19e\n",(double)(*f)));
208   PetscCall(VecLockReadPop(X));
209   PetscFunctionReturn(0);
210 }
211 
212 /*@
213   TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
214 
215   Collective on tao
216 
217   Input Parameters:
218 + tao - the Tao context
219 - X - input vector
220 
221   Output Parameters:
222 + f - Objective value at X
223 - g - Gradient vector at X
224 
225   Note:
226     `TaoComputeObjectiveAndGradient()` is typically used within the implementation of the optimization algorithm,
227   so most users would not generally call this routine themselves.
228 
229   Level: developer
230 
231 .seealso: `TaoComputeGradient()`, `TaoComputeObjectiveAndGradient()`, `TaoSetObjective()`
232 @*/
233 PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
234 {
235   PetscFunctionBegin;
236   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
237   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
238   PetscValidHeaderSpecific(G,VEC_CLASSID,4);
239   PetscCheckSameComm(tao,1,X,2);
240   PetscCheckSameComm(tao,1,G,4);
241   PetscCall(VecLockReadPush(X));
242   if (tao->ops->computeobjectiveandgradient) {
243     PetscCall(PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL));
244     if (tao->ops->computegradient == TaoDefaultComputeGradient) {
245       PetscCall(TaoComputeObjective(tao,X,f));
246       PetscCall(TaoDefaultComputeGradient(tao,X,G,NULL));
247     } else {
248       PetscStackPush("Tao user objective/gradient evaluation routine");
249       PetscCall((*tao->ops->computeobjectiveandgradient)(tao,X,f,G,tao->user_objgradP));
250       PetscStackPop;
251     }
252     PetscCall(PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL));
253     tao->nfuncgrads++;
254   } else if (tao->ops->computeobjective && tao->ops->computegradient) {
255     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL));
256     PetscStackPush("Tao user objective evaluation routine");
257     PetscCall((*tao->ops->computeobjective)(tao,X,f,tao->user_objP));
258     PetscStackPop;
259     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL));
260     tao->nfuncs++;
261     PetscCall(PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL));
262     PetscStackPush("Tao user gradient evaluation routine");
263     PetscCall((*tao->ops->computegradient)(tao,X,G,tao->user_gradP));
264     PetscStackPop;
265     PetscCall(PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL));
266     tao->ngrads++;
267   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjective() or TaoSetGradient() not set");
268   PetscCall(PetscInfo(tao,"TAO Function evaluation: %20.19e\n",(double)(*f)));
269   PetscCall(VecLockReadPop(X));
270 
271   PetscCall(TaoTestGradient(tao,X,G));
272   PetscFunctionReturn(0);
273 }
274 
275 /*@C
276   TaoSetObjective - Sets the function evaluation routine for minimization
277 
278   Logically collective on tao
279 
280   Input Parameters:
281 + tao - the Tao context
282 . func - the objective function
283 - ctx - [optional] user-defined context for private data for the function evaluation
284         routine (may be NULL)
285 
286   Calling sequence of func:
287 $      func (Tao tao, Vec x, PetscReal *f, void *ctx);
288 
289 + x - input vector
290 . f - function value
291 - ctx - [optional] user-defined function context
292 
293   Level: beginner
294 
295 .seealso: `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetObjective()`
296 @*/
297 PetscErrorCode TaoSetObjective(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*,void*),void *ctx)
298 {
299   PetscFunctionBegin;
300   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
301   if (ctx) tao->user_objP = ctx;
302   if (func) tao->ops->computeobjective = func;
303   PetscFunctionReturn(0);
304 }
305 
306 /*@C
307   TaoGetObjective - Gets the function evaluation routine for the function to be minimized
308 
309   Not collective
310 
311   Input Parameter:
312 . tao - the Tao context
313 
314   Output Parameters
315 + func - the objective function
316 - ctx - the user-defined context for private data for the function evaluation
317 
318   Calling sequence of func:
319 $      func (Tao tao, Vec x, PetscReal *f, void *ctx);
320 
321 + x - input vector
322 . f - function value
323 - ctx - [optional] user-defined function context
324 
325   Level: beginner
326 
327 .seealso: `Tao`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjective()`
328 @*/
329 PetscErrorCode TaoGetObjective(Tao tao, PetscErrorCode (**func)(Tao, Vec, PetscReal*,void*),void **ctx)
330 {
331   PetscFunctionBegin;
332   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
333   if (func) *func = tao->ops->computeobjective;
334   if (ctx) *ctx = tao->user_objP;
335   PetscFunctionReturn(0);
336 }
337 
338 /*@C
339   TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
340 
341   Logically collective on tao
342 
343   Input Parameters:
344 + tao - the Tao context
345 . func - the residual evaluation routine
346 - ctx - [optional] user-defined context for private data for the function evaluation
347         routine (may be NULL)
348 
349   Calling sequence of func:
350 $      func (Tao tao, Vec x, Vec f, void *ctx);
351 
352 + x - input vector
353 . f - function value vector
354 - ctx - [optional] user-defined function context
355 
356   Level: beginner
357 
358 .seealso: `Tao`, `TaoSetObjective()`, `TaoSetJacobianRoutine()`
359 @*/
360 PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
361 {
362   PetscFunctionBegin;
363   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
364   PetscValidHeaderSpecific(res,VEC_CLASSID,2);
365   PetscCall(PetscObjectReference((PetscObject)res));
366   if (tao->ls_res) {
367     PetscCall(VecDestroy(&tao->ls_res));
368   }
369   tao->ls_res = res;
370   tao->user_lsresP = ctx;
371   tao->ops->computeresidual = func;
372 
373   PetscFunctionReturn(0);
374 }
375 
376 /*@
377   TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give.
378    If this function is not provided, or if sigma_v and sigma_w are both NULL, then the identity matrix will be used for weights.
379 
380   Collective on tao
381 
382   Input Parameters:
383 + tao - the Tao context
384 . sigma_v - vector of weights (diagonal terms only)
385 . n       - the number of weights (if using off-diagonal)
386 . rows    - index list of rows for sigma_w
387 . cols    - index list of columns for sigma_w
388 - vals - array of weights
389 
390   Note: Either sigma_v or sigma_w (or both) should be NULL
391 
392   Level: intermediate
393 
394 .seealso: `Tao`, `TaoSetResidualRoutine()`
395 @*/
396 PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
397 {
398   PetscInt       i;
399 
400   PetscFunctionBegin;
401   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
402   if (sigma_v) PetscValidHeaderSpecific(sigma_v,VEC_CLASSID,2);
403   PetscCall(PetscObjectReference((PetscObject)sigma_v));
404   PetscCall(VecDestroy(&tao->res_weights_v));
405   tao->res_weights_v = sigma_v;
406   if (vals) {
407     PetscCall(PetscFree(tao->res_weights_rows));
408     PetscCall(PetscFree(tao->res_weights_cols));
409     PetscCall(PetscFree(tao->res_weights_w));
410     PetscCall(PetscMalloc1(n,&tao->res_weights_rows));
411     PetscCall(PetscMalloc1(n,&tao->res_weights_cols));
412     PetscCall(PetscMalloc1(n,&tao->res_weights_w));
413     tao->res_weights_n = n;
414     for (i=0;i<n;i++) {
415       tao->res_weights_rows[i] = rows[i];
416       tao->res_weights_cols[i] = cols[i];
417       tao->res_weights_w[i] = vals[i];
418     }
419   } else {
420     tao->res_weights_n = 0;
421     tao->res_weights_rows = NULL;
422     tao->res_weights_cols = NULL;
423   }
424   PetscFunctionReturn(0);
425 }
426 
427 /*@
428   TaoComputeResidual - Computes a least-squares residual vector at a given point
429 
430   Collective on tao
431 
432   Input Parameters:
433 + tao - the Tao context
434 - X - input vector
435 
436   Output Parameter:
437 . f - Objective vector at X
438 
439   Notes:
440     `TaoComputeResidual()` is typically used within the implementation of the optimization algorithm,
441   so most users would not generally call this routine themselves.
442 
443   Level: advanced
444 
445 .seealso: `Tao`, `TaoSetResidualRoutine()`
446 @*/
447 PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
448 {
449   PetscFunctionBegin;
450   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
451   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
452   PetscValidHeaderSpecific(F,VEC_CLASSID,3);
453   PetscCheckSameComm(tao,1,X,2);
454   PetscCheckSameComm(tao,1,F,3);
455   if (tao->ops->computeresidual) {
456     PetscCall(PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL));
457     PetscStackPush("Tao user least-squares residual evaluation routine");
458     PetscCall((*tao->ops->computeresidual)(tao,X,F,tao->user_lsresP));
459     PetscStackPop;
460     PetscCall(PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL));
461     tao->nfuncs++;
462   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetResidualRoutine() has not been called");
463   PetscCall(PetscInfo(tao,"TAO least-squares residual evaluation.\n"));
464   PetscFunctionReturn(0);
465 }
466 
467 /*@C
468   TaoSetGradient - Sets the gradient evaluation routine for the function to be optimized
469 
470   Logically collective on tao
471 
472   Input Parameters:
473 + tao - the Tao context
474 . g - [optional] the vector to internally hold the gradient computation
475 . func - the gradient function
476 - ctx - [optional] user-defined context for private data for the gradient evaluation
477         routine (may be NULL)
478 
479   Calling sequence of func:
480 $      func (Tao tao, Vec x, Vec g, void *ctx);
481 
482 + x - input vector
483 . g - gradient value (output)
484 - ctx - [optional] user-defined function context
485 
486   Level: beginner
487 
488 .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoGetGradient()`
489 @*/
490 PetscErrorCode TaoSetGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
491 {
492   PetscFunctionBegin;
493   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
494   if (g) {
495     PetscValidHeaderSpecific(g,VEC_CLASSID,2);
496     PetscCheckSameComm(tao,1,g,2);
497     PetscCall(PetscObjectReference((PetscObject)g));
498     PetscCall(VecDestroy(&tao->gradient));
499     tao->gradient = g;
500   }
501   if (func) tao->ops->computegradient = func;
502   if (ctx) tao->user_gradP = ctx;
503   PetscFunctionReturn(0);
504 }
505 
506 /*@C
507   TaoGetGradient - Gets the gradient evaluation routine for the function being optimized
508 
509   Not collective
510 
511   Input Parameter:
512 . tao - the Tao context
513 
514   Output Parameters:
515 + g - the vector to internally hold the gradient computation
516 . func - the gradient function
517 - ctx - user-defined context for private data for the gradient evaluation routine
518 
519   Calling sequence of func:
520 $      func (Tao tao, Vec x, Vec g, void *ctx);
521 
522 + x - input vector
523 . g - gradient value (output)
524 - ctx - [optional] user-defined function context
525 
526   Level: beginner
527 
528 .seealso: `Tao`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`, `TaoSetGradient()`
529 @*/
530 PetscErrorCode TaoGetGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, Vec, void*),void **ctx)
531 {
532   PetscFunctionBegin;
533   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
534   if (g) *g = tao->gradient;
535   if (func) *func = tao->ops->computegradient;
536   if (ctx) *ctx = tao->user_gradP;
537   PetscFunctionReturn(0);
538 }
539 
540 /*@C
541   TaoSetObjectiveAndGradient - Sets a combined objective function and gradient evaluation routine for the function to be optimized
542 
543   Logically collective on tao
544 
545   Input Parameters:
546 + tao - the Tao context
547 . g - [optional] the vector to internally hold the gradient computation
548 . func - the gradient function
549 - ctx - [optional] user-defined context for private data for the gradient evaluation
550         routine (may be NULL)
551 
552   Calling sequence of func:
553 $      func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
554 
555 + x - input vector
556 . f - objective value (output)
557 . g - gradient value (output)
558 - ctx - [optional] user-defined function context
559 
560   Level: beginner
561 
562   Note:
563   For some optimization methods using a combined function can be more eifficient.
564 
565 .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetHessian()`, `TaoSetGradient()`, `TaoGetObjectiveAndGradient()`
566 @*/
567 PetscErrorCode TaoSetObjectiveAndGradient(Tao tao, Vec g, PetscErrorCode (*func)(Tao, Vec, PetscReal*, Vec, void*), void *ctx)
568 {
569   PetscFunctionBegin;
570   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
571   if (g) {
572     PetscValidHeaderSpecific(g,VEC_CLASSID,2);
573     PetscCheckSameComm(tao,1,g,2);
574     PetscCall(PetscObjectReference((PetscObject)g));
575     PetscCall(VecDestroy(&tao->gradient));
576     tao->gradient = g;
577   }
578   if (ctx) tao->user_objgradP = ctx;
579   if (func) tao->ops->computeobjectiveandgradient = func;
580   PetscFunctionReturn(0);
581 }
582 
583 /*@C
584   TaoGetObjectiveAndGradient - Gets the combined objective function and gradient evaluation routine for the function to be optimized
585 
586   Not collective
587 
588   Input Parameter:
589 . tao - the Tao context
590 
591   Output Parameters:
592 + g - the vector to internally hold the gradient computation
593 . func - the gradient function
594 - ctx - user-defined context for private data for the gradient evaluation routine
595 
596   Calling sequence of func:
597 $      func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
598 
599 + x - input vector
600 . f - objective value (output)
601 . g - gradient value (output)
602 - ctx - [optional] user-defined function context
603 
604   Level: beginner
605 
606 .seealso: `Tao`, `TaoSolve()`, `TaoSetObjective()`, `TaoSetGradient()`, `TaoSetHessian()`, `TaoSetObjectiveAndGradient()`
607 @*/
608 PetscErrorCode TaoGetObjectiveAndGradient(Tao tao, Vec *g, PetscErrorCode (**func)(Tao, Vec, PetscReal*, Vec, void*), void **ctx)
609 {
610   PetscFunctionBegin;
611   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
612   if (g) *g = tao->gradient;
613   if (func) *func = tao->ops->computeobjectiveandgradient;
614   if (ctx) *ctx = tao->user_objgradP;
615   PetscFunctionReturn(0);
616 }
617 
618 /*@
619   TaoIsObjectiveDefined - Checks to see if the user has
620   declared an objective-only routine.  Useful for determining when
621   it is appropriate to call `TaoComputeObjective()` or
622   `TaoComputeObjectiveAndGradient()`
623 
624   Not collective
625 
626   Input Parameter:
627 . tao - the Tao context
628 
629   Output Parameter:
630 . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
631 
632   Level: developer
633 
634 .seealso: `Tao`, `TaoSetObjective()`, `TaoIsGradientDefined()`, `TaoIsObjectiveAndGradientDefined()`
635 @*/
636 PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
637 {
638   PetscFunctionBegin;
639   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
640   if (tao->ops->computeobjective == NULL) *flg = PETSC_FALSE;
641   else *flg = PETSC_TRUE;
642   PetscFunctionReturn(0);
643 }
644 
645 /*@
646   TaoIsGradientDefined - Checks to see if the user has
647   declared an objective-only routine.  Useful for determining when
648   it is appropriate to call `TaoComputeGradient()` or
649   `TaoComputeGradientAndGradient()`
650 
651   Not Collective
652 
653   Input Parameter:
654 . tao - the Tao context
655 
656   Output Parameter:
657 . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
658 
659   Level: developer
660 
661 .seealso: `TaoSetGradient()`, `TaoIsObjectiveDefined()`, `TaoIsObjectiveAndGradientDefined()`
662 @*/
663 PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
664 {
665   PetscFunctionBegin;
666   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
667   if (tao->ops->computegradient == NULL) *flg = PETSC_FALSE;
668   else *flg = PETSC_TRUE;
669   PetscFunctionReturn(0);
670 }
671 
672 /*@
673   TaoIsObjectiveAndGradientDefined - Checks to see if the user has
674   declared a joint objective/gradient routine.  Useful for determining when
675   it is appropriate to call `TaoComputeObjective()` or
676   `TaoComputeObjectiveAndGradient()`
677 
678   Not Collective
679 
680   Input Parameter:
681 . tao - the Tao context
682 
683   Output Parameter:
684 . flg - `PETSC_TRUE` if function routine is set by user, `PETSC_FALSE` otherwise
685 
686   Level: developer
687 
688 .seealso: `TaoSetObjectiveAndGradient()`, `TaoIsObjectiveDefined()`, `TaoIsGradientDefined()`
689 @*/
690 PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
691 {
692   PetscFunctionBegin;
693   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
694   if (tao->ops->computeobjectiveandgradient == NULL) *flg = PETSC_FALSE;
695   else *flg = PETSC_TRUE;
696   PetscFunctionReturn(0);
697 }
698