xref: /petsc/src/tao/interface/taosolver_fg.c (revision ccb4e88a40f0b86eaeca07ff64c64e4de2fae686)
1 #include <petsc/private/taoimpl.h> /*I "petsctao.h" I*/
2 
3 /*@
4   TaoSetInitialVector - Sets the initial guess for the solve
5 
6   Logically collective on Tao
7 
8   Input Parameters:
9 + tao - the Tao context
10 - x0  - the initial guess
11 
12   Level: beginner
13 .seealso: TaoCreate(), TaoSolve()
14 @*/
15 
16 PetscErrorCode TaoSetInitialVector(Tao tao, Vec x0)
17 {
18   PetscErrorCode ierr;
19 
20   PetscFunctionBegin;
21   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
22   if (x0) {
23     PetscValidHeaderSpecific(x0,VEC_CLASSID,2);
24     PetscObjectReference((PetscObject)x0);
25   }
26   ierr = VecDestroy(&tao->solution);CHKERRQ(ierr);
27   tao->solution = x0;
28   PetscFunctionReturn(0);
29 }
30 
31 PetscErrorCode TaoTestGradient(Tao tao,Vec x,Vec g1)
32 {
33   Vec               g2,g3;
34   PetscBool         complete_print = PETSC_FALSE,test = PETSC_FALSE;
35   PetscReal         hcnorm,fdnorm,hcmax,fdmax,diffmax,diffnorm;
36   PetscScalar       dot;
37   MPI_Comm          comm;
38   PetscViewer       viewer,mviewer;
39   PetscViewerFormat format;
40   PetscInt          tabs;
41   static PetscBool  directionsprinted = PETSC_FALSE;
42   PetscErrorCode    ierr;
43 
44   PetscFunctionBegin;
45   ierr = PetscObjectOptionsBegin((PetscObject)tao);CHKERRQ(ierr);
46   ierr = PetscOptionsName("-tao_test_gradient","Compare hand-coded and finite difference Gradients","None",&test);CHKERRQ(ierr);
47   ierr = PetscOptionsViewer("-tao_test_gradient_view","View difference between hand-coded and finite difference Gradients element entries","None",&mviewer,&format,&complete_print);CHKERRQ(ierr);
48   ierr = PetscOptionsEnd();CHKERRQ(ierr);
49   if (!test) {
50     if (complete_print) {
51       ierr = PetscViewerDestroy(&mviewer);CHKERRQ(ierr);
52     }
53     PetscFunctionReturn(0);
54   }
55 
56   ierr = PetscObjectGetComm((PetscObject)tao,&comm);CHKERRQ(ierr);
57   ierr = PetscViewerASCIIGetStdout(comm,&viewer);CHKERRQ(ierr);
58   ierr = PetscViewerASCIIGetTab(viewer, &tabs);CHKERRQ(ierr);
59   ierr = PetscViewerASCIISetTab(viewer, ((PetscObject)tao)->tablevel);CHKERRQ(ierr);
60   ierr = PetscViewerASCIIPrintf(viewer,"  ---------- Testing Gradient -------------\n");CHKERRQ(ierr);
61   if (!complete_print && !directionsprinted) {
62     ierr = PetscViewerASCIIPrintf(viewer,"  Run with -tao_test_gradient_view and optionally -tao_test_gradient <threshold> to show difference\n");CHKERRQ(ierr);
63     ierr = PetscViewerASCIIPrintf(viewer,"    of hand-coded and finite difference gradient entries greater than <threshold>.\n");CHKERRQ(ierr);
64   }
65   if (!directionsprinted) {
66     ierr = PetscViewerASCIIPrintf(viewer,"  Testing hand-coded Gradient, if (for double precision runs) ||G - Gfd||/||G|| is\n");CHKERRQ(ierr);
67     ierr = PetscViewerASCIIPrintf(viewer,"    O(1.e-8), the hand-coded Gradient is probably correct.\n");CHKERRQ(ierr);
68     directionsprinted = PETSC_TRUE;
69   }
70   if (complete_print) {
71     ierr = PetscViewerPushFormat(mviewer,format);CHKERRQ(ierr);
72   }
73 
74   ierr = VecDuplicate(x,&g2);CHKERRQ(ierr);
75   ierr = VecDuplicate(x,&g3);CHKERRQ(ierr);
76 
77   /* Compute finite difference gradient, assume the gradient is already computed by TaoComputeGradient() and put into g1 */
78   ierr = TaoDefaultComputeGradient(tao,x,g2,NULL);CHKERRQ(ierr);
79 
80   ierr = VecNorm(g2,NORM_2,&fdnorm);CHKERRQ(ierr);
81   ierr = VecNorm(g1,NORM_2,&hcnorm);CHKERRQ(ierr);
82   ierr = VecNorm(g2,NORM_INFINITY,&fdmax);CHKERRQ(ierr);
83   ierr = VecNorm(g1,NORM_INFINITY,&hcmax);CHKERRQ(ierr);
84   ierr = VecDot(g1,g2,&dot);CHKERRQ(ierr);
85   ierr = VecCopy(g1,g3);CHKERRQ(ierr);
86   ierr = VecAXPY(g3,-1.0,g2);CHKERRQ(ierr);
87   ierr = VecNorm(g3,NORM_2,&diffnorm);CHKERRQ(ierr);
88   ierr = VecNorm(g3,NORM_INFINITY,&diffmax);CHKERRQ(ierr);
89   ierr = PetscViewerASCIIPrintf(viewer,"  ||Gfd|| %g, ||G|| = %g, angle cosine = (Gfd'G)/||Gfd||||G|| = %g\n", (double)fdnorm, (double)hcnorm, (double)(PetscRealPart(dot)/(fdnorm*hcnorm)));CHKERRQ(ierr);
90   ierr = PetscViewerASCIIPrintf(viewer,"  2-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffnorm/PetscMax(hcnorm,fdnorm)),(double)diffnorm);CHKERRQ(ierr);
91   ierr = PetscViewerASCIIPrintf(viewer,"  max-norm ||G - Gfd||/||G|| = %g, ||G - Gfd|| = %g\n",(double)(diffmax/PetscMax(hcmax,fdmax)),(double)diffmax);CHKERRQ(ierr);
92 
93   if (complete_print) {
94     ierr = PetscViewerASCIIPrintf(viewer,"  Hand-coded gradient ----------\n");CHKERRQ(ierr);
95     ierr = VecView(g1,mviewer);CHKERRQ(ierr);
96     ierr = PetscViewerASCIIPrintf(viewer,"  Finite difference gradient ----------\n");CHKERRQ(ierr);
97     ierr = VecView(g2,mviewer);CHKERRQ(ierr);
98     ierr = PetscViewerASCIIPrintf(viewer,"  Hand-coded minus finite-difference gradient ----------\n");CHKERRQ(ierr);
99     ierr = VecView(g3,mviewer);CHKERRQ(ierr);
100   }
101   ierr = VecDestroy(&g2);CHKERRQ(ierr);
102   ierr = VecDestroy(&g3);CHKERRQ(ierr);
103 
104   if (complete_print) {
105     ierr = PetscViewerPopFormat(mviewer);CHKERRQ(ierr);
106     ierr = PetscViewerDestroy(&mviewer);CHKERRQ(ierr);
107   }
108   ierr = PetscViewerASCIISetTab(viewer,tabs);CHKERRQ(ierr);
109   PetscFunctionReturn(0);
110 }
111 
112 /*@
113   TaoComputeGradient - Computes the gradient of the objective function
114 
115   Collective on Tao
116 
117   Input Parameters:
118 + tao - the Tao context
119 - X - input vector
120 
121   Output Parameter:
122 . G - gradient vector
123 
124   Options Database Keys:
125 +    -tao_test_gradient - compare the user provided gradient with one compute via finite differences to check for errors
126 -    -tao_test_gradient_view - display the user provided gradient, the finite difference gradient and the difference between them to help users detect the location of errors in the user provided gradient
127 
128   Notes:
129     TaoComputeGradient() is typically used within minimization implementations,
130   so most users would not generally call this routine themselves.
131 
132   Level: advanced
133 
134 .seealso: TaoComputeObjective(), TaoComputeObjectiveAndGradient(), TaoSetGradientRoutine()
135 @*/
136 PetscErrorCode TaoComputeGradient(Tao tao, Vec X, Vec G)
137 {
138   PetscErrorCode ierr;
139   PetscReal      dummy;
140 
141   PetscFunctionBegin;
142   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
143   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
144   PetscValidHeaderSpecific(G,VEC_CLASSID,3);
145   PetscCheckSameComm(tao,1,X,2);
146   PetscCheckSameComm(tao,1,G,3);
147   ierr = VecLockReadPush(X);CHKERRQ(ierr);
148   if (tao->ops->computegradient) {
149     ierr = PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
150     PetscStackPush("Tao user gradient evaluation routine");
151     ierr = (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);CHKERRQ(ierr);
152     PetscStackPop;
153     ierr = PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
154     tao->ngrads++;
155   } else if (tao->ops->computeobjectiveandgradient) {
156     ierr = PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr);
157     PetscStackPush("Tao user objective/gradient evaluation routine");
158     ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,&dummy,G,tao->user_objgradP);CHKERRQ(ierr);
159     PetscStackPop;
160     ierr = PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr);
161     tao->nfuncgrads++;
162   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetGradientRoutine() has not been called");
163   ierr = VecLockReadPop(X);CHKERRQ(ierr);
164 
165   ierr = TaoTestGradient(tao,X,G);CHKERRQ(ierr);
166   PetscFunctionReturn(0);
167 }
168 
169 /*@
170   TaoComputeObjective - Computes the objective function value at a given point
171 
172   Collective on Tao
173 
174   Input Parameters:
175 + tao - the Tao context
176 - X - input vector
177 
178   Output Parameter:
179 . f - Objective value at X
180 
181   Notes:
182     TaoComputeObjective() is typically used within minimization implementations,
183   so most users would not generally call this routine themselves.
184 
185   Level: advanced
186 
187 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
188 @*/
189 PetscErrorCode TaoComputeObjective(Tao tao, Vec X, PetscReal *f)
190 {
191   PetscErrorCode ierr;
192   Vec            temp;
193 
194   PetscFunctionBegin;
195   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
196   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
197   PetscCheckSameComm(tao,1,X,2);
198   ierr = VecLockReadPush(X);CHKERRQ(ierr);
199   if (tao->ops->computeobjective) {
200     ierr = PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
201     PetscStackPush("Tao user objective evaluation routine");
202     ierr = (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);CHKERRQ(ierr);
203     PetscStackPop;
204     ierr = PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
205     tao->nfuncs++;
206   } else if (tao->ops->computeobjectiveandgradient) {
207     ierr = PetscInfo(tao,"Duplicating variable vector in order to call func/grad routine\n");CHKERRQ(ierr);
208     ierr = VecDuplicate(X,&temp);CHKERRQ(ierr);
209     ierr = PetscLogEventBegin(TAO_ObjGradEval,tao,X,NULL,NULL);CHKERRQ(ierr);
210     PetscStackPush("Tao user objective/gradient evaluation routine");
211     ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,f,temp,tao->user_objgradP);CHKERRQ(ierr);
212     PetscStackPop;
213     ierr = PetscLogEventEnd(TAO_ObjGradEval,tao,X,NULL,NULL);CHKERRQ(ierr);
214     ierr = VecDestroy(&temp);CHKERRQ(ierr);
215     tao->nfuncgrads++;
216   }  else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() has not been called");
217   ierr = PetscInfo1(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));CHKERRQ(ierr);
218   ierr = VecLockReadPop(X);CHKERRQ(ierr);
219   PetscFunctionReturn(0);
220 }
221 
222 /*@
223   TaoComputeObjectiveAndGradient - Computes the objective function value at a given point
224 
225   Collective on Tao
226 
227   Input Parameters:
228 + tao - the Tao context
229 - X - input vector
230 
231   Output Parameters:
232 + f - Objective value at X
233 - g - Gradient vector at X
234 
235   Notes:
236     TaoComputeObjectiveAndGradient() is typically used within minimization implementations,
237   so most users would not generally call this routine themselves.
238 
239   Level: advanced
240 
241 .seealso: TaoComputeGradient(), TaoComputeObjectiveAndGradient(), TaoSetObjectiveRoutine()
242 @*/
243 PetscErrorCode TaoComputeObjectiveAndGradient(Tao tao, Vec X, PetscReal *f, Vec G)
244 {
245   PetscErrorCode ierr;
246 
247   PetscFunctionBegin;
248   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
249   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
250   PetscValidHeaderSpecific(G,VEC_CLASSID,4);
251   PetscCheckSameComm(tao,1,X,2);
252   PetscCheckSameComm(tao,1,G,4);
253   ierr = VecLockReadPush(X);CHKERRQ(ierr);
254   if (tao->ops->computeobjectiveandgradient) {
255     ierr = PetscLogEventBegin(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr);
256     if (tao->ops->computegradient == TaoDefaultComputeGradient) {
257       ierr = TaoComputeObjective(tao,X,f);CHKERRQ(ierr);
258       ierr = TaoDefaultComputeGradient(tao,X,G,NULL);CHKERRQ(ierr);
259     } else {
260       PetscStackPush("Tao user objective/gradient evaluation routine");
261       ierr = (*tao->ops->computeobjectiveandgradient)(tao,X,f,G,tao->user_objgradP);CHKERRQ(ierr);
262       PetscStackPop;
263     }
264     ierr = PetscLogEventEnd(TAO_ObjGradEval,tao,X,G,NULL);CHKERRQ(ierr);
265     tao->nfuncgrads++;
266   } else if (tao->ops->computeobjective && tao->ops->computegradient) {
267     ierr = PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
268     PetscStackPush("Tao user objective evaluation routine");
269     ierr = (*tao->ops->computeobjective)(tao,X,f,tao->user_objP);CHKERRQ(ierr);
270     PetscStackPop;
271     ierr = PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
272     tao->nfuncs++;
273     ierr = PetscLogEventBegin(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
274     PetscStackPush("Tao user gradient evaluation routine");
275     ierr = (*tao->ops->computegradient)(tao,X,G,tao->user_gradP);CHKERRQ(ierr);
276     PetscStackPop;
277     ierr = PetscLogEventEnd(TAO_GradientEval,tao,X,G,NULL);CHKERRQ(ierr);
278     tao->ngrads++;
279   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetObjectiveRoutine() or TaoSetGradientRoutine() not set");
280   ierr = PetscInfo1(tao,"TAO Function evaluation: %20.19e\n",(double)(*f));CHKERRQ(ierr);
281   ierr = VecLockReadPop(X);CHKERRQ(ierr);
282 
283   ierr = TaoTestGradient(tao,X,G);CHKERRQ(ierr);
284   PetscFunctionReturn(0);
285 }
286 
287 /*@C
288   TaoSetObjectiveRoutine - Sets the function evaluation routine for minimization
289 
290   Logically collective on Tao
291 
292   Input Parameters:
293 + tao - the Tao context
294 . func - the objective function
295 - ctx - [optional] user-defined context for private data for the function evaluation
296         routine (may be NULL)
297 
298   Calling sequence of func:
299 $      func (Tao tao, Vec x, PetscReal *f, void *ctx);
300 
301 + x - input vector
302 . f - function value
303 - ctx - [optional] user-defined function context
304 
305   Level: beginner
306 
307 .seealso: TaoSetGradientRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
308 @*/
309 PetscErrorCode TaoSetObjectiveRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal*,void*),void *ctx)
310 {
311   PetscFunctionBegin;
312   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
313   tao->user_objP = ctx;
314   tao->ops->computeobjective = func;
315   PetscFunctionReturn(0);
316 }
317 
318 /*@C
319   TaoSetResidualRoutine - Sets the residual evaluation routine for least-square applications
320 
321   Logically collective on Tao
322 
323   Input Parameters:
324 + tao - the Tao context
325 . func - the residual evaluation routine
326 - ctx - [optional] user-defined context for private data for the function evaluation
327         routine (may be NULL)
328 
329   Calling sequence of func:
330 $      func (Tao tao, Vec x, Vec f, void *ctx);
331 
332 + x - input vector
333 . f - function value vector
334 - ctx - [optional] user-defined function context
335 
336   Level: beginner
337 
338 .seealso: TaoSetObjectiveRoutine(), TaoSetJacobianRoutine()
339 @*/
340 PetscErrorCode TaoSetResidualRoutine(Tao tao, Vec res, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
341 {
342   PetscErrorCode ierr;
343 
344   PetscFunctionBegin;
345   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
346   PetscValidHeaderSpecific(res,VEC_CLASSID,2);
347   ierr = PetscObjectReference((PetscObject)res);CHKERRQ(ierr);
348   if (tao->ls_res) {
349     ierr = VecDestroy(&tao->ls_res);CHKERRQ(ierr);
350   }
351   tao->ls_res = res;
352   tao->user_lsresP = ctx;
353   tao->ops->computeresidual = func;
354 
355   PetscFunctionReturn(0);
356 }
357 
358 /*@
359   TaoSetResidualWeights - Give weights for the residual values. A vector can be used if only diagonal terms are used, otherwise a matrix can be give. If this function is not used, or if sigma_v and sigma_w are both NULL, then the default identity matrix will be used for weights.
360 
361   Collective on Tao
362 
363   Input Parameters:
364 + tao - the Tao context
365 . sigma_v - vector of weights (diagonal terms only)
366 . n       - the number of weights (if using off-diagonal)
367 . rows    - index list of rows for sigma_w
368 . cols    - index list of columns for sigma_w
369 - vals - array of weights
370 
371   Note: Either sigma_v or sigma_w (or both) should be NULL
372 
373   Level: intermediate
374 
375 .seealso: TaoSetResidualRoutine()
376 @*/
377 PetscErrorCode TaoSetResidualWeights(Tao tao, Vec sigma_v, PetscInt n, PetscInt *rows, PetscInt *cols, PetscReal *vals)
378 {
379   PetscErrorCode ierr;
380   PetscInt       i;
381   PetscFunctionBegin;
382   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
383   if (sigma_v) {
384     PetscValidHeaderSpecific(sigma_v,VEC_CLASSID,2);
385     ierr = PetscObjectReference((PetscObject)sigma_v);CHKERRQ(ierr);
386   }
387   if (tao->res_weights_v) {
388     ierr = VecDestroy(&tao->res_weights_v);CHKERRQ(ierr);
389   }
390   tao->res_weights_v=sigma_v;
391   if (vals) {
392     if (tao->res_weights_n) {
393       ierr = PetscFree(tao->res_weights_rows);CHKERRQ(ierr);
394       ierr = PetscFree(tao->res_weights_cols);CHKERRQ(ierr);
395       ierr = PetscFree(tao->res_weights_w);CHKERRQ(ierr);
396     }
397     ierr = PetscMalloc1(n,&tao->res_weights_rows);CHKERRQ(ierr);
398     ierr = PetscMalloc1(n,&tao->res_weights_cols);CHKERRQ(ierr);
399     ierr = PetscMalloc1(n,&tao->res_weights_w);CHKERRQ(ierr);
400     tao->res_weights_n=n;
401     for (i=0;i<n;i++) {
402       tao->res_weights_rows[i]=rows[i];
403       tao->res_weights_cols[i]=cols[i];
404       tao->res_weights_w[i]=vals[i];
405     }
406   } else {
407     tao->res_weights_n=0;
408     tao->res_weights_rows=NULL;
409     tao->res_weights_cols=NULL;
410   }
411   PetscFunctionReturn(0);
412 }
413 
414 /*@
415   TaoComputeResidual - Computes a least-squares residual vector at a given point
416 
417   Collective on Tao
418 
419   Input Parameters:
420 + tao - the Tao context
421 - X - input vector
422 
423   Output Parameter:
424 . f - Objective vector at X
425 
426   Notes:
427     TaoComputeResidual() is typically used within minimization implementations,
428   so most users would not generally call this routine themselves.
429 
430   Level: advanced
431 
432 .seealso: TaoSetResidualRoutine()
433 @*/
434 PetscErrorCode TaoComputeResidual(Tao tao, Vec X, Vec F)
435 {
436   PetscErrorCode ierr;
437 
438   PetscFunctionBegin;
439   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
440   PetscValidHeaderSpecific(X,VEC_CLASSID,2);
441   PetscValidHeaderSpecific(F,VEC_CLASSID,3);
442   PetscCheckSameComm(tao,1,X,2);
443   PetscCheckSameComm(tao,1,F,3);
444   if (tao->ops->computeresidual) {
445     ierr = PetscLogEventBegin(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
446     PetscStackPush("Tao user least-squares residual evaluation routine");
447     ierr = (*tao->ops->computeresidual)(tao,X,F,tao->user_lsresP);CHKERRQ(ierr);
448     PetscStackPop;
449     ierr = PetscLogEventEnd(TAO_ObjectiveEval,tao,X,NULL,NULL);CHKERRQ(ierr);
450     tao->nfuncs++;
451   } else SETERRQ(PetscObjectComm((PetscObject)tao),PETSC_ERR_ARG_WRONGSTATE,"TaoSetResidualRoutine() has not been called");
452   ierr = PetscInfo(tao,"TAO least-squares residual evaluation.\n");CHKERRQ(ierr);
453   PetscFunctionReturn(0);
454 }
455 
456 /*@C
457   TaoSetGradientRoutine - Sets the gradient evaluation routine for minimization
458 
459   Logically collective on Tao
460 
461   Input Parameters:
462 + tao - the Tao context
463 . func - the gradient function
464 - ctx - [optional] user-defined context for private data for the gradient evaluation
465         routine (may be NULL)
466 
467   Calling sequence of func:
468 $      func (Tao tao, Vec x, Vec g, void *ctx);
469 
470 + x - input vector
471 . g - gradient value (output)
472 - ctx - [optional] user-defined function context
473 
474   Level: beginner
475 
476 .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
477 @*/
478 PetscErrorCode TaoSetGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, Vec, void*),void *ctx)
479 {
480   PetscFunctionBegin;
481   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
482   tao->user_gradP = ctx;
483   tao->ops->computegradient = func;
484   PetscFunctionReturn(0);
485 }
486 
487 /*@C
488   TaoSetObjectiveAndGradientRoutine - Sets a combined objective function and gradient evaluation routine for minimization
489 
490   Logically collective on Tao
491 
492   Input Parameters:
493 + tao - the Tao context
494 . func - the gradient function
495 - ctx - [optional] user-defined context for private data for the gradient evaluation
496         routine (may be NULL)
497 
498   Calling sequence of func:
499 $      func (Tao tao, Vec x, PetscReal *f, Vec g, void *ctx);
500 
501 + x - input vector
502 . f - objective value (output)
503 . g - gradient value (output)
504 - ctx - [optional] user-defined function context
505 
506   Level: beginner
507 
508 .seealso: TaoSetObjectiveRoutine(), TaoSetHessianRoutine() TaoSetObjectiveAndGradientRoutine()
509 @*/
510 PetscErrorCode TaoSetObjectiveAndGradientRoutine(Tao tao, PetscErrorCode (*func)(Tao, Vec, PetscReal *, Vec, void*), void *ctx)
511 {
512   PetscFunctionBegin;
513   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
514   tao->user_objgradP = ctx;
515   tao->ops->computeobjectiveandgradient = func;
516   PetscFunctionReturn(0);
517 }
518 
519 /*@
520   TaoIsObjectiveDefined -- Checks to see if the user has
521   declared an objective-only routine.  Useful for determining when
522   it is appropriate to call TaoComputeObjective() or
523   TaoComputeObjectiveAndGradient()
524 
525   Collective on Tao
526 
527   Input Parameters:
528 + tao - the Tao context
529 - ctx - PETSC_TRUE if objective function routine is set by user,
530         PETSC_FALSE otherwise
531   Level: developer
532 
533 .seealso: TaoSetObjectiveRoutine(), TaoIsGradientDefined(), TaoIsObjectiveAndGradientDefined()
534 @*/
535 PetscErrorCode TaoIsObjectiveDefined(Tao tao, PetscBool *flg)
536 {
537   PetscFunctionBegin;
538   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
539   if (tao->ops->computeobjective == NULL) *flg = PETSC_FALSE;
540   else *flg = PETSC_TRUE;
541   PetscFunctionReturn(0);
542 }
543 
544 /*@
545   TaoIsGradientDefined -- Checks to see if the user has
546   declared an objective-only routine.  Useful for determining when
547   it is appropriate to call TaoComputeGradient() or
548   TaoComputeGradientAndGradient()
549 
550   Not Collective
551 
552   Input Parameters:
553 + tao - the Tao context
554 - ctx - PETSC_TRUE if gradient routine is set by user, PETSC_FALSE otherwise
555   Level: developer
556 
557 .seealso: TaoSetGradientRoutine(), TaoIsObjectiveDefined(), TaoIsObjectiveAndGradientDefined()
558 @*/
559 PetscErrorCode TaoIsGradientDefined(Tao tao, PetscBool *flg)
560 {
561   PetscFunctionBegin;
562   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
563   if (tao->ops->computegradient == NULL) *flg = PETSC_FALSE;
564   else *flg = PETSC_TRUE;
565   PetscFunctionReturn(0);
566 }
567 
568 /*@
569   TaoIsObjectiveAndGradientDefined -- Checks to see if the user has
570   declared a joint objective/gradient routine.  Useful for determining when
571   it is appropriate to call TaoComputeObjective() or
572   TaoComputeObjectiveAndGradient()
573 
574   Not Collective
575 
576   Input Parameters:
577 + tao - the Tao context
578 - ctx - PETSC_TRUE if objective/gradient routine is set by user, PETSC_FALSE otherwise
579   Level: developer
580 
581 .seealso: TaoSetObjectiveAndGradientRoutine(), TaoIsObjectiveDefined(), TaoIsGradientDefined()
582 @*/
583 PetscErrorCode TaoIsObjectiveAndGradientDefined(Tao tao, PetscBool *flg)
584 {
585   PetscFunctionBegin;
586   PetscValidHeaderSpecific(tao,TAO_CLASSID,1);
587   if (tao->ops->computeobjectiveandgradient == NULL) *flg = PETSC_FALSE;
588   else *flg = PETSC_TRUE;
589   PetscFunctionReturn(0);
590 }
591