File: val_optimize.cpp

package info (click to toggle)
cppad 2025.00.00.2-1
  • links: PTS, VCS
  • area: main
  • in suites: forky, sid, trixie
  • size: 11,552 kB
  • sloc: cpp: 112,594; sh: 5,972; ansic: 179; python: 71; sed: 12; makefile: 10
file content (74 lines) | stat: -rw-r--r-- 1,846 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later
// SPDX-FileCopyrightText: Bradley M. Bell <bradbell@seanet.com>
// SPDX-FileContributor: 2023-23 Bradley M. Bell
# include <cppad/cppad.hpp>
namespace { // BEIGN_EMPTY_NAMESPACE
// ----------------------------------------------------------------------------
// optimize_csum
bool csum_op(void)
{  bool ok = true;
   using CppAD::AD;
   using CppAD::vector;
   //
   // ax, ap
   size_t n = 2;
   vector< AD<double> > ax(n), ap(n);
   for(size_t j = 0; j < n; ++j)
   {  ax[j] = 0.0;
      ap[j] = 0.0;
   }
   Independent(ax, ap);
   //
   // asum
   AD<double> asum = 0.0;
   for(size_t j = 0; j < n; ++j)
   {  asum += ax[j];
      asum += ap[j];
   }
   AD<double> aplus  = ax[0] + ap[0];
   AD<double> aminus = ax[0] - ap[0];
   asum             += CondExpLe(ax[0], ap[0], aplus, aminus);
   //
   // f
   vector< AD<double> > ay(1);
   ay[0] = asum * asum;
   CppAD::ADFun<double> f(ax, ay);
   //
   // val_optimize
   f.val_optimize("val_graph no_conditional_skip");
   //
   // x, p, y, check, ok
   // zero order forward
   vector<double> x(n), p(n), y(1);
   double sum = 0.0;
   for(size_t j = 0; j < n; ++j)
   {  p[j]  = double(j + 1);
      x[j]  = double(n + j + 1);
      sum  += x[j];
      sum  += p[j];
   }
   if( x[0] <= p[0] )
      sum += x[0] + p[0];
   else
      sum += x[0] - p[0];
   //
   double check = sum * sum;
   f.new_dynamic(p);
   y     = f.Forward(0, x);
   ok &= y[0] == check;
   //
   // val_optimize, y, ok
   f.val_optimize("val_graph no_conditional_skip");
   f.new_dynamic(p);
   y     = f.Forward(0, x);
   ok &= y[0] == check;
   //
   return ok;
}
// ----------------------------------------------------------------------------
} // END_EMPTY_NAMESPACE
bool test_val_optimize(void)
{  bool ok = true;
   ok     &= csum_op();
   return ok;
}