1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213
|
// SPDX-License-Identifier: EPL-2.0 OR GPL-2.0-or-later
// SPDX-FileCopyrightText: Bradley M. Bell <bradbell@seanet.com>
// SPDX-FileContributor: 2003-24 Bradley M. Bell
// ----------------------------------------------------------------------------
/*
{xrst_begin cppadcg_det_minor.cpp}
cppadcg Speed: Gradient of Determinant by Minor Expansion
#########################################################
Specifications
**************
See :ref:`link_det_minor-name` .
PASS_JACOBIAN_TO_CODE_GEN
*************************
If this is one, the Jacobian of the determinant is the function passed
to CppADCodeGen. In this case, the ``code_gen_fun``
:ref:`code_gen_fun@Syntax@function` is used to calculate
the Jacobian of the determinant.
Otherwise, this flag is zero and the determinant function is passed
to CppADCodeGen. In this case, the ``code_gen_fun``
:ref:`code_gen_fun@Syntax@jacobian` is used to calculate
the Jacobian of the determinant.
{xrst_spell_off}
{xrst_code cpp} */
# define PASS_JACOBIAN_TO_CODE_GEN 1
/* {xrst_code}
{xrst_spell_on}
Implementation
**************
{xrst_spell_off}
{xrst_code cpp} */
# include <cppad/speed/det_by_minor.hpp>
# include <cppad/speed/uniform_01.hpp>
# include <cppad/utility/vector.hpp>
# include <cppad/example/code_gen_fun.hpp>
# include <map>
extern std::map<std::string, bool> global_option;
namespace {
//
// typedefs
typedef CppAD::cg::CG<double> c_double;
typedef CppAD::AD<c_double> ac_double;
typedef CppAD::vector<double> d_vector;
typedef CppAD::vector<ac_double> ac_vector;
//
// setup
void setup(
// inputs
size_t size ,
// outputs
code_gen_fun& fun )
{ // optimization options
std::string optimize_options =
"no_conditional_skip no_compare_op no_print_for_op";
//
// object for computing determinant
CppAD::det_by_minor<ac_double> ac_det(size);
//
// number of independent variables
size_t nx = size * size;
//
// choose a matrix
CppAD::vector<double> matrix(nx);
CppAD::uniform_01(nx, matrix);
//
// copy to independent variables
ac_vector ac_A(nx);
for(size_t j = 0; j < nx; ++j)
ac_A[j] = matrix[j];
//
// declare independent variables for function computation
bool record_compare = false;
size_t abort_op_index = 0;
CppAD::Independent(ac_A, abort_op_index, record_compare);
//
// AD computation of the determinant
ac_vector ac_detA(1);
ac_detA[0] = ac_det(ac_A);
//
// create function objects for f : A -> detA
CppAD::ADFun<c_double> c_f;
c_f.Dependent(ac_A, ac_detA);
if( global_option["optimize"] )
c_f.optimize(optimize_options);
# if ! PASS_JACOBIAN_TO_CODE_GEN
// f(x) is the determinant function
code_gen_fun::evaluation_enum eval_jac = code_gen_fun::dense_enum;
code_gen_fun f_tmp("det_minor", c_f, eval_jac);
fun.swap(f_tmp);
# else
CppAD::ADFun<ac_double, c_double> ac_f;
ac_f = c_f.base2ad();
//
// declare independent variables for gradient computation
CppAD::Independent(ac_A, abort_op_index, record_compare);
//
// vectors of reverse mode weights
CppAD::vector<ac_double> ac_w(1);
ac_w[0] = ac_double(1.0);
//
// AD computation of the gradient
ac_vector ac_gradient(nx);
ac_f.Forward(0, ac_A);
ac_gradient = ac_f.Reverse(1, ac_w);
//
// create function objects for g : A -> det'( detA )
CppAD::ADFun<c_double> c_g;
c_g.Dependent(ac_A, ac_gradient);
if( global_option["optimize"] )
c_g.optimize(optimize_options);
// g(x) is the Jacobian of the determinant
code_gen_fun g_tmp("det_minor", c_g);
fun.swap(g_tmp);
# endif
}
}
bool link_det_minor(
const std::string& job ,
size_t size ,
size_t repeat ,
CppAD::vector<double> &matrix ,
CppAD::vector<double> &gradient )
{ CPPAD_ASSERT_UNKNOWN( matrix.size() == size * size );
CPPAD_ASSERT_UNKNOWN( gradient.size() == size * size );
// --------------------------------------------------------------------
// check global options
const char* valid[] = { "onetape", "optimize"};
size_t n_valid = sizeof(valid) / sizeof(valid[0]);
typedef std::map<std::string, bool>::iterator iterator;
//
for(iterator itr=global_option.begin(); itr!=global_option.end(); ++itr)
{ if( itr->second )
{ bool ok = false;
for(size_t i = 0; i < n_valid; i++)
ok |= itr->first == valid[i];
if( ! ok )
return false;
}
}
// --------------------------------------------------------------------
//
// function object mapping matrix to gradient of determinant
static code_gen_fun static_fun;
//
// size corresponding static_fun
static size_t static_size = 0;
//
// number of independent variables
size_t nx = size * size;
//
// onetape
bool onetape = global_option["onetape"];
// ----------------------------------------------------------------------
if( job == "setup" )
{ if( onetape )
{ setup(size, static_fun);
static_size = size;
}
else
{ static_size = 0;
}
return true;
}
if( job == "teardown" )
{ code_gen_fun fun;
static_fun.swap(fun);
return true;
}
// -----------------------------------------------------------------------
CPPAD_ASSERT_UNKNOWN( job == "run" );
if( onetape ) while(repeat--)
{ // use if before assert to avoid warning that static_size is not used
if( size != static_size )
{ CPPAD_ASSERT_UNKNOWN( size == static_size );
}
// get next matrix
CppAD::uniform_01(nx, matrix);
// evaluate the gradient
# if PASS_JACOBIAN_TO_CODE_GEN
gradient = static_fun(matrix);
# else
gradient = static_fun.jacobian(matrix);
# endif
}
else while(repeat--)
{ setup(size, static_fun);
static_size = size;
// get next matrix
CppAD::uniform_01(nx, matrix);
// evaluate the gradient
# if PASS_JACOBIAN_TO_CODE_GEN
gradient = static_fun(matrix);
# else
gradient = static_fun.jacobian(matrix);
# endif
}
return true;
}
/* {xrst_code}
{xrst_spell_on}
{xrst_end cppadcg_det_minor.cpp}
*/
|