1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81
|
// Copyright (C) 2009 by Thomas Moulard, AIST, CNRS, INRIA.
//
// This file is part of the roboptim.
//
// roboptim is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// roboptim is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with roboptim. If not, see <http://www.gnu.org/licenses/>.
#include "shared-tests/fixture.hh"
#include <iostream>
#include <roboptim/core/io.hh>
#include <roboptim/core/differentiable-function.hh>
#include <roboptim/core/util.hh>
#include <roboptim/core/filter/cached-function.hh>
using namespace roboptim;
boost::shared_ptr<boost::test_tools::output_test_stream> output;
struct F : public DifferentiableFunction
{
F () : DifferentiableFunction (1, 1, "2 * x")
{}
void impl_compute (result_t& res, const argument_t& argument) const throw ()
{
(*output) << "computation (not cached)" << std::endl;
res.setZero ();
res[0] = 2. * argument[0];
}
void impl_gradient (gradient_t& grad, const argument_t&,
size_type) const throw ()
{
(*output) << "gradient computation (not cached)" << std::endl;
grad.setZero ();
grad[0] = 2.;
}
};
BOOST_FIXTURE_TEST_SUITE (core, TestSuiteConfiguration)
BOOST_AUTO_TEST_CASE (cached_function)
{
output = retrievePattern ("cached-function");
boost::shared_ptr<F> f (new F ());
CachedFunction<DifferentiableFunction> cachedF (f);
(*output) << cachedF << ":" << std::endl
<< std::endl;
Function::vector_t x (1);
for (double i = 0.; i < 10.; i += 0.5)
{
x[0] = i;
(*output) << cachedF (x) << std::endl;
(*output) << cachedF (x) << std::endl;
BOOST_CHECK_EQUAL ((*f) (x)[0], cachedF (x)[0]);
(*output) << cachedF.gradient (x) << std::endl;
(*output) << cachedF.gradient (x) << std::endl;
}
std::cout << output->str () << std::endl;
BOOST_CHECK (output->match_pattern ());
}
BOOST_AUTO_TEST_SUITE_END ()
|