[roboptim-commit] [SCM] roboptim branch, core, updated. 418a767ab2f1fd2e490077fc8e1fedaf3502b13b
Status: Beta
Brought to you by:
flamiraux
From: Thomas M. <tho...@us...> - 2009-07-23 07:00:58
|
This is an automated email from the git hooks/post-receive script. It was generated because a ref change was pushed to the repository containing the project "roboptim". The branch, core has been updated via 418a767ab2f1fd2e490077fc8e1fedaf3502b13b (commit) from 3e805364b175388f3cc1c0c5cd9bd3e38cad9e41 (commit) Those revisions listed above that are new to this repository have not appeared on any other notification email; so we list those revisions in full, below. - Log ----------------------------------------------------------------- commit 418a767ab2f1fd2e490077fc8e1fedaf3502b13b Author: Thomas Moulard <tho...@gm...> Date: Thu Jul 23 16:00:37 2009 +0900 Enhance finite difference computation. * include/roboptim/core/finite-difference-gradient.hh: Add BadGradient exception class and add checkGradientAndThrow free function. * src/finite-difference-gradient.cc: Implement new class and free function. Signed-off-by: Thomas Moulard <tho...@gm...> diff --git a/ChangeLog b/ChangeLog index 0def12e..2f29542 100644 --- a/ChangeLog +++ b/ChangeLog @@ -1,5 +1,14 @@ 2009-07-23 Thomas Moulard <tho...@gm...> + Enhance finite difference computation. + * include/roboptim/core/finite-difference-gradient.hh: + Add BadGradient exception class and add checkGradientAndThrow + free function. + * src/finite-difference-gradient.cc: Implement new class and free + function. + +2009-07-23 Thomas Moulard <tho...@gm...> + Enhance finite difference gradient computaton. * include/roboptim/core/finite-difference-gradient.hh: Add missing const in checkGradient signature, fix default diff --git a/include/roboptim/core/finite-difference-gradient.hh b/include/roboptim/core/finite-difference-gradient.hh index 14371e6..599e57b 100644 --- a/include/roboptim/core/finite-difference-gradient.hh +++ b/include/roboptim/core/finite-difference-gradient.hh @@ -17,12 +17,68 @@ #ifndef ROBOPTIM_CORE_FINITE_DIFFERENCE_GRADIENT_HH # define ROBOPTIM_CORE_FINITE_DIFFERENCE_GRADIENT_HH +# include <stdexcept> # include <roboptim/core/fwd.hh> # include <roboptim/core/derivable-function.hh> namespace roboptim { + /// \brief Default threshold for checkGradient. + static const double finiteDifferenceThreshold = 1e-4; + /// \brief Default epsilon for finite difference class. + static const double finiteDifferenceEpsilon = 1e-8; + + /// \brief Exception thrown when a gradient check fail. + class BadGradient : public std::runtime_error + { + public: + /// \brief Import vector. + typedef DerivableFunction::vector_t vector_t; + /// \brief Import gradient. + typedef DerivableFunction::gradient_t gradient_t; + /// \brief Import value_type. + typedef DerivableFunction::value_type value_type; + + /// \brief Default constructor. + BadGradient (const vector_t& x, + const gradient_t& analyticalGradient, + const gradient_t& finiteDifferenceGradient, + const value_type& threshold); + + virtual ~BadGradient () throw (); + + /// \brief Display the exception on the specified output stream. + /// + /// \param o output stream used for display + /// \return output stream + virtual std::ostream& print (std::ostream& o) const throw (); + + + /// \brief Gradient has been computed for this point. + vector_t x_; + + /// \brief Analytical gradient. + gradient_t analyticalGradient_; + + /// \brief Gradient computed through finite differences. + gradient_t finiteDifferenceGradient_; + + /// \brief Maximum error. + value_type maxDelta_; + + /// \brief Allowed threshold. + value_type threshold_; + }; + + /// \brief Override operator<< to handle exception display. + /// + /// \param o output stream used for display + /// \param f function to be displayed + /// \return output stream + std::ostream& operator<< (std::ostream& o, const BadGradient& f); + + /// \addtogroup roboptim_function /// @{ @@ -49,7 +105,9 @@ namespace roboptim /// using finite differences. /// \param f function that will e wrapped /// \param e epsilon used in finite difference computation - FiniteDifferenceGradient (const Function& f, value_type e = 1e-8) throw (); + FiniteDifferenceGradient (const Function& f, + value_type e = finiteDifferenceEpsilon) + throw (); ~FiniteDifferenceGradient () throw (); protected: @@ -73,11 +131,19 @@ namespace roboptim /// \param x point where the gradient will be evaluated /// \param threshold maximum tolerated error /// \return true if valid, false if not - bool checkGradient (const DerivableFunction& function, - int functionId, - const Function::vector_t& x, - Function::value_type threshold = 1e-4) throw (); + bool checkGradient + (const DerivableFunction& function, + int functionId, + const Function::vector_t& x, + Function::value_type threshold = finiteDifferenceThreshold) + throw (); + void checkGradientAndThrow + (const DerivableFunction& function, + int functionId, + const Function::vector_t& x, + Function::value_type threshold = finiteDifferenceThreshold) + throw (BadGradient); /// Example shows finite differences gradient use. /// \example finite-difference-gradient.cc diff --git a/src/finite-difference-gradient.cc b/src/finite-difference-gradient.cc index 224244a..f8cd62e 100644 --- a/src/finite-difference-gradient.cc +++ b/src/finite-difference-gradient.cc @@ -15,10 +15,57 @@ // You should have received a copy of the GNU Lesser General Public License // along with roboptim. If not, see <http://www.gnu.org/licenses/>. +#include <boost/numeric/ublas/io.hpp> + +#include <roboptim/core/indent.hh> #include <roboptim/core/finite-difference-gradient.hh> namespace roboptim { + BadGradient::BadGradient (const vector_t& x, + const gradient_t& analyticalGradient, + const gradient_t& finiteDifferenceGradient, + const value_type& threshold) + : std::runtime_error ("bad gradient"), + x_ (x), + analyticalGradient_ (analyticalGradient), + finiteDifferenceGradient_ (finiteDifferenceGradient), + maxDelta_ (), + threshold_ (threshold) + { + gradient_t delta = analyticalGradient - finiteDifferenceGradient; + for (unsigned i = 0; i < delta.size (); ++i) + { + delta[i] = fabs (delta[i]); + if (maxDelta_ < delta[i]) + maxDelta_ = delta[i]; + } + } + + BadGradient::~BadGradient () throw () + {} + + std::ostream& + BadGradient::print (std::ostream& o) const throw () + { + o << this->what () << incindent << iendl + << "X: " << x_ << iendl + << "Analytical gradient: " << analyticalGradient_ << iendl + << "Finite difference gradient: " << finiteDifferenceGradient_ + << iendl + << "Max. delta: " << maxDelta_ << iendl + << "Max. allowed delta): " << threshold_ << decindent; + return o; + } + + std::ostream& + operator<< (std::ostream& o, const BadGradient& bg) + { + return bg.print (o); + } + + + FiniteDifferenceGradient::FiniteDifferenceGradient (const Function& adaptee, value_type epsilon) throw () @@ -62,10 +109,11 @@ namespace roboptim } - bool checkGradient (const DerivableFunction& function, - int i, - const Function::vector_t& x, - Function::value_type threshold) throw () + bool + checkGradient (const DerivableFunction& function, + int i, + const Function::vector_t& x, + Function::value_type threshold) throw () { FiniteDifferenceGradient fdfunction (function); DerivableFunction::gradient_t grad = function.gradient (x, i); @@ -77,4 +125,19 @@ namespace roboptim return true; } + void + checkGradientAndThrow (const DerivableFunction& function, + int i, + const Function::vector_t& x, + Function::value_type threshold) + throw (BadGradient) + { + FiniteDifferenceGradient fdfunction (function); + DerivableFunction::gradient_t grad = function.gradient (x, i); + DerivableFunction::gradient_t fdgrad = fdfunction.gradient (x, i); + + if (!checkGradient (function, i, x, threshold)) + throw BadGradient (x, grad, fdgrad, threshold); + } + } // end of namespace roboptim ----------------------------------------------------------------------- Summary of changes: ChangeLog | 9 +++ .../roboptim/core/finite-difference-gradient.hh | 76 ++++++++++++++++++-- src/finite-difference-gradient.cc | 71 +++++++++++++++++- 3 files changed, 147 insertions(+), 9 deletions(-) hooks/post-receive -- roboptim |