217 for (
int i = 0; i <
m_npars; ++i) {
236 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
237 if (pars[ipar]->is_free()) {
238 if ((*fct.
curvature())(ipar,ipar) == 0.0) {
240 *
m_logger <<
" Parameter \"" << pars[ipar]->name();
241 *
m_logger <<
"\" has zero curvature.";
242 *
m_logger <<
" Fix parameter." << std::endl;
259 (*m_logger)(
">Iteration %3d: -logL=%.3f, Lambda=%.1e",
262 #if defined(G_DEBUG_OPT)
263 std::cout <<
"Initial iteration: func=" <<
m_value <<
", Lambda="
266 #if defined(G_DEBUG_SHOW_GRAD_COVAR)
274 bool check_for_freeze =
true;
283 double grad_max = 0.0;
285 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
286 if (pars[ipar]->is_free()) {
287 double grad = pars[ipar]->factor_gradient();
288 if (std::abs(grad) > std::abs(grad_max)) {
295 *
m_logger <<
" (" << pars[ipar]->name() <<
")";
296 *
m_logger <<
" has a zero gradient." << std::endl;
304 std::string stalled =
"";
308 stalled =
" (stalled)";
313 std::string parname =
"";
314 if (grad_imax != -1) {
315 parname =
" [" + pars[grad_imax]->name() +
":" +
318 (*m_logger)(
"%sIteration %3d: -logL=%.3f, Lambda=%.1e,"
319 " delta=%.3f, step=%.1e, max(|grad|)=%f%s%s",
322 parname.c_str(), stalled.c_str());
324 #if defined(G_DEBUG_OPT)
325 std::cout <<
"Iteration " <<
m_iter <<
": func="
326 <<
m_value <<
", Lambda=" << lambda_old
327 <<
", delta=" <<
m_delta << std::endl;
329 #if defined(G_DEBUG_SHOW_GRAD_COVAR)
352 if (check_for_freeze) {
356 check_for_freeze =
false;
361 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
367 << pars[ipar]->name()
368 <<
"\" after convergence was"
369 " reached with frozen"
370 " parameter." << std::endl;
408 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
413 << pars[ipar]->name()
414 <<
"\" after convergence was"
415 " reached with frozen"
416 " parameter." << std::endl;
434 (*m_logger)(
">Iteration %3d: -logL=%.3f, Lambda=%.1e (no free parameters)",
474 bool diag_loaded =
false;
477 for (
int i = 0; i < 2; ++i) {
483 for (
int ipar = 0; ipar <
npars; ++ipar) {
486 if (x[ipar] >= 0.0) {
487 pars[ipar]->factor_error(
sqrt(x[ipar]));
490 pars[ipar]->factor_error(0.0);
504 *
m_logger <<
"Non-Positive definite curvature matrix encountered."
506 *
m_logger <<
"Load diagonal elements with 1e-10."
507 <<
" Fit errors may be inaccurate."
512 *curvature = save_curvature;
513 for (
int ipar = 0; ipar <
npars; ++ipar) {
514 (*curvature)(ipar,ipar) += 1.0e-10;
529 *
m_logger <<
"Non-Positive definite curvature matrix encountered,"
530 <<
" even after diagonal loading." << std::endl;
535 catch (std::exception &e) {
564 result.append(
"=== GOptimizerLM ===");
578 result.append(
"converged");
581 result.append(
"stalled");
584 result.append(
"singular curvature matrix encountered");
587 result.append(
"curvature matrix not positive definite");
590 result.append(
"errors are inaccurate");
593 result.append(
"unknown");
725 #if defined(G_DEBUG_ITER)
726 std::cout <<
"GOptimizerLM::iteration: enter" << std::endl;
746 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
747 save_pars[ipar] = pars[ipar]->factor_value();
751 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
752 (*curvature)(ipar,ipar) *= (1.0 +
m_lambda);
753 (*grad)[ipar] = -(*grad)[ipar];
757 #if defined(G_DEBUG_ITER)
758 std::cout <<
"Gradient : ";
759 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
760 std::cout << (*grad)[ipar] <<
" ";
762 std::cout << std::endl;
763 std::cout <<
"Curvature: ";
764 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
765 for (
int jpar = 0; jpar <
m_npars; ++jpar) {
766 std::cout << (*curvature)(ipar,jpar) <<
" ";
769 std::cout << std::endl;
774 *grad = curvature->
solve(*grad);
779 *
m_logger <<
"GOptimizerLM::iteration: "
780 <<
"Curvature matrix not positive definite."
785 catch (std::exception &e) {
793 #if defined(G_DEBUG_ITER)
794 std::cout <<
"Step size = " << step << std::endl;
798 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
801 if (pars[ipar]->is_free()) {
804 double p = pars[ipar]->factor_value();
805 double p_min = pars[ipar]->factor_min();
806 double p_max = pars[ipar]->factor_max();
809 p += (*grad)[ipar] * step;
812 #if defined(G_DEBUG_ITER)
813 std::cout <<
"Trial factor value of ";
814 std::cout << pars[ipar]->name() <<
" = ";
815 std::cout << p << std::endl;
819 if (pars[ipar]->has_min() && p < p_min) {
822 *
m_logger <<
" Parameter \"" << pars[ipar]->name();
823 *
m_logger <<
"\" hits minimum " << p_min <<
" more than ";
825 *
m_logger <<
" Fix parameter at minimum for now." << std::endl;
832 *
m_logger <<
" Parameter \"" << pars[ipar]->name();
841 else if (pars[ipar]->has_max() && p > p_max) {
844 *
m_logger <<
" Parameter \"" << pars[ipar]->name();
845 *
m_logger <<
"\" hits maximum " << p_max <<
" more than ";
847 *
m_logger <<
" Fix parameter at maximum for now." << std::endl;
854 *
m_logger <<
" Parameter \"" << pars[ipar]->name();
869 pars[ipar]->factor_value(p);
872 #if defined(G_DEBUG_ITER)
873 std::cout <<
"New value of ";
874 std::cout << pars[ipar]->name() <<
" = ";
875 std::cout << pars[ipar]->value() << std::endl;
893 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
894 if (pars[ipar]->is_free()) {
895 if ((*fct.
curvature())(ipar,ipar) == 0.0) {
897 *
m_logger <<
" Parameter \"" << pars[ipar]->name();
898 *
m_logger <<
"\" has zero curvature.";
899 *
m_logger <<
" Fix parameter." << std::endl;
915 #if defined(G_DEBUG_ITER)
916 std::cout <<
"Function value " <<
m_value;
917 std::cout <<
" (was " << save_value <<
")" << std::endl;
922 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
923 if (pars[ipar]->factor_value() != save_pars[ipar]) {
965 *curvature = save_curvature;
966 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
967 pars[ipar]->factor_value(save_pars[ipar]);
974 #if defined(G_DEBUG_ITER)
975 std::cout <<
"GOptimizerLM::iteration: exit" << std::endl;
1007 for (
int ipar = 0; ipar < pars.
size(); ++ipar) {
1010 double p = pars[ipar]->factor_value();
1011 double p_min = pars[ipar]->factor_min();
1012 double p_max = pars[ipar]->factor_max();
1013 double delta = grad[ipar];
1016 if (pars[ipar]->has_min()) {
1017 double step_min = (delta < 0.0) ? (p_min - p)/delta : 1.0;
1018 if (step_min > 0.0) {
1019 if (step_min < step) {
1030 *
m_logger <<
"\" does not drive optimization step anymore.";
1038 if (pars[ipar]->has_max()) {
1039 double step_max = (delta > 0.0) ? (p_max - p)/delta : 1.0;
1040 if (step_max > 0.0) {
1041 if (step_max < step) {
1052 *
m_logger <<
"\" does not drive optimization step anymore.";
1062 if (ipar_bnd != -1) {
1066 *
m_logger << pars[ipar_bnd]->name();
1067 *
m_logger <<
"\" drives optimization step (step=";
1068 *
m_logger << step <<
")" << std::endl;
Exception handler interface definition.
Levenberg Marquardt optimizer class interface definition.
#define G_LM_NOT_POSTIVE_DEFINITE
GVector log(const GVector &vector)
Computes natural logarithm of vector elements.
GVector sqrt(const GVector &vector)
Computes square root of vector elements.
Information logger interface definition.
Sparse matrix class interface definition.
GMatrixSparse cholesky_decompose(const bool &compress=true) const
Return Cholesky decomposition.
GVector solve(const GVector &vector) const
Solves linear matrix equation.
GVector cholesky_solver(const GVector &vector, const bool &compress=true) const
Cholesky solver.
Optimizer function abstract base class.
virtual double value(void) const =0
virtual GMatrixSparse * curvature(void)=0
virtual GVector * gradient(void)=0
virtual void eval(const GOptimizerPars &pars)=0
Levenberg Marquardt optimizer class.
bool m_step_adjust
Adjust step size to boundaries.
int m_npars
Number of parameters.
int m_max_iter
Maximum number of iterations.
GLog * m_logger
Pointer to optional logger.
double m_lambda_inc
Lambda increase.
int m_max_dec
Maximum number of function decrease.
std::vector< int > m_hit_minimum
Bookkeeping of successive minimum hits.
int m_num_dec
Number of function decreases.
std::vector< bool > m_hit_boundary
Bookkeeping array for boundary hits.
virtual GOptimizerLM * clone(void) const
Clone object.
void init_members(void)
Initialise class members.
GOptimizerLM & operator=(const GOptimizerLM &opt)
Assignment operator.
void free_members(void)
Delete class members.
const double & lambda_inc(void) const
Return lambda increment value.
double m_value
Actual function value.
int m_max_stall
Maximum number of stalls.
double m_lambda_dec
Lambda decrease.
std::vector< bool > m_par_freeze
Bookkeeping of parameter freeze.
GOptimizerLM(void)
Void constructor.
double step_size(const GVector &grad, const GOptimizerPars &pars)
Return LM step size.
virtual void errors(GOptimizerFunction &fct, GOptimizerPars &pars)
Compute parameter uncertainties.
double iteration(GOptimizerFunction &fct, GOptimizerPars &pars)
Perform one LM iteration.
void copy_members(const GOptimizerLM &opt)
Copy class members.
double m_eps
Absolute precision.
virtual std::string print(const GChatter &chatter=NORMAL) const
Print optimizer information.
virtual void optimize(GOptimizerFunction &fct, GOptimizerPars &pars)
Optimize function parameters.
std::vector< int > m_hit_maximum
Bookkeeping of successive maximum hits.
std::vector< bool > m_par_remove
Bookkeeping of parameter removal.
double m_lambda_start
Initial start value.
int m_nfree
Number of free parameters.
double m_delta
Function improvement.
virtual void clear(void)
Clear object.
int m_max_hit
Maximum number of successive hits.
double m_accept_dec
Acceptable function decrease.
virtual ~GOptimizerLM(void)
Destructor.
double m_lambda
Actual lambda.
virtual int status(void) const
Return optimizer status.
const int & npars(void) const
Return number of model parameters.
Optimizer parameter container class.
int size(void) const
Return number of parameters in container.
int nfree(void) const
Return number of free parameters.
Abstract optimizer abstract base class.
void free_members(void)
Delete class members.
virtual GOptimizer & operator=(const GOptimizer &opt)
Assignment operator.
void init_members(void)
Initialise class members.
std::string parformat(const std::string &s, const int &indent=0)
Convert string in parameter format.
std::string str(const unsigned short int &value)
Convert unsigned short integer value into string.