217 for (
int i = 0; i <
m_npars; ++i) {
231 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
232 if (pars[ipar]->is_free()) {
233 if ((*fct.
curvature())(ipar,ipar) == 0.0) {
235 *
m_logger <<
" Parameter \"" << pars[ipar]->name();
236 *
m_logger <<
"\" = " << pars[ipar]->value();
237 *
m_logger <<
" with gradient " << pars[ipar]->gradient();
238 *
m_logger <<
" has zero curvature.";
239 *
m_logger <<
" Fix parameter." << std::endl;
256 (*m_logger)(
">Iteration %3d: -logL=%.3f, Lambda=%.1e",
259 #if defined(G_DEBUG_OPT)
260 std::cout <<
"Initial iteration: func=" <<
m_value <<
", Lambda="
263 #if defined(G_DEBUG_SHOW_GRAD_COVAR)
271 bool check_for_freeze =
true;
280 double grad_max = 0.0;
282 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
283 if (pars[ipar]->is_free()) {
284 double grad = pars[ipar]->factor_gradient();
285 if (std::abs(grad) > std::abs(grad_max)) {
303 std::string stalled =
"";
307 stalled =
" (stalled)";
312 std::string parname =
"";
313 if (grad_imax != -1) {
314 parname =
" [" + pars[grad_imax]->name() +
":" +
317 (*m_logger)(
"%sIteration %3d: -logL=%.3f, Lambda=%.1e,"
318 " delta=%.3f, step=%.1e, max(|grad|)=%f%s%s",
321 parname.c_str(), stalled.c_str());
323 #if defined(G_DEBUG_OPT)
324 std::cout <<
"Iteration " <<
m_iter <<
": func="
325 <<
m_value <<
", Lambda=" << lambda_old
326 <<
", delta=" <<
m_delta << std::endl;
328 #if defined(G_DEBUG_SHOW_GRAD_COVAR)
351 if (check_for_freeze) {
355 check_for_freeze =
false;
360 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
366 << pars[ipar]->name()
367 <<
"\" after convergence was"
368 " reached with frozen"
369 " parameter." << std::endl;
407 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
412 << pars[ipar]->name()
413 <<
"\" after convergence was"
414 " reached with frozen"
415 " parameter." << std::endl;
433 (*m_logger)(
">Iteration %3d: -logL=%.3f, Lambda=%.1e (no free parameters)",
473 bool diag_loaded =
false;
476 for (
int i = 0; i < 2; ++i) {
482 for (
int ipar = 0; ipar <
npars; ++ipar) {
485 if (x[ipar] >= 0.0) {
486 pars[ipar]->factor_error(
sqrt(x[ipar]));
489 pars[ipar]->factor_error(0.0);
503 *
m_logger <<
"Non-Positive definite curvature matrix encountered."
505 *
m_logger <<
"Load diagonal elements with 1e-10."
506 <<
" Fit errors may be inaccurate."
511 *curvature = save_curvature;
512 for (
int ipar = 0; ipar <
npars; ++ipar) {
513 if ((*curvature)(ipar,ipar) != 0.0) {
514 (*curvature)(ipar,ipar) += 1.0e-10;
530 *
m_logger <<
"Non-Positive definite curvature matrix encountered,"
531 <<
" even after diagonal loading." << std::endl;
536 catch (std::exception &e) {
569 status =
"singular curvature matrix encountered";
572 status =
"curvature matrix not positive definite";
575 status =
"errors are inaccurate";
602 result.append(
"=== GOptimizerLM ===");
744 #if defined(G_DEBUG_ITER)
745 std::cout <<
"GOptimizerLM::iteration: enter" << std::endl;
765 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
766 save_pars[ipar] = pars[ipar]->factor_value();
770 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
771 (*curvature)(ipar,ipar) *= (1.0 +
m_lambda);
772 (*grad)[ipar] = -(*grad)[ipar];
776 #if defined(G_DEBUG_ITER)
777 std::cout <<
"Gradient : ";
778 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
779 std::cout << (*grad)[ipar] <<
" ";
781 std::cout << std::endl;
782 std::cout <<
"Curvature: ";
783 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
784 for (
int jpar = 0; jpar <
m_npars; ++jpar) {
785 std::cout << (*curvature)(ipar,jpar) <<
" ";
788 std::cout << std::endl;
793 *grad = curvature->
solve(*grad);
798 *
m_logger <<
"GOptimizerLM::iteration: "
799 <<
"Curvature matrix not positive definite."
804 catch (std::exception &e) {
812 #if defined(G_DEBUG_ITER)
813 std::cout <<
"Step size = " << step << std::endl;
817 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
820 if (pars[ipar]->is_free()) {
823 double p = pars[ipar]->factor_value();
824 double p_min = pars[ipar]->factor_min();
825 double p_max = pars[ipar]->factor_max();
828 p += (*grad)[ipar] * step;
831 #if defined(G_DEBUG_ITER)
832 std::cout <<
"Trial factor value of ";
833 std::cout << pars[ipar]->name() <<
" = ";
834 std::cout << p << std::endl;
838 if (pars[ipar]->has_min() && p < p_min) {
841 *
m_logger <<
" Parameter \"" << pars[ipar]->name();
842 *
m_logger <<
"\" hits minimum " << p_min <<
" more than ";
844 *
m_logger <<
" Fix parameter at minimum for now." << std::endl;
851 *
m_logger <<
" Parameter \"" << pars[ipar]->name();
860 else if (pars[ipar]->has_max() && p > p_max) {
863 *
m_logger <<
" Parameter \"" << pars[ipar]->name();
864 *
m_logger <<
"\" hits maximum " << p_max <<
" more than ";
866 *
m_logger <<
" Fix parameter at maximum for now." << std::endl;
873 *
m_logger <<
" Parameter \"" << pars[ipar]->name();
888 pars[ipar]->factor_value(p);
891 #if defined(G_DEBUG_ITER)
892 std::cout <<
"New value of ";
893 std::cout << pars[ipar]->name() <<
" = ";
894 std::cout << pars[ipar]->value() << std::endl;
907 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
908 if (pars[ipar]->is_free()) {
909 if ((*fct.
curvature())(ipar,ipar) == 0.0) {
911 *
m_logger <<
" Parameter \"" << pars[ipar]->name();
912 *
m_logger <<
"\" = " << pars[ipar]->value();
913 *
m_logger <<
" with gradient " << pars[ipar]->gradient();
914 *
m_logger <<
" has zero curvature.";
915 *
m_logger <<
" Fix parameter." << std::endl;
931 #if defined(G_DEBUG_ITER)
932 std::cout <<
"Function value " <<
m_value;
933 std::cout <<
" (was " << save_value <<
")" << std::endl;
938 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
939 if (pars[ipar]->factor_value() != save_pars[ipar]) {
981 *curvature = save_curvature;
982 for (
int ipar = 0; ipar <
m_npars; ++ipar) {
983 pars[ipar]->factor_value(save_pars[ipar]);
990 #if defined(G_DEBUG_ITER)
991 std::cout <<
"GOptimizerLM::iteration: exit" << std::endl;
1023 for (
int ipar = 0; ipar < pars.
size(); ++ipar) {
1026 double p = pars[ipar]->factor_value();
1027 double p_min = pars[ipar]->factor_min();
1028 double p_max = pars[ipar]->factor_max();
1029 double delta = grad[ipar];
1032 if (pars[ipar]->has_min()) {
1033 double step_min = (delta < 0.0) ? (p_min - p)/delta : 1.0;
1034 if (step_min > 0.0) {
1035 if (step_min < step) {
1046 *
m_logger <<
"\" does not drive optimization step anymore.";
1054 if (pars[ipar]->has_max()) {
1055 double step_max = (delta > 0.0) ? (p_max - p)/delta : 1.0;
1056 if (step_max > 0.0) {
1057 if (step_max < step) {
1068 *
m_logger <<
"\" does not drive optimization step anymore.";
1078 if (ipar_bnd != -1) {
1082 *
m_logger << pars[ipar_bnd]->name();
1083 *
m_logger <<
"\" drives optimization step (step=";
1084 *
m_logger << step <<
")" << std::endl;
Exception handler interface definition.
Levenberg Marquardt optimizer class interface definition.
#define G_LM_NOT_POSTIVE_DEFINITE
GVector log(const GVector &vector)
Computes natural logarithm of vector elements.
GVector sqrt(const GVector &vector)
Computes square root of vector elements.
Information logger interface definition.
Sparse matrix class interface definition.
GMatrixSparse cholesky_decompose(const bool &compress=true) const
Return Cholesky decomposition.
GVector solve(const GVector &vector) const
Solves linear matrix equation.
GVector cholesky_solver(const GVector &vector, const bool &compress=true) const
Cholesky solver.
Optimizer function abstract base class.
virtual double value(void) const =0
virtual GMatrixSparse * curvature(void)=0
virtual GVector * gradient(void)=0
virtual void eval(const GOptimizerPars &pars)=0
Levenberg Marquardt optimizer class.
bool m_step_adjust
Adjust step size to boundaries.
int m_npars
Number of parameters.
int m_max_iter
Maximum number of iterations.
GLog * m_logger
Pointer to optional logger.
double m_lambda_inc
Lambda increase.
std::string status_string(void) const
Set fit status string.
int m_max_dec
Maximum number of function decrease.
std::vector< int > m_hit_minimum
Bookkeeping of successive minimum hits.
int m_num_dec
Number of function decreases.
std::vector< bool > m_hit_boundary
Bookkeeping array for boundary hits.
virtual GOptimizerLM * clone(void) const
Clone object.
void init_members(void)
Initialise class members.
GOptimizerLM & operator=(const GOptimizerLM &opt)
Assignment operator.
void free_members(void)
Delete class members.
const double & lambda_inc(void) const
Return lambda increment value.
double m_value
Actual function value.
int m_max_stall
Maximum number of stalls.
double m_lambda_dec
Lambda decrease.
std::vector< bool > m_par_freeze
Bookkeeping of parameter freeze.
GOptimizerLM(void)
Void constructor.
double step_size(const GVector &grad, const GOptimizerPars &pars)
Return LM step size.
virtual void errors(GOptimizerFunction &fct, GOptimizerPars &pars)
Compute parameter uncertainties.
double iteration(GOptimizerFunction &fct, GOptimizerPars &pars)
Perform one LM iteration.
void copy_members(const GOptimizerLM &opt)
Copy class members.
double m_eps
Absolute precision.
virtual std::string print(const GChatter &chatter=NORMAL) const
Print optimizer information.
virtual void optimize(GOptimizerFunction &fct, GOptimizerPars &pars)
Optimize function parameters.
std::vector< int > m_hit_maximum
Bookkeeping of successive maximum hits.
std::vector< bool > m_par_remove
Bookkeeping of parameter removal.
double m_lambda_start
Initial start value.
int m_nfree
Number of free parameters.
double m_delta
Function improvement.
virtual void clear(void)
Clear object.
int m_max_hit
Maximum number of successive hits.
double m_accept_dec
Acceptable function decrease.
virtual ~GOptimizerLM(void)
Destructor.
double m_lambda
Actual lambda.
virtual int status(void) const
Return optimizer status.
const int & npars(void) const
Return number of model parameters.
Optimizer parameter container class.
int size(void) const
Return number of parameters in container.
int nfree(void) const
Return number of free parameters.
Abstract optimizer abstract base class.
void free_members(void)
Delete class members.
virtual GOptimizer & operator=(const GOptimizer &opt)
Assignment operator.
void init_members(void)
Initialise class members.
std::string parformat(const std::string &s, const int &indent=0)
Convert string in parameter format.
std::string str(const unsigned short int &value)
Convert unsigned short integer value into string.