Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'extern/ceres/include/ceres/gradient_problem_solver.h')
-rw-r--r--extern/ceres/include/ceres/gradient_problem_solver.h128
1 files changed, 61 insertions, 67 deletions
diff --git a/extern/ceres/include/ceres/gradient_problem_solver.h b/extern/ceres/include/ceres/gradient_problem_solver.h
index a7d0121ea0c..181699d8fd4 100644
--- a/extern/ceres/include/ceres/gradient_problem_solver.h
+++ b/extern/ceres/include/ceres/gradient_problem_solver.h
@@ -1,5 +1,5 @@
// Ceres Solver - A fast non-linear least squares minimizer
-// Copyright 2015 Google Inc. All rights reserved.
+// Copyright 2019 Google Inc. All rights reserved.
// http://ceres-solver.org/
//
// Redistribution and use in source and binary forms, with or without
@@ -34,11 +34,11 @@
#include <cmath>
#include <string>
#include <vector>
-#include "ceres/internal/macros.h"
+
+#include "ceres/internal/disable_warnings.h"
#include "ceres/internal/port.h"
#include "ceres/iteration_callback.h"
#include "ceres/types.h"
-#include "ceres/internal/disable_warnings.h"
namespace ceres {
@@ -54,39 +54,15 @@ class CERES_EXPORT GradientProblemSolver {
//
// The constants are defined inside types.h
struct CERES_EXPORT Options {
- // Default constructor that sets up a generic sparse problem.
- Options() {
- line_search_direction_type = LBFGS;
- line_search_type = WOLFE;
- nonlinear_conjugate_gradient_type = FLETCHER_REEVES;
- max_lbfgs_rank = 20;
- use_approximate_eigenvalue_bfgs_scaling = false;
- line_search_interpolation_type = CUBIC;
- min_line_search_step_size = 1e-9;
- line_search_sufficient_function_decrease = 1e-4;
- max_line_search_step_contraction = 1e-3;
- min_line_search_step_contraction = 0.6;
- max_num_line_search_step_size_iterations = 20;
- max_num_line_search_direction_restarts = 5;
- line_search_sufficient_curvature_decrease = 0.9;
- max_line_search_step_expansion = 10.0;
- max_num_iterations = 50;
- max_solver_time_in_seconds = 1e9;
- function_tolerance = 1e-6;
- gradient_tolerance = 1e-10;
- logging_type = PER_MINIMIZER_ITERATION;
- minimizer_progress_to_stdout = false;
- }
-
// Returns true if the options struct has a valid
// configuration. Returns false otherwise, and fills in *error
// with a message describing the problem.
bool IsValid(std::string* error) const;
// Minimizer options ----------------------------------------
- LineSearchDirectionType line_search_direction_type;
- LineSearchType line_search_type;
- NonlinearConjugateGradientType nonlinear_conjugate_gradient_type;
+ LineSearchDirectionType line_search_direction_type = LBFGS;
+ LineSearchType line_search_type = WOLFE;
+ NonlinearConjugateGradientType nonlinear_conjugate_gradient_type = FLETCHER_REEVES;
// The LBFGS hessian approximation is a low rank approximation to
// the inverse of the Hessian matrix. The rank of the
@@ -111,8 +87,8 @@ class CERES_EXPORT GradientProblemSolver {
// method, please see:
//
// Nocedal, J. (1980). "Updating Quasi-Newton Matrices with
- // Limited Storage". Mathematics of Computation 35 (151): 773–782.
- int max_lbfgs_rank;
+ // Limited Storage". Mathematics of Computation 35 (151): 773-782.
+ int max_lbfgs_rank = 20;
// As part of the (L)BFGS update step (BFGS) / right-multiply step (L-BFGS),
// the initial inverse Hessian approximation is taken to be the Identity.
@@ -134,18 +110,18 @@ class CERES_EXPORT GradientProblemSolver {
// Oren S.S., Self-scaling variable metric (SSVM) algorithms
// Part II: Implementation and experiments, Management Science,
// 20(5), 863-874, 1974.
- bool use_approximate_eigenvalue_bfgs_scaling;
+ bool use_approximate_eigenvalue_bfgs_scaling = false;
// Degree of the polynomial used to approximate the objective
// function. Valid values are BISECTION, QUADRATIC and CUBIC.
//
// BISECTION corresponds to pure backtracking search with no
// interpolation.
- LineSearchInterpolationType line_search_interpolation_type;
+ LineSearchInterpolationType line_search_interpolation_type = CUBIC;
// If during the line search, the step_size falls below this
// value, it is truncated to zero.
- double min_line_search_step_size;
+ double min_line_search_step_size = 1e-9;
// Line search parameters.
@@ -159,7 +135,7 @@ class CERES_EXPORT GradientProblemSolver {
//
// f(step_size) <= f(0) + sufficient_decrease * f'(0) * step_size
//
- double line_search_sufficient_function_decrease;
+ double line_search_sufficient_function_decrease = 1e-4;
// In each iteration of the line search,
//
@@ -169,7 +145,7 @@ class CERES_EXPORT GradientProblemSolver {
//
// 0 < max_step_contraction < min_step_contraction < 1
//
- double max_line_search_step_contraction;
+ double max_line_search_step_contraction = 1e-3;
// In each iteration of the line search,
//
@@ -179,19 +155,19 @@ class CERES_EXPORT GradientProblemSolver {
//
// 0 < max_step_contraction < min_step_contraction < 1
//
- double min_line_search_step_contraction;
+ double min_line_search_step_contraction = 0.6;
// Maximum number of trial step size iterations during each line search,
// if a step size satisfying the search conditions cannot be found within
// this number of trials, the line search will terminate.
- int max_num_line_search_step_size_iterations;
+ int max_num_line_search_step_size_iterations = 20;
// Maximum number of restarts of the line search direction algorithm before
// terminating the optimization. Restarts of the line search direction
// algorithm occur when the current algorithm fails to produce a new descent
// direction. This typically indicates a numerical failure, or a breakdown
// in the validity of the approximations used.
- int max_num_line_search_direction_restarts;
+ int max_num_line_search_direction_restarts = 5;
// The strong Wolfe conditions consist of the Armijo sufficient
// decrease condition, and an additional requirement that the
@@ -204,7 +180,7 @@ class CERES_EXPORT GradientProblemSolver {
//
// Where f() is the line search objective and f'() is the derivative
// of f w.r.t step_size (d f / d step_size).
- double line_search_sufficient_curvature_decrease;
+ double line_search_sufficient_curvature_decrease = 0.9;
// During the bracketing phase of the Wolfe search, the step size is
// increased until either a point satisfying the Wolfe conditions is
@@ -215,36 +191,49 @@ class CERES_EXPORT GradientProblemSolver {
// new_step_size <= max_step_expansion * step_size.
//
// By definition for expansion, max_step_expansion > 1.0.
- double max_line_search_step_expansion;
+ double max_line_search_step_expansion = 10.0;
// Maximum number of iterations for the minimizer to run for.
- int max_num_iterations;
+ int max_num_iterations = 50;
// Maximum time for which the minimizer should run for.
- double max_solver_time_in_seconds;
+ double max_solver_time_in_seconds = 1e9;
// Minimizer terminates when
//
// (new_cost - old_cost) < function_tolerance * old_cost;
//
- double function_tolerance;
+ double function_tolerance = 1e-6;
// Minimizer terminates when
//
// max_i |x - Project(Plus(x, -g(x))| < gradient_tolerance
//
// This value should typically be 1e-4 * function_tolerance.
- double gradient_tolerance;
+ double gradient_tolerance = 1e-10;
+
+ // Minimizer terminates when
+ //
+ // |step|_2 <= parameter_tolerance * ( |x|_2 + parameter_tolerance)
+ //
+ double parameter_tolerance = 1e-8;
// Logging options ---------------------------------------------------------
- LoggingType logging_type;
+ LoggingType logging_type = PER_MINIMIZER_ITERATION;
// By default the Minimizer progress is logged to VLOG(1), which
// is sent to STDERR depending on the vlog level. If this flag is
// set to true, and logging_type is not SILENT, the logging output
// is sent to STDOUT.
- bool minimizer_progress_to_stdout;
+ bool minimizer_progress_to_stdout = false;
+
+ // If true, the user's parameter blocks are updated at the end of
+ // every Minimizer iteration, otherwise they are updated when the
+ // Minimizer terminates. This is useful if, for example, the user
+ // wishes to visualize the state of the optimization every
+ // iteration.
+ bool update_state_every_iteration = false;
// Callbacks that are executed at the end of each iteration of the
// Minimizer. An iteration may terminate midway, either due to
@@ -265,8 +254,6 @@ class CERES_EXPORT GradientProblemSolver {
};
struct CERES_EXPORT Summary {
- Summary();
-
// A brief one line description of the state of the solver after
// termination.
std::string BriefReport() const;
@@ -278,65 +265,72 @@ class CERES_EXPORT GradientProblemSolver {
bool IsSolutionUsable() const;
// Minimizer summary -------------------------------------------------
- TerminationType termination_type;
+ TerminationType termination_type = FAILURE;
// Reason why the solver terminated.
- std::string message;
+ std::string message = "ceres::GradientProblemSolve was not called.";
// Cost of the problem (value of the objective function) before
// the optimization.
- double initial_cost;
+ double initial_cost = -1.0;
// Cost of the problem (value of the objective function) after the
// optimization.
- double final_cost;
+ double final_cost = -1.0;
// IterationSummary for each minimizer iteration in order.
std::vector<IterationSummary> iterations;
+ // Number of times the cost (and not the gradient) was evaluated.
+ int num_cost_evaluations = -1;
+
+ // Number of times the gradient (and the cost) were evaluated.
+ int num_gradient_evaluations = -1;
+
// Sum total of all time spent inside Ceres when Solve is called.
- double total_time_in_seconds;
+ double total_time_in_seconds = -1.0;
// Time (in seconds) spent evaluating the cost.
- double cost_evaluation_time_in_seconds;
+ double cost_evaluation_time_in_seconds = -1.0;
// Time (in seconds) spent evaluating the gradient.
- double gradient_evaluation_time_in_seconds;
+ double gradient_evaluation_time_in_seconds = -1.0;
// Time (in seconds) spent minimizing the interpolating polynomial
// to compute the next candidate step size as part of a line search.
- double line_search_polynomial_minimization_time_in_seconds;
+ double line_search_polynomial_minimization_time_in_seconds = -1.0;
- // Number of parameters in the probem.
- int num_parameters;
+ // Number of parameters in the problem.
+ int num_parameters = -1;
// Dimension of the tangent space of the problem.
- int num_local_parameters;
+ int num_local_parameters = -1;
// Type of line search direction used.
- LineSearchDirectionType line_search_direction_type;
+ LineSearchDirectionType line_search_direction_type = LBFGS;
// Type of the line search algorithm used.
- LineSearchType line_search_type;
+ LineSearchType line_search_type = WOLFE;
// When performing line search, the degree of the polynomial used
// to approximate the objective function.
- LineSearchInterpolationType line_search_interpolation_type;
+ LineSearchInterpolationType line_search_interpolation_type = CUBIC;
// If the line search direction is NONLINEAR_CONJUGATE_GRADIENT,
// then this indicates the particular variant of non-linear
// conjugate gradient used.
- NonlinearConjugateGradientType nonlinear_conjugate_gradient_type;
+ NonlinearConjugateGradientType nonlinear_conjugate_gradient_type =
+ FLETCHER_REEVES;
// If the type of the line search direction is LBFGS, then this
// indicates the rank of the Hessian approximation.
- int max_lbfgs_rank;
+ int max_lbfgs_rank = -1;
};
// Once a least squares problem has been built, this function takes
// the problem and optimizes it based on the values of the options
// parameters. Upon return, a detailed summary of the work performed
- // by the preprocessor, the non-linear minmizer and the linear
+ // by the preprocessor, the non-linear minimizer and the linear
// solver are reported in the summary object.
virtual void Solve(const GradientProblemSolver::Options& options,
const GradientProblem& problem,