Welcome to mirror list, hosted at ThFree Co, Russian Federation.

git.blender.org/blender.git - Unnamed repository; edit this file 'description' to name the repository.
summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
Diffstat (limited to 'extern/ceres/internal/ceres/line_search_minimizer.cc')
-rw-r--r--extern/ceres/internal/ceres/line_search_minimizer.cc122
1 files changed, 63 insertions, 59 deletions
diff --git a/extern/ceres/internal/ceres/line_search_minimizer.cc b/extern/ceres/internal/ceres/line_search_minimizer.cc
index fdde1ca9c86..931f56c960c 100644
--- a/extern/ceres/internal/ceres/line_search_minimizer.cc
+++ b/extern/ceres/internal/ceres/line_search_minimizer.cc
@@ -43,6 +43,7 @@
#include <algorithm>
#include <cstdlib>
#include <cmath>
+#include <memory>
#include <string>
#include <vector>
@@ -51,7 +52,6 @@
#include "ceres/evaluator.h"
#include "ceres/internal/eigen.h"
#include "ceres/internal/port.h"
-#include "ceres/internal/scoped_ptr.h"
#include "ceres/line_search.h"
#include "ceres/line_search_direction.h"
#include "ceres/stringprintf.h"
@@ -63,27 +63,14 @@ namespace ceres {
namespace internal {
namespace {
-// TODO(sameeragarwal): I think there is a small bug here, in that if
-// the evaluation fails, then the state can contain garbage. Look at
-// this more carefully.
-bool Evaluate(Evaluator* evaluator,
- const Vector& x,
- LineSearchMinimizer::State* state,
- std::string* message) {
- if (!evaluator->Evaluate(x.data(),
- &(state->cost),
- NULL,
- state->gradient.data(),
- NULL)) {
- *message = "Gradient evaluation failed.";
- return false;
- }
-
+bool EvaluateGradientNorms(Evaluator* evaluator,
+ const Vector& x,
+ LineSearchMinimizer::State* state,
+ std::string* message) {
Vector negative_gradient = -state->gradient;
Vector projected_gradient_step(x.size());
- if (!evaluator->Plus(x.data(),
- negative_gradient.data(),
- projected_gradient_step.data())) {
+ if (!evaluator->Plus(
+ x.data(), negative_gradient.data(), projected_gradient_step.data())) {
*message = "projected_gradient_step = Plus(x, -gradient) failed.";
return false;
}
@@ -103,7 +90,8 @@ void LineSearchMinimizer::Minimize(const Minimizer::Options& options,
double start_time = WallTimeInSeconds();
double iteration_start_time = start_time;
- Evaluator* evaluator = CHECK_NOTNULL(options.evaluator.get());
+ CHECK(options.evaluator != nullptr);
+ Evaluator* evaluator = options.evaluator.get();
const int num_parameters = evaluator->NumParameters();
const int num_effective_parameters = evaluator->NumEffectiveParameters();
@@ -116,9 +104,6 @@ void LineSearchMinimizer::Minimize(const Minimizer::Options& options,
State current_state(num_parameters, num_effective_parameters);
State previous_state(num_parameters, num_effective_parameters);
- Vector delta(num_effective_parameters);
- Vector x_plus_delta(num_parameters);
-
IterationSummary iteration_summary;
iteration_summary.iteration = 0;
iteration_summary.step_is_valid = false;
@@ -130,8 +115,19 @@ void LineSearchMinimizer::Minimize(const Minimizer::Options& options,
iteration_summary.linear_solver_iterations = 0;
iteration_summary.step_solver_time_in_seconds = 0;
- // Do initial cost and Jacobian evaluation.
- if (!Evaluate(evaluator, x, &current_state, &summary->message)) {
+ // Do initial cost and gradient evaluation.
+ if (!evaluator->Evaluate(x.data(),
+ &(current_state.cost),
+ nullptr,
+ current_state.gradient.data(),
+ nullptr)) {
+ summary->termination_type = FAILURE;
+ summary->message = "Initial cost and jacobian evaluation failed.";
+ LOG_IF(WARNING, is_not_silent) << "Terminating: " << summary->message;
+ return;
+ }
+
+ if (!EvaluateGradientNorms(evaluator, x, &current_state, &summary->message)) {
summary->termination_type = FAILURE;
summary->message = "Initial cost and jacobian evaluation failed. "
"More details: " + summary->message;
@@ -142,9 +138,8 @@ void LineSearchMinimizer::Minimize(const Minimizer::Options& options,
summary->initial_cost = current_state.cost + summary->fixed_cost;
iteration_summary.cost = current_state.cost + summary->fixed_cost;
- iteration_summary.gradient_max_norm = current_state.gradient_max_norm;
iteration_summary.gradient_norm = sqrt(current_state.gradient_squared_norm);
-
+ iteration_summary.gradient_max_norm = current_state.gradient_max_norm;
if (iteration_summary.gradient_max_norm <= options.gradient_tolerance) {
summary->message = StringPrintf("Gradient tolerance reached. "
"Gradient max norm: %e <= %e",
@@ -170,7 +165,7 @@ void LineSearchMinimizer::Minimize(const Minimizer::Options& options,
line_search_direction_options.max_lbfgs_rank = options.max_lbfgs_rank;
line_search_direction_options.use_approximate_eigenvalue_bfgs_scaling =
options.use_approximate_eigenvalue_bfgs_scaling;
- scoped_ptr<LineSearchDirection> line_search_direction(
+ std::unique_ptr<LineSearchDirection> line_search_direction(
LineSearchDirection::Create(line_search_direction_options));
LineSearchFunction line_search_function(evaluator);
@@ -194,11 +189,11 @@ void LineSearchMinimizer::Minimize(const Minimizer::Options& options,
line_search_options.is_silent = options.is_silent;
line_search_options.function = &line_search_function;
- scoped_ptr<LineSearch>
+ std::unique_ptr<LineSearch>
line_search(LineSearch::Create(options.line_search_type,
line_search_options,
&summary->message));
- if (line_search.get() == NULL) {
+ if (line_search.get() == nullptr) {
summary->termination_type = FAILURE;
LOG_IF(ERROR, is_not_silent) << "Terminating: " << summary->message;
return;
@@ -326,28 +321,37 @@ void LineSearchMinimizer::Minimize(const Minimizer::Options& options,
break;
}
- current_state.step_size = line_search_summary.optimal_step_size;
- delta = current_state.step_size * current_state.search_direction;
-
+ const FunctionSample& optimal_point = line_search_summary.optimal_point;
+ CHECK(optimal_point.vector_x_is_valid)
+ << "Congratulations, you found a bug in Ceres. Please report it.";
+ current_state.step_size = optimal_point.x;
previous_state = current_state;
iteration_summary.step_solver_time_in_seconds =
WallTimeInSeconds() - iteration_start_time;
- const double x_norm = x.norm();
-
- if (!evaluator->Plus(x.data(), delta.data(), x_plus_delta.data())) {
- summary->termination_type = FAILURE;
- summary->message =
- "x_plus_delta = Plus(x, delta) failed. This should not happen "
- "as the step was valid when it was selected by the line search.";
- LOG_IF(WARNING, is_not_silent) << "Terminating: " << summary->message;
- break;
+ if (optimal_point.vector_gradient_is_valid) {
+ current_state.cost = optimal_point.value;
+ current_state.gradient = optimal_point.vector_gradient;
+ } else {
+ Evaluator::EvaluateOptions evaluate_options;
+ evaluate_options.new_evaluation_point = false;
+ if (!evaluator->Evaluate(evaluate_options,
+ optimal_point.vector_x.data(),
+ &(current_state.cost),
+ nullptr,
+ current_state.gradient.data(),
+ nullptr)) {
+ summary->termination_type = FAILURE;
+ summary->message = "Cost and jacobian evaluation failed.";
+ LOG_IF(WARNING, is_not_silent) << "Terminating: " << summary->message;
+ return;
+ }
}
- if (!Evaluate(evaluator,
- x_plus_delta,
- &current_state,
- &summary->message)) {
+ if (!EvaluateGradientNorms(evaluator,
+ optimal_point.vector_x,
+ &current_state,
+ &summary->message)) {
summary->termination_type = FAILURE;
summary->message =
"Step failed to evaluate. This should not happen as the step was "
@@ -358,8 +362,9 @@ void LineSearchMinimizer::Minimize(const Minimizer::Options& options,
}
// Compute the norm of the step in the ambient space.
- iteration_summary.step_norm = (x_plus_delta - x).norm();
- x = x_plus_delta;
+ iteration_summary.step_norm = (optimal_point.vector_x - x).norm();
+ const double x_norm = x.norm();
+ x = optimal_point.vector_x;
iteration_summary.gradient_max_norm = current_state.gradient_max_norm;
iteration_summary.gradient_norm = sqrt(current_state.gradient_squared_norm);
@@ -380,6 +385,7 @@ void LineSearchMinimizer::Minimize(const Minimizer::Options& options,
iteration_summary.cumulative_time_in_seconds =
WallTimeInSeconds() - start_time
+ summary->preprocessor_time_in_seconds;
+ summary->iterations.push_back(iteration_summary);
// Iterations inside the line search algorithm are considered
// 'steps' in the broader context, to distinguish these inner
@@ -423,20 +429,18 @@ void LineSearchMinimizer::Minimize(const Minimizer::Options& options,
}
const double absolute_function_tolerance =
- options.function_tolerance * previous_state.cost;
- if (fabs(iteration_summary.cost_change) <= absolute_function_tolerance) {
- summary->message =
- StringPrintf("Function tolerance reached. "
- "|cost_change|/cost: %e <= %e",
- fabs(iteration_summary.cost_change) /
- previous_state.cost,
- options.function_tolerance);
+ options.function_tolerance * std::abs(previous_state.cost);
+ if (std::abs(iteration_summary.cost_change) <=
+ absolute_function_tolerance) {
+ summary->message = StringPrintf(
+ "Function tolerance reached. "
+ "|cost_change|/cost: %e <= %e",
+ std::abs(iteration_summary.cost_change) / previous_state.cost,
+ options.function_tolerance);
summary->termination_type = CONVERGENCE;
VLOG_IF(1, is_not_silent) << "Terminating: " << summary->message;
break;
}
-
- summary->iterations.push_back(iteration_summary);
}
}