Autoware.Auto
newtons_method_optimizer.hpp
Go to the documentation of this file.
1 // Copyright 2019 the Autoware Foundation
2 //
3 // Licensed under the Apache License, Version 2.0 (the "License");
4 // you may not use this file except in compliance with the License.
5 // You may obtain a copy of the License at
6 //
7 //    http://www.apache.org/licenses/LICENSE-2.0
8 //
9 // Unless required by applicable law or agreed to in writing, software
10 // distributed under the License is distributed on an "AS IS" BASIS,
11 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 // See the License for the specific language governing permissions and
13 // limitations under the License.
14 //
15 // Co-developed by Tier IV, Inc. and Apex.AI, Inc.
16 
17 #ifndef OPTIMIZATION__NEWTONS_METHOD_OPTIMIZER_HPP_
18 #define OPTIMIZATION__NEWTONS_METHOD_OPTIMIZER_HPP_
19 
24 #include <Eigen/SVD>
25 #include <limits>
26 #include <memory>
27 #include <cmath>
28 
29 namespace autoware
30 {
31 namespace common
32 {
33 namespace optimization
34 {
36 template<typename LineSearchT>
37 class OPTIMIZATION_PUBLIC NewtonsMethodOptimizer
38  : public Optimizer<NewtonsMethodOptimizer<LineSearchT>>
39 {
40 public:
41  using StepT = float_t;
42 
49  const LineSearchT & line_searcher,
50  const OptimizationOptions & options)
51  : m_line_searcher{line_searcher}, m_options{options} {}
52 
67  template<typename OptimizationProblemT, typename DomainValueT, typename EigenSolverT>
69  OptimizationProblemT & optimization_problem,
70  const DomainValueT & x0, DomainValueT & x_out)
71  {
72  // Get types from the method's templated parameter
73  using Value = typename OptimizationProblemT::Value;
74  using Jacobian = typename OptimizationProblemT::Jacobian;
75  using Hessian = typename OptimizationProblemT::Hessian;
77  Value score_previous{0.0};
78  Jacobian jacobian{Jacobian{}.setZero()};
79  Hessian hessian{Hessian{}.setZero()};
80  DomainValueT opt_direction{DomainValueT{}.setZero()};
81 
82  if (!x0.allFinite()) { // Early exit for invalid input.
84  }
85 
86  // Initialize
87  x_out = x0;
88 
89  // Get score, Jacobian and Hessian (pre-computed using evaluate)
90  optimization_problem.evaluate(x_out, ComputeMode{}.set_score().set_jacobian().set_hessian());
91  score_previous = optimization_problem(x_out);
92  optimization_problem.jacobian(x_out, jacobian);
93  optimization_problem.hessian(x_out, hessian);
94 
95  // Early exit if the initial solution is good enough.
96  if (jacobian.template lpNorm<Eigen::Infinity>() <= m_options.gradient_tolerance()) {
97  // As there's no newton solution yet, jacobian can be a good substitute.
98  return OptimizationSummary{jacobian.norm(), TerminationType::CONVERGENCE, 0UL};
99  }
100 
101  // Iterate until convergence, error, or maximum number of iterations
102  auto nr_iterations = 0UL;
103  for (; nr_iterations < m_options.max_num_iterations(); ++nr_iterations) {
104  if (!x_out.allFinite() || !jacobian.allFinite() || !hessian.allFinite()) {
105  termination_type = TerminationType::FAILURE;
106  break;
107  }
108  // Find decent direction using Newton's method
109  EigenSolverT solver(hessian);
110  opt_direction = solver.solve(-jacobian);
111 
112  // Check if there was a problem during Eigen's solve()
113  if (!opt_direction.allFinite()) {
114  termination_type = TerminationType::FAILURE;
115  break;
116  }
117  // Calculate and apply step length
118  // TODO(zozen): with guarnteed sufficient decrease as in [More, Thuente 1994]
119  // would need partial results passed to optimization_problem before call, as in:
120  // computeStepLengthMT (x0, x_delta, x_delta_norm, transformation_epsilon_/2, ...
121  // and would pre-compute score/jacobian/hessian as during init in evaluate!
122  // also needs the sign to know the direction of optimization?
123  const auto step = m_line_searcher.compute_next_step(
124  x_out, opt_direction,
125  optimization_problem);
126  const auto prev_x_norm = x_out.norm();
127  x_out += step;
128 
129  // Check change in parameter relative to the parameter value
130  // tolerance added to the norm for stability when the norm is close to 0
131  // (Inspired from https://github.com/ceres-solver/ceres-solver/blob/4362a2169966e08394252098
132  // c80d1f26764becd0/include/ceres/tiny_solver.h#L244)
133  const auto parameter_tolerance =
134  m_options.parameter_tolerance() * (prev_x_norm + m_options.parameter_tolerance());
135  if (step.norm() <= parameter_tolerance) {
136  termination_type = TerminationType::CONVERGENCE;
137  break;
138  }
139 
140  // Update value, Jacobian and Hessian (pre-computed using evaluate)
141  optimization_problem.evaluate(x_out, ComputeMode{}.set_score().set_jacobian().set_hessian());
142  const auto score = optimization_problem(x_out);
143  optimization_problem.jacobian(x_out, jacobian);
144  optimization_problem.hessian(x_out, hessian);
145 
146  // Check if the max-norm of the gradient is small enough.
147  if (jacobian.template lpNorm<Eigen::Infinity>() <= m_options.gradient_tolerance()) {
148  termination_type = TerminationType::CONVERGENCE;
149  break;
150  }
151 
152  // Check change in cost function.
153  if (std::fabs(score - score_previous) <=
154  (m_options.function_tolerance() * std::fabs(score_previous)))
155  {
156  termination_type = TerminationType::CONVERGENCE;
157  break;
158  }
159  score_previous = score;
160  }
161 
162  // Returning summary consisting of the following three values:
163  // estimated_distance_to_optimum, convergence_tolerance_criteria_met, number_of_iterations_made
164  return OptimizationSummary{opt_direction.norm(), termination_type, nr_iterations};
165  }
166 
167 private:
168  // initialize on construction
169  LineSearchT m_line_searcher;
170  OptimizationOptions m_options;
171 };
172 } // namespace optimization
173 } // namespace common
174 } // namespace autoware
175 
176 #endif // OPTIMIZATION__NEWTONS_METHOD_OPTIMIZER_HPP_
optimizer.hpp
autoware::common::optimization::ComputeMode
Definition: common/optimization/include/optimization/utils.hpp:38
autoware::common::optimization::TerminationType::CONVERGENCE
@ CONVERGENCE
autoware::common::optimization::Optimizer
Definition: optimizer.hpp:32
autoware::common::optimization::OptimizationSummary
Definition: optimizer_options.hpp:77
autoware::common::optimization::ComputeMode::set_jacobian
ComputeMode & set_jacobian() noexcept
Definition: common/optimization/src/utils.cpp:36
autoware::common::optimization::NewtonsMethodOptimizer::NewtonsMethodOptimizer
NewtonsMethodOptimizer(const LineSearchT &line_searcher, const OptimizationOptions &options)
Definition: newtons_method_optimizer.hpp:48
optimizer_options.hpp
autoware
This file defines the lanelet2_map_provider_node class.
Definition: quick_sort.hpp:24
autoware::common::optimization::TerminationType::FAILURE
@ FAILURE
autoware::common::optimization::NewtonsMethodOptimizer::StepT
float_t StepT
Definition: newtons_method_optimizer.hpp:41
line_search.hpp
autoware::common::optimization::ComputeMode::set_score
ComputeMode & set_score() noexcept
Definition: common/optimization/src/utils.cpp:31
autoware::common::optimization::OptimizationOptions
Definition: optimizer_options.hpp:46
optimization_problem.hpp
autoware::common::optimization::NewtonsMethodOptimizer
Optimizer using the Newton method with line search.
Definition: newtons_method_optimizer.hpp:37
autoware::common::optimization::NewtonsMethodOptimizer::solve_
OptimizationSummary solve_(OptimizationProblemT &optimization_problem, const DomainValueT &x0, DomainValueT &x_out)
Definition: newtons_method_optimizer.hpp:68
autoware::common::optimization::TerminationType::NO_CONVERGENCE
@ NO_CONVERGENCE
autoware::common::optimization::TerminationType
TerminationType
Type of termination at the end of an optimization loop.
Definition: optimizer_options.hpp:34
autoware::common::optimization::ComputeMode::set_hessian
ComputeMode & set_hessian() noexcept
Definition: common/optimization/src/utils.cpp:41