From 166cc92b00c51dd3f9907f790922636a6be53b5d Mon Sep 17 00:00:00 2001 From: Levi Armstrong Date: Mon, 15 Dec 2025 14:16:59 -0600 Subject: [PATCH 1/7] Add dynamic constraint support --- .../collision/discrete_collision_constraint.h | 69 ++++++ trajopt_ifopt/include/trajopt_ifopt/fwd.h | 3 + .../continuous_collision_evaluators.cpp | 2 +- .../discrete_collision_constraint.cpp | 107 +++++++++ .../include/trajopt_sqp/ifopt_qp_problem.h | 2 + .../include/trajopt_sqp/qp_problem.h | 15 ++ .../include/trajopt_sqp/trajopt_qp_problem.h | 2 + .../trajopt_sqp/src/ifopt_qp_problem.cpp | 6 + .../trajopt_sqp/src/trajopt_qp_problem.cpp | 215 ++++++++++++++++-- 9 files changed, 406 insertions(+), 15 deletions(-) diff --git a/trajopt_ifopt/include/trajopt_ifopt/constraints/collision/discrete_collision_constraint.h b/trajopt_ifopt/include/trajopt_ifopt/constraints/collision/discrete_collision_constraint.h index 9264e2db5..f7c72cbb1 100644 --- a/trajopt_ifopt/include/trajopt_ifopt/constraints/collision/discrete_collision_constraint.h +++ b/trajopt_ifopt/include/trajopt_ifopt/constraints/collision/discrete_collision_constraint.h @@ -118,5 +118,74 @@ class DiscreteCollisionConstraint : public ConstraintSet void init() const; }; +class DiscreteCollisionConstraintD : public DynamicConstraintSet +{ +public: + using Ptr = std::shared_ptr; + using ConstPtr = std::shared_ptr; + + DiscreteCollisionConstraintD(std::shared_ptr collision_evaluator, + std::shared_ptr position_var, + const std::string& name = "DiscreteCollision"); + + int update() override; + + /** + * @brief Returns the values associated with the constraint. + * @warning Make sure that the values returns are not just the violation but the constraint values. + * Remember the values are the constant in the quadratic function, so if you only return the + * violation then if it is not violating the constraint this would be zero which means it + * will always appear to be on the constraint boundary which will cause issue when solving. + * If it is not voliating the constraint then return the max negative number. + * If no contacts are found return the negative of the collision margin buffer. This is why + * it is important to not set the collision margin buffer to zero. + * @return The constraint values not the violations + */ + Eigen::VectorXd GetValues() const override; + + /** + * @brief Returns the "bounds" of this constraint. How these are enforced is up to the solver + * @return Returns the "bounds" of this constraint + */ + std::vector GetBounds() const override; + + /** + * @brief Fills the jacobian block associated with the given var_set. + * @param var_set Name of the var_set to which the jac_block is associated + * @param jac_block Block of the overall jacobian associated with these constraints and the var_set variable + */ + void FillJacobianBlock(std::string var_set, Jacobian& jac_block) const override; + + /** + * @brief Sets the bounds on the collision distance + * @param bounds New bounds that will be set. Should be size 1 + */ + void SetBounds(const std::vector& bounds); + + /** + * @brief Get the collision evaluator. This exposed for plotter callbacks + * @return The collision evaluator + */ + std::shared_ptr GetCollisionEvaluator() const; + +private: + /** @brief The number of joints in a single JointPosition */ + long n_dof_{ 0 }; + + /** @brief Bounds on the constraint value. Default: std::vector(1, ifopt::BoundSmallerZero) */ + std::vector bounds_; + + /** @brief Pointers to the vars used by this constraint. */ + std::shared_ptr position_var_; + + std::shared_ptr collision_evaluator_; + + /** @brief Used to initialize */ + mutable std::string var_set_name_; + mutable std::once_flag init_flag_; + + void init() const; +}; + } // namespace trajopt_ifopt #endif diff --git a/trajopt_ifopt/include/trajopt_ifopt/fwd.h b/trajopt_ifopt/include/trajopt_ifopt/fwd.h index b889108a8..8c56a7614 100644 --- a/trajopt_ifopt/include/trajopt_ifopt/fwd.h +++ b/trajopt_ifopt/include/trajopt_ifopt/fwd.h @@ -33,6 +33,9 @@ class NodesVariables; class Node; class Var; +// Dynamic Constraint Set +class DynamicConstraintSet; + // cartesian_line_constraint.h struct CartLineInfo; class CartLineConstraint; diff --git a/trajopt_ifopt/src/constraints/collision/continuous_collision_evaluators.cpp b/trajopt_ifopt/src/constraints/collision/continuous_collision_evaluators.cpp index 9718c23a7..436f870bc 100644 --- a/trajopt_ifopt/src/constraints/collision/continuous_collision_evaluators.cpp +++ b/trajopt_ifopt/src/constraints/collision/continuous_collision_evaluators.cpp @@ -390,7 +390,7 @@ LVSDiscreteCollisionEvaluator::CalcCollisionData(const Eigen::Refgradient_results_sets.push_back(std::move(kv.second)); } - if (data->gradient_results_sets.size() > bounds_size) + if (data->gradient_results_sets.size() > bounds_size && bounds_size != 0) { auto cmp = [vars0_fixed, vars1_fixed](const trajopt_common::GradientResultsSet& a, const trajopt_common::GradientResultsSet& b) { diff --git a/trajopt_ifopt/src/constraints/collision/discrete_collision_constraint.cpp b/trajopt_ifopt/src/constraints/collision/discrete_collision_constraint.cpp index 73208c283..0e31db23f 100644 --- a/trajopt_ifopt/src/constraints/collision/discrete_collision_constraint.cpp +++ b/trajopt_ifopt/src/constraints/collision/discrete_collision_constraint.cpp @@ -195,4 +195,111 @@ std::shared_ptr DiscreteCollisionConstraint::GetColl return collision_evaluator_; } +DiscreteCollisionConstraintD::DiscreteCollisionConstraintD( + std::shared_ptr collision_evaluator, + std::shared_ptr position_var, + const std::string& name) + : DynamicConstraintSet(name) + , position_var_(std::move(position_var)) + , collision_evaluator_(std::move(collision_evaluator)) +{ + n_dof_ = position_var_->size(); + assert(n_dof_ > 0); +} + +int DiscreteCollisionConstraintD::update() +{ + int rows{ 0 }; + const trajopt_common::CollisionCacheData::ConstPtr collision_data = + collision_evaluator_->CalcCollisions(position_var_->value(), 0); + for (const auto& gradient_results_set : collision_data->gradient_results_sets) + rows += static_cast(gradient_results_set.results.size()); + + SetRows(rows); + bounds_ = std::vector(static_cast(rows), ifopt::BoundSmallerZero); + + std::call_once(init_flag_, &DiscreteCollisionConstraintD::init, this); + + return rows; +} + +Eigen::VectorXd DiscreteCollisionConstraintD::GetValues() const +{ + const trajopt_common::CollisionCacheData::ConstPtr collision_data = + collision_evaluator_->CalcCollisions(position_var_->value(), 0); + + std::vector values; + for (const auto& gradient_results_set : collision_data->gradient_results_sets) + { + for (const auto& result : gradient_results_set.results) + values.emplace_back(result.error * gradient_results_set.coeff); + } + + return Eigen::Map(values.data(), static_cast(values.size())); +} + +// Set the limits on the constraint values +std::vector DiscreteCollisionConstraintD::GetBounds() const { return bounds_; } + +void DiscreteCollisionConstraintD::FillJacobianBlock(std::string var_set, Jacobian& jac_block) const +{ + // Only modify the jacobian if this constraint uses var_set + if (var_set != var_set_name_ || GetRows() == 0) // NOLINT + return; + + Eigen::Index i{ 0 }; + + const trajopt_common::CollisionCacheData::ConstPtr collision_data = + collision_evaluator_->CalcCollisions(position_var_->value(), 0); + + if (collision_data->gradient_results_sets.empty()) + return; + + for (const auto& gradient_results_set : collision_data->gradient_results_sets) + { + for (const auto& result : gradient_results_set.results) + { + assert(result.gradients[0].has_gradient || result.gradients[1].has_gradient); + if (result.gradients[0].has_gradient && result.gradients[1].has_gradient) + { + // This does work but could be faster + for (int j = 0; j < n_dof_; j++) + jac_block.coeffRef(i, position_var_->getIndex() + j) = + -1.0 * (result.gradients[0].gradient[j] + result.gradients[1].gradient[j]); + } + else if (result.gradients[0].has_gradient) + { + // This does work but could be faster + for (int j = 0; j < n_dof_; j++) + jac_block.coeffRef(i, position_var_->getIndex() + j) = -1.0 * result.gradients[0].gradient[j]; + } + else if (result.gradients[1].has_gradient) + { + // This does work but could be faster + for (int j = 0; j < n_dof_; j++) + jac_block.coeffRef(i, position_var_->getIndex() + j) = -1.0 * result.gradients[1].gradient[j]; + } + ++i; + } + } + assert(GetRows() == (i - 1)); +} + +void DiscreteCollisionConstraintD::SetBounds(const std::vector& bounds) +{ + assert(bounds.size() == GetRows()); + bounds_ = bounds; +} + +void DiscreteCollisionConstraintD::init() const +{ + const auto* parent_set = position_var_->getParent()->getParent(); + var_set_name_ = parent_set->GetName(); +} + +std::shared_ptr DiscreteCollisionConstraintD::GetCollisionEvaluator() const +{ + return collision_evaluator_; +} + } // namespace trajopt_ifopt diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/ifopt_qp_problem.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/ifopt_qp_problem.h index 83396d5ed..60f8cef67 100644 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/ifopt_qp_problem.h +++ b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/ifopt_qp_problem.h @@ -45,6 +45,8 @@ class IfoptQPProblem : public QPProblem void addConstraintSet(std::shared_ptr constraint_set) override; + void addDynamicConstraintSet(std::shared_ptr dyn_constraint_set) override; + void addCostSet(std::shared_ptr constraint_set, CostPenaltyType penalty_type) override; void setup() override; diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_problem.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_problem.h index ae44c8e41..70b10f3ee 100644 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_problem.h +++ b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_problem.h @@ -5,6 +5,11 @@ #include #include +namespace trajopt_ifopt +{ +class DynamicConstraintSet; +} + namespace trajopt_sqp { enum class CostPenaltyType : std::uint8_t; @@ -38,6 +43,16 @@ class QPProblem */ virtual void addConstraintSet(std::shared_ptr constraint_set) = 0; + /** + * @brief Add a set of multiple constraints to the optimization problem. + * @param dyn_constraint_set This can be 1 to infinity number of constraints. + * + * This function can be called multiple times for different sets of + * constraints. It makes sure the overall constraint and Jacobian correctly + * considers all individual constraint sets. + */ + virtual void addDynamicConstraintSet(std::shared_ptr dyn_constraint_set) = 0; + /** * @brief Add a squared cost term to the problem. * @param constraint_set The constraint set to be evaluated as a squared cost. diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/trajopt_qp_problem.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/trajopt_qp_problem.h index 8a9dc8839..4577f7e07 100644 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/trajopt_qp_problem.h +++ b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/trajopt_qp_problem.h @@ -24,6 +24,8 @@ class TrajOptQPProblem : public QPProblem void addConstraintSet(std::shared_ptr constraint_set) override; + void addDynamicConstraintSet(std::shared_ptr dyn_constraint_set) override; + void addCostSet(std::shared_ptr constraint_set, CostPenaltyType penalty_type) override; void setup() override; diff --git a/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp b/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp index 01f21e850..fed810010 100644 --- a/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp +++ b/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp @@ -43,6 +43,12 @@ void IfoptQPProblem::addConstraintSet(std::shared_ptrAddConstraintSet(std::move(constraint_set)); } +void +IfoptQPProblem::addDynamicConstraintSet(std::shared_ptr /*dyn_constraint_set*/) +{ + throw std::runtime_error("IfoptQPProblem, dynamic constraint sets are not supported"); +} + void IfoptQPProblem::addCostSet(std::shared_ptr constraint_set, CostPenaltyType penalty_type) { diff --git a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp index fc601b282..04a5aa5dc 100644 --- a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp +++ b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp @@ -6,6 +6,7 @@ #include #include +#include #include #include @@ -60,11 +61,15 @@ struct TrajOptQPProblem::Implementation std::vector> constraint_types_; std::vector constraint_names_; + std::vector dyn_constraints_; + std::vector> dyn_constraint_types_; + trajopt_ifopt::Composite squared_costs_; trajopt_ifopt::Composite hinge_costs_; trajopt_ifopt::Composite abs_costs_; trajopt_ifopt::Composite hinge_constraints_; trajopt_ifopt::Composite abs_constraints_; + Eigen::VectorXd squared_costs_target_; std::vector cost_names_; @@ -110,6 +115,8 @@ struct TrajOptQPProblem::Implementation void addConstraintSet(std::shared_ptr constraint_set); + void addDynamicConstraintSet(const std::shared_ptr& dyn_constraint_set); + void addCostSet(std::shared_ptr constraint_set, CostPenaltyType penalty_type); void setup(); @@ -218,6 +225,14 @@ void TrajOptQPProblem::Implementation::addConstraintSet(std::shared_ptr& dyn_constraint_set) +{ + dyn_constraint_set->LinkWithVariables(variables_); + dyn_constraints_.push_back(dyn_constraint_set); + initialized_ = false; +} + void TrajOptQPProblem::Implementation::addCostSet(std::shared_ptr constraint_set, CostPenaltyType penalty_type) { @@ -269,6 +284,12 @@ void TrajOptQPProblem::Implementation::addCostSet(std::shared_ptrupdate(); + // Local counts to avoid repeated virtual / composite queries const Eigen::Index n_nlp_vars = variables_->GetRows(); const Eigen::Index n_hinge = hinge_costs_.GetRows(); @@ -321,6 +342,43 @@ void TrajOptQPProblem::Implementation::update() } } + for (std::size_t c = 0; c < dyn_constraints_.size(); ++c) + { + const auto& cnt = dyn_constraints_[c]; + num_nlp_cnts_ += cnt->GetRows(); + num_qp_cnts_ += cnt->GetRows(); + constraint_names_[constraints_.size() + c] = cnt->GetName(); + Eigen::VectorXd nlp_bounds_l(cnt->GetRows()); + Eigen::VectorXd nlp_bounds_u(cnt->GetRows()); + + const std::vector cnt_bounds = cnt->GetBounds(); + for (Eigen::Index i = 0; i < cnt->GetRows(); ++i) + { + nlp_bounds_l[i] = cnt_bounds[static_cast(i)].lower_; + nlp_bounds_u[i] = cnt_bounds[static_cast(i)].upper_; + } + + const Eigen::VectorXd nlp_bounds_diff = nlp_bounds_u - nlp_bounds_l; + std::vector& cnt_types = dyn_constraint_types_[c]; + cnt_types.resize(static_cast(cnt->GetRows())); + + for (std::size_t i = 0; i < static_cast(nlp_bounds_diff.size()); ++i) + { + if (std::abs(nlp_bounds_diff[static_cast(i)]) < 1e-3) + { + cnt_types[i] = ConstraintType::EQ; + num_qp_vars_ += 2; // L1 slack pair + num_qp_cnts_ += 2; + } + else + { + cnt_types[i] = ConstraintType::INEQ; + num_qp_vars_ += 1; // hinge slack + num_qp_cnts_ += 1; + } + } + } + constraint_constant_ = Eigen::VectorXd::Zero(num_nlp_cnts_ + n_hinge + n_abs); // Initialize the constraint bounds @@ -340,11 +398,14 @@ void TrajOptQPProblem::Implementation::setup() squared_costs_target_ = Eigen::VectorXd::Zero(squared_costs_.GetRows()); box_size_ = Eigen::VectorXd::Constant(n_nlp_vars, 1e-1); - constraint_merit_coeff_ = Eigen::VectorXd::Constant(static_cast(constraints_.size()), 10.0); + + constraint_merit_coeff_ = + Eigen::VectorXd::Constant(static_cast(constraints_.size() + dyn_constraints_.size()), 10.0); constraint_types_.resize(constraints_.size()); + constraint_names_.resize(constraints_.size() + dyn_constraints_.size()); + dyn_constraint_types_.resize(dyn_constraints_.size()); // Reset and reserve name buffers (avoid accumulation across multiple setup() calls) - constraint_names_.resize(constraints_.size()); cost_names_.clear(); cost_names_.reserve(static_cast(n_nlp_costs)); @@ -553,7 +614,7 @@ Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexConstraintViolations(const Eigen::Ref& var_vals) { Eigen::Index row_index = hinge_constraints_.GetRows() + abs_costs_.GetRows(); - Eigen::VectorXd violations(constraints_.size()); + Eigen::VectorXd violations(constraints_.size() + dyn_constraints_.size()); for (std::size_t i = 0; i < constraints_.size(); ++i) { const auto& cnt = constraints_[i]; @@ -567,6 +628,20 @@ TrajOptQPProblem::Implementation::evaluateConvexConstraintViolations(const Eigen trajopt_ifopt::calcBoundsViolations(constraint_value, cnt->GetBounds()).sum(); row_index += cnt->GetRows(); } + + for (std::size_t i = 0; i < dyn_constraints_.size(); ++i) + { + const auto& cnt = dyn_constraints_[i]; + + // NOLINTNEXTLINE + Eigen::VectorXd result_lin = constraint_matrix_.block(row_index, 0, cnt->GetRows(), getNumNLPVars()) * + var_vals.head(getNumNLPVars()); // NOLINT + const Eigen::VectorXd constraint_value = constraint_constant_.middleRows(row_index, cnt->GetRows()) + result_lin; + + violations(static_cast(constraints_.size() + i)) = + trajopt_ifopt::calcBoundsViolations(constraint_value, cnt->GetBounds()).sum(); + row_index += cnt->GetRows(); + } return violations; } @@ -575,7 +650,7 @@ TrajOptQPProblem::Implementation::evaluateExactConstraintViolations(const Eigen: { setVariables(var_vals.data()); - Eigen::VectorXd violations(constraints_.size()); + Eigen::VectorXd violations(constraints_.size() + dyn_constraints_.size()); for (std::size_t i = 0; i < constraints_.size(); ++i) { const auto& cnt = constraints_[i]; @@ -583,6 +658,14 @@ TrajOptQPProblem::Implementation::evaluateExactConstraintViolations(const Eigen: violations(static_cast(i)) = trajopt_ifopt::calcBoundsViolations(cnt_vals, cnt->GetBounds()).sum(); } + for (std::size_t i = 0; i < dyn_constraints_.size(); ++i) + { + const auto& cnt = dyn_constraints_[i]; + const Eigen::VectorXd cnt_vals = cnt->GetValues(); + violations(static_cast(constraints_.size() + i)) = + trajopt_ifopt::calcBoundsViolations(cnt_vals, cnt->GetBounds()).sum(); + } + return violations; } @@ -601,7 +684,7 @@ void TrajOptQPProblem::Implementation::setBoxSize(const Eigen::Ref& merit_coeff) { - assert(merit_coeff.size() == constraints_.size()); + assert(merit_coeff.size() == constraints_.size() + dyn_constraints_.size()); constraint_merit_coeff_ = merit_coeff; } @@ -618,6 +701,10 @@ void TrajOptQPProblem::Implementation::print() const for (const auto& cnt_type : cnt_set) std::cout << static_cast(cnt_type) << ", "; + for (const auto& cnt_set : dyn_constraint_types_) + for (const auto& cnt_type : cnt_set) + std::cout << static_cast(cnt_type) << ", "; + std::cout << '\n'; std::cout << "Box Size: " << box_size_.transpose().format(format) << '\n'; // NOLINT std::cout << "Constraint Merit Coeff: " << constraint_merit_coeff_.transpose().format(format) << '\n'; @@ -772,6 +859,23 @@ void TrajOptQPProblem::Implementation::convexifyCosts() } } } + + auto cnt_offset = static_cast(constraint_types_.size()); + for (Eigen::Index i = 0; i < dyn_constraint_types_.size(); i++) + { + for (const auto& constraint_type : dyn_constraint_types_[static_cast(i)]) + { + if (constraint_type == ConstraintType::EQ) + { + gradient_[current_var_index++] = constraint_merit_coeff_[cnt_offset + i]; + gradient_[current_var_index++] = constraint_merit_coeff_[cnt_offset + i]; + } + else + { + gradient_[current_var_index++] = constraint_merit_coeff_[cnt_offset + i]; + } + } + } } void TrajOptQPProblem::Implementation::linearizeConstraints() @@ -800,7 +904,6 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() // nlp constraints (shift again) current_row_index += abs_constraints_.GetRows(); - Eigen::Index num_nlp_cnts{ 0 }; for (const auto& cnt : constraints_) { const SparseMatrix nlp_cnt_jac = cnt->GetJacobian(); @@ -808,7 +911,16 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() for (SparseMatrix::InnerIterator it(nlp_cnt_jac, k); it; ++it) triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); - num_nlp_cnts += cnt->GetRows(); + current_row_index += cnt->GetRows(); + } + + for (const auto& cnt : dyn_constraints_) + { + const SparseMatrix nlp_cnt_jac = cnt->GetJacobian(); + for (int k = 0; k < nlp_cnt_jac.outerSize(); ++k) + for (SparseMatrix::InnerIterator it(nlp_cnt_jac, k); it; ++it) + triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); + current_row_index += cnt->GetRows(); } @@ -827,9 +939,26 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() // constraint slack vars current_row_index += abs_costs_.GetRows(); - for (Eigen::Index i = 0; i < constraint_types_.size(); i++) + for (const auto& entry : constraint_types_) { - for (const auto& constraint_type : constraint_types_[static_cast(i)]) + for (const auto& constraint_type : entry) + { + if (constraint_type == ConstraintType::EQ) + { + triplets.emplace_back(current_row_index, current_col_index++, 1.0); + triplets.emplace_back(current_row_index, current_col_index++, -1.0); + } + else + { + triplets.emplace_back(current_row_index, current_col_index++, -1.0); + } + current_row_index++; + } + } + + for (const auto& entry : dyn_constraint_types_) + { + for (const auto& constraint_type : entry) { if (constraint_type == ConstraintType::EQ) { @@ -846,7 +975,7 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() // Add a diagonal matrix for the variable limits (including slack variables since the merit coeff is only applied in // the cost) below the actual constraints - current_row_index = num_nlp_cnts + hinge_cnt_jac.rows() + abs_cnt_jac.rows(); + current_row_index = num_nlp_cnts_ + hinge_cnt_jac.rows() + abs_cnt_jac.rows(); for (Eigen::Index i = 0; i < num_qp_vars_; ++i) triplets.emplace_back(current_row_index + i, i, 1.0); @@ -944,6 +1073,29 @@ void TrajOptQPProblem::Implementation::updateConstraintsConstantExpression() row += cnt->GetRows(); } } + + for (const auto& cnt : dyn_constraints_) + { + const Eigen::VectorXd cnt_initial_value = cnt->GetValues(); + + // In the case of a QP problem the costs and constraints are represented as + // quadratic functions is f(x) = a + b * x + c * x^2. + // Currently for constraints we do not leverage the Hessian so the quadratic + // function is f(x) = a + b * x + // When convexifying the function it need to produce the same constraint values at the values used to calculate + // the jacobian, so f(x_initial) = a + b * x = cnt_initial_value. + // Therefore a = cnt_initial_value - b * x + // where: b = jac (calculated below) + // x = x_initial + // a = quadratic constant (constraint_constant_) + // + // Note: This is not used by the QP solver directly but by the Trust Regions Solver + // to calculate the merit of the solve. + + // The block excludes the slack variables + constraint_constant_.segment(row, cnt->GetRows()) = cnt_initial_value - lin.segment(row, cnt->GetRows()); + row += cnt->GetRows(); + } } void TrajOptQPProblem::Implementation::updateNLPConstraintBounds() @@ -978,9 +1130,20 @@ void TrajOptQPProblem::Implementation::updateNLPConstraintBounds() row += n_abs; // NLP constraint bounds - for (Eigen::Index i = 0; i < constraints_.size(); ++i) + for (const auto& cnt : constraints_) + { + const auto cnt_bounds = cnt->GetBounds(); + for (Eigen::Index j = 0; j < cnt_bounds.size(); ++j) + { + const auto& b = cnt_bounds[static_cast(j)]; + cnt_bound_lower[row + j] = b.lower_; + cnt_bound_upper[row + j] = b.upper_; + } + row += cnt->GetRows(); + } + + for (const auto& cnt : dyn_constraints_) { - const auto& cnt = constraints_[static_cast(i)]; const auto cnt_bounds = cnt->GetBounds(); for (Eigen::Index j = 0; j < cnt_bounds.size(); ++j) { @@ -1049,9 +1212,28 @@ void TrajOptQPProblem::Implementation::updateSlackVariableBounds() bounds_upper_[current_cnt_index++] = double(INFINITY); } - for (Eigen::Index i = 0; i < constraint_types_.size(); i++) + for (const auto& entry : constraint_types_) { - for (const auto& constraint_type : constraint_types_[static_cast(i)]) + for (const auto& constraint_type : entry) + { + if (constraint_type == ConstraintType::EQ) + { + bounds_lower_[current_cnt_index] = 0; + bounds_upper_[current_cnt_index++] = double(INFINITY); + bounds_lower_[current_cnt_index] = 0; + bounds_upper_[current_cnt_index++] = double(INFINITY); + } + else + { + bounds_lower_[current_cnt_index] = 0; + bounds_upper_[current_cnt_index++] = double(INFINITY); + } + } + } + + for (const auto& entry : dyn_constraint_types_) + { + for (const auto& constraint_type : entry) { if (constraint_type == ConstraintType::EQ) { @@ -1083,6 +1265,11 @@ void TrajOptQPProblem::addConstraintSet(std::shared_ptraddConstraintSet(std::move(constraint_set)); } +void TrajOptQPProblem::addDynamicConstraintSet(std::shared_ptr dyn_constraint_set) +{ + impl_->addDynamicConstraintSet(dyn_constraint_set); +} + void TrajOptQPProblem::addCostSet(std::shared_ptr constraint_set, CostPenaltyType penalty_type) { From 4898b7e31468b7a94150ceb2854259f773a1f330 Mon Sep 17 00:00:00 2001 From: Levi Armstrong Date: Thu, 18 Dec 2025 10:50:19 -0600 Subject: [PATCH 2/7] fixup --- .../collision/discrete_collision_constraint.h | 11 +- .../discrete_collision_constraint.cpp | 12 +- .../trajopt_sqp/src/ifopt_qp_problem.cpp | 2 +- .../trajopt_sqp/src/trajopt_qp_problem.cpp | 302 +++++++++--------- 4 files changed, 160 insertions(+), 167 deletions(-) diff --git a/trajopt_ifopt/include/trajopt_ifopt/constraints/collision/discrete_collision_constraint.h b/trajopt_ifopt/include/trajopt_ifopt/constraints/collision/discrete_collision_constraint.h index f7c72cbb1..205e3c63d 100644 --- a/trajopt_ifopt/include/trajopt_ifopt/constraints/collision/discrete_collision_constraint.h +++ b/trajopt_ifopt/include/trajopt_ifopt/constraints/collision/discrete_collision_constraint.h @@ -32,6 +32,7 @@ TRAJOPT_IGNORE_WARNINGS_PUSH TRAJOPT_IGNORE_WARNINGS_POP #include +#include namespace trajopt_ifopt { @@ -126,9 +127,9 @@ class DiscreteCollisionConstraintD : public DynamicConstraintSet DiscreteCollisionConstraintD(std::shared_ptr collision_evaluator, std::shared_ptr position_var, - const std::string& name = "DiscreteCollision"); + std::string name = "DiscreteCollision"); - int update() override; + int Update() override; /** * @brief Returns the values associated with the constraint. @@ -147,7 +148,7 @@ class DiscreteCollisionConstraintD : public DynamicConstraintSet * @brief Returns the "bounds" of this constraint. How these are enforced is up to the solver * @return Returns the "bounds" of this constraint */ - std::vector GetBounds() const override; + std::vector GetBounds() const override; /** * @brief Fills the jacobian block associated with the given var_set. @@ -160,7 +161,7 @@ class DiscreteCollisionConstraintD : public DynamicConstraintSet * @brief Sets the bounds on the collision distance * @param bounds New bounds that will be set. Should be size 1 */ - void SetBounds(const std::vector& bounds); + void SetBounds(const std::vector& bounds); /** * @brief Get the collision evaluator. This exposed for plotter callbacks @@ -173,7 +174,7 @@ class DiscreteCollisionConstraintD : public DynamicConstraintSet long n_dof_{ 0 }; /** @brief Bounds on the constraint value. Default: std::vector(1, ifopt::BoundSmallerZero) */ - std::vector bounds_; + std::vector bounds_; /** @brief Pointers to the vars used by this constraint. */ std::shared_ptr position_var_; diff --git a/trajopt_ifopt/src/constraints/collision/discrete_collision_constraint.cpp b/trajopt_ifopt/src/constraints/collision/discrete_collision_constraint.cpp index 0e31db23f..74cdffa48 100644 --- a/trajopt_ifopt/src/constraints/collision/discrete_collision_constraint.cpp +++ b/trajopt_ifopt/src/constraints/collision/discrete_collision_constraint.cpp @@ -198,8 +198,8 @@ std::shared_ptr DiscreteCollisionConstraint::GetColl DiscreteCollisionConstraintD::DiscreteCollisionConstraintD( std::shared_ptr collision_evaluator, std::shared_ptr position_var, - const std::string& name) - : DynamicConstraintSet(name) + std::string name) + : DynamicConstraintSet(std::move(name)) , position_var_(std::move(position_var)) , collision_evaluator_(std::move(collision_evaluator)) { @@ -207,7 +207,7 @@ DiscreteCollisionConstraintD::DiscreteCollisionConstraintD( assert(n_dof_ > 0); } -int DiscreteCollisionConstraintD::update() +int DiscreteCollisionConstraintD::Update() { int rows{ 0 }; const trajopt_common::CollisionCacheData::ConstPtr collision_data = @@ -216,7 +216,7 @@ int DiscreteCollisionConstraintD::update() rows += static_cast(gradient_results_set.results.size()); SetRows(rows); - bounds_ = std::vector(static_cast(rows), ifopt::BoundSmallerZero); + bounds_ = std::vector(static_cast(rows), BoundSmallerZero); std::call_once(init_flag_, &DiscreteCollisionConstraintD::init, this); @@ -239,7 +239,7 @@ Eigen::VectorXd DiscreteCollisionConstraintD::GetValues() const } // Set the limits on the constraint values -std::vector DiscreteCollisionConstraintD::GetBounds() const { return bounds_; } +std::vector DiscreteCollisionConstraintD::GetBounds() const { return bounds_; } void DiscreteCollisionConstraintD::FillJacobianBlock(std::string var_set, Jacobian& jac_block) const { @@ -285,7 +285,7 @@ void DiscreteCollisionConstraintD::FillJacobianBlock(std::string var_set, Jacobi assert(GetRows() == (i - 1)); } -void DiscreteCollisionConstraintD::SetBounds(const std::vector& bounds) +void DiscreteCollisionConstraintD::SetBounds(const std::vector& bounds) { assert(bounds.size() == GetRows()); bounds_ = bounds; diff --git a/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp b/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp index fed810010..76c84132e 100644 --- a/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp +++ b/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp @@ -44,7 +44,7 @@ void IfoptQPProblem::addConstraintSet(std::shared_ptr /*dyn_constraint_set*/) + IfoptQPProblem::addDynamicConstraintSet(std::shared_ptr /*dyn_constraint_set*/) { throw std::runtime_error("IfoptQPProblem, dynamic constraint sets are not supported"); } diff --git a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp index 04a5aa5dc..65cd7718f 100644 --- a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp +++ b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp @@ -6,7 +6,7 @@ #include #include -#include +#include #include #include @@ -41,7 +41,8 @@ namespace trajopt_sqp struct TrajOptQPProblem::Implementation { Implementation() - : squared_costs_("squared-cost-terms", false, false) + : constraints_("constraint-terms", false, false) + , squared_costs_("squared-cost-terms", false, false) , hinge_costs_("hinge-cost-terms", false, false) , abs_costs_("abs-cost-terms", false, false) , hinge_constraints_("hinge-constraint-sets", false, false) @@ -57,9 +58,11 @@ struct TrajOptQPProblem::Implementation // These will never change size /////////////////////////////// Eigen::VectorXd constraint_merit_coeff_; - std::vector constraints_; - std::vector> constraint_types_; + trajopt_ifopt::Composite constraints_; + std::vector constraint_types_; std::vector constraint_names_; + Eigen::Index num_constraint_qp_vars_{ 0 }; + Eigen::Index num_constraint_qp_cnts_{ 0 }; std::vector dyn_constraints_; std::vector> dyn_constraint_types_; @@ -74,6 +77,8 @@ struct TrajOptQPProblem::Implementation std::vector cost_names_; // Cached bounds (assumed static over SQP iterations) + std::vector constraint_bounds_; + std::vector squared_cost_bounds_; std::vector abs_cost_bounds_; std::vector hinge_cost_bounds_; @@ -221,7 +226,7 @@ void TrajOptQPProblem::Implementation::addVariableSet(std::shared_ptr constraint_set) { constraint_set->LinkWithVariables(variables_); - constraints_.push_back(std::move(constraint_set)); + constraints_.AddComponent(std::move(constraint_set)); initialized_ = false; } @@ -288,29 +293,30 @@ void TrajOptQPProblem::Implementation::update() return; for (auto& dyn_cnt : dyn_constraints_) - dyn_cnt->update(); + dyn_cnt->Update(); // Local counts to avoid repeated virtual / composite queries const Eigen::Index n_nlp_vars = variables_->GetRows(); const Eigen::Index n_hinge = hinge_costs_.GetRows(); const Eigen::Index n_abs = abs_costs_.GetRows(); + num_nlp_cnts_ = constraints_.GetRows(); + // Hinge cost adds a variable and an inequality constraint (→ 2 constraints) // Absolute cost adds two variables and an equality constraint (→ 3 constraints) - num_qp_vars_ = n_nlp_vars + n_hinge + (2L * n_abs); - num_qp_cnts_ = n_nlp_vars + (2L * n_hinge) + (3L * n_abs); + num_qp_vars_ = num_constraint_qp_vars_ + n_nlp_vars + n_hinge + (2L * n_abs); + num_qp_cnts_ = num_nlp_cnts_ + num_constraint_qp_cnts_ + n_nlp_vars + (2L * n_hinge) + (3L * n_abs); //////////////////////////////////////////////////////// - // Get NLP bounds and detect constraint type + // Get NLP bounds and detect dynamic constraint type //////////////////////////////////////////////////////// - num_nlp_cnts_ = 0; - for (std::size_t c = 0; c < constraints_.size(); ++c) + for (std::size_t c = 0; c < dyn_constraints_.size(); ++c) { - const auto& cnt = constraints_[c]; + const auto& cnt = dyn_constraints_[c]; num_nlp_cnts_ += cnt->GetRows(); num_qp_cnts_ += cnt->GetRows(); - constraint_names_[c] = cnt->GetName(); + constraint_names_[static_cast(constraints_.GetRows()) + c] = cnt->GetName(); Eigen::VectorXd nlp_bounds_l(cnt->GetRows()); Eigen::VectorXd nlp_bounds_u(cnt->GetRows()); @@ -321,43 +327,6 @@ void TrajOptQPProblem::Implementation::update() nlp_bounds_u[i] = cnt_bounds[static_cast(i)].upper; } - const Eigen::VectorXd nlp_bounds_diff = nlp_bounds_u - nlp_bounds_l; - std::vector& cnt_types = constraint_types_[c]; - cnt_types.resize(static_cast(cnt->GetRows())); - - for (std::size_t i = 0; i < static_cast(nlp_bounds_diff.size()); ++i) - { - if (std::abs(nlp_bounds_diff[static_cast(i)]) < 1e-3) - { - cnt_types[i] = ConstraintType::EQ; - num_qp_vars_ += 2; // L1 slack pair - num_qp_cnts_ += 2; - } - else - { - cnt_types[i] = ConstraintType::INEQ; - num_qp_vars_ += 1; // hinge slack - num_qp_cnts_ += 1; - } - } - } - - for (std::size_t c = 0; c < dyn_constraints_.size(); ++c) - { - const auto& cnt = dyn_constraints_[c]; - num_nlp_cnts_ += cnt->GetRows(); - num_qp_cnts_ += cnt->GetRows(); - constraint_names_[constraints_.size() + c] = cnt->GetName(); - Eigen::VectorXd nlp_bounds_l(cnt->GetRows()); - Eigen::VectorXd nlp_bounds_u(cnt->GetRows()); - - const std::vector cnt_bounds = cnt->GetBounds(); - for (Eigen::Index i = 0; i < cnt->GetRows(); ++i) - { - nlp_bounds_l[i] = cnt_bounds[static_cast(i)].lower_; - nlp_bounds_u[i] = cnt_bounds[static_cast(i)].upper_; - } - const Eigen::VectorXd nlp_bounds_diff = nlp_bounds_u - nlp_bounds_l; std::vector& cnt_types = dyn_constraint_types_[c]; cnt_types.resize(static_cast(cnt->GetRows())); @@ -400,11 +369,20 @@ void TrajOptQPProblem::Implementation::setup() box_size_ = Eigen::VectorXd::Constant(n_nlp_vars, 1e-1); constraint_merit_coeff_ = - Eigen::VectorXd::Constant(static_cast(constraints_.size() + dyn_constraints_.size()), 10.0); - constraint_types_.resize(constraints_.size()); - constraint_names_.resize(constraints_.size() + dyn_constraints_.size()); + Eigen::VectorXd::Constant(constraints_.GetRows() + static_cast(dyn_constraints_.size()), 10.0); + constraint_types_.resize(static_cast(constraints_.GetRows())); + constraint_names_.resize(static_cast(constraints_.GetRows()) + dyn_constraints_.size()); dyn_constraint_types_.resize(dyn_constraints_.size()); + // Get NLP Cost and Constraint Names for Debug Print + const auto& cnt_components = constraints_.GetComponents(); + for (std::size_t i = 0; i < cnt_components.size(); ++i) + { + const auto& cnt = cnt_components[i]; + for (Eigen::Index j = 0; j < cnt->GetRows(); ++j) + constraint_names_[i] = cnt->GetName() + "_" + std::to_string(j); + } + // Reset and reserve name buffers (avoid accumulation across multiple setup() calls) cost_names_.clear(); cost_names_.reserve(static_cast(n_nlp_costs)); @@ -445,6 +423,40 @@ void TrajOptQPProblem::Implementation::setup() } } + // Get NLP bounds and detect constraint type + const Eigen::Index n_nlp_cnts = constraints_.GetRows(); + constraint_bounds_ = constraints_.GetBounds(); + num_constraint_qp_vars_ = 0; + num_constraint_qp_cnts_ = n_nlp_cnts; + + Eigen::VectorXd nlp_bounds_l(n_nlp_cnts); + Eigen::VectorXd nlp_bounds_u(n_nlp_cnts); + + for (Eigen::Index i = 0; i < n_nlp_cnts; ++i) + { + nlp_bounds_l[i] = constraint_bounds_[static_cast(i)].lower; + nlp_bounds_u[i] = constraint_bounds_[static_cast(i)].upper; + } + + const Eigen::VectorXd nlp_bounds_diff = nlp_bounds_u - nlp_bounds_l; + constraint_types_.resize(static_cast(n_nlp_cnts)); + + for (std::size_t i = 0; i < static_cast(nlp_bounds_diff.size()); ++i) + { + if (std::abs(nlp_bounds_diff[static_cast(i)]) < 1e-3) + { + constraint_types_[i] = ConstraintType::EQ; + num_constraint_qp_vars_ += 2; // L1 slack pair + num_constraint_qp_cnts_ += 2; + } + else + { + constraint_types_[i] = ConstraintType::INEQ; + num_constraint_qp_vars_ += 1; // hinge slack + num_constraint_qp_cnts_ += 1; + } + } + // Cache flattened bounds for costs (used in exact cost evaluation) squared_cost_bounds_ = squared_costs_.GetBounds(); abs_cost_bounds_ = abs_costs_.GetBounds(); @@ -614,19 +626,16 @@ Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexConstraintViolations(const Eigen::Ref& var_vals) { Eigen::Index row_index = hinge_constraints_.GetRows() + abs_costs_.GetRows(); - Eigen::VectorXd violations(constraints_.size() + dyn_constraints_.size()); - for (std::size_t i = 0; i < constraints_.size(); ++i) + Eigen::VectorXd violations(constraints_.GetRows() + static_cast(dyn_constraints_.size())); { - const auto& cnt = constraints_[i]; - // NOLINTNEXTLINE - Eigen::VectorXd result_lin = constraint_matrix_.block(row_index, 0, cnt->GetRows(), getNumNLPVars()) * + Eigen::VectorXd result_lin = constraint_matrix_.block(row_index, 0, constraints_.GetRows(), getNumNLPVars()) * var_vals.head(getNumNLPVars()); // NOLINT - const Eigen::VectorXd constraint_value = constraint_constant_.middleRows(row_index, cnt->GetRows()) + result_lin; + const Eigen::VectorXd constraint_value = + constraint_constant_.middleRows(row_index, constraints_.GetRows()) + result_lin; - violations(static_cast(i)) = - trajopt_ifopt::calcBoundsViolations(constraint_value, cnt->GetBounds()).sum(); - row_index += cnt->GetRows(); + violations.head(constraints_.GetRows()) = trajopt_ifopt::calcBoundsViolations(constraint_value, constraint_bounds_); + row_index += constraints_.GetRows(); } for (std::size_t i = 0; i < dyn_constraints_.size(); ++i) @@ -638,10 +647,11 @@ TrajOptQPProblem::Implementation::evaluateConvexConstraintViolations(const Eigen var_vals.head(getNumNLPVars()); // NOLINT const Eigen::VectorXd constraint_value = constraint_constant_.middleRows(row_index, cnt->GetRows()) + result_lin; - violations(static_cast(constraints_.size() + i)) = + violations(constraints_.GetRows() + static_cast(i)) = trajopt_ifopt::calcBoundsViolations(constraint_value, cnt->GetBounds()).sum(); row_index += cnt->GetRows(); } + return violations; } @@ -650,19 +660,17 @@ TrajOptQPProblem::Implementation::evaluateExactConstraintViolations(const Eigen: { setVariables(var_vals.data()); - Eigen::VectorXd violations(constraints_.size() + dyn_constraints_.size()); - for (std::size_t i = 0; i < constraints_.size(); ++i) + Eigen::VectorXd violations(constraints_.GetRows() + static_cast(dyn_constraints_.size())); { - const auto& cnt = constraints_[i]; - const Eigen::VectorXd cnt_vals = cnt->GetValues(); - violations(static_cast(i)) = trajopt_ifopt::calcBoundsViolations(cnt_vals, cnt->GetBounds()).sum(); + const Eigen::VectorXd cnt_vals = constraints_.GetValues(); + violations.head(constraints_.GetRows()) = trajopt_ifopt::calcBoundsViolations(cnt_vals, constraint_bounds_); } for (std::size_t i = 0; i < dyn_constraints_.size(); ++i) { const auto& cnt = dyn_constraints_[i]; const Eigen::VectorXd cnt_vals = cnt->GetValues(); - violations(static_cast(constraints_.size() + i)) = + violations(constraints_.GetRows() + static_cast(i)) = trajopt_ifopt::calcBoundsViolations(cnt_vals, cnt->GetBounds()).sum(); } @@ -684,7 +692,7 @@ void TrajOptQPProblem::Implementation::setBoxSize(const Eigen::Ref& merit_coeff) { - assert(merit_coeff.size() == constraints_.size() + dyn_constraints_.size()); + assert(merit_coeff.size() == static_cast(constraints_.GetRows()) + dyn_constraints_.size()); constraint_merit_coeff_ = merit_coeff; } @@ -697,9 +705,8 @@ void TrajOptQPProblem::Implementation::print() const std::cout << "Num QP Vars: " << num_qp_vars_ << '\n'; std::cout << "Num NLP Constraints: " << num_qp_cnts_ << '\n'; std::cout << "Detected Constraint Type: "; - for (const auto& cnt_set : constraint_types_) - for (const auto& cnt_type : cnt_set) - std::cout << static_cast(cnt_type) << ", "; + for (const auto& cnt_type : constraint_types_) + std::cout << static_cast(cnt_type) << ", "; for (const auto& cnt_set : dyn_constraint_types_) for (const auto& cnt_type : cnt_set) @@ -846,17 +853,14 @@ void TrajOptQPProblem::Implementation::convexifyCosts() for (Eigen::Index i = 0; i < constraint_types_.size(); i++) { - for (const auto& constraint_type : constraint_types_[static_cast(i)]) + if (constraint_types_[static_cast(i)] == ConstraintType::EQ) { - if (constraint_type == ConstraintType::EQ) - { - gradient_[current_var_index++] = constraint_merit_coeff_[i]; - gradient_[current_var_index++] = constraint_merit_coeff_[i]; - } - else - { - gradient_[current_var_index++] = constraint_merit_coeff_[i]; - } + gradient_[current_var_index++] = constraint_merit_coeff_[i]; + gradient_[current_var_index++] = constraint_merit_coeff_[i]; + } + else + { + gradient_[current_var_index++] = constraint_merit_coeff_[i]; } } @@ -880,11 +884,12 @@ void TrajOptQPProblem::Implementation::convexifyCosts() void TrajOptQPProblem::Implementation::linearizeConstraints() { + const SparseMatrix nlp_cnt_jac = constraints_.GetJacobian(); const SparseMatrix hinge_cnt_jac = hinge_constraints_.GetJacobian(); const SparseMatrix abs_cnt_jac = abs_constraints_.GetJacobian(); std::vector> triplets; - const Eigen::Index nnz_base = hinge_cnt_jac.nonZeros() + abs_cnt_jac.nonZeros(); + const Eigen::Index nnz_base = nlp_cnt_jac.nonZeros() + hinge_cnt_jac.nonZeros() + abs_cnt_jac.nonZeros(); // Rough but closer than *3 triplets.reserve(static_cast(nnz_base + num_qp_vars_ + // diag hinge_costs_.GetRows() + (2L * abs_costs_.GetRows()) + @@ -903,16 +908,9 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() // nlp constraints (shift again) current_row_index += abs_constraints_.GetRows(); - - for (const auto& cnt : constraints_) - { - const SparseMatrix nlp_cnt_jac = cnt->GetJacobian(); - for (int k = 0; k < nlp_cnt_jac.outerSize(); ++k) - for (SparseMatrix::InnerIterator it(nlp_cnt_jac, k); it; ++it) - triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); - - current_row_index += cnt->GetRows(); - } + for (int k = 0; k < nlp_cnt_jac.outerSize(); ++k) + for (SparseMatrix::InnerIterator it(nlp_cnt_jac, k); it; ++it) + triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); for (const auto& cnt : dyn_constraints_) { @@ -939,21 +937,18 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() // constraint slack vars current_row_index += abs_costs_.GetRows(); - for (const auto& entry : constraint_types_) + for (const auto& constraint_type : constraint_types_) { - for (const auto& constraint_type : entry) + if (constraint_type == ConstraintType::EQ) { - if (constraint_type == ConstraintType::EQ) - { - triplets.emplace_back(current_row_index, current_col_index++, 1.0); - triplets.emplace_back(current_row_index, current_col_index++, -1.0); - } - else - { - triplets.emplace_back(current_row_index, current_col_index++, -1.0); - } - current_row_index++; + triplets.emplace_back(current_row_index, current_col_index++, 1.0); + triplets.emplace_back(current_row_index, current_col_index++, -1.0); + } + else + { + triplets.emplace_back(current_row_index, current_col_index++, -1.0); } + current_row_index++; } for (const auto& entry : dyn_constraint_types_) @@ -1048,9 +1043,33 @@ void TrajOptQPProblem::Implementation::updateConstraintsConstantExpression() row += n_abs; } - if (num_nlp_cnts_ > 0) + if (constraints_.GetRows() > 0) + { + const Eigen::VectorXd cnt_initial_value = constraints_.GetValues(); + + // In the case of a QP problem the costs and constraints are represented as + // quadratic functions is f(x) = a + b * x + c * x^2. + // Currently for constraints we do not leverage the Hessian so the quadratic + // function is f(x) = a + b * x + // When convexifying the function it need to produce the same constraint values at the values used to calculate + // the jacobian, so f(x_initial) = a + b * x = cnt_initial_value. + // Therefore a = cnt_initial_value - b * x + // where: b = jac (calculated below) + // x = x_initial + // a = quadratic constant (constraint_constant_) + // + // Note: This is not used by the QP solver directly but by the Trust Regions Solver + // to calculate the merit of the solve. + + // The block excludes the slack variables + constraint_constant_.segment(row, constraints_.GetRows()) = + cnt_initial_value - lin.segment(row, constraints_.GetRows()); + row += constraints_.GetRows(); + } + + if (!dyn_constraint_types_.empty()) { - for (const auto& cnt : constraints_) + for (const auto& cnt : dyn_constraints_) { const Eigen::VectorXd cnt_initial_value = cnt->GetValues(); @@ -1073,29 +1092,6 @@ void TrajOptQPProblem::Implementation::updateConstraintsConstantExpression() row += cnt->GetRows(); } } - - for (const auto& cnt : dyn_constraints_) - { - const Eigen::VectorXd cnt_initial_value = cnt->GetValues(); - - // In the case of a QP problem the costs and constraints are represented as - // quadratic functions is f(x) = a + b * x + c * x^2. - // Currently for constraints we do not leverage the Hessian so the quadratic - // function is f(x) = a + b * x - // When convexifying the function it need to produce the same constraint values at the values used to calculate - // the jacobian, so f(x_initial) = a + b * x = cnt_initial_value. - // Therefore a = cnt_initial_value - b * x - // where: b = jac (calculated below) - // x = x_initial - // a = quadratic constant (constraint_constant_) - // - // Note: This is not used by the QP solver directly but by the Trust Regions Solver - // to calculate the merit of the solve. - - // The block excludes the slack variables - constraint_constant_.segment(row, cnt->GetRows()) = cnt_initial_value - lin.segment(row, cnt->GetRows()); - row += cnt->GetRows(); - } } void TrajOptQPProblem::Implementation::updateNLPConstraintBounds() @@ -1130,17 +1126,13 @@ void TrajOptQPProblem::Implementation::updateNLPConstraintBounds() row += n_abs; // NLP constraint bounds - for (const auto& cnt : constraints_) + for (Eigen::Index j = 0; j < constraint_bounds_.size(); ++j) { - const auto cnt_bounds = cnt->GetBounds(); - for (Eigen::Index j = 0; j < cnt_bounds.size(); ++j) - { - const auto& b = cnt_bounds[static_cast(j)]; - cnt_bound_lower[row + j] = b.lower_; - cnt_bound_upper[row + j] = b.upper_; - } - row += cnt->GetRows(); + const auto& b = constraint_bounds_[static_cast(j)]; + cnt_bound_lower[row + j] = b.lower; + cnt_bound_upper[row + j] = b.upper; } + row += constraints_.GetRows(); for (const auto& cnt : dyn_constraints_) { @@ -1212,22 +1204,19 @@ void TrajOptQPProblem::Implementation::updateSlackVariableBounds() bounds_upper_[current_cnt_index++] = double(INFINITY); } - for (const auto& entry : constraint_types_) + for (const auto& constraint_type : constraint_types_) { - for (const auto& constraint_type : entry) + if (constraint_type == ConstraintType::EQ) { - if (constraint_type == ConstraintType::EQ) - { - bounds_lower_[current_cnt_index] = 0; - bounds_upper_[current_cnt_index++] = double(INFINITY); - bounds_lower_[current_cnt_index] = 0; - bounds_upper_[current_cnt_index++] = double(INFINITY); - } - else - { - bounds_lower_[current_cnt_index] = 0; - bounds_upper_[current_cnt_index++] = double(INFINITY); - } + bounds_lower_[current_cnt_index] = 0; + bounds_upper_[current_cnt_index++] = double(INFINITY); + bounds_lower_[current_cnt_index] = 0; + bounds_upper_[current_cnt_index++] = double(INFINITY); + } + else + { + bounds_lower_[current_cnt_index] = 0; + bounds_upper_[current_cnt_index++] = double(INFINITY); } } @@ -1339,14 +1328,17 @@ Eigen::VectorXd TrajOptQPProblem::getBoxSize() const { return std::as_const(*impl_).print(); } Eigen::Index TrajOptQPProblem::getNumNLPVars() const { return std::as_const(*impl_).getNumNLPVars(); } + Eigen::Index TrajOptQPProblem::getNumNLPConstraints() const { - return static_cast(std::as_const(*impl_).constraints_.size()); + const auto& base = std::as_const(*impl_); + return (base.constraints_.GetRows() + static_cast(base.dyn_constraints_.size())); } Eigen::Index TrajOptQPProblem::getNumNLPCosts() const { return std::as_const(*impl_).getNumNLPCosts(); } Eigen::Index TrajOptQPProblem::getNumQPVars() const { return std::as_const(*impl_).num_qp_vars_; } + Eigen::Index TrajOptQPProblem::getNumQPConstraints() const { return std::as_const(*impl_).num_qp_cnts_; From faa7a68a241af421500909125ccc1700ed100a0b Mon Sep 17 00:00:00 2001 From: Levi Armstrong Date: Thu, 18 Dec 2025 11:29:18 -0600 Subject: [PATCH 3/7] fixup --- .../include/trajopt_ifopt/core/composite.h | 3 +- .../include/trajopt_ifopt/core/eigen_types.h | 40 +++++++++++++++++++ .../include/trajopt_ifopt/utils/ifopt_utils.h | 7 +--- .../include/trajopt_sqp/eigen_types.h | 35 ---------------- .../include/trajopt_sqp/expressions.h | 16 ++++---- .../include/trajopt_sqp/ifopt_qp_problem.h | 8 ++-- .../include/trajopt_sqp/osqp_eigen_solver.h | 4 +- .../include/trajopt_sqp/qp_problem.h | 11 ++--- .../include/trajopt_sqp/qp_solver.h | 6 +-- .../include/trajopt_sqp/trajopt_qp_problem.h | 7 ++-- .../trajopt_sqp/include/trajopt_sqp/types.h | 2 +- .../trajopt_sqp/src/expressions.cpp | 12 +++--- .../trajopt_sqp/src/ifopt_qp_problem.cpp | 8 ++-- .../trajopt_sqp/src/osqp_eigen_solver.cpp | 8 ++-- .../trajopt_sqp/src/trajopt_qp_problem.cpp | 30 +++++++------- 15 files changed, 96 insertions(+), 101 deletions(-) create mode 100644 trajopt_ifopt/include/trajopt_ifopt/core/eigen_types.h delete mode 100644 trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/eigen_types.h diff --git a/trajopt_ifopt/include/trajopt_ifopt/core/composite.h b/trajopt_ifopt/include/trajopt_ifopt/core/composite.h index 327294cba..d28a642b1 100644 --- a/trajopt_ifopt/include/trajopt_ifopt/core/composite.h +++ b/trajopt_ifopt/include/trajopt_ifopt/core/composite.h @@ -38,11 +38,10 @@ OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include #include +#include namespace trajopt_ifopt { -using Jacobian = Eigen::SparseMatrix; - /** * @brief Interface representing either Variable, Cost or Constraint. * diff --git a/trajopt_ifopt/include/trajopt_ifopt/core/eigen_types.h b/trajopt_ifopt/include/trajopt_ifopt/core/eigen_types.h new file mode 100644 index 000000000..f901f6432 --- /dev/null +++ b/trajopt_ifopt/include/trajopt_ifopt/core/eigen_types.h @@ -0,0 +1,40 @@ +/****************************************************************************** +Copyright (c) 2017, Alexander W Winkler. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +* Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +* Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation +and/or other materials provided with the distribution. + +* Neither the name of the copyright holder nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +******************************************************************************/ + +#ifndef TRAJOPT_IFOPT_CORE_EIGEN_TYPES_H +#define TRAJOPT_IFOPT_CORE_EIGEN_TYPES_H + +#include + +namespace trajopt_ifopt +{ +using Jacobian = Eigen::SparseMatrix; +} // namespace trajopt_ifopt + +#endif // TRAJOPT_IFOPT_CORE_EIGEN_TYPES_H diff --git a/trajopt_ifopt/include/trajopt_ifopt/utils/ifopt_utils.h b/trajopt_ifopt/include/trajopt_ifopt/utils/ifopt_utils.h index b53643f4e..e8da4c039 100644 --- a/trajopt_ifopt/include/trajopt_ifopt/utils/ifopt_utils.h +++ b/trajopt_ifopt/include/trajopt_ifopt/utils/ifopt_utils.h @@ -24,15 +24,10 @@ #ifndef TRAJOPT_IFOPT_IFOPT_UTILS_H #define TRAJOPT_IFOPT_IFOPT_UTILS_H -#include -TRAJOPT_IGNORE_WARNINGS_PUSH -#include -TRAJOPT_IGNORE_WARNINGS_POP +#include namespace trajopt_ifopt { -using Jacobian = Eigen::SparseMatrix; - struct Bounds; class Component; class ConstraintSet; diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/eigen_types.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/eigen_types.h deleted file mode 100644 index 439148551..000000000 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/eigen_types.h +++ /dev/null @@ -1,35 +0,0 @@ -/** - * @file eigen_types.h - * @brief Contains eigen types for the trust region sqp solver - * - * @author Matthew Powelson - * @date May 18, 2020 - * - * @copyright Copyright (c) 2020, Southwest Research Institute - * - * @par License - * Software License Agreement (Apache License) - * @par - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * @par - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -#ifndef TRAJOPT_SQP_EIGEN_TYPES_H -#define TRAJOPT_SQP_EIGEN_TYPES_H - -#include - -namespace trajopt_sqp -{ -using SparseMatrix = Eigen::SparseMatrix; -using SparseVector = Eigen::SparseVector; -} // namespace trajopt_sqp - -#endif // TRAJOPT_SQP_EIGEN_TYPES_H diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/expressions.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/expressions.h index 3d6164038..fcdb74118 100644 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/expressions.h +++ b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/expressions.h @@ -3,7 +3,7 @@ #include -#include +#include namespace trajopt_sqp { @@ -57,7 +57,7 @@ struct AffExprs : Exprs * Each row corresponds to a single expression, and each column corresponds to a * decision variable. Size: (num_exprs × num_vars). */ - SparseMatrix linear_coeffs; + trajopt_ifopt::Jacobian linear_coeffs; /** * @brief Evaluate the affine expressions at the given decision vector. @@ -124,7 +124,7 @@ struct QuadExprs : Exprs * * Dimensions: (num_expressions × num_vars). */ - SparseMatrix linear_coeffs; + trajopt_ifopt::Jacobian linear_coeffs; /** * @brief Quadratic coefficient matrices \f$Q_i\f for each expression. @@ -132,7 +132,7 @@ struct QuadExprs : Exprs * expression \f$f_i(x)\f. If a given expression is purely affine, * the corresponding matrix may be empty (zero non-zeros). */ - std::vector quadratic_coeffs; + std::vector quadratic_coeffs; /** * @brief Aggregated objective linear coefficients. @@ -159,7 +159,7 @@ struct QuadExprs : Exprs * * Dimensions: (num_vars × num_vars). */ - SparseMatrix objective_quadratic_coeffs; + trajopt_ifopt::Jacobian objective_quadratic_coeffs; /** * @brief Evaluate all quadratic expressions at the given decision vector. @@ -220,7 +220,7 @@ struct QuadExprs : Exprs * @return Affine expressions representing the local linear approximation @f$\hat{f}(x)@f$. */ AffExprs createAffExprs(const Eigen::Ref& func_error, - const Eigen::Ref& func_jacobian, + const Eigen::Ref& func_jacobian, const Eigen::Ref& x); /** @@ -294,8 +294,8 @@ AffExprs createAffExprs(const Eigen::Ref& func_error, * @return Quadratic expressions representing the local second-order approximation \f$\hat{f}(x)\f$. */ QuadExprs createQuadExprs(const Eigen::Ref& func_errors, - const Eigen::Ref& func_jacobian, - const std::vector& func_hessians, + const Eigen::Ref& func_jacobian, + const std::vector& func_hessians, const Eigen::Ref& x); /** diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/ifopt_qp_problem.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/ifopt_qp_problem.h index 60f8cef67..f9fa92ea3 100644 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/ifopt_qp_problem.h +++ b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/ifopt_qp_problem.h @@ -96,10 +96,10 @@ class IfoptQPProblem : public QPProblem Eigen::Ref getBoxSize() override { return box_size_; } Eigen::Ref getConstraintMeritCoeff() override { return constraint_merit_coeff_; } - Eigen::Ref getHessian() override { return hessian_; } + Eigen::Ref getHessian() override { return hessian_; } Eigen::Ref getGradient() override { return gradient_; } - Eigen::Ref getConstraintMatrix() override { return constraint_matrix_; } + Eigen::Ref getConstraintMatrix() override { return constraint_matrix_; } Eigen::Ref getBoundsLower() override { return bounds_lower_; } Eigen::Ref getBoundsUpper() override { return bounds_upper_; } @@ -120,11 +120,11 @@ class IfoptQPProblem : public QPProblem Eigen::VectorXd box_size_; Eigen::VectorXd constraint_merit_coeff_; - SparseMatrix hessian_; + trajopt_ifopt::Jacobian hessian_; Eigen::VectorXd gradient_; Eigen::VectorXd cost_constant_; - SparseMatrix constraint_matrix_; + trajopt_ifopt::Jacobian constraint_matrix_; Eigen::VectorXd bounds_lower_; Eigen::VectorXd bounds_upper_; // This should be the center of the bounds diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/osqp_eigen_solver.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/osqp_eigen_solver.h index 8da6cf08c..6bae760f9 100644 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/osqp_eigen_solver.h +++ b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/osqp_eigen_solver.h @@ -64,7 +64,7 @@ class OSQPEigenSolver : public QPSolver Eigen::VectorXd getSolution() override; - bool updateHessianMatrix(const SparseMatrix& hessian) override; + bool updateHessianMatrix(const trajopt_ifopt::Jacobian& hessian) override; bool updateGradient(const Eigen::Ref& gradient) override; @@ -75,7 +75,7 @@ class OSQPEigenSolver : public QPSolver bool updateBounds(const Eigen::Ref& lowerBound, const Eigen::Ref& upperBound) override; - bool updateLinearConstraintsMatrix(const SparseMatrix& linearConstraintsMatrix) override; + bool updateLinearConstraintsMatrix(const trajopt_ifopt::Jacobian& linearConstraintsMatrix) override; QPSolverStatus getSolverStatus() const override { return solver_status_; } diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_problem.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_problem.h index 70b10f3ee..32bd788fa 100644 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_problem.h +++ b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_problem.h @@ -2,14 +2,9 @@ #define TRAJOPT_SQP_QP_PROBLEM_BASE_H #include -#include +#include #include -namespace trajopt_ifopt -{ -class DynamicConstraintSet; -} - namespace trajopt_sqp { enum class CostPenaltyType : std::uint8_t; @@ -175,10 +170,10 @@ class QPProblem virtual Eigen::Ref getBoxSize() = 0; virtual Eigen::Ref getConstraintMeritCoeff() = 0; - virtual Eigen::Ref getHessian() = 0; + virtual Eigen::Ref getHessian() = 0; virtual Eigen::Ref getGradient() = 0; - virtual Eigen::Ref getConstraintMatrix() = 0; + virtual Eigen::Ref getConstraintMatrix() = 0; virtual Eigen::Ref getBoundsLower() = 0; virtual Eigen::Ref getBoundsUpper() = 0; }; diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_solver.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_solver.h index c1510af8c..712006ea3 100644 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_solver.h +++ b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_solver.h @@ -27,7 +27,7 @@ #define TRAJOPT_SQP_INCLUDE_QP_SOLVER_H_ #include -#include +#include namespace trajopt_sqp { @@ -89,7 +89,7 @@ class QPSolver * @param hessian The QP hessian. Should be n_vars x n_vars * @return true if successful */ - virtual bool updateHessianMatrix(const SparseMatrix& hessian) = 0; + virtual bool updateHessianMatrix(const trajopt_ifopt::Jacobian& hessian) = 0; /** * @brief Updates the cost gradient @@ -126,7 +126,7 @@ class QPSolver * @param linearConstraintsMatrix Input constraint matrix * @return true if successful */ - virtual bool updateLinearConstraintsMatrix(const SparseMatrix& linearConstraintsMatrix) = 0; + virtual bool updateLinearConstraintsMatrix(const trajopt_ifopt::Jacobian& linearConstraintsMatrix) = 0; /** * @brief Returns the solver status diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/trajopt_qp_problem.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/trajopt_qp_problem.h index 4577f7e07..60a9c9661 100644 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/trajopt_qp_problem.h +++ b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/trajopt_qp_problem.h @@ -1,9 +1,10 @@ #ifndef TRAJOPT_SQP_TRAJOPT_QP_PROBLEM_H #define TRAJOPT_SQP_TRAJOPT_QP_PROBLEM_H -#include #include +#include + namespace trajopt_sqp { /** @brief Converts a general NLP into a convexified QP that can be solved by a QP solver */ @@ -74,10 +75,10 @@ class TrajOptQPProblem : public QPProblem Eigen::Ref getBoxSize() override; Eigen::Ref getConstraintMeritCoeff() override; - Eigen::Ref getHessian() override; + Eigen::Ref getHessian() override; Eigen::Ref getGradient() override; - Eigen::Ref getConstraintMatrix() override; + Eigen::Ref getConstraintMatrix() override; Eigen::Ref getBoundsLower() override; Eigen::Ref getBoundsUpper() override; diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/types.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/types.h index 785e16513..1cc3ebe6a 100644 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/types.h +++ b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/types.h @@ -24,7 +24,7 @@ #ifndef TRAJOPT_SQP_TYPES_H_ #define TRAJOPT_SQP_TYPES_H_ -#include +#include namespace trajopt_sqp { diff --git a/trajopt_optimizers/trajopt_sqp/src/expressions.cpp b/trajopt_optimizers/trajopt_sqp/src/expressions.cpp index 39cc1efbe..e3a8004d0 100644 --- a/trajopt_optimizers/trajopt_sqp/src/expressions.cpp +++ b/trajopt_optimizers/trajopt_sqp/src/expressions.cpp @@ -35,7 +35,7 @@ Eigen::VectorXd QuadExprs::values(const Eigen::Ref& x) co for (Eigen::Index i = 0; i < n; ++i) { - const SparseMatrix& Q = quadratic_coeffs[static_cast(i)]; + const trajopt_ifopt::Jacobian& Q = quadratic_coeffs[static_cast(i)]; if (Q.nonZeros() == 0) continue; @@ -46,7 +46,7 @@ Eigen::VectorXd QuadExprs::values(const Eigen::Ref& x) co } AffExprs createAffExprs(const Eigen::Ref& func_error, - const Eigen::Ref& func_jacobian, + const Eigen::Ref& func_jacobian, const Eigen::Ref& x) { AffExprs aff_expr; @@ -60,8 +60,8 @@ AffExprs createAffExprs(const Eigen::Ref& func_error, } QuadExprs createQuadExprs(const Eigen::Ref& func_errors, - const Eigen::Ref& func_jacobian, - const std::vector& func_hessians, + const Eigen::Ref& func_jacobian, + const std::vector& func_hessians, const Eigen::Ref& x) { const Eigen::Index num_cost = func_errors.rows(); @@ -121,8 +121,8 @@ QuadExprs squareAffExprs(const AffExprs& aff_expr) // objective_quadratic_coeffs will be the sum of per-equation Q_i for (Eigen::Index i = 0; i < aff_expr.constants.rows(); ++i) { - auto eq_affexpr_coeffs = aff_expr.linear_coeffs.row(i); // b_iᵀ - const SparseMatrix eq_quadexpr_coeffs = eq_affexpr_coeffs.transpose() * eq_affexpr_coeffs; // b_i b_iᵀ + auto eq_affexpr_coeffs = aff_expr.linear_coeffs.row(i); // b_iᵀ + const trajopt_ifopt::Jacobian eq_quadexpr_coeffs = eq_affexpr_coeffs.transpose() * eq_affexpr_coeffs; // b_i b_iᵀ if (eq_quadexpr_coeffs.nonZeros() > 0) { diff --git a/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp b/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp index 76c84132e..a5aea4c0a 100644 --- a/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp +++ b/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp @@ -181,7 +181,7 @@ void IfoptQPProblem::updateGradient() // Set the gradient of the NLP costs //////////////////////////////////////////////////////// gradient_ = Eigen::VectorXd::Zero(num_qp_vars_); - const SparseMatrix cost_jac = nlp_->GetJacobianOfCosts(); + const trajopt_ifopt::Jacobian cost_jac = nlp_->GetJacobianOfCosts(); /** * @note See CostFromFunc::convex in modeling_utils.cpp. Once Hessian has been implemented * @@ -253,7 +253,7 @@ void IfoptQPProblem::updateGradient() void IfoptQPProblem::linearizeConstraints() { - const SparseMatrix jac = nlp_->GetJacobianOfConstraints(); + const trajopt_ifopt::Jacobian jac = nlp_->GetJacobianOfConstraints(); // Create triplet list of nonzero constraints using T = Eigen::Triplet; @@ -263,7 +263,7 @@ void IfoptQPProblem::linearizeConstraints() // Add jacobian to triplet list for (int k = 0; k < jac.outerSize(); ++k) // NOLINT { - for (SparseMatrix::InnerIterator it(jac, k); it; ++it) + for (trajopt_ifopt::Jacobian::InnerIterator it(jac, k); it; ++it) { tripletList.emplace_back(it.row(), it.col(), it.value()); } @@ -351,7 +351,7 @@ void IfoptQPProblem::updateConstraintsConstantExpression() // to calculate the merit of the solve. // The block excludes the slack variables - const SparseMatrix jac = constraint_matrix_.block(0, 0, num_nlp_cnts_, num_nlp_vars_); + const trajopt_ifopt::Jacobian jac = constraint_matrix_.block(0, 0, num_nlp_cnts_, num_nlp_vars_); constraint_constant_ = (cnt_initial_value - jac * x_initial); } diff --git a/trajopt_optimizers/trajopt_sqp/src/osqp_eigen_solver.cpp b/trajopt_optimizers/trajopt_sqp/src/osqp_eigen_solver.cpp index 38eab2fb7..e64b378b9 100644 --- a/trajopt_optimizers/trajopt_sqp/src/osqp_eigen_solver.cpp +++ b/trajopt_optimizers/trajopt_sqp/src/osqp_eigen_solver.cpp @@ -203,11 +203,11 @@ bool OSQPEigenSolver::solve() Eigen::VectorXd OSQPEigenSolver::getSolution() { return solver_->getSolution(); } -bool OSQPEigenSolver::updateHessianMatrix(const SparseMatrix& hessian) +bool OSQPEigenSolver::updateHessianMatrix(const trajopt_ifopt::Jacobian& hessian) { // Clean up values close to 0 // Also multiply by 2 because OSQP is multiplying by (1/2) for the objective fuction - const SparseMatrix cleaned = 2.0 * hessian.pruned(1e-7, 1); // Any value < 1e-7 will be removed + const trajopt_ifopt::Jacobian cleaned = 2.0 * hessian.pruned(1e-7, 1); // Any value < 1e-7 will be removed if (solver_->isInitialized()) return solver_->updateHessianMatrix(cleaned); @@ -252,13 +252,13 @@ bool OSQPEigenSolver::updateBounds(const Eigen::Ref& lowe return success; } -bool OSQPEigenSolver::updateLinearConstraintsMatrix(const SparseMatrix& linearConstraintsMatrix) +bool OSQPEigenSolver::updateLinearConstraintsMatrix(const trajopt_ifopt::Jacobian& linearConstraintsMatrix) { assert(num_cnts_ == linearConstraintsMatrix.rows()); assert(num_vars_ == linearConstraintsMatrix.cols()); solver_->data()->clearLinearConstraintsMatrix(); - const SparseMatrix cleaned = linearConstraintsMatrix.pruned(1e-7, 1); // Any value < 1e-7 will be removed + const trajopt_ifopt::Jacobian cleaned = linearConstraintsMatrix.pruned(1e-7, 1); // Any value < 1e-7 will be removed if (solver_->isInitialized()) return solver_->updateLinearConstraintsMatrix(cleaned); diff --git a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp index 65cd7718f..21d1dc93a 100644 --- a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp +++ b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp @@ -101,12 +101,12 @@ struct TrajOptQPProblem::Implementation Eigen::Index num_qp_cnts_{ 0 }; // These objects are computed in the convexifyCosts() method - SparseMatrix hessian_; + trajopt_ifopt::Jacobian hessian_; Eigen::VectorXd gradient_; QuadExprs squared_objective_nlp_; // This object is computed in the linearizeConstraints() method - SparseMatrix constraint_matrix_; + trajopt_ifopt::Jacobian constraint_matrix_; // This object is computed in the updateConstraintsConstantExpression() method Eigen::VectorXd constraint_constant_; // This should be the center of the bounds @@ -770,7 +770,7 @@ void TrajOptQPProblem::Implementation::convexifyCosts() if (squared_costs_.GetRows() > 0) { squared_objective_nlp_ = QuadExprs(squared_costs_.GetRows(), getNumNLPVars()); - const SparseMatrix cnt_jac = squared_costs_.GetJacobian(); + const trajopt_ifopt::Jacobian cnt_jac = squared_costs_.GetJacobian(); const Eigen::VectorXd cnt_vals = squared_costs_.GetValues(); // This is not correct should pass the value to createAffExprs then use bound to which could change the sign of the @@ -798,7 +798,7 @@ void TrajOptQPProblem::Implementation::convexifyCosts() // Add jacobian to triplet list for (int k = 0; k < cost_quad_expr.linear_coeffs.outerSize(); ++k) { - for (SparseMatrix::InnerIterator it(cost_quad_expr.linear_coeffs, k); it; ++it) + for (trajopt_ifopt::Jacobian::InnerIterator it(cost_quad_expr.linear_coeffs, k); it; ++it) grad_triplet_list.emplace_back(it.row(), it.col(), it.value()); } } @@ -822,7 +822,7 @@ void TrajOptQPProblem::Implementation::convexifyCosts() hessian_.reserve(squared_objective_nlp_.objective_quadratic_coeffs.nonZeros()); for (int k = 0; k < squared_objective_nlp_.objective_quadratic_coeffs.outerSize(); ++k) { - for (SparseMatrix::InnerIterator it(squared_objective_nlp_.objective_quadratic_coeffs, k); it; ++it) + for (trajopt_ifopt::Jacobian::InnerIterator it(squared_objective_nlp_.objective_quadratic_coeffs, k); it; ++it) hessian_.coeffRef(it.row(), it.col()) += it.value(); } } @@ -884,9 +884,9 @@ void TrajOptQPProblem::Implementation::convexifyCosts() void TrajOptQPProblem::Implementation::linearizeConstraints() { - const SparseMatrix nlp_cnt_jac = constraints_.GetJacobian(); - const SparseMatrix hinge_cnt_jac = hinge_constraints_.GetJacobian(); - const SparseMatrix abs_cnt_jac = abs_constraints_.GetJacobian(); + const trajopt_ifopt::Jacobian nlp_cnt_jac = constraints_.GetJacobian(); + const trajopt_ifopt::Jacobian hinge_cnt_jac = hinge_constraints_.GetJacobian(); + const trajopt_ifopt::Jacobian abs_cnt_jac = abs_constraints_.GetJacobian(); std::vector> triplets; const Eigen::Index nnz_base = nlp_cnt_jac.nonZeros() + hinge_cnt_jac.nonZeros() + abs_cnt_jac.nonZeros(); @@ -897,26 +897,26 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() // hinge constraints for (int k = 0; k < hinge_cnt_jac.outerSize(); ++k) - for (SparseMatrix::InnerIterator it(hinge_cnt_jac, k); it; ++it) + for (trajopt_ifopt::Jacobian::InnerIterator it(hinge_cnt_jac, k); it; ++it) triplets.emplace_back(it.row(), it.col(), it.value()); // abs constraints (shifted rows) Eigen::Index current_row_index = hinge_constraints_.GetRows(); for (int k = 0; k < abs_cnt_jac.outerSize(); ++k) - for (SparseMatrix::InnerIterator it(abs_cnt_jac, k); it; ++it) + for (trajopt_ifopt::Jacobian::InnerIterator it(abs_cnt_jac, k); it; ++it) triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); // nlp constraints (shift again) current_row_index += abs_constraints_.GetRows(); for (int k = 0; k < nlp_cnt_jac.outerSize(); ++k) - for (SparseMatrix::InnerIterator it(nlp_cnt_jac, k); it; ++it) + for (trajopt_ifopt::Jacobian::InnerIterator it(nlp_cnt_jac, k); it; ++it) triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); for (const auto& cnt : dyn_constraints_) { - const SparseMatrix nlp_cnt_jac = cnt->GetJacobian(); + const trajopt_ifopt::Jacobian nlp_cnt_jac = cnt->GetJacobian(); for (int k = 0; k < nlp_cnt_jac.outerSize(); ++k) - for (SparseMatrix::InnerIterator it(nlp_cnt_jac, k); it; ++it) + for (trajopt_ifopt::Jacobian::InnerIterator it(nlp_cnt_jac, k); it; ++it) triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); current_row_index += cnt->GetRows(); @@ -1356,10 +1356,10 @@ const std::vector& TrajOptQPProblem::getNLPCostNames() const Eigen::Ref TrajOptQPProblem::getBoxSize() { return impl_->box_size_; } Eigen::Ref TrajOptQPProblem::getConstraintMeritCoeff() { return impl_->constraint_merit_coeff_; } -Eigen::Ref TrajOptQPProblem::getHessian() { return impl_->hessian_; } +Eigen::Ref TrajOptQPProblem::getHessian() { return impl_->hessian_; } Eigen::Ref TrajOptQPProblem::getGradient() { return impl_->gradient_; } -Eigen::Ref TrajOptQPProblem::getConstraintMatrix() { return impl_->constraint_matrix_; } +Eigen::Ref TrajOptQPProblem::getConstraintMatrix() { return impl_->constraint_matrix_; } Eigen::Ref TrajOptQPProblem::getBoundsLower() { return impl_->bounds_lower_; } Eigen::Ref TrajOptQPProblem::getBoundsUpper() { return impl_->bounds_upper_; } From a66ac114173a9e4f00a9b2d41cc6a7f5bc5b99e0 Mon Sep 17 00:00:00 2001 From: Levi Armstrong Date: Thu, 18 Dec 2025 14:32:58 -0600 Subject: [PATCH 4/7] fixup --- .../include/trajopt_ifopt/core/composite.h | 3 + trajopt_ifopt/src/core/composite.cpp | 2 + .../include/trajopt_sqp/ifopt_qp_problem.h | 3 + .../include/trajopt_sqp/qp_problem.h | 11 + .../include/trajopt_sqp/trajopt_qp_problem.h | 3 + .../trajopt_sqp/src/ifopt_qp_problem.cpp | 6 + .../trajopt_sqp/src/trajopt_qp_problem.cpp | 305 ++++++++++-------- 7 files changed, 194 insertions(+), 139 deletions(-) diff --git a/trajopt_ifopt/include/trajopt_ifopt/core/composite.h b/trajopt_ifopt/include/trajopt_ifopt/core/composite.h index d28a642b1..ac60cd78f 100644 --- a/trajopt_ifopt/include/trajopt_ifopt/core/composite.h +++ b/trajopt_ifopt/include/trajopt_ifopt/core/composite.h @@ -218,6 +218,9 @@ class Composite : public Component */ const ComponentVec& GetComponents() const; + /** @brief If no components have been added */ + bool Empty() const; + private: ComponentVec components_; bool is_cost_{ false }; diff --git a/trajopt_ifopt/src/core/composite.cpp b/trajopt_ifopt/src/core/composite.cpp index 5623e64b6..267fe38d3 100644 --- a/trajopt_ifopt/src/core/composite.cpp +++ b/trajopt_ifopt/src/core/composite.cpp @@ -197,6 +197,8 @@ std::vector Composite::GetBounds() const const Composite::ComponentVec& Composite::GetComponents() const { return components_; } +bool Composite::Empty() const { return components_.empty(); } + void Composite::PrintAll() const { int index = 0; diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/ifopt_qp_problem.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/ifopt_qp_problem.h index f9fa92ea3..57df62697 100644 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/ifopt_qp_problem.h +++ b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/ifopt_qp_problem.h @@ -49,6 +49,9 @@ class IfoptQPProblem : public QPProblem void addCostSet(std::shared_ptr constraint_set, CostPenaltyType penalty_type) override; + void addDynamicCostSet(std::shared_ptr dyn_constraint_set, + CostPenaltyType penalty_type) override; + void setup() override; void setVariables(const double* x) override; diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_problem.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_problem.h index 32bd788fa..8c16c4019 100644 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_problem.h +++ b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/qp_problem.h @@ -59,6 +59,17 @@ class QPProblem virtual void addCostSet(std::shared_ptr constraint_set, CostPenaltyType penalty_type) = 0; + /** + * @brief Add a squared cost term to the problem. + * @param dyn_constraint_set The constraint set to be evaluated as a squared cost. + * + * This function can be called multiple times if the constraint function is + * composed of different cost terms. It makes sure the overall value and + * gradient is considering each individual cost. + */ + virtual void addDynamicCostSet(std::shared_ptr dyn_constraint_set, + CostPenaltyType penalty_type) = 0; + /** * @brief This setups the QP problems based on the constraints and cost sets added to the problem. * @details This must be called after all constraints and costs have been added to the problem diff --git a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/trajopt_qp_problem.h b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/trajopt_qp_problem.h index 60a9c9661..3dd69decd 100644 --- a/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/trajopt_qp_problem.h +++ b/trajopt_optimizers/trajopt_sqp/include/trajopt_sqp/trajopt_qp_problem.h @@ -29,6 +29,9 @@ class TrajOptQPProblem : public QPProblem void addCostSet(std::shared_ptr constraint_set, CostPenaltyType penalty_type) override; + void addDynamicCostSet(std::shared_ptr dyn_constraint_set, + CostPenaltyType penalty_type) override; + void setup() override; void setVariables(const double* x) override; diff --git a/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp b/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp index a5aea4c0a..2e188f66a 100644 --- a/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp +++ b/trajopt_optimizers/trajopt_sqp/src/ifopt_qp_problem.cpp @@ -75,6 +75,12 @@ void IfoptQPProblem::addCostSet(std::shared_ptr co } } +void IfoptQPProblem::addDynamicCostSet(std::shared_ptr /*dyn_constraint_set*/, + CostPenaltyType /*penalty_type*/) +{ + throw std::runtime_error("IfoptQPProblem, dynamic cost sets are not supported"); +} + void IfoptQPProblem::setup() { num_nlp_vars_ = nlp_->GetNumberOfOptimizationVariables(); diff --git a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp index 21d1dc93a..a4ce6f61f 100644 --- a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp +++ b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp @@ -47,6 +47,7 @@ struct TrajOptQPProblem::Implementation , abs_costs_("abs-cost-terms", false, false) , hinge_constraints_("hinge-constraint-sets", false, false) , abs_constraints_("abs-constraint-sets", false, false) + , dyn_constraints_("dyn_constraint-terms", false, true) { variables_ = std::make_shared("variable-sets", false, false); } @@ -64,9 +65,6 @@ struct TrajOptQPProblem::Implementation Eigen::Index num_constraint_qp_vars_{ 0 }; Eigen::Index num_constraint_qp_cnts_{ 0 }; - std::vector dyn_constraints_; - std::vector> dyn_constraint_types_; - trajopt_ifopt::Composite squared_costs_; trajopt_ifopt::Composite hinge_costs_; trajopt_ifopt::Composite abs_costs_; @@ -95,6 +93,10 @@ struct TrajOptQPProblem::Implementation // These objects can changes size if dynamic constraints are present //////////////////////////////////////////////////////////////////// + trajopt_ifopt::Composite dyn_constraints_; + std::vector dyn_constraint_types_; + std::vector dyn_constraint_bounds_; + // These quantities are computed in the update() method Eigen::Index num_nlp_cnts_{ 0 }; Eigen::Index num_qp_vars_{ 0 }; @@ -120,10 +122,13 @@ struct TrajOptQPProblem::Implementation void addConstraintSet(std::shared_ptr constraint_set); - void addDynamicConstraintSet(const std::shared_ptr& dyn_constraint_set); + void addDynamicConstraintSet(std::shared_ptr dyn_constraint_set); void addCostSet(std::shared_ptr constraint_set, CostPenaltyType penalty_type); + void addDynamicCostSet(std::shared_ptr dyn_constraint_set, + CostPenaltyType penalty_type); + void setup(); void update(); @@ -231,10 +236,10 @@ void TrajOptQPProblem::Implementation::addConstraintSet(std::shared_ptr& dyn_constraint_set) + std::shared_ptr dyn_constraint_set) { dyn_constraint_set->LinkWithVariables(variables_); - dyn_constraints_.push_back(dyn_constraint_set); + dyn_constraints_.AddComponent(std::move(dyn_constraint_set)); initialized_ = false; } @@ -287,20 +292,71 @@ void TrajOptQPProblem::Implementation::addCostSet(std::shared_ptr /*dyn_constraint_set*/, + CostPenaltyType /*penalty_type*/) +{ + // dyn_constraint_set->LinkWithVariables(variables_); + // const std::vector cost_bounds = dyn_constraint_set->GetBounds(); + // switch (penalty_type) + // { + // case CostPenaltyType::SQUARED: + // { + // for (const auto& bound : cost_bounds) + // { + // if (!trajopt_ifopt::isBoundsEquality(bound)) + // throw std::runtime_error("TrajOpt Ifopt squared cost must have equality bounds!"); + // } + + // squared_costs_.AddComponent(std::move(dyn_constraint_set)); + // break; + // } + // case CostPenaltyType::ABSOLUTE: + // { + // for (const auto& bound : cost_bounds) + // { + // if (!trajopt_ifopt::isBoundsEquality(bound)) + // throw std::runtime_error("TrajOpt Ifopt absolute cost must have equality bounds!"); + // } + + // abs_costs_.AddComponent(std::move(dyn_constraint_set)); + // break; + // } + // case CostPenaltyType::HINGE: + // { + // for (const auto& bound : cost_bounds) + // { + // if (!trajopt_ifopt::isBoundsInEquality(bound)) + // throw std::runtime_error("TrajOpt Ifopt hinge cost must have inequality bounds!"); + // } + + // hinge_costs_.AddComponent(std::move(dyn_constraint_set)); + // break; + // } + // default: + // { + // throw std::runtime_error("Unsupport CostPenaltyType!"); + // } + // } +} + void TrajOptQPProblem::Implementation::update() { - if (initialized_ && dyn_constraints_.empty()) + if (initialized_ && dyn_constraints_.Empty()) return; - for (auto& dyn_cnt : dyn_constraints_) - dyn_cnt->Update(); + int num_nlp_dyn_cnts = dyn_constraints_.Update(); + dyn_constraint_bounds_ = dyn_constraints_.GetBounds(); + dyn_constraint_types_.resize(static_cast(num_nlp_dyn_cnts)); + assert(dyn_constraint_bounds_.size() == static_cast(num_nlp_dyn_cnts)); // Local counts to avoid repeated virtual / composite queries const Eigen::Index n_nlp_vars = variables_->GetRows(); const Eigen::Index n_hinge = hinge_costs_.GetRows(); const Eigen::Index n_abs = abs_costs_.GetRows(); - num_nlp_cnts_ = constraints_.GetRows(); + num_nlp_cnts_ = constraints_.GetRows() + num_nlp_dyn_cnts; // Hinge cost adds a variable and an inequality constraint (→ 2 constraints) // Absolute cost adds two variables and an equality constraint (→ 3 constraints) @@ -310,41 +366,21 @@ void TrajOptQPProblem::Implementation::update() //////////////////////////////////////////////////////// // Get NLP bounds and detect dynamic constraint type //////////////////////////////////////////////////////// - - for (std::size_t c = 0; c < dyn_constraints_.size(); ++c) + for (std::size_t i = 0; i < num_nlp_dyn_cnts; ++i) { - const auto& cnt = dyn_constraints_[c]; - num_nlp_cnts_ += cnt->GetRows(); - num_qp_cnts_ += cnt->GetRows(); - constraint_names_[static_cast(constraints_.GetRows()) + c] = cnt->GetName(); - Eigen::VectorXd nlp_bounds_l(cnt->GetRows()); - Eigen::VectorXd nlp_bounds_u(cnt->GetRows()); - - const std::vector cnt_bounds = cnt->GetBounds(); - for (Eigen::Index i = 0; i < cnt->GetRows(); ++i) + const auto& b = dyn_constraint_bounds_[i]; + const double diff = b.upper - b.lower; + if (std::abs(diff) < 1e-3) { - nlp_bounds_l[i] = cnt_bounds[static_cast(i)].lower; - nlp_bounds_u[i] = cnt_bounds[static_cast(i)].upper; + dyn_constraint_types_[i] = ConstraintType::EQ; + num_qp_vars_ += 2; // L1 slack pair + num_qp_cnts_ += 2; } - - const Eigen::VectorXd nlp_bounds_diff = nlp_bounds_u - nlp_bounds_l; - std::vector& cnt_types = dyn_constraint_types_[c]; - cnt_types.resize(static_cast(cnt->GetRows())); - - for (std::size_t i = 0; i < static_cast(nlp_bounds_diff.size()); ++i) + else { - if (std::abs(nlp_bounds_diff[static_cast(i)]) < 1e-3) - { - cnt_types[i] = ConstraintType::EQ; - num_qp_vars_ += 2; // L1 slack pair - num_qp_cnts_ += 2; - } - else - { - cnt_types[i] = ConstraintType::INEQ; - num_qp_vars_ += 1; // hinge slack - num_qp_cnts_ += 1; - } + dyn_constraint_types_[i] = ConstraintType::INEQ; + num_qp_vars_ += 1; // hinge slack + num_qp_cnts_ += 1; } } @@ -360,6 +396,7 @@ void TrajOptQPProblem::Implementation::setup() // Local counts to avoid repeated virtual / composite queries const Eigen::Index n_nlp_vars = getNumNLPVars(); const Eigen::Index n_nlp_costs = getNumNLPCosts(); + const Eigen::Index n_nlp_cnts = constraints_.GetRows(); hinge_constraints_.ClearComponents(); abs_constraints_.ClearComponents(); @@ -369,10 +406,9 @@ void TrajOptQPProblem::Implementation::setup() box_size_ = Eigen::VectorXd::Constant(n_nlp_vars, 1e-1); constraint_merit_coeff_ = - Eigen::VectorXd::Constant(constraints_.GetRows() + static_cast(dyn_constraints_.size()), 10.0); - constraint_types_.resize(static_cast(constraints_.GetRows())); - constraint_names_.resize(static_cast(constraints_.GetRows()) + dyn_constraints_.size()); - dyn_constraint_types_.resize(dyn_constraints_.size()); + Eigen::VectorXd::Constant(n_nlp_cnts + static_cast(dyn_constraints_.GetComponents().size()), 10.0); + constraint_types_.resize(static_cast(n_nlp_cnts)); + constraint_names_.resize(static_cast(n_nlp_cnts) + dyn_constraints_.GetComponents().size()); // Get NLP Cost and Constraint Names for Debug Print const auto& cnt_components = constraints_.GetComponents(); @@ -383,6 +419,10 @@ void TrajOptQPProblem::Implementation::setup() constraint_names_[i] = cnt->GetName() + "_" + std::to_string(j); } + const auto& dyn_cnt_components = dyn_constraints_.GetComponents(); + for (std::size_t i = 0; i < dyn_cnt_components.size(); ++i) + constraint_names_[static_cast(n_nlp_cnts) + i] = dyn_cnt_components[i]->GetName(); + // Reset and reserve name buffers (avoid accumulation across multiple setup() calls) cost_names_.clear(); cost_names_.reserve(static_cast(n_nlp_costs)); @@ -424,26 +464,16 @@ void TrajOptQPProblem::Implementation::setup() } // Get NLP bounds and detect constraint type - const Eigen::Index n_nlp_cnts = constraints_.GetRows(); constraint_bounds_ = constraints_.GetBounds(); num_constraint_qp_vars_ = 0; num_constraint_qp_cnts_ = n_nlp_cnts; - Eigen::VectorXd nlp_bounds_l(n_nlp_cnts); - Eigen::VectorXd nlp_bounds_u(n_nlp_cnts); - - for (Eigen::Index i = 0; i < n_nlp_cnts; ++i) - { - nlp_bounds_l[i] = constraint_bounds_[static_cast(i)].lower; - nlp_bounds_u[i] = constraint_bounds_[static_cast(i)].upper; - } - - const Eigen::VectorXd nlp_bounds_diff = nlp_bounds_u - nlp_bounds_l; constraint_types_.resize(static_cast(n_nlp_cnts)); - - for (std::size_t i = 0; i < static_cast(nlp_bounds_diff.size()); ++i) + for (std::size_t i = 0; i < n_nlp_cnts; ++i) { - if (std::abs(nlp_bounds_diff[static_cast(i)]) < 1e-3) + const auto& b = constraint_bounds_[i]; + const double diff = b.upper - b.lower; + if (std::abs(diff) < 1e-3) { constraint_types_[i] = ConstraintType::EQ; num_constraint_qp_vars_ += 2; // L1 slack pair @@ -626,7 +656,9 @@ Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexConstraintViolations(const Eigen::Ref& var_vals) { Eigen::Index row_index = hinge_constraints_.GetRows() + abs_costs_.GetRows(); - Eigen::VectorXd violations(constraints_.GetRows() + static_cast(dyn_constraints_.size())); + const auto& dyn_components = dyn_constraints_.GetComponents(); + + Eigen::VectorXd violations(constraints_.GetRows() + static_cast(dyn_components.size())); { // NOLINTNEXTLINE Eigen::VectorXd result_lin = constraint_matrix_.block(row_index, 0, constraints_.GetRows(), getNumNLPVars()) * @@ -638,9 +670,9 @@ TrajOptQPProblem::Implementation::evaluateConvexConstraintViolations(const Eigen row_index += constraints_.GetRows(); } - for (std::size_t i = 0; i < dyn_constraints_.size(); ++i) + for (std::size_t i = 0; i < dyn_components.size(); ++i) { - const auto& cnt = dyn_constraints_[i]; + const auto& cnt = dyn_components[i]; // NOLINTNEXTLINE Eigen::VectorXd result_lin = constraint_matrix_.block(row_index, 0, cnt->GetRows(), getNumNLPVars()) * @@ -660,15 +692,17 @@ TrajOptQPProblem::Implementation::evaluateExactConstraintViolations(const Eigen: { setVariables(var_vals.data()); - Eigen::VectorXd violations(constraints_.GetRows() + static_cast(dyn_constraints_.size())); + const auto& dyn_components = dyn_constraints_.GetComponents(); + + Eigen::VectorXd violations(constraints_.GetRows() + static_cast(dyn_components.size())); { const Eigen::VectorXd cnt_vals = constraints_.GetValues(); violations.head(constraints_.GetRows()) = trajopt_ifopt::calcBoundsViolations(cnt_vals, constraint_bounds_); } - for (std::size_t i = 0; i < dyn_constraints_.size(); ++i) + for (std::size_t i = 0; i < dyn_components.size(); ++i) { - const auto& cnt = dyn_constraints_[i]; + const auto& cnt = dyn_components[i]; const Eigen::VectorXd cnt_vals = cnt->GetValues(); violations(constraints_.GetRows() + static_cast(i)) = trajopt_ifopt::calcBoundsViolations(cnt_vals, cnt->GetBounds()).sum(); @@ -692,7 +726,8 @@ void TrajOptQPProblem::Implementation::setBoxSize(const Eigen::Ref& merit_coeff) { - assert(merit_coeff.size() == static_cast(constraints_.GetRows()) + dyn_constraints_.size()); + assert(merit_coeff.size() == + static_cast(constraints_.GetRows()) + dyn_constraints_.GetComponents().size()); constraint_merit_coeff_ = merit_coeff; } @@ -708,9 +743,8 @@ void TrajOptQPProblem::Implementation::print() const for (const auto& cnt_type : constraint_types_) std::cout << static_cast(cnt_type) << ", "; - for (const auto& cnt_set : dyn_constraint_types_) - for (const auto& cnt_type : cnt_set) - std::cout << static_cast(cnt_type) << ", "; + for (const auto& cnt_type : dyn_constraint_types_) + std::cout << static_cast(cnt_type) << ", "; std::cout << '\n'; std::cout << "Box Size: " << box_size_.transpose().format(format) << '\n'; // NOLINT @@ -867,17 +901,14 @@ void TrajOptQPProblem::Implementation::convexifyCosts() auto cnt_offset = static_cast(constraint_types_.size()); for (Eigen::Index i = 0; i < dyn_constraint_types_.size(); i++) { - for (const auto& constraint_type : dyn_constraint_types_[static_cast(i)]) + if (dyn_constraint_types_[static_cast(i)] == ConstraintType::EQ) { - if (constraint_type == ConstraintType::EQ) - { - gradient_[current_var_index++] = constraint_merit_coeff_[cnt_offset + i]; - gradient_[current_var_index++] = constraint_merit_coeff_[cnt_offset + i]; - } - else - { - gradient_[current_var_index++] = constraint_merit_coeff_[cnt_offset + i]; - } + gradient_[current_var_index++] = constraint_merit_coeff_[cnt_offset + i]; + gradient_[current_var_index++] = constraint_merit_coeff_[cnt_offset + i]; + } + else + { + gradient_[current_var_index++] = constraint_merit_coeff_[cnt_offset + i]; } } } @@ -885,6 +916,7 @@ void TrajOptQPProblem::Implementation::convexifyCosts() void TrajOptQPProblem::Implementation::linearizeConstraints() { const trajopt_ifopt::Jacobian nlp_cnt_jac = constraints_.GetJacobian(); + const trajopt_ifopt::Jacobian nlp_dyn_cnt_jac = dyn_constraints_.GetJacobian(); const trajopt_ifopt::Jacobian hinge_cnt_jac = hinge_constraints_.GetJacobian(); const trajopt_ifopt::Jacobian abs_cnt_jac = abs_constraints_.GetJacobian(); @@ -912,15 +944,11 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() for (trajopt_ifopt::Jacobian::InnerIterator it(nlp_cnt_jac, k); it; ++it) triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); - for (const auto& cnt : dyn_constraints_) - { - const trajopt_ifopt::Jacobian nlp_cnt_jac = cnt->GetJacobian(); - for (int k = 0; k < nlp_cnt_jac.outerSize(); ++k) - for (trajopt_ifopt::Jacobian::InnerIterator it(nlp_cnt_jac, k); it; ++it) - triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); - - current_row_index += cnt->GetRows(); - } + // nlp dynamic constraints (shift again) + current_row_index += constraints_.GetRows(); + for (int k = 0; k < nlp_dyn_cnt_jac.outerSize(); ++k) + for (trajopt_ifopt::Jacobian::InnerIterator it(nlp_cnt_jac, k); it; ++it) + triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); // hinge slack vars Eigen::Index current_col_index = getNumNLPVars(); @@ -951,21 +979,18 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() current_row_index++; } - for (const auto& entry : dyn_constraint_types_) + for (const auto& constraint_type : dyn_constraint_types_) { - for (const auto& constraint_type : entry) + if (constraint_type == ConstraintType::EQ) { - if (constraint_type == ConstraintType::EQ) - { - triplets.emplace_back(current_row_index, current_col_index++, 1.0); - triplets.emplace_back(current_row_index, current_col_index++, -1.0); - } - else - { - triplets.emplace_back(current_row_index, current_col_index++, -1.0); - } - current_row_index++; + triplets.emplace_back(current_row_index, current_col_index++, 1.0); + triplets.emplace_back(current_row_index, current_col_index++, -1.0); } + else + { + triplets.emplace_back(current_row_index, current_col_index++, -1.0); + } + current_row_index++; } // Add a diagonal matrix for the variable limits (including slack variables since the merit coeff is only applied in @@ -1067,30 +1092,28 @@ void TrajOptQPProblem::Implementation::updateConstraintsConstantExpression() row += constraints_.GetRows(); } - if (!dyn_constraint_types_.empty()) + if (dyn_constraints_.GetRows() > 0) { - for (const auto& cnt : dyn_constraints_) - { - const Eigen::VectorXd cnt_initial_value = cnt->GetValues(); - - // In the case of a QP problem the costs and constraints are represented as - // quadratic functions is f(x) = a + b * x + c * x^2. - // Currently for constraints we do not leverage the Hessian so the quadratic - // function is f(x) = a + b * x - // When convexifying the function it need to produce the same constraint values at the values used to calculate - // the jacobian, so f(x_initial) = a + b * x = cnt_initial_value. - // Therefore a = cnt_initial_value - b * x - // where: b = jac (calculated below) - // x = x_initial - // a = quadratic constant (constraint_constant_) - // - // Note: This is not used by the QP solver directly but by the Trust Regions Solver - // to calculate the merit of the solve. - - // The block excludes the slack variables - constraint_constant_.segment(row, cnt->GetRows()) = cnt_initial_value - lin.segment(row, cnt->GetRows()); - row += cnt->GetRows(); - } + const Eigen::VectorXd cnt_initial_value = dyn_constraints_.GetValues(); + + // In the case of a QP problem the costs and constraints are represented as + // quadratic functions is f(x) = a + b * x + c * x^2. + // Currently for constraints we do not leverage the Hessian so the quadratic + // function is f(x) = a + b * x + // When convexifying the function it need to produce the same constraint values at the values used to calculate + // the jacobian, so f(x_initial) = a + b * x = cnt_initial_value. + // Therefore a = cnt_initial_value - b * x + // where: b = jac (calculated below) + // x = x_initial + // a = quadratic constant (constraint_constant_) + // + // Note: This is not used by the QP solver directly but by the Trust Regions Solver + // to calculate the merit of the solve. + + // The block excludes the slack variables + constraint_constant_.segment(row, dyn_constraints_.GetRows()) = + cnt_initial_value - lin.segment(row, dyn_constraints_.GetRows()); + // row += dyn_constraints_.GetRows(); } } @@ -1134,7 +1157,8 @@ void TrajOptQPProblem::Implementation::updateNLPConstraintBounds() } row += constraints_.GetRows(); - for (const auto& cnt : dyn_constraints_) + /** @todo Cache the bounds for dynamic constraints in the Update method */ + for (const auto& cnt : dyn_constraints_.GetComponents()) { const auto cnt_bounds = cnt->GetBounds(); for (Eigen::Index j = 0; j < cnt_bounds.size(); ++j) @@ -1220,22 +1244,19 @@ void TrajOptQPProblem::Implementation::updateSlackVariableBounds() } } - for (const auto& entry : dyn_constraint_types_) + for (const auto& constraint_type : dyn_constraint_types_) { - for (const auto& constraint_type : entry) + if (constraint_type == ConstraintType::EQ) { - if (constraint_type == ConstraintType::EQ) - { - bounds_lower_[current_cnt_index] = 0; - bounds_upper_[current_cnt_index++] = double(INFINITY); - bounds_lower_[current_cnt_index] = 0; - bounds_upper_[current_cnt_index++] = double(INFINITY); - } - else - { - bounds_lower_[current_cnt_index] = 0; - bounds_upper_[current_cnt_index++] = double(INFINITY); - } + bounds_lower_[current_cnt_index] = 0; + bounds_upper_[current_cnt_index++] = double(INFINITY); + bounds_lower_[current_cnt_index] = 0; + bounds_upper_[current_cnt_index++] = double(INFINITY); + } + else + { + bounds_lower_[current_cnt_index] = 0; + bounds_upper_[current_cnt_index++] = double(INFINITY); } } } @@ -1265,6 +1286,12 @@ void TrajOptQPProblem::addCostSet(std::shared_ptr impl_->addCostSet(std::move(constraint_set), penalty_type); } +void TrajOptQPProblem::addDynamicCostSet(std::shared_ptr dyn_constraint_set, + CostPenaltyType penalty_type) +{ + impl_->addDynamicCostSet(std::move(dyn_constraint_set), penalty_type); +} + void TrajOptQPProblem::setup() { impl_->setup(); } void TrajOptQPProblem::setVariables(const double* x) { impl_->setVariables(x); } @@ -1332,7 +1359,7 @@ Eigen::Index TrajOptQPProblem::getNumNLPVars() const { return std::as_const(*impl_); - return (base.constraints_.GetRows() + static_cast(base.dyn_constraints_.size())); + return (base.constraints_.GetRows() + static_cast(base.dyn_constraints_.GetComponents().size())); } Eigen::Index TrajOptQPProblem::getNumNLPCosts() const { return std::as_const(*impl_).getNumNLPCosts(); } From 99a77ef5713c0c9b63eaa81d64642c53702a9767 Mon Sep 17 00:00:00 2001 From: Levi Armstrong Date: Thu, 18 Dec 2025 16:51:30 -0600 Subject: [PATCH 5/7] fixup --- .../trajopt_sqp/src/trajopt_qp_problem.cpp | 293 +++++++++++------- 1 file changed, 177 insertions(+), 116 deletions(-) diff --git a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp index a4ce6f61f..fbe96b575 100644 --- a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp +++ b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp @@ -45,9 +45,10 @@ struct TrajOptQPProblem::Implementation , squared_costs_("squared-cost-terms", false, false) , hinge_costs_("hinge-cost-terms", false, false) , abs_costs_("abs-cost-terms", false, false) - , hinge_constraints_("hinge-constraint-sets", false, false) - , abs_constraints_("abs-constraint-sets", false, false) - , dyn_constraints_("dyn_constraint-terms", false, true) + , dyn_constraints_("dyn-constraint-terms", false, true) + , dyn_squared_costs_("dyn-squared-cost-terms", false, true) + , dyn_hinge_costs_("dyn-hinge-cost-terms", false, true) + , dyn_abs_costs_("dyn-abs-cost-terms", false, true) { variables_ = std::make_shared("variable-sets", false, false); } @@ -68,8 +69,6 @@ struct TrajOptQPProblem::Implementation trajopt_ifopt::Composite squared_costs_; trajopt_ifopt::Composite hinge_costs_; trajopt_ifopt::Composite abs_costs_; - trajopt_ifopt::Composite hinge_constraints_; - trajopt_ifopt::Composite abs_constraints_; Eigen::VectorXd squared_costs_target_; std::vector cost_names_; @@ -81,9 +80,6 @@ struct TrajOptQPProblem::Implementation std::vector abs_cost_bounds_; std::vector hinge_cost_bounds_; - std::vector hinge_cnt_bounds_; - std::vector abs_cnt_bounds_; - std::vector var_bounds_; /** @brief Box size - constraint is set at current_val +/- box_size */ @@ -97,6 +93,10 @@ struct TrajOptQPProblem::Implementation std::vector dyn_constraint_types_; std::vector dyn_constraint_bounds_; + trajopt_ifopt::Composite dyn_squared_costs_; + trajopt_ifopt::Composite dyn_hinge_costs_; + trajopt_ifopt::Composite dyn_abs_costs_; + // These quantities are computed in the update() method Eigen::Index num_nlp_cnts_{ 0 }; Eigen::Index num_qp_vars_{ 0 }; @@ -292,99 +292,116 @@ void TrajOptQPProblem::Implementation::addCostSet(std::shared_ptr /*dyn_constraint_set*/, - CostPenaltyType /*penalty_type*/) + std::shared_ptr dyn_constraint_set, + CostPenaltyType penalty_type) { - // dyn_constraint_set->LinkWithVariables(variables_); - // const std::vector cost_bounds = dyn_constraint_set->GetBounds(); - // switch (penalty_type) - // { - // case CostPenaltyType::SQUARED: - // { - // for (const auto& bound : cost_bounds) - // { - // if (!trajopt_ifopt::isBoundsEquality(bound)) - // throw std::runtime_error("TrajOpt Ifopt squared cost must have equality bounds!"); - // } - - // squared_costs_.AddComponent(std::move(dyn_constraint_set)); - // break; - // } - // case CostPenaltyType::ABSOLUTE: - // { - // for (const auto& bound : cost_bounds) - // { - // if (!trajopt_ifopt::isBoundsEquality(bound)) - // throw std::runtime_error("TrajOpt Ifopt absolute cost must have equality bounds!"); - // } - - // abs_costs_.AddComponent(std::move(dyn_constraint_set)); - // break; - // } - // case CostPenaltyType::HINGE: - // { - // for (const auto& bound : cost_bounds) - // { - // if (!trajopt_ifopt::isBoundsInEquality(bound)) - // throw std::runtime_error("TrajOpt Ifopt hinge cost must have inequality bounds!"); - // } - - // hinge_costs_.AddComponent(std::move(dyn_constraint_set)); - // break; - // } - // default: - // { - // throw std::runtime_error("Unsupport CostPenaltyType!"); - // } - // } + dyn_constraint_set->LinkWithVariables(variables_); + switch (penalty_type) + { + case CostPenaltyType::SQUARED: + { + dyn_squared_costs_.AddComponent(std::move(dyn_constraint_set)); + break; + } + case CostPenaltyType::ABSOLUTE: + { + dyn_abs_costs_.AddComponent(std::move(dyn_constraint_set)); + break; + } + case CostPenaltyType::HINGE: + { + dyn_hinge_costs_.AddComponent(std::move(dyn_constraint_set)); + break; + } + default: + { + throw std::runtime_error("Unsupport CostPenaltyType!"); + } + } } void TrajOptQPProblem::Implementation::update() { - if (initialized_ && dyn_constraints_.Empty()) + if (initialized_ && dyn_constraints_.Empty() && dyn_squared_costs_.Empty() && dyn_abs_costs_.Empty() && + dyn_hinge_costs_.Empty()) return; - int num_nlp_dyn_cnts = dyn_constraints_.Update(); - dyn_constraint_bounds_ = dyn_constraints_.GetBounds(); - dyn_constraint_types_.resize(static_cast(num_nlp_dyn_cnts)); - assert(dyn_constraint_bounds_.size() == static_cast(num_nlp_dyn_cnts)); - // Local counts to avoid repeated virtual / composite queries const Eigen::Index n_nlp_vars = variables_->GetRows(); + const Eigen::Index n_squared = squared_costs_.GetRows(); const Eigen::Index n_hinge = hinge_costs_.GetRows(); const Eigen::Index n_abs = abs_costs_.GetRows(); - num_nlp_cnts_ = constraints_.GetRows() + num_nlp_dyn_cnts; + num_nlp_cnts_ = constraints_.GetRows(); // Hinge cost adds a variable and an inequality constraint (→ 2 constraints) // Absolute cost adds two variables and an equality constraint (→ 3 constraints) num_qp_vars_ = num_constraint_qp_vars_ + n_nlp_vars + n_hinge + (2L * n_abs); num_qp_cnts_ = num_nlp_cnts_ + num_constraint_qp_cnts_ + n_nlp_vars + (2L * n_hinge) + (3L * n_abs); - //////////////////////////////////////////////////////// - // Get NLP bounds and detect dynamic constraint type - //////////////////////////////////////////////////////// - for (std::size_t i = 0; i < num_nlp_dyn_cnts; ++i) + if (!dyn_squared_costs_.Empty()) { - const auto& b = dyn_constraint_bounds_[i]; - const double diff = b.upper - b.lower; - if (std::abs(diff) < 1e-3) + int num_dyn_squared = dyn_squared_costs_.Update(); + Eigen::VectorXd squared_costs_target = Eigen::VectorXd::Zero(n_squared + num_dyn_squared); + if (n_squared > 0) + squared_costs_target.head(n_squared) = squared_costs_target_.head(n_squared); + + auto dyn_squared_cost_bounds = dyn_constraints_.GetBounds(); + for (std::size_t j = 0; j < dyn_squared_cost_bounds.size(); ++j) { - dyn_constraint_types_[i] = ConstraintType::EQ; - num_qp_vars_ += 2; // L1 slack pair - num_qp_cnts_ += 2; + assert(trajopt_ifopt::isBoundsEquality(dyn_squared_cost_bounds[j])); + squared_costs_target(n_squared + static_cast(j)) = dyn_squared_cost_bounds[j].lower; } - else + squared_costs_target_ = squared_costs_target; + } + + int n_dyn_abs{ 0 }; + if (!dyn_abs_costs_.Empty()) + { + n_dyn_abs = dyn_abs_costs_.Update(); + + num_qp_vars_ += (2L * n_dyn_abs); + num_qp_cnts_ += (3L * n_dyn_abs); + } + + int n_dyn_hinge{ 0 }; + if (!dyn_hinge_costs_.Empty()) + { + int n_dyn_hinge = dyn_hinge_costs_.Update(); + + num_qp_vars_ += n_dyn_hinge; + num_qp_cnts_ += (2L * n_dyn_hinge); + } + + if (!dyn_constraints_.Empty()) + { + int num_nlp_dyn_cnts = dyn_constraints_.Update(); + num_nlp_cnts_ += num_nlp_dyn_cnts; + dyn_constraint_bounds_ = dyn_constraints_.GetBounds(); + dyn_constraint_types_.resize(static_cast(num_nlp_dyn_cnts)); + assert(dyn_constraint_bounds_.size() == static_cast(num_nlp_dyn_cnts)); + + for (std::size_t i = 0; i < num_nlp_dyn_cnts; ++i) { - dyn_constraint_types_[i] = ConstraintType::INEQ; - num_qp_vars_ += 1; // hinge slack - num_qp_cnts_ += 1; + const auto& b = dyn_constraint_bounds_[i]; + const double diff = b.upper - b.lower; + if (std::abs(diff) < 1e-3) + { + dyn_constraint_types_[i] = ConstraintType::EQ; + num_qp_vars_ += 2; // L1 slack pair + num_qp_cnts_ += 2; + } + else + { + dyn_constraint_types_[i] = ConstraintType::INEQ; + num_qp_vars_ += 1; // hinge slack + num_qp_cnts_ += 1; + } } } - constraint_constant_ = Eigen::VectorXd::Zero(num_nlp_cnts_ + n_hinge + n_abs); + constraint_constant_ = Eigen::VectorXd::Zero(num_nlp_cnts_ + n_hinge + n_abs + n_dyn_hinge + n_dyn_abs); // Initialize the constraint bounds bounds_lower_ = Eigen::VectorXd::Constant(num_qp_cnts_, -double(INFINITY)); @@ -398,9 +415,6 @@ void TrajOptQPProblem::Implementation::setup() const Eigen::Index n_nlp_costs = getNumNLPCosts(); const Eigen::Index n_nlp_cnts = constraints_.GetRows(); - hinge_constraints_.ClearComponents(); - abs_constraints_.ClearComponents(); - squared_costs_target_ = Eigen::VectorXd::Zero(squared_costs_.GetRows()); box_size_ = Eigen::VectorXd::Constant(n_nlp_vars, 1e-1); @@ -439,10 +453,11 @@ void TrajOptQPProblem::Implementation::setup() } } + for (const auto& cost : dyn_squared_costs_.GetComponents()) + cost_names_.push_back(cost->GetName()); + for (const auto& cost : abs_costs_.GetComponents()) { - abs_constraints_.AddComponent(cost); - const std::vector cost_bounds = cost->GetBounds(); for (Eigen::Index j = 0; j < cost->GetRows(); ++j) { @@ -451,10 +466,11 @@ void TrajOptQPProblem::Implementation::setup() } } + for (const auto& cost : dyn_abs_costs_.GetComponents()) + cost_names_.push_back(cost->GetName()); + for (const auto& cost : hinge_costs_.GetComponents()) { - hinge_constraints_.AddComponent(cost); - const std::vector cost_bounds = cost->GetBounds(); for (Eigen::Index j = 0; j < cost->GetRows(); ++j) { @@ -463,6 +479,9 @@ void TrajOptQPProblem::Implementation::setup() } } + for (const auto& cost : dyn_hinge_costs_.GetComponents()) + cost_names_.push_back(cost->GetName()); + // Get NLP bounds and detect constraint type constraint_bounds_ = constraints_.GetBounds(); num_constraint_qp_vars_ = 0; @@ -492,10 +511,6 @@ void TrajOptQPProblem::Implementation::setup() abs_cost_bounds_ = abs_costs_.GetBounds(); hinge_cost_bounds_ = hinge_costs_.GetBounds(); - // Cache constraint bounds for hinge / abs constraint composites - hinge_cnt_bounds_ = hinge_constraints_.GetBounds(); - abs_cnt_bounds_ = abs_constraints_.GetBounds(); - // Cache variable bounds (used in updateNLPVariableBounds) var_bounds_ = variables_->GetBounds(); @@ -544,7 +559,10 @@ Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexCosts(const Eige const Eigen::Index n_sq = squared_costs_.GetRows(); const Eigen::Index n_hinge = hinge_costs_.GetRows(); const Eigen::Index n_abs = abs_costs_.GetRows(); - const Eigen::Index total_cost = n_sq + n_hinge + n_abs; + const auto n_dyn_sq = static_cast(dyn_squared_costs_.GetComponents().size()); + const auto n_dyn_hinge = static_cast(dyn_hinge_costs_.GetComponents().size()); + const auto n_dyn_abs = static_cast(dyn_abs_costs_.GetComponents().size()); + const Eigen::Index total_cost = n_sq + n_hinge + n_abs + n_dyn_sq + n_dyn_hinge + n_dyn_abs; Eigen::VectorXd costs = Eigen::VectorXd::Zero(total_cost); Eigen::VectorXd var_block = var_vals.head(nlp_vars); @@ -552,36 +570,79 @@ Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexCosts(const Eige // Squared costs (already convexified into squared_objective_nlp_) if (n_sq > 0) { + /** @todo How do I update this with dyn */ costs.head(n_sq) = squared_objective_nlp_.values(var_block); assert(!(costs.head(n_sq).array() < -1e-8).any()); } + Eigen::Index cost_offset{ n_sq }; + Eigen::Index row_offset{ 0 }; + // Hinge costs if (n_hinge > 0) { const Eigen::VectorXd hinge_cnt_constant = constraint_constant_.topRows(n_hinge); - const auto hinge_cnt_jac = constraint_matrix_.block(0, 0, hinge_constraints_.GetRows(), nlp_vars); + const auto hinge_cnt_jac = constraint_matrix_.block(0, 0, hinge_costs_.GetRows(), nlp_vars); const Eigen::VectorXd hinge_convex_value = hinge_cnt_constant + hinge_cnt_jac * var_block; const Eigen::VectorXd hinge_cost = trajopt_ifopt::calcBoundsViolations(hinge_convex_value, hinge_cost_bounds_); - costs.segment(n_sq, n_hinge) = hinge_cost; - assert(!(costs.segment(n_sq, n_hinge).array() < -1e-8).any()); + costs.segment(cost_offset, n_hinge) = hinge_cost; + assert(!(hinge_cost.array() < -1e-8).any()); + + cost_offset += n_hinge; + row_offset += n_hinge; + } + + if (n_dyn_hinge > 0) + { + for (const auto& cost : dyn_hinge_costs_.GetComponents()) + { + const Eigen::VectorXd hinge_cnt_constant = constraint_constant_.segment(row_offset, cost->GetRows()); + const auto hinge_cnt_jac = constraint_matrix_.block(row_offset, 0, cost->GetRows(), nlp_vars); + + const Eigen::VectorXd hinge_convex_value = hinge_cnt_constant + hinge_cnt_jac * var_block; + const Eigen::VectorXd hinge_cost = trajopt_ifopt::calcBoundsViolations(hinge_convex_value, cost->GetBounds()); + + costs(cost_offset++) = hinge_cost.sum(); + assert(!(hinge_cost.array() < -1e-8).any()); + row_offset += cost->GetRows(); + } } // Absolute costs if (n_abs > 0) { - const Eigen::Index abs_row_offset = n_hinge; - const Eigen::VectorXd abs_cnt_constant = constraint_constant_.segment(abs_row_offset, n_abs); - const auto abs_cnt_jac = constraint_matrix_.block(abs_row_offset, 0, abs_constraints_.GetRows(), nlp_vars); + const Eigen::VectorXd abs_cnt_constant = constraint_constant_.segment(row_offset, n_abs); + const auto abs_cnt_jac = constraint_matrix_.block(row_offset, 0, abs_costs_.GetRows(), nlp_vars); const Eigen::VectorXd abs_convex_value = abs_cnt_constant + abs_cnt_jac * var_block; // calcBoundsViolations already returns |violation| const Eigen::VectorXd abs_cost = trajopt_ifopt::calcBoundsViolations(abs_convex_value, abs_cost_bounds_); - costs.segment(n_sq + n_hinge, n_abs) = abs_cost; - assert(!(costs.segment(n_sq + n_hinge, n_abs).array() < -1e-8).any()); + costs.segment(cost_offset, n_abs) = abs_cost; + assert(!(abs_cost.array() < -1e-8).any()); + + cost_offset += n_abs; + row_offset += n_abs; + } + + if (n_dyn_abs > 0) + { + for (const auto& cost : dyn_abs_costs_.GetComponents()) + { + const Eigen::VectorXd abs_cnt_constant = constraint_constant_.segment(row_offset, cost->GetRows()); + const auto abs_cnt_jac = constraint_matrix_.block(row_offset, 0, cost->GetRows(), nlp_vars); + const Eigen::VectorXd abs_convex_value = abs_cnt_constant + abs_cnt_jac * var_block; + + // calcBoundsViolations already returns |violation| + const Eigen::VectorXd abs_cost = trajopt_ifopt::calcBoundsViolations(abs_convex_value, cost->GetBounds()); + + costs(cost_offset++) = abs_cost.sum(); + assert(!(abs_cost.array() < -1e-8).any()); + + row_offset += cost->GetRows(); + } } return costs; @@ -655,7 +716,7 @@ Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateExactCosts(const Eigen Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexConstraintViolations(const Eigen::Ref& var_vals) { - Eigen::Index row_index = hinge_constraints_.GetRows() + abs_costs_.GetRows(); + Eigen::Index row_index = hinge_costs_.GetRows() + abs_costs_.GetRows(); const auto& dyn_components = dyn_constraints_.GetComponents(); Eigen::VectorXd violations(constraints_.GetRows() + static_cast(dyn_components.size())); @@ -754,16 +815,16 @@ void TrajOptQPProblem::Implementation::print() const std::cout << "Gradient: " << gradient_.transpose().format(format) << '\n'; std::cout << "Constraint Matrix:\n" << constraint_matrix_.toDense().format(format) << '\n'; std::cout << "Constraint Lower Bounds: " - << bounds_lower_.head(num_nlp_cnts_ + hinge_constraints_.GetRows()).transpose().format(format) << '\n'; + << bounds_lower_.head(num_nlp_cnts_ + hinge_costs_.GetRows()).transpose().format(format) << '\n'; std::cout << "Constraint Upper Bounds: " - << bounds_upper_.head(num_nlp_cnts_ + hinge_constraints_.GetRows()).transpose().format(format) << '\n'; + << bounds_upper_.head(num_nlp_cnts_ + hinge_costs_.GetRows()).transpose().format(format) << '\n'; std::cout << "Variable Lower Bounds: " - << bounds_lower_.tail(bounds_lower_.rows() - num_nlp_cnts_ - hinge_constraints_.GetRows()) + << bounds_lower_.tail(bounds_lower_.rows() - num_nlp_cnts_ - hinge_costs_.GetRows()) .transpose() .format(format) << '\n'; std::cout << "Variable Upper Bounds: " - << bounds_upper_.tail(bounds_upper_.rows() - num_nlp_cnts_ - hinge_constraints_.GetRows()) + << bounds_upper_.tail(bounds_upper_.rows() - num_nlp_cnts_ - hinge_costs_.GetRows()) .transpose() .format(format) << '\n'; @@ -778,6 +839,7 @@ Eigen::Index TrajOptQPProblem::Implementation::getNumNLPVars() const { return va Eigen::Index TrajOptQPProblem::Implementation::getNumNLPCosts() const { + /** @todo This need to be updated to include dyn costs */ return (squared_costs_.GetRows() + abs_costs_.GetRows() + hinge_costs_.GetRows()); } @@ -917,8 +979,8 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() { const trajopt_ifopt::Jacobian nlp_cnt_jac = constraints_.GetJacobian(); const trajopt_ifopt::Jacobian nlp_dyn_cnt_jac = dyn_constraints_.GetJacobian(); - const trajopt_ifopt::Jacobian hinge_cnt_jac = hinge_constraints_.GetJacobian(); - const trajopt_ifopt::Jacobian abs_cnt_jac = abs_constraints_.GetJacobian(); + const trajopt_ifopt::Jacobian hinge_cnt_jac = hinge_costs_.GetJacobian(); + const trajopt_ifopt::Jacobian abs_cnt_jac = abs_costs_.GetJacobian(); std::vector> triplets; const Eigen::Index nnz_base = nlp_cnt_jac.nonZeros() + hinge_cnt_jac.nonZeros() + abs_cnt_jac.nonZeros(); @@ -933,13 +995,13 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() triplets.emplace_back(it.row(), it.col(), it.value()); // abs constraints (shifted rows) - Eigen::Index current_row_index = hinge_constraints_.GetRows(); + Eigen::Index current_row_index = hinge_costs_.GetRows(); for (int k = 0; k < abs_cnt_jac.outerSize(); ++k) for (trajopt_ifopt::Jacobian::InnerIterator it(abs_cnt_jac, k); it; ++it) triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); // nlp constraints (shift again) - current_row_index += abs_constraints_.GetRows(); + current_row_index += abs_costs_.GetRows(); for (int k = 0; k < nlp_cnt_jac.outerSize(); ++k) for (trajopt_ifopt::Jacobian::InnerIterator it(nlp_cnt_jac, k); it; ++it) triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); @@ -1008,8 +1070,8 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() void TrajOptQPProblem::Implementation::updateConstraintsConstantExpression() { const Eigen::Index n_nlp = getNumNLPVars(); - const Eigen::Index n_hinge = hinge_constraints_.GetRows(); - const Eigen::Index n_abs = abs_constraints_.GetRows(); + const Eigen::Index n_hinge = hinge_costs_.GetRows(); + const Eigen::Index n_abs = abs_costs_.GetRows(); const Eigen::Index total_num_cnt = n_hinge + n_abs + num_nlp_cnts_; if (total_num_cnt == 0) @@ -1024,7 +1086,7 @@ void TrajOptQPProblem::Implementation::updateConstraintsConstantExpression() Eigen::Index row = 0; if (n_hinge > 0) { // Get values about which we will linearize - const Eigen::VectorXd cnt_initial_value = hinge_constraints_.GetValues(); + const Eigen::VectorXd cnt_initial_value = hinge_costs_.GetValues(); // In the case of a QP problem the costs and constraints are represented as // quadratic functions is f(x) = a + b * x + c * x^2. @@ -1047,7 +1109,7 @@ void TrajOptQPProblem::Implementation::updateConstraintsConstantExpression() if (n_abs > 0) { // Get values about which we will linearize - const Eigen::VectorXd cnt_initial_value = abs_constraints_.GetValues(); + const Eigen::VectorXd cnt_initial_value = abs_costs_.GetValues(); // In the case of a QP problem the costs and constraints are represented as // quadratic functions is f(x) = a + b * x + c * x^2. @@ -1119,8 +1181,8 @@ void TrajOptQPProblem::Implementation::updateConstraintsConstantExpression() void TrajOptQPProblem::Implementation::updateNLPConstraintBounds() { - const Eigen::Index n_hinge = hinge_constraints_.GetRows(); - const Eigen::Index n_abs = abs_constraints_.GetRows(); + const Eigen::Index n_hinge = hinge_costs_.GetRows(); + const Eigen::Index n_abs = abs_costs_.GetRows(); const Eigen::Index total_num_cnt = n_hinge + n_abs + num_nlp_cnts_; if (total_num_cnt == 0) @@ -1133,7 +1195,7 @@ void TrajOptQPProblem::Implementation::updateNLPConstraintBounds() // Hinge constraint bounds for (Eigen::Index i = 0; i < n_hinge; ++i) { - const auto& b = hinge_cnt_bounds_[static_cast(i)]; + const auto& b = hinge_cost_bounds_[static_cast(i)]; cnt_bound_lower[row + i] = b.lower; cnt_bound_upper[row + i] = b.upper; } @@ -1142,7 +1204,7 @@ void TrajOptQPProblem::Implementation::updateNLPConstraintBounds() // Absolute constraint bounds for (Eigen::Index i = 0; i < n_abs; ++i) { - const auto& b = abs_cnt_bounds_[static_cast(i)]; + const auto& b = abs_cost_bounds_[static_cast(i)]; cnt_bound_lower[row + i] = b.lower; cnt_bound_upper[row + i] = b.upper; } @@ -1181,8 +1243,8 @@ void TrajOptQPProblem::Implementation::updateNLPVariableBounds() { // Equivalent to BasicTrustRegionSQP::setTrustBoxConstraints const Eigen::Index n_nlp_vars = getNumNLPVars(); - const Eigen::Index n_hinge = hinge_constraints_.GetRows(); - const Eigen::Index n_abs = abs_constraints_.GetRows(); + const Eigen::Index n_hinge = hinge_costs_.GetRows(); + const Eigen::Index n_abs = abs_costs_.GetRows(); const Eigen::VectorXd x_initial = variables_->GetValues(); @@ -1211,8 +1273,7 @@ void TrajOptQPProblem::Implementation::updateNLPVariableBounds() void TrajOptQPProblem::Implementation::updateSlackVariableBounds() { - Eigen::Index current_cnt_index = - num_nlp_cnts_ + hinge_constraints_.GetRows() + abs_constraints_.GetRows() + getNumNLPVars(); + Eigen::Index current_cnt_index = num_nlp_cnts_ + hinge_costs_.GetRows() + abs_costs_.GetRows() + getNumNLPVars(); for (Eigen::Index i = 0; i < hinge_costs_.GetRows(); i++) { From 541accf509e0648e3949aad60503fe4fbae4e636 Mon Sep 17 00:00:00 2001 From: Levi Armstrong Date: Fri, 19 Dec 2025 08:58:00 -0600 Subject: [PATCH 6/7] fixup --- .../trajopt_sqp/src/trajopt_qp_problem.cpp | 263 +++++++++++------- 1 file changed, 162 insertions(+), 101 deletions(-) diff --git a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp index fbe96b575..d74be1024 100644 --- a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp +++ b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp @@ -91,12 +91,16 @@ struct TrajOptQPProblem::Implementation trajopt_ifopt::Composite dyn_constraints_; std::vector dyn_constraint_types_; - std::vector dyn_constraint_bounds_; trajopt_ifopt::Composite dyn_squared_costs_; trajopt_ifopt::Composite dyn_hinge_costs_; trajopt_ifopt::Composite dyn_abs_costs_; + std::vector dyn_constraint_bounds_; + std::vector dyn_squared_cost_bounds_; + std::vector dyn_abs_cost_bounds_; + std::vector dyn_hinge_cost_bounds_; + // These quantities are computed in the update() method Eigen::Index num_nlp_cnts_{ 0 }; Eigen::Index num_qp_vars_{ 0 }; @@ -347,11 +351,11 @@ void TrajOptQPProblem::Implementation::update() if (n_squared > 0) squared_costs_target.head(n_squared) = squared_costs_target_.head(n_squared); - auto dyn_squared_cost_bounds = dyn_constraints_.GetBounds(); - for (std::size_t j = 0; j < dyn_squared_cost_bounds.size(); ++j) + dyn_squared_cost_bounds_ = dyn_constraints_.GetBounds(); + for (std::size_t j = 0; j < dyn_squared_cost_bounds_.size(); ++j) { - assert(trajopt_ifopt::isBoundsEquality(dyn_squared_cost_bounds[j])); - squared_costs_target(n_squared + static_cast(j)) = dyn_squared_cost_bounds[j].lower; + assert(trajopt_ifopt::isBoundsEquality(dyn_squared_cost_bounds_[j])); + squared_costs_target(n_squared + static_cast(j)) = dyn_squared_cost_bounds_[j].lower; } squared_costs_target_ = squared_costs_target; } @@ -360,6 +364,7 @@ void TrajOptQPProblem::Implementation::update() if (!dyn_abs_costs_.Empty()) { n_dyn_abs = dyn_abs_costs_.Update(); + dyn_abs_cost_bounds_ = dyn_abs_costs_.GetBounds(); num_qp_vars_ += (2L * n_dyn_abs); num_qp_cnts_ += (3L * n_dyn_abs); @@ -369,6 +374,7 @@ void TrajOptQPProblem::Implementation::update() if (!dyn_hinge_costs_.Empty()) { int n_dyn_hinge = dyn_hinge_costs_.Update(); + dyn_hinge_cost_bounds_ = dyn_hinge_costs_.GetBounds(); num_qp_vars_ += n_dyn_hinge; num_qp_cnts_ += (2L * n_dyn_hinge); @@ -930,17 +936,35 @@ void TrajOptQPProblem::Implementation::convexifyCosts() //////////////////////////////////////////////////////// // Set the gradient of the hinge cost variables //////////////////////////////////////////////////////// - for (Eigen::Index i = 0; i < hinge_costs_.GetRows(); i++) + + if (hinge_costs_.GetRows() > 0) { - gradient_[current_var_index++] = 1; /** @todo This should be multiplied by the weight */ + /** @todo This should be multiplied by the weight */ + gradient_.segment(current_var_index, hinge_costs_.GetRows()).setConstant(1.0); + current_var_index += hinge_costs_.GetRows(); + } + + if (dyn_hinge_costs_.GetRows() > 0) + { + gradient_.segment(current_var_index, dyn_hinge_costs_.GetRows()).setConstant(1.0); + current_var_index += dyn_hinge_costs_.GetRows(); } //////////////////////////////////////////////////////// // Set the gradient of the absolute cost variables //////////////////////////////////////////////////////// - for (Eigen::Index i = 0; i < 2L * abs_costs_.GetRows(); i++) + if (abs_costs_.GetRows() > 0) { - gradient_[current_var_index++] = 1; /** @todo This should be multiplied by the weight */ + /** @todo This should be multiplied by the weight */ + gradient_.segment(current_var_index, 2L * abs_costs_.GetRows()).setConstant(1.0); + current_var_index += (2L * abs_costs_.GetRows()); + } + + if (dyn_abs_costs_.GetRows() > 0) + { + /** @todo This should be multiplied by the weight */ + gradient_.segment(current_var_index, 2L * dyn_abs_costs_.GetRows()).setConstant(1.0); + current_var_index += (2L * dyn_abs_costs_.GetRows()); } //////////////////////////////////////////////////////// @@ -978,38 +1002,56 @@ void TrajOptQPProblem::Implementation::convexifyCosts() void TrajOptQPProblem::Implementation::linearizeConstraints() { const trajopt_ifopt::Jacobian nlp_cnt_jac = constraints_.GetJacobian(); - const trajopt_ifopt::Jacobian nlp_dyn_cnt_jac = dyn_constraints_.GetJacobian(); const trajopt_ifopt::Jacobian hinge_cnt_jac = hinge_costs_.GetJacobian(); const trajopt_ifopt::Jacobian abs_cnt_jac = abs_costs_.GetJacobian(); + const trajopt_ifopt::Jacobian dyn_nlp_cnt_jac = dyn_constraints_.GetJacobian(); + const trajopt_ifopt::Jacobian dyn_hinge_cnt_jac = dyn_hinge_costs_.GetJacobian(); + const trajopt_ifopt::Jacobian dyn_abs_cnt_jac = dyn_abs_costs_.GetJacobian(); std::vector> triplets; - const Eigen::Index nnz_base = nlp_cnt_jac.nonZeros() + hinge_cnt_jac.nonZeros() + abs_cnt_jac.nonZeros(); + const Eigen::Index nnz_base = nlp_cnt_jac.nonZeros() + hinge_cnt_jac.nonZeros() + abs_cnt_jac.nonZeros() + + dyn_nlp_cnt_jac.nonZeros() + dyn_hinge_cnt_jac.nonZeros() + dyn_abs_cnt_jac.nonZeros(); // Rough but closer than *3 triplets.reserve(static_cast(nnz_base + num_qp_vars_ + // diag - hinge_costs_.GetRows() + (2L * abs_costs_.GetRows()) + + hinge_costs_.GetRows() + dyn_hinge_costs_.GetRows() + + (2L * abs_costs_.GetRows()) + (2L * dyn_abs_costs_.GetRows()) + (2L * num_nlp_cnts_))); + Eigen::Index current_row_index = 0; + // hinge constraints for (int k = 0; k < hinge_cnt_jac.outerSize(); ++k) for (trajopt_ifopt::Jacobian::InnerIterator it(hinge_cnt_jac, k); it; ++it) - triplets.emplace_back(it.row(), it.col(), it.value()); + triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); - // abs constraints (shifted rows) - Eigen::Index current_row_index = hinge_costs_.GetRows(); + // dyn hinge constraints (shifted rows) + current_row_index += hinge_costs_.GetRows(); + for (int k = 0; k < dyn_hinge_cnt_jac.outerSize(); ++k) + for (trajopt_ifopt::Jacobian::InnerIterator it(dyn_hinge_cnt_jac, k); it; ++it) + triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); + + // abs constraints (shifted again) + current_row_index += dyn_hinge_costs_.GetRows(); for (int k = 0; k < abs_cnt_jac.outerSize(); ++k) for (trajopt_ifopt::Jacobian::InnerIterator it(abs_cnt_jac, k); it; ++it) triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); - // nlp constraints (shift again) + // dyn abs constraints (shifted again) current_row_index += abs_costs_.GetRows(); + for (int k = 0; k < dyn_abs_cnt_jac.outerSize(); ++k) + for (trajopt_ifopt::Jacobian::InnerIterator it(dyn_abs_cnt_jac, k); it; ++it) + triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); + + // nlp constraints (shift again) + current_row_index += dyn_abs_costs_.GetRows(); for (int k = 0; k < nlp_cnt_jac.outerSize(); ++k) for (trajopt_ifopt::Jacobian::InnerIterator it(nlp_cnt_jac, k); it; ++it) triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); // nlp dynamic constraints (shift again) current_row_index += constraints_.GetRows(); - for (int k = 0; k < nlp_dyn_cnt_jac.outerSize(); ++k) - for (trajopt_ifopt::Jacobian::InnerIterator it(nlp_cnt_jac, k); it; ++it) + for (int k = 0; k < dyn_nlp_cnt_jac.outerSize(); ++k) + for (trajopt_ifopt::Jacobian::InnerIterator it(dyn_nlp_cnt_jac, k); it; ++it) triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); // hinge slack vars @@ -1017,14 +1059,25 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() for (Eigen::Index i = 0; i < hinge_costs_.GetRows(); ++i) triplets.emplace_back(i, current_col_index++, -1.0); - // abs slack vars current_row_index = hinge_costs_.GetRows(); + for (Eigen::Index i = 0; i < dyn_hinge_costs_.GetRows(); ++i) + triplets.emplace_back(i, current_col_index++, -1.0); + + // abs slack vars + current_row_index += dyn_hinge_costs_.GetRows(); for (Eigen::Index i = 0; i < abs_costs_.GetRows(); ++i) { triplets.emplace_back(current_row_index + i, current_col_index++, 1.0); triplets.emplace_back(current_row_index + i, current_col_index++, -1.0); } + current_row_index += abs_costs_.GetRows(); + for (Eigen::Index i = 0; i < dyn_abs_costs_.GetRows(); ++i) + { + triplets.emplace_back(current_row_index + i, current_col_index++, 1.0); + triplets.emplace_back(current_row_index + i, current_col_index++, -1.0); + } + // constraint slack vars current_row_index += abs_costs_.GetRows(); for (const auto& constraint_type : constraint_types_) @@ -1070,9 +1123,13 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() void TrajOptQPProblem::Implementation::updateConstraintsConstantExpression() { const Eigen::Index n_nlp = getNumNLPVars(); + const Eigen::Index n_cnt = constraints_.GetRows(); const Eigen::Index n_hinge = hinge_costs_.GetRows(); const Eigen::Index n_abs = abs_costs_.GetRows(); - const Eigen::Index total_num_cnt = n_hinge + n_abs + num_nlp_cnts_; + const Eigen::Index n_dyn_cnt = dyn_constraints_.GetRows(); + const Eigen::Index n_dyn_hinge = dyn_hinge_costs_.GetRows(); + const Eigen::Index n_dyn_abs = dyn_abs_costs_.GetRows(); + const Eigen::Index total_num_cnt = num_nlp_cnts_ + n_hinge + n_abs + n_dyn_hinge + n_dyn_abs; if (total_num_cnt == 0) return; @@ -1083,107 +1140,86 @@ void TrajOptQPProblem::Implementation::updateConstraintsConstantExpression() // One mat-vec for all constraint rows (excluding slack columns) const Eigen::VectorXd lin = constraint_matrix_.block(0, 0, total_num_cnt, n_nlp) * x_initial; + // In the case of a QP problem the costs and constraints are represented as + // quadratic functions is f(x) = a + b * x + c * x^2. + // Currently for constraints we do not leverage the Hessian so the quadratic + // function is f(x) = a + b * x + // When convexifying the function it need to produce the same constraint values at the values used to calculate + // the jacobian, so f(x_initial) = a + b * x = cnt_initial_value. + // Therefore a = cnt_initial_value - b * x + // where: b = jac (calculated below) + // x = x_initial + // a = quadratic constant (constraint_constant_) + // + // Note: This is not used by the QP solver directly but by the Trust Regions Solver + // to calculate the merit of the solve. + Eigen::Index row = 0; if (n_hinge > 0) { // Get values about which we will linearize const Eigen::VectorXd cnt_initial_value = hinge_costs_.GetValues(); - // In the case of a QP problem the costs and constraints are represented as - // quadratic functions is f(x) = a + b * x + c * x^2. - // Currently for constraints we do not leverage the Hessian so the quadratic - // function is f(x) = a + b * x - // When convexifying the function it need to produce the same constraint values at the values used to calculate - // the jacobian, so f(x_initial) = a + b * x = cnt_initial_value. - // Therefore a = cnt_initial_value - b * x - // where: b = jac (calculated below) - // x = x_initial - // a = quadratic constant (constraint_constant_) - // - // Note: This is not used by the QP solver directly but by the Trust Regions Solver - // to calculate the merit of the solve. - // The block excludes the slack variables constraint_constant_.segment(row, n_hinge) = cnt_initial_value - lin.segment(row, n_hinge); row += n_hinge; } + if (n_dyn_hinge > 0) + { // Get values about which we will linearize + const Eigen::VectorXd cnt_initial_value = dyn_hinge_costs_.GetValues(); + + // The block excludes the slack variables + constraint_constant_.segment(row, n_dyn_hinge) = cnt_initial_value - lin.segment(row, n_dyn_hinge); + row += n_dyn_hinge; + } + if (n_abs > 0) { // Get values about which we will linearize const Eigen::VectorXd cnt_initial_value = abs_costs_.GetValues(); - // In the case of a QP problem the costs and constraints are represented as - // quadratic functions is f(x) = a + b * x + c * x^2. - // Currently for constraints we do not leverage the Hessian so the quadratic - // function is f(x) = a + b * x - // When convexifying the function it need to produce the same constraint values at the values used to calculate - // the jacobian, so f(x_initial) = a + b * x = cnt_initial_value. - // Therefore a = cnt_initial_value - b * x - // where: b = jac (calculated below) - // x = x_initial - // a = quadratic constant (constraint_constant_) - // - // Note: This is not used by the QP solver directly but by the Trust Regions Solver - // to calculate the merit of the solve. - // The block excludes the slack variables constraint_constant_.segment(row, n_abs) = cnt_initial_value - lin.segment(row, n_abs); row += n_abs; } - if (constraints_.GetRows() > 0) + if (n_dyn_abs > 0) + { // Get values about which we will linearize + const Eigen::VectorXd cnt_initial_value = abs_costs_.GetValues(); + + // The block excludes the slack variables + constraint_constant_.segment(row, n_dyn_abs) = cnt_initial_value - lin.segment(row, n_dyn_abs); + row += n_dyn_abs; + } + + if (n_cnt > 0) { const Eigen::VectorXd cnt_initial_value = constraints_.GetValues(); - // In the case of a QP problem the costs and constraints are represented as - // quadratic functions is f(x) = a + b * x + c * x^2. - // Currently for constraints we do not leverage the Hessian so the quadratic - // function is f(x) = a + b * x - // When convexifying the function it need to produce the same constraint values at the values used to calculate - // the jacobian, so f(x_initial) = a + b * x = cnt_initial_value. - // Therefore a = cnt_initial_value - b * x - // where: b = jac (calculated below) - // x = x_initial - // a = quadratic constant (constraint_constant_) - // - // Note: This is not used by the QP solver directly but by the Trust Regions Solver - // to calculate the merit of the solve. - // The block excludes the slack variables - constraint_constant_.segment(row, constraints_.GetRows()) = - cnt_initial_value - lin.segment(row, constraints_.GetRows()); - row += constraints_.GetRows(); + constraint_constant_.segment(row, n_cnt) = cnt_initial_value - lin.segment(row, n_cnt); + row += n_cnt; } - if (dyn_constraints_.GetRows() > 0) + if (n_dyn_cnt > 0) { const Eigen::VectorXd cnt_initial_value = dyn_constraints_.GetValues(); - // In the case of a QP problem the costs and constraints are represented as - // quadratic functions is f(x) = a + b * x + c * x^2. - // Currently for constraints we do not leverage the Hessian so the quadratic - // function is f(x) = a + b * x - // When convexifying the function it need to produce the same constraint values at the values used to calculate - // the jacobian, so f(x_initial) = a + b * x = cnt_initial_value. - // Therefore a = cnt_initial_value - b * x - // where: b = jac (calculated below) - // x = x_initial - // a = quadratic constant (constraint_constant_) - // - // Note: This is not used by the QP solver directly but by the Trust Regions Solver - // to calculate the merit of the solve. - // The block excludes the slack variables - constraint_constant_.segment(row, dyn_constraints_.GetRows()) = - cnt_initial_value - lin.segment(row, dyn_constraints_.GetRows()); - // row += dyn_constraints_.GetRows(); + constraint_constant_.segment(row, n_dyn_cnt) = cnt_initial_value - lin.segment(row, n_dyn_cnt); + // row += n_dyn_cnt; } } void TrajOptQPProblem::Implementation::updateNLPConstraintBounds() { + const Eigen::Index n_cnt = constraints_.GetRows(); const Eigen::Index n_hinge = hinge_costs_.GetRows(); const Eigen::Index n_abs = abs_costs_.GetRows(); - const Eigen::Index total_num_cnt = n_hinge + n_abs + num_nlp_cnts_; + const Eigen::Index n_dyn_cnt = dyn_constraints_.GetRows(); + const Eigen::Index n_dyn_hinge = dyn_hinge_costs_.GetRows(); + const Eigen::Index n_dyn_abs = dyn_abs_costs_.GetRows(); + + const Eigen::Index total_num_cnt = num_nlp_cnts_ + n_hinge + n_abs + n_dyn_hinge + n_dyn_abs; if (total_num_cnt == 0) return; @@ -1201,6 +1237,14 @@ void TrajOptQPProblem::Implementation::updateNLPConstraintBounds() } row += n_hinge; + for (Eigen::Index i = 0; i < n_dyn_hinge; ++i) + { + const auto& b = dyn_hinge_cost_bounds_[static_cast(i)]; + cnt_bound_lower[row + i] = b.lower; + cnt_bound_upper[row + i] = b.upper; + } + row += n_dyn_hinge; + // Absolute constraint bounds for (Eigen::Index i = 0; i < n_abs; ++i) { @@ -1210,26 +1254,29 @@ void TrajOptQPProblem::Implementation::updateNLPConstraintBounds() } row += n_abs; + for (Eigen::Index i = 0; i < n_dyn_abs; ++i) + { + const auto& b = dyn_abs_cost_bounds_[static_cast(i)]; + cnt_bound_lower[row + i] = b.lower; + cnt_bound_upper[row + i] = b.upper; + } + row += n_dyn_abs; + // NLP constraint bounds - for (Eigen::Index j = 0; j < constraint_bounds_.size(); ++j) + for (Eigen::Index j = 0; j < n_cnt; ++j) { const auto& b = constraint_bounds_[static_cast(j)]; cnt_bound_lower[row + j] = b.lower; cnt_bound_upper[row + j] = b.upper; } - row += constraints_.GetRows(); + row += n_cnt; - /** @todo Cache the bounds for dynamic constraints in the Update method */ - for (const auto& cnt : dyn_constraints_.GetComponents()) + for (Eigen::Index j = 0; j < n_dyn_cnt; ++j) { - const auto cnt_bounds = cnt->GetBounds(); - for (Eigen::Index j = 0; j < cnt_bounds.size(); ++j) - { - const auto& b = cnt_bounds[static_cast(j)]; - cnt_bound_lower[row + j] = b.lower; - cnt_bound_upper[row + j] = b.upper; - } - row += cnt->GetRows(); + const auto& b = dyn_constraint_bounds_[static_cast(j)]; + cnt_bound_lower[row + j] = b.lower; + cnt_bound_upper[row + j] = b.upper; + row += n_dyn_cnt; } const Eigen::VectorXd linearized_cnt_lower = cnt_bound_lower - constraint_constant_; @@ -1245,6 +1292,8 @@ void TrajOptQPProblem::Implementation::updateNLPVariableBounds() const Eigen::Index n_nlp_vars = getNumNLPVars(); const Eigen::Index n_hinge = hinge_costs_.GetRows(); const Eigen::Index n_abs = abs_costs_.GetRows(); + const Eigen::Index n_dyn_hinge = dyn_hinge_costs_.GetRows(); + const Eigen::Index n_dyn_abs = dyn_abs_costs_.GetRows(); const Eigen::VectorXd x_initial = variables_->GetValues(); @@ -1265,7 +1314,7 @@ void TrajOptQPProblem::Implementation::updateNLPVariableBounds() const Eigen::VectorXd var_bounds_upper_final = (x_initial.cwiseMax(var_bounds_lower + box_size_) + box_size_).cwiseMin(var_bounds_upper); - const Eigen::Index var_row_index = num_nlp_cnts_ + n_hinge + n_abs; + const Eigen::Index var_row_index = num_nlp_cnts_ + n_hinge + n_abs + n_dyn_hinge + n_dyn_abs; bounds_lower_.segment(var_row_index, n_nlp_vars) = var_bounds_lower_final; bounds_upper_.segment(var_row_index, n_nlp_vars) = var_bounds_upper_final; @@ -1273,15 +1322,27 @@ void TrajOptQPProblem::Implementation::updateNLPVariableBounds() void TrajOptQPProblem::Implementation::updateSlackVariableBounds() { - Eigen::Index current_cnt_index = num_nlp_cnts_ + hinge_costs_.GetRows() + abs_costs_.GetRows() + getNumNLPVars(); + const Eigen::Index n_nlp_vars = getNumNLPVars(); + const Eigen::Index n_hinge = hinge_costs_.GetRows(); + const Eigen::Index n_abs = abs_costs_.GetRows(); + const Eigen::Index n_dyn_hinge = dyn_hinge_costs_.GetRows(); + const Eigen::Index n_dyn_abs = dyn_abs_costs_.GetRows(); + + Eigen::Index current_cnt_index = n_nlp_vars + num_nlp_cnts_ + n_hinge + n_abs + n_dyn_hinge + n_dyn_abs; + + for (Eigen::Index i = 0; i < n_hinge; i++) + { + bounds_lower_[current_cnt_index] = 0; + bounds_upper_[current_cnt_index++] = double(INFINITY); + } - for (Eigen::Index i = 0; i < hinge_costs_.GetRows(); i++) + for (Eigen::Index i = 0; i < n_dyn_hinge; i++) { bounds_lower_[current_cnt_index] = 0; bounds_upper_[current_cnt_index++] = double(INFINITY); } - for (Eigen::Index i = 0; i < abs_costs_.GetRows(); i++) + for (Eigen::Index i = 0; i < n_abs; i++) { bounds_lower_[current_cnt_index] = 0; bounds_upper_[current_cnt_index++] = double(INFINITY); From 023e1c58edbcef73af1051618b01f6281dc8ef6d Mon Sep 17 00:00:00 2001 From: Levi Armstrong Date: Fri, 19 Dec 2025 09:36:58 -0600 Subject: [PATCH 7/7] fixup --- .../trajopt_sqp/src/trajopt_qp_problem.cpp | 227 ++++++++++++------ 1 file changed, 152 insertions(+), 75 deletions(-) diff --git a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp index d74be1024..59a8cacd6 100644 --- a/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp +++ b/trajopt_optimizers/trajopt_sqp/src/trajopt_qp_problem.cpp @@ -161,9 +161,6 @@ struct TrajOptQPProblem::Implementation void print() const; - Eigen::Index getNumNLPVars() const; - Eigen::Index getNumNLPCosts() const; - /** * @brief Helper that updates the objective function constant and linear and quadratic coefficients of the QP Problem * @details Called by convexify() @@ -417,8 +414,8 @@ void TrajOptQPProblem::Implementation::update() void TrajOptQPProblem::Implementation::setup() { // Local counts to avoid repeated virtual / composite queries - const Eigen::Index n_nlp_vars = getNumNLPVars(); - const Eigen::Index n_nlp_costs = getNumNLPCosts(); + const Eigen::Index n_nlp_vars = variables_->GetRows(); + const Eigen::Index n_nlp_costs = squared_costs_.GetRows() + hinge_costs_.GetRows() + abs_costs_.GetRows(); const Eigen::Index n_nlp_cnts = constraints_.GetRows(); squared_costs_target_ = Eigen::VectorXd::Zero(squared_costs_.GetRows()); @@ -558,10 +555,7 @@ void TrajOptQPProblem::Implementation::convexify() Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexCosts(const Eigen::Ref& var_vals) { - if (getNumNLPCosts() == 0) - return {}; - - const Eigen::Index nlp_vars = getNumNLPVars(); + const Eigen::Index n_nlp_vars = variables_->GetRows(); const Eigen::Index n_sq = squared_costs_.GetRows(); const Eigen::Index n_hinge = hinge_costs_.GetRows(); const Eigen::Index n_abs = abs_costs_.GetRows(); @@ -570,8 +564,11 @@ Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexCosts(const Eige const auto n_dyn_abs = static_cast(dyn_abs_costs_.GetComponents().size()); const Eigen::Index total_cost = n_sq + n_hinge + n_abs + n_dyn_sq + n_dyn_hinge + n_dyn_abs; + if (total_cost == 0) + return {}; + Eigen::VectorXd costs = Eigen::VectorXd::Zero(total_cost); - Eigen::VectorXd var_block = var_vals.head(nlp_vars); + Eigen::VectorXd var_block = var_vals.head(n_nlp_vars); // Squared costs (already convexified into squared_objective_nlp_) if (n_sq > 0) @@ -588,7 +585,7 @@ Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexCosts(const Eige if (n_hinge > 0) { const Eigen::VectorXd hinge_cnt_constant = constraint_constant_.topRows(n_hinge); - const auto hinge_cnt_jac = constraint_matrix_.block(0, 0, hinge_costs_.GetRows(), nlp_vars); + const auto hinge_cnt_jac = constraint_matrix_.block(0, 0, hinge_costs_.GetRows(), n_nlp_vars); const Eigen::VectorXd hinge_convex_value = hinge_cnt_constant + hinge_cnt_jac * var_block; const Eigen::VectorXd hinge_cost = trajopt_ifopt::calcBoundsViolations(hinge_convex_value, hinge_cost_bounds_); @@ -605,7 +602,7 @@ Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexCosts(const Eige for (const auto& cost : dyn_hinge_costs_.GetComponents()) { const Eigen::VectorXd hinge_cnt_constant = constraint_constant_.segment(row_offset, cost->GetRows()); - const auto hinge_cnt_jac = constraint_matrix_.block(row_offset, 0, cost->GetRows(), nlp_vars); + const auto hinge_cnt_jac = constraint_matrix_.block(row_offset, 0, cost->GetRows(), n_nlp_vars); const Eigen::VectorXd hinge_convex_value = hinge_cnt_constant + hinge_cnt_jac * var_block; const Eigen::VectorXd hinge_cost = trajopt_ifopt::calcBoundsViolations(hinge_convex_value, cost->GetBounds()); @@ -620,7 +617,7 @@ Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexCosts(const Eige if (n_abs > 0) { const Eigen::VectorXd abs_cnt_constant = constraint_constant_.segment(row_offset, n_abs); - const auto abs_cnt_jac = constraint_matrix_.block(row_offset, 0, abs_costs_.GetRows(), nlp_vars); + const auto abs_cnt_jac = constraint_matrix_.block(row_offset, 0, abs_costs_.GetRows(), n_nlp_vars); const Eigen::VectorXd abs_convex_value = abs_cnt_constant + abs_cnt_jac * var_block; // calcBoundsViolations already returns |violation| @@ -638,7 +635,7 @@ Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexCosts(const Eige for (const auto& cost : dyn_abs_costs_.GetComponents()) { const Eigen::VectorXd abs_cnt_constant = constraint_constant_.segment(row_offset, cost->GetRows()); - const auto abs_cnt_jac = constraint_matrix_.block(row_offset, 0, cost->GetRows(), nlp_vars); + const auto abs_cnt_jac = constraint_matrix_.block(row_offset, 0, cost->GetRows(), n_nlp_vars); const Eigen::VectorXd abs_convex_value = abs_cnt_constant + abs_cnt_jac * var_block; // calcBoundsViolations already returns |violation| @@ -656,64 +653,127 @@ Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexCosts(const Eige double TrajOptQPProblem::Implementation::evaluateTotalExactCost(const Eigen::Ref& var_vals) { - if (getNumNLPCosts() == 0) // NOLINT - return 0; + const Eigen::Index n_sq = squared_costs_.GetRows(); + const Eigen::Index n_hinge = hinge_costs_.GetRows(); + const Eigen::Index n_abs = abs_costs_.GetRows(); + const auto n_dyn_sq = static_cast(dyn_squared_costs_.GetComponents().size()); + const auto n_dyn_hinge = static_cast(dyn_hinge_costs_.GetComponents().size()); + const auto n_dyn_abs = static_cast(dyn_abs_costs_.GetComponents().size()); + const Eigen::Index n_total = n_sq + n_hinge + n_abs + n_dyn_sq + n_dyn_hinge + n_dyn_abs; + + if (n_total == 0) + return {}; double g{ 0 }; setVariables(var_vals.data()); - if (squared_costs_.GetRows() > 0) + if (n_sq > 0) { const Eigen::VectorXd error = trajopt_ifopt::calcBoundsViolations(squared_costs_.GetValues(), squared_cost_bounds_); assert(!(error.array() < 0.0).any()); g += error.squaredNorm(); } - if (abs_costs_.GetRows() > 0) + if (n_dyn_sq > 0) + { + const Eigen::VectorXd error = + trajopt_ifopt::calcBoundsViolations(dyn_squared_costs_.GetValues(), dyn_squared_cost_bounds_); + assert(!(error.array() < 0.0).any()); + g += error.squaredNorm(); + } + + if (n_abs > 0) { const Eigen::VectorXd error = trajopt_ifopt::calcBoundsViolations(abs_costs_.GetValues(), abs_cost_bounds_); assert(!(error.array() < 0.0).any()); g += error.sum(); } - if (hinge_costs_.GetRows() > 0) + if (n_dyn_abs > 0) + { + const Eigen::VectorXd error = trajopt_ifopt::calcBoundsViolations(dyn_abs_costs_.GetValues(), dyn_abs_cost_bounds_); + assert(!(error.array() < 0.0).any()); + g += error.sum(); + } + + if (n_hinge > 0) { const Eigen::VectorXd error = trajopt_ifopt::calcBoundsViolations(hinge_costs_.GetValues(), hinge_cost_bounds_); assert(!(error.array() < 0.0).any()); g += error.sum(); } + if (n_dyn_hinge > 0) + { + const Eigen::VectorXd error = + trajopt_ifopt::calcBoundsViolations(dyn_hinge_costs_.GetValues(), dyn_hinge_cost_bounds_); + assert(!(error.array() < 0.0).any()); + g += error.sum(); + } + return g; } Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateExactCosts(const Eigen::Ref& var_vals) { - if (getNumNLPCosts() == 0) + const Eigen::Index n_sq = squared_costs_.GetRows(); + const Eigen::Index n_hinge = hinge_costs_.GetRows(); + const Eigen::Index n_abs = abs_costs_.GetRows(); + const auto n_dyn_sq = static_cast(dyn_squared_costs_.GetComponents().size()); + const auto n_dyn_hinge = static_cast(dyn_hinge_costs_.GetRows()); + const auto n_dyn_abs = static_cast(dyn_abs_costs_.GetRows()); + const Eigen::Index n_total = n_sq + n_hinge + n_abs + n_dyn_sq + n_dyn_hinge + n_dyn_abs; + + if (n_total == 0) return {}; setVariables(var_vals.data()); - Eigen::VectorXd g(getNumNLPCosts()); + Eigen::VectorXd g(n_total); Eigen::Index idx = 0; - if (squared_costs_.GetRows() > 0) + if (n_sq > 0) { const Eigen::VectorXd err = trajopt_ifopt::calcBoundsViolations(squared_costs_.GetValues(), squared_cost_bounds_); - g.segment(idx, squared_costs_.GetRows()) = err.array().square().matrix(); - idx += squared_costs_.GetRows(); + g.segment(idx, n_sq) = err.array().square().matrix(); + idx += n_sq; } - if (abs_costs_.GetRows() > 0) + if (n_dyn_sq > 0) + { + const Eigen::VectorXd err = + trajopt_ifopt::calcBoundsViolations(dyn_squared_costs_.GetValues(), dyn_squared_cost_bounds_); + g.segment(idx, n_dyn_sq) = err.array().square().matrix(); + idx += n_dyn_sq; + } + + if (n_abs > 0) { const Eigen::VectorXd err = trajopt_ifopt::calcBoundsViolations(abs_costs_.GetValues(), abs_cost_bounds_); - g.segment(idx, abs_costs_.GetRows()) = err; - idx += abs_costs_.GetRows(); + g.segment(idx, n_abs) = err; + idx += n_abs; } - if (hinge_costs_.GetRows() > 0) + if (n_dyn_abs > 0) + { + const Eigen::VectorXd err = trajopt_ifopt::calcBoundsViolations(dyn_abs_costs_.GetValues(), dyn_abs_cost_bounds_); + g.segment(idx, n_dyn_abs) = err; + idx += n_dyn_abs; + } + + if (n_hinge > 0) { const Eigen::VectorXd err = trajopt_ifopt::calcBoundsViolations(hinge_costs_.GetValues(), hinge_cost_bounds_); - g.segment(idx, hinge_costs_.GetRows()) = err; + g.segment(idx, n_hinge) = err; + idx += n_hinge; + } + + if (n_dyn_hinge > 0) + { + const Eigen::VectorXd err = + trajopt_ifopt::calcBoundsViolations(dyn_hinge_costs_.GetValues(), dyn_hinge_cost_bounds_); + g.segment(idx, n_dyn_hinge) = err; + // idx += n_dyn_hinge; } return g; @@ -722,19 +782,24 @@ Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateExactCosts(const Eigen Eigen::VectorXd TrajOptQPProblem::Implementation::evaluateConvexConstraintViolations(const Eigen::Ref& var_vals) { - Eigen::Index row_index = hinge_costs_.GetRows() + abs_costs_.GetRows(); + const Eigen::Index n_nlp_vars = variables_->GetRows(); + const Eigen::Index n_cnt = constraints_.GetRows(); + const Eigen::Index n_hinge = hinge_costs_.GetRows(); + const Eigen::Index n_abs = abs_costs_.GetRows(); + const Eigen::Index n_dyn_hinge = dyn_hinge_costs_.GetRows(); + const Eigen::Index n_dyn_abs = dyn_abs_costs_.GetRows(); + + Eigen::Index row_index = n_nlp_vars + n_hinge + n_abs + n_dyn_hinge + n_dyn_abs; const auto& dyn_components = dyn_constraints_.GetComponents(); - Eigen::VectorXd violations(constraints_.GetRows() + static_cast(dyn_components.size())); + Eigen::VectorXd violations(n_cnt + static_cast(dyn_components.size())); { // NOLINTNEXTLINE - Eigen::VectorXd result_lin = constraint_matrix_.block(row_index, 0, constraints_.GetRows(), getNumNLPVars()) * - var_vals.head(getNumNLPVars()); // NOLINT - const Eigen::VectorXd constraint_value = - constraint_constant_.middleRows(row_index, constraints_.GetRows()) + result_lin; + Eigen::VectorXd result_lin = constraint_matrix_.block(row_index, 0, n_cnt, n_nlp_vars) * var_vals.head(n_nlp_vars); + const Eigen::VectorXd constraint_value = constraint_constant_.middleRows(row_index, n_cnt) + result_lin; - violations.head(constraints_.GetRows()) = trajopt_ifopt::calcBoundsViolations(constraint_value, constraint_bounds_); - row_index += constraints_.GetRows(); + violations.head(n_cnt) = trajopt_ifopt::calcBoundsViolations(constraint_value, constraint_bounds_); + row_index += n_cnt; } for (std::size_t i = 0; i < dyn_components.size(); ++i) @@ -742,11 +807,11 @@ TrajOptQPProblem::Implementation::evaluateConvexConstraintViolations(const Eigen const auto& cnt = dyn_components[i]; // NOLINTNEXTLINE - Eigen::VectorXd result_lin = constraint_matrix_.block(row_index, 0, cnt->GetRows(), getNumNLPVars()) * - var_vals.head(getNumNLPVars()); // NOLINT + Eigen::VectorXd result_lin = + constraint_matrix_.block(row_index, 0, cnt->GetRows(), n_nlp_vars) * var_vals.head(n_nlp_vars); const Eigen::VectorXd constraint_value = constraint_constant_.middleRows(row_index, cnt->GetRows()) + result_lin; - violations(constraints_.GetRows() + static_cast(i)) = + violations(n_cnt + static_cast(i)) = trajopt_ifopt::calcBoundsViolations(constraint_value, cnt->GetBounds()).sum(); row_index += cnt->GetRows(); } @@ -786,7 +851,7 @@ void TrajOptQPProblem::Implementation::scaleBoxSize(double& scale) void TrajOptQPProblem::Implementation::setBoxSize(const Eigen::Ref& box_size) { - assert(box_size.size() == getNumNLPVars()); + assert(box_size.size() == variables_->GetRows()); box_size_ = box_size; updateNLPVariableBounds(); } @@ -803,7 +868,7 @@ void TrajOptQPProblem::Implementation::print() const const Eigen::IOFormat format(3); std::cout << "-------------- QPProblem::print() --------------" << '\n'; - std::cout << "Num NLP Vars: " << getNumNLPVars() << '\n'; + std::cout << "Num NLP Vars: " << variables_->GetRows() << '\n'; std::cout << "Num QP Vars: " << num_qp_vars_ << '\n'; std::cout << "Num NLP Constraints: " << num_qp_cnts_ << '\n'; std::cout << "Detected Constraint Type: "; @@ -841,16 +906,17 @@ void TrajOptQPProblem::Implementation::print() const std::cout << v_set->GetValues().transpose().format(format) << '\n'; } -Eigen::Index TrajOptQPProblem::Implementation::getNumNLPVars() const { return variables_->GetRows(); } - -Eigen::Index TrajOptQPProblem::Implementation::getNumNLPCosts() const -{ - /** @todo This need to be updated to include dyn costs */ - return (squared_costs_.GetRows() + abs_costs_.GetRows() + hinge_costs_.GetRows()); -} - void TrajOptQPProblem::Implementation::convexifyCosts() { + const Eigen::Index n_nlp_vars = variables_->GetRows(); + const Eigen::Index n_sq = squared_costs_.GetRows(); + const Eigen::Index n_hinge = hinge_costs_.GetRows(); + const Eigen::Index n_abs = abs_costs_.GetRows(); + const Eigen::Index n_dyn_sq = dyn_squared_costs_.GetRows(); + const Eigen::Index n_dyn_hinge = dyn_hinge_costs_.GetRows(); + const Eigen::Index n_dyn_abs = dyn_abs_costs_.GetRows(); + const Eigen::Index num_nlp_costs = n_sq + n_hinge + n_abs + n_dyn_sq + n_dyn_hinge + n_dyn_abs; + //////////////////////////////////////////////////////// // Set the Hessian (empty for now) //////////////////////////////////////////////////////// @@ -861,17 +927,17 @@ void TrajOptQPProblem::Implementation::convexifyCosts() //////////////////////////////////////////////////////// gradient_ = Eigen::VectorXd::Zero(num_qp_vars_); - const Eigen::VectorXd x_initial = variables_->GetValues().head(getNumNLPVars()); + const Eigen::VectorXd x_initial = variables_->GetValues(); // Create triplet list of nonzero gradients std::vector> grad_triplet_list; - grad_triplet_list.reserve(static_cast(getNumNLPVars() * getNumNLPCosts()) * 3); + grad_triplet_list.reserve(static_cast(n_nlp_vars * num_nlp_costs) * 3); // Process Squared Costs /** @note See CostFromFunc::convex in modeling_utils.cpp. */ if (squared_costs_.GetRows() > 0) { - squared_objective_nlp_ = QuadExprs(squared_costs_.GetRows(), getNumNLPVars()); + squared_objective_nlp_ = QuadExprs(squared_costs_.GetRows(), n_nlp_vars); const trajopt_ifopt::Jacobian cnt_jac = squared_costs_.GetJacobian(); const Eigen::VectorXd cnt_vals = squared_costs_.GetValues(); @@ -916,7 +982,7 @@ void TrajOptQPProblem::Implementation::convexifyCosts() squared_objective_nlp_.linear_coeffs.setFromTriplets(grad_triplet_list.begin(), grad_triplet_list.end()); // NOLINT // Insert QP Problem Objective Linear Coefficients - gradient_.head(getNumNLPVars()) = squared_objective_nlp_.objective_linear_coeffs; + gradient_.head(n_nlp_vars) = squared_objective_nlp_.objective_linear_coeffs; // Insert QP Problem Objective Quadratic Coefficients if (squared_objective_nlp_.objective_quadratic_coeffs.nonZeros() > 0) @@ -932,39 +998,39 @@ void TrajOptQPProblem::Implementation::convexifyCosts() // Hinge and Asolute costs are handled differently than squared cost because they add constraints to the qp problem - Eigen::Index current_var_index = getNumNLPVars(); + Eigen::Index current_var_index = n_nlp_vars; //////////////////////////////////////////////////////// // Set the gradient of the hinge cost variables //////////////////////////////////////////////////////// - if (hinge_costs_.GetRows() > 0) + if (n_hinge > 0) { /** @todo This should be multiplied by the weight */ - gradient_.segment(current_var_index, hinge_costs_.GetRows()).setConstant(1.0); - current_var_index += hinge_costs_.GetRows(); + gradient_.segment(current_var_index, n_hinge).setConstant(1.0); + current_var_index += n_hinge; } - if (dyn_hinge_costs_.GetRows() > 0) + if (n_dyn_hinge > 0) { - gradient_.segment(current_var_index, dyn_hinge_costs_.GetRows()).setConstant(1.0); - current_var_index += dyn_hinge_costs_.GetRows(); + gradient_.segment(current_var_index, n_dyn_hinge).setConstant(1.0); + current_var_index += n_dyn_hinge; } //////////////////////////////////////////////////////// // Set the gradient of the absolute cost variables //////////////////////////////////////////////////////// - if (abs_costs_.GetRows() > 0) + if (n_abs > 0) { /** @todo This should be multiplied by the weight */ - gradient_.segment(current_var_index, 2L * abs_costs_.GetRows()).setConstant(1.0); - current_var_index += (2L * abs_costs_.GetRows()); + gradient_.segment(current_var_index, 2L * n_abs).setConstant(1.0); + current_var_index += (2L * n_abs); } - if (dyn_abs_costs_.GetRows() > 0) + if (n_dyn_abs > 0) { /** @todo This should be multiplied by the weight */ - gradient_.segment(current_var_index, 2L * dyn_abs_costs_.GetRows()).setConstant(1.0); - current_var_index += (2L * dyn_abs_costs_.GetRows()); + gradient_.segment(current_var_index, 2L * n_dyn_abs).setConstant(1.0); + current_var_index += (2L * n_dyn_abs); } //////////////////////////////////////////////////////// @@ -1055,7 +1121,7 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() triplets.emplace_back(current_row_index + it.row(), it.col(), it.value()); // hinge slack vars - Eigen::Index current_col_index = getNumNLPVars(); + Eigen::Index current_col_index = variables_->GetRows(); for (Eigen::Index i = 0; i < hinge_costs_.GetRows(); ++i) triplets.emplace_back(i, current_col_index++, -1.0); @@ -1122,7 +1188,7 @@ void TrajOptQPProblem::Implementation::linearizeConstraints() void TrajOptQPProblem::Implementation::updateConstraintsConstantExpression() { - const Eigen::Index n_nlp = getNumNLPVars(); + const Eigen::Index n_nlp_vars = variables_->GetRows(); const Eigen::Index n_cnt = constraints_.GetRows(); const Eigen::Index n_hinge = hinge_costs_.GetRows(); const Eigen::Index n_abs = abs_costs_.GetRows(); @@ -1135,10 +1201,10 @@ void TrajOptQPProblem::Implementation::updateConstraintsConstantExpression() return; // Get values about which we will linearize - const Eigen::VectorXd x_initial = variables_->GetValues().head(n_nlp); + const Eigen::VectorXd x_initial = variables_->GetValues().head(n_nlp_vars); // One mat-vec for all constraint rows (excluding slack columns) - const Eigen::VectorXd lin = constraint_matrix_.block(0, 0, total_num_cnt, n_nlp) * x_initial; + const Eigen::VectorXd lin = constraint_matrix_.block(0, 0, total_num_cnt, n_nlp_vars) * x_initial; // In the case of a QP problem the costs and constraints are represented as // quadratic functions is f(x) = a + b * x + c * x^2. @@ -1289,7 +1355,7 @@ void TrajOptQPProblem::Implementation::updateNLPConstraintBounds() void TrajOptQPProblem::Implementation::updateNLPVariableBounds() { // Equivalent to BasicTrustRegionSQP::setTrustBoxConstraints - const Eigen::Index n_nlp_vars = getNumNLPVars(); + const Eigen::Index n_nlp_vars = variables_->GetRows(); const Eigen::Index n_hinge = hinge_costs_.GetRows(); const Eigen::Index n_abs = abs_costs_.GetRows(); const Eigen::Index n_dyn_hinge = dyn_hinge_costs_.GetRows(); @@ -1322,7 +1388,7 @@ void TrajOptQPProblem::Implementation::updateNLPVariableBounds() void TrajOptQPProblem::Implementation::updateSlackVariableBounds() { - const Eigen::Index n_nlp_vars = getNumNLPVars(); + const Eigen::Index n_nlp_vars = variables_->GetRows(); const Eigen::Index n_hinge = hinge_costs_.GetRows(); const Eigen::Index n_abs = abs_costs_.GetRows(); const Eigen::Index n_dyn_hinge = dyn_hinge_costs_.GetRows(); @@ -1476,7 +1542,10 @@ Eigen::VectorXd TrajOptQPProblem::getBoxSize() const { return std::as_const(*impl_).print(); } -Eigen::Index TrajOptQPProblem::getNumNLPVars() const { return std::as_const(*impl_).getNumNLPVars(); } +Eigen::Index TrajOptQPProblem::getNumNLPVars() const +{ + return std::as_const(*impl_).variables_->GetRows(); +} Eigen::Index TrajOptQPProblem::getNumNLPConstraints() const { @@ -1484,7 +1553,15 @@ Eigen::Index TrajOptQPProblem::getNumNLPConstraints() const return (base.constraints_.GetRows() + static_cast(base.dyn_constraints_.GetComponents().size())); } -Eigen::Index TrajOptQPProblem::getNumNLPCosts() const { return std::as_const(*impl_).getNumNLPCosts(); } +Eigen::Index TrajOptQPProblem::getNumNLPCosts() const +{ + const auto& base = std::as_const(*impl_); + Eigen::Index cnt = + (base.squared_costs_.GetRows() + static_cast(base.dyn_squared_costs_.GetComponents().size())); + cnt += (base.hinge_costs_.GetRows() + static_cast(base.dyn_hinge_costs_.GetComponents().size())); + cnt += (base.abs_costs_.GetRows() + static_cast(base.dyn_abs_costs_.GetComponents().size())); + return cnt; +} Eigen::Index TrajOptQPProblem::getNumQPVars() const { return std::as_const(*impl_).num_qp_vars_; }