Skip to content

Commit

Permalink
feat: add initializer list init for wrt in derivatives
Browse files Browse the repository at this point in the history
  • Loading branch information
mimizh2418 committed Nov 30, 2024
1 parent aa30036 commit ed244d2
Show file tree
Hide file tree
Showing 5 changed files with 41 additions and 13 deletions.
21 changes: 21 additions & 0 deletions include/suboptimal/autodiff/derivatives.h
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,13 @@ class Gradient {
*/
Gradient(const Variable& var, const Eigen::Ref<const VectorXv>& wrt);

/**
* Constructs a gradient object
* @param var the variable to compute the gradient of
* @param wrt the list of variables to compute the gradient with respect to
*/
Gradient(const Variable& var, std::initializer_list<Variable> wrt);

/**
* Gets the value of the gradient based on the current value of wrt
*/
Expand Down Expand Up @@ -82,6 +89,13 @@ class Jacobian {
*/
Jacobian(const Eigen::Ref<const VectorXv>& vars, const Eigen::Ref<const VectorXv>& wrt);

/**
* Constructs a Jacobian object
* @param vars the variable to compute the gradient of
* @param wrt the list of variables to compute the gradient with respect to
*/
Jacobian(const Eigen::Ref<const VectorXv>& vars, std::initializer_list<Variable> wrt);

/**
* Gets the value of the Jacobian based on the current value of wrt
*/
Expand Down Expand Up @@ -115,6 +129,13 @@ class Hessian {
*/
Hessian(const Variable& var, const Eigen::Ref<const VectorXv>& wrt);

/**
* Constructs a Hessian object
* @param var the variable to compute the gradient of
* @param wrt the list of variables to compute the Hessian with respect to
*/
Hessian(const Variable& var, std::initializer_list<Variable> wrt);

/**
* Gets the value of the Hessian based on the current value of wrt
*/
Expand Down
7 changes: 7 additions & 0 deletions src/autodiff/derivatives.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ Gradient::Gradient(const Variable& var, const Eigen::Ref<const VectorXv>& wrt) :
}
}

Gradient::Gradient(const Variable& var, const std::initializer_list<Variable> wrt) : Gradient{var, VectorXv{wrt}} {}

const Eigen::SparseVector<double>& Gradient::getValue() {
if (var.getLinearity() > Linearity::Linear) {
std::ranges::for_each(wrt, [](const Variable& v) { v.expr->adjoint = 0.0; });
Expand Down Expand Up @@ -112,6 +114,9 @@ Jacobian::Jacobian(const Eigen::Ref<const VectorXv>& vars, const Eigen::Ref<cons
value.setFromTriplets(triplets.begin(), triplets.end());
}

Jacobian::Jacobian(const Eigen::Ref<const VectorXv>& vars, const std::initializer_list<Variable> wrt)
: Jacobian{vars, VectorXv{wrt}} {}

const Eigen::SparseMatrix<double>& Jacobian::getValue() {
if (nonlinear_rows.empty()) {
return value;
Expand Down Expand Up @@ -139,6 +144,8 @@ MatrixXv Jacobian::getExpr() {
Hessian::Hessian(const Variable& var, const Eigen::Ref<const VectorXv>& wrt)
: jacobian(Gradient{var, wrt}.getExpr(), wrt) {}

Hessian::Hessian(const Variable& var, const std::initializer_list<Variable> wrt) : Hessian{var, VectorXv{wrt}} {}

const Eigen::SparseMatrix<double>& Hessian::getValue() {
return jacobian.getValue();
}
Expand Down
18 changes: 9 additions & 9 deletions test/autodiff/gradient_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ TEST_CASE("Autodiff - Basic gradient", "[autodiff]") {
Variable x{};
Variable y{};
const Variable f = (x * x - 2 * x * y + y * y) / (5 * x);
Gradient grad{f, Vector2v{x, y}};
Gradient grad{f, {x, y}};

const double x_val = GENERATE(take(10, random(-100.0, 100.0)));
const double y_val = GENERATE(take(10, random(-100.0, 100.0)));
Expand All @@ -37,7 +37,7 @@ TEST_CASE("Autodiff - Basic gradient", "[autodiff]") {
TEST_CASE("Autodiff - Gradient of abs", "[autodiff]") {
Variable x{};
const Variable f = suboptimal::abs(x);
Gradient grad{f, VectorXv{{x}}};
Gradient grad{f, {x}};

const double x_val = GENERATE(take(10, random(-100.0, 100.0)));
x.setValue(x_val);
Expand Down Expand Up @@ -72,7 +72,7 @@ TEST_CASE("Autodiff - Gradient of exp", "[autodiff]") {
Variable x{};
Variable y{};
const Variable f = suboptimal::exp(x * y);
Gradient grad{f, Vector2v{x, y}};
Gradient grad{f, {x, y}};

const double x_val = GENERATE(take(10, random(-10.0, 10.0)));
const double y_val = GENERATE(take(10, random(-10.0, 10.0)));
Expand All @@ -89,7 +89,7 @@ TEST_CASE("Autodiff - Gradient of log", "[autodiff]") {
Variable x{};
Variable y{};
const Variable f = suboptimal::log(x * y);
Gradient grad{f, Vector2v{x, y}};
Gradient grad{f, {x, y}};

const double x_val = GENERATE(take(10, random(0.1, 100.0)));
const double y_val = GENERATE(take(10, random(0.1, 100.0)));
Expand All @@ -106,7 +106,7 @@ TEST_CASE("Autodiff - Gradient of pow", "[autodiff]") {
Variable x{};
Variable y{};
const Variable f = suboptimal::pow(x, y);
Gradient grad{f, Vector2v{x, y}};
Gradient grad{f, {x, y}};

const double x_val = GENERATE(take(10, random(0.0, 100.0)));
const double y_val = GENERATE(take(10, random(-100.0, 100.0)));
Expand All @@ -123,7 +123,7 @@ TEST_CASE("Autodiff - Gradient of hypot", "[autodiff]") {
Variable x{};
Variable y{};
const Variable f = suboptimal::hypot(x, y);
Gradient grad{f, Vector2v{x, y}};
Gradient grad{f, {x, y}};

const double x_val = GENERATE(take(10, random(-100.0, 100.0)));
const double y_val = GENERATE(take(10, random(-100.0, 100.0)));
Expand All @@ -141,7 +141,7 @@ TEST_CASE("Autodiff - Gradients of trig functions", "[autodiff]") {
Variable y{};
Variable z{};
const Variable f = suboptimal::sin(x) * suboptimal::cos(y) * suboptimal::tan(z);
Gradient grad{f, Vector3v{x, y, z}};
Gradient grad{f, {x, y, z}};

const double x_val = GENERATE(take(5, random(-100.0, 100.0)));
const double y_val = GENERATE(take(5, random(-100.0, 100.0)));
Expand All @@ -162,7 +162,7 @@ TEST_CASE("Autodiff - Gradients of inverse trig functions", "[autodiff]") {
Variable y{};
Variable z{};
const Variable f = suboptimal::asin(x) * suboptimal::acos(y) * suboptimal::atan(z);
Gradient grad{f, Vector3v{x, y, z}};
Gradient grad{f, {x, y, z}};

const double x_val = GENERATE(take(5, random(-1.0, 1.0)));
const double y_val = GENERATE(take(5, random(-1.0, 1.0)));
Expand All @@ -182,7 +182,7 @@ TEST_CASE("Autodiff - Gradient of atan2", "[autodiff]") {
Variable x{};
Variable y{};
const Variable f = suboptimal::atan2(y, x);
Gradient grad{f, Vector2v{x, y}};
Gradient grad{f, {x, y}};

const double x_val = GENERATE(take(10, random(-100.0, 100.0)));
const double y_val = GENERATE(take(10, random(-100.0, 100.0)));
Expand Down
4 changes: 2 additions & 2 deletions test/autodiff/hessian_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ TEST_CASE("Autodiff - Basic Hessian") {
Variable x{};
Variable y{};
const Variable f = (x * x - 2 * x * y + y * y);
Hessian h{f, Vector2v{x, y}};
Hessian h{f, {x, y}};

const double x_val = GENERATE(take(10, random(-100, 100)));
const double y_val = GENERATE(take(10, random(-100, 100)));
Expand All @@ -38,7 +38,7 @@ TEST_CASE("Autodiff - Complicated Hessian") {
Variable y{};
Variable z{};
const Variable f = x * y * z + y * suboptimal::sin(x) + z * suboptimal::cos(y);
Hessian h{f, Vector3v{x, y, z}};
Hessian h{f, {x, y, z}};

const double x_val = GENERATE(take(5, random(-100, 100)));
const double y_val = GENERATE(take(5, random(-100, 100)));
Expand Down
4 changes: 2 additions & 2 deletions test/autodiff/jacobian_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ TEST_CASE("Autodiff - Basic Jacobian", "[autodiff]") {
Variable y{};
const Vector2v f{4 * suboptimal::pow(x, 2) * y, //
x - suboptimal::pow(y, 2)};
Jacobian j{f, Vector2v{x, y}};
Jacobian j{f, {x, y}};

const double x_val = GENERATE(take(10, random(-100, 100)));
const double y_val = GENERATE(take(10, random(-100, 100)));
Expand All @@ -42,7 +42,7 @@ TEST_CASE("Autodiff - Complicated Jacobian", "[autodiff]") {
const Vector3v f{x * y * z, //
y * suboptimal::sin(x), //
z * suboptimal::cos(y)};
Jacobian j{f, Vector3v{x, y, z}};
Jacobian j{f, {x, y, z}};

const double x_val = GENERATE(take(5, random(-100.0, 100.0)));
const double y_val = GENERATE(take(5, random(-100.0, 100.0)));
Expand Down

0 comments on commit ed244d2

Please sign in to comment.