Skip to content

Commit

Permalink
opt docs (#79)
Browse files Browse the repository at this point in the history
  • Loading branch information
strasdat authored Feb 17, 2025
1 parent 2322c32 commit 497395f
Show file tree
Hide file tree
Showing 47 changed files with 1,176 additions and 651 deletions.
28 changes: 14 additions & 14 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,22 +22,22 @@ include = ["**/*.rs", "**/*.wgsl", "**/*.md", "**/Cargo.toml"]
keywords = ["robotics", "optimization"]
license = "MIT OR Apache-2.0"
repository = "https://github.com/sophus-vision/sophus-rs/"
version = "0.13.0"
version = "0.14.0"

[workspace.dependencies]
sophus = { path = "crates/sophus", version = "0.13.0" }
sophus_autodiff = { path = "crates/sophus_autodiff", version = "0.13.0" }
sophus_tensor = { path = "crates/sophus_tensor", version = "0.13.0" }
sophus_image = { path = "crates/sophus_image", version = "0.13.0" }
sophus_lie = { path = "crates/sophus_lie", version = "0.13.0" }
sophus_geo = { path = "crates/sophus_geo", version = "0.13.0" }
sophus_spline = { path = "crates/sophus_spline", version = "0.13.0" }
sophus_sensor = { path = "crates/sophus_sensor", version = "0.13.0" }
sophus_opt = { path = "crates/sophus_opt", version = "0.13.0" }
sophus_timeseries = { path = "crates/sophus_timeseries", version = "0.13.0" }
sophus_renderer = { path = "crates/sophus_renderer", version = "0.13.0" }
sophus_sim = { path = "crates/sophus_sim", version = "0.13.0" }
sophus_viewer = { path = "crates/sophus_viewer", version = "0.13.0" }
sophus = { path = "crates/sophus", version = "0.14.0" }
sophus_autodiff = { path = "crates/sophus_autodiff", version = "0.14.0" }
sophus_tensor = { path = "crates/sophus_tensor", version = "0.14.0" }
sophus_image = { path = "crates/sophus_image", version = "0.14.0" }
sophus_lie = { path = "crates/sophus_lie", version = "0.14.0" }
sophus_geo = { path = "crates/sophus_geo", version = "0.14.0" }
sophus_spline = { path = "crates/sophus_spline", version = "0.14.0" }
sophus_sensor = { path = "crates/sophus_sensor", version = "0.14.0" }
sophus_opt = { path = "crates/sophus_opt", version = "0.14.0" }
sophus_timeseries = { path = "crates/sophus_timeseries", version = "0.14.0" }
sophus_renderer = { path = "crates/sophus_renderer", version = "0.14.0" }
sophus_sim = { path = "crates/sophus_sim", version = "0.14.0" }
sophus_viewer = { path = "crates/sophus_viewer", version = "0.14.0" }

approx = "0.5"
as-any = "0.3"
Expand Down
122 changes: 118 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ concepts.
sophus-rs provides an automatic differentiation using dual numbers such as
[autodiff::dual::DualScalar] and [autodiff::dual::DualVector].

```
```rust
use sophus::prelude::*;
use sophus::autodiff::dual::{DualScalar, DualVector};
use sophus::autodiff::linalg::VecF64;
Expand Down Expand Up @@ -72,7 +72,7 @@ sophus-rs provides a number of Lie groups, including:
Spevial Euclidean group SE(3).


```
```rust
use sophus::autodiff::linalg::VecF64;
use sophus::lie::{Rotation3F64, Isometry3F64};
use std::f64::consts::FRAC_PI_4;
Expand Down Expand Up @@ -124,6 +124,120 @@ let bar_from_foo_isometry
sophus-rs also provides a sparse non-linear least squares optimization through
[crate::opt].

```rust
use sophus::prelude::*;
use sophus::autodiff::linalg::{MatF64, VecF64};
use sophus::autodiff::dual::DualVector;
use sophus::lie::{Isometry2, Isometry2F64, Rotation2F64};
use sophus::opt::nlls::{CostFn, CostTerms, EvaluatedCostTerm, optimize_nlls, OptParams};
use sophus::opt::robust_kernel;
use sophus::opt::variables::{VarBuilder, VarFamily, VarKind};

// We want to fit the isometry `T ∈ SE(2)` to a prior distribution
// `N(E(T), W⁻¹)`, where `E(T)` is the prior mean and `W⁻¹` is the prior
// covariance matrix.

// (1) First we define the residual cost term.
#[derive(Clone, Debug)]
pub struct Isometry2PriorCostTerm {
// Prior mean, `E(T)` of type [Isometry2F64].
pub isometry_prior_mean: Isometry2F64,
// `W`, which is the inverse of the prior covariance matrix.
pub isometry_prior_precision: MatF64<3, 3>,
// We only have one variable, so this will be `[0]`.
pub entity_indices: [usize; 1],
}

impl Isometry2PriorCostTerm {
// (2) Then we define residual function for the cost term:
//
// `g(T) = log[T * E(T)⁻¹]`
pub fn residual<Scalar: IsSingleScalar<DM, DN>, const DM: usize, const DN: usize>(
isometry: Isometry2<Scalar, 1, DM, DN>,
isometry_prior_mean: Isometry2<Scalar, 1, DM, DN>,
) -> Scalar::Vector<3> {
(isometry * isometry_prior_mean.inverse()).log()
}
}

// (3) Implement the `HasResidualFn` trait for the cost term.
impl HasResidualFn<3, 1, (), Isometry2F64> for Isometry2PriorCostTerm {
fn idx_ref(&self) -> &[usize; 1] {
&self.entity_indices
}

fn eval(
&self,
_global_constants: &(),
idx: [usize; 1],
args: Isometry2F64,
var_kinds: [VarKind; 1],
robust_kernel: Option<robust_kernel::RobustKernel>,
) -> EvaluatedCostTerm<3, 1> {
let isometry: Isometry2F64 = args;

let residual = Self::residual(isometry, self.isometry_prior_mean);
let dx_res_fn = |x: DualVector<3, 3, 1>| -> DualVector<3, 3, 1> {
Self::residual(
Isometry2::exp(x) * isometry.to_dual_c(),
self.isometry_prior_mean.to_dual_c(),
)
};

(|| dx_res_fn(DualVector::var(VecF64::<3>::zeros())).jacobian(),).make(
idx,
var_kinds,
residual,
robust_kernel,
Some(self.isometry_prior_precision),
)
}
}

let prior_world_from_robot = Isometry2F64::from_translation(
VecF64::<2>::new(1.0, 2.0),
);

// (4) Define the cost terms, by specifying the residual function
// `g(T) = Isometry2PriorCostTerm` as well as providing the prior distribution.
const POSE: &str = "poses";
let obs_pose_a_from_pose_b_poses = CostTerms::new(
[POSE],
vec![Isometry2PriorCostTerm {
isometry_prior_mean: prior_world_from_robot,
isometry_prior_precision: MatF64::<3, 3>::identity(),
entity_indices: [0],
}],
);

// (5) Define the decision variables. In this case, we only have one variable,
// and we initialize it with the identity transformation.
let est_world_from_robot = Isometry2F64::identity();
let variables = VarBuilder::new()
.add_family(
POSE,
VarFamily::new(VarKind::Free, vec![est_world_from_robot]),
)
.build();

// (6) Perform the non-linear least squares optimization.
let solution = optimize_nlls(
variables,
vec![CostFn::new_boxed((), obs_pose_a_from_pose_b_poses.clone(),)],
OptParams::default(),
)
.unwrap();

// (7) Retrieve the refined transformation and compare it with the prior one.
let refined_world_from_robot
= solution.variables.get_members::<Isometry2F64>(POSE)[0];
approx::assert_abs_diff_eq!(
prior_world_from_robot.compact(),
refined_world_from_robot.compact(),
epsilon = 1e-6,
);
```

## And more...

such unit vector, splines, image classes, camera models, and some visualization
Expand All @@ -136,7 +250,7 @@ sophus-rs builds on stable.

```toml
[dependencies]
sophus = "0.13.0"
sophus = "0.14.0"
```

To allow for batch types, such as BatchScalarF64, the 'simd' feature is required. This feature
Expand All @@ -146,7 +260,7 @@ are no plans to rely on any other nightly features.

```toml
[dependencies]
sophus = { version = "0.13.0", features = ["simd"] }
sophus = { version = "0.14.0", features = ["simd"] }
```

## Crate Structure and Usage
Expand Down
1 change: 1 addition & 0 deletions crates/sophus/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@ eframe.workspace = true
egui-wgpu.workspace = true
egui_extras.workspace = true
env_logger.workspace = true
rand.workspace = true
thingbuf.workspace = true
tokio = { version = "1", features = ["full"] }
wgpu.workspace = true
Expand Down
150 changes: 149 additions & 1 deletion crates/sophus_opt/README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,154 @@
## Sparse non-linear least squares optimization

Sparse non-linear least squares optimization - with optional equality constraints.
This crate provides a general sparse non-linear least squares optimization
library, similar to the C++ libraries [ceres-solver](http://ceres-solver.org/),
[g2o](https://github.com/RainerKuemmerle/g2o) and [gtsam](https://gtsam.org/).
It supports automatic differentiation through the
[sophus-autodiff](https://crates.io/crates/sophus-autodiff) crate, as well
optimization on manifolds and Lie groups through the
[sophus_autodiff::manifold::IsVariable] trait and the
[sophus-lie](https://crates.io/crates/sophus-lie) crate.


## Example

```rust
use sophus_opt::prelude::*;
use sophus_autodiff::linalg::{MatF64, VecF64};
use sophus_autodiff::dual::DualVector;
use sophus_lie::{Isometry2, Isometry2F64, Rotation2F64};
use sophus_opt::nlls::{CostFn, CostTerms, EvaluatedCostTerm, optimize_nlls, OptParams};
use sophus_opt::robust_kernel;
use sophus_opt::variables::{VarBuilder, VarFamily, VarKind};

// We want to fit the isometry `T ∈ SE(2)` to a prior distribution
// `N(E(T), W⁻¹)`, where `E(T)` is the prior mean and `W⁻¹` is the prior
// covariance matrix.

// (1) First we define the residual cost term.
#[derive(Clone, Debug)]
pub struct Isometry2PriorCostTerm {
// Prior mean, `E(T)` of type [Isometry2F64].
pub isometry_prior_mean: Isometry2F64,
// `W`, which is the inverse of the prior covariance matrix.
pub isometry_prior_precision: MatF64<3, 3>,
// We only have one variable, so this will be `[0]`.
pub entity_indices: [usize; 1],
}

impl Isometry2PriorCostTerm {
// (2) Then we define residual function for the cost term:
//
// `g(T) = log[T * E(T)⁻¹]`
pub fn residual<Scalar: IsSingleScalar<DM, DN>, const DM: usize, const DN: usize>(
isometry: Isometry2<Scalar, 1, DM, DN>,
isometry_prior_mean: Isometry2<Scalar, 1, DM, DN>,
) -> Scalar::Vector<3> {
(isometry * isometry_prior_mean.inverse()).log()
}
}

// (3) Implement the `HasResidualFn` trait for the cost term.
impl HasResidualFn<3, 1, (), Isometry2F64> for Isometry2PriorCostTerm {
fn idx_ref(&self) -> &[usize; 1] {
&self.entity_indices
}

fn eval(
&self,
_global_constants: &(),
idx: [usize; 1],
args: Isometry2F64,
var_kinds: [VarKind; 1],
robust_kernel: Option<robust_kernel::RobustKernel>,
) -> EvaluatedCostTerm<3, 1> {
let isometry: Isometry2F64 = args;

let residual = Self::residual(isometry, self.isometry_prior_mean);
let dx_res_fn = |x: DualVector<3, 3, 1>| -> DualVector<3, 3, 1> {
Self::residual(
Isometry2::exp(x) * isometry.to_dual_c(),
self.isometry_prior_mean.to_dual_c(),
)
};

(|| dx_res_fn(DualVector::var(VecF64::<3>::zeros())).jacobian(),).make(
idx,
var_kinds,
residual,
robust_kernel,
Some(self.isometry_prior_precision),
)
}
}

let prior_world_from_robot = Isometry2F64::from_translation(
VecF64::<2>::new(1.0, 2.0),
);

// (4) Define the cost terms, by specifying the residual function
// `g(T) = Isometry2PriorCostTerm` as well as providing the prior distribution.
const POSE: &str = "poses";
let obs_pose_a_from_pose_b_poses = CostTerms::new(
[POSE],
vec![Isometry2PriorCostTerm {
isometry_prior_mean: prior_world_from_robot,
isometry_prior_precision: MatF64::<3, 3>::identity(),
entity_indices: [0],
}],
);

// (5) Define the decision variables. In this case, we only have one variable,
// and we initialize it with the identity transformation.
let est_world_from_robot = Isometry2F64::identity();
let variables = VarBuilder::new()
.add_family(
POSE,
VarFamily::new(VarKind::Free, vec![est_world_from_robot]),
)
.build();

// (6) Perform the non-linear least squares optimization.
let solution = optimize_nlls(
variables,
vec![CostFn::new_boxed((), obs_pose_a_from_pose_b_poses.clone(),)],
OptParams::default(),
)
.unwrap();

// (7) Retrieve the refined transformation and compare it with the prior one.
let refined_world_from_robot
= solution.variables.get_members::<Isometry2F64>(POSE)[0];
approx::assert_abs_diff_eq!(
prior_world_from_robot.compact(),
refined_world_from_robot.compact(),
epsilon = 1e-6,
);
```

In the code above, we define an [crate::nlls::costs::Isometry2PriorCostTerm]
struct that imposes a Gaussian prior on an [sophus_lie::Isometry2F64]
pose, specifying a prior mean `E(T)` and a precision matrix `W`. The key
operation is the residual function `g(T) = log[T * E(T)⁻¹]`. This maps the
difference between the current estimate \(T\) and the prior mean \(E(T)\) into
the tangent space of the [Lie group](sophus_lie::LieGroup).

We then implement the [nlls::HasResidualFn] trait for our cost term
so that it can be evaluated inside the solver. In the `eval` method, automatic
differentiation with [sophus_autodiff::dual::DualVector] computes the Jacobian
of the residual. The returned [nlls::EvaluatedCostTerm] includes
both the residual vector and its associated covariance.

Next, we group our cost terms with [crate::nlls::CostTerms] and associate
them with one or more variable “families” via the [crate::variables::VarBuilder].
Here, we create a single family [crate::variables::VarFamily] named
`"poses"` for the free variable, initialized to the identity transform. Finally,
we call the sparse non-linear least squares solver [crate::nlls::optimize_nlls]
with these variables and the wrapped cost function [crate::nlls::CostFn],
passing in tuning parameters [crate::nlls::OptParams]; we are using the
default here. Upon convergence, the solver returns updated variables that we
retrieve from the solution using [crate::variables::VarFamilies::get_members].


## Integration with sophus-rs

Expand Down
Loading

0 comments on commit 497395f

Please sign in to comment.