Skip to content

Commit

Permalink
Filled out observer structure more fully
Browse files Browse the repository at this point in the history
  • Loading branch information
contagon committed Jul 9, 2024
1 parent 2c2d100 commit 84449aa
Show file tree
Hide file tree
Showing 11 changed files with 314 additions and 3,404 deletions.
3,493 changes: 165 additions & 3,328 deletions Cargo.lock

Large diffs are not rendered by default.

4 changes: 3 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@ typetag = { version = "0.2.16", optional = true }
serde_json = { version = "1.0.120", optional = true }

# rerun support
rerun = { version = "0.16.1", optional = true }
rerun = { version = "0.16.1", optional = true, default-features = false, features = [
"sdk",
] }

[features]
# Run everything with f32 instead of the defaul f64
Expand Down
20 changes: 10 additions & 10 deletions examples/g2o-rerun.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,8 @@ use std::{
};

use factrs::{
optimizers::{GaussNewton, Optimizer},
rerun::RerunSender,
optimizers::{GaussNewton, GraphOptimizer, Optimizer},
rerun::RerunObserver,
utils::load_g20,
variables::*,
};
Expand Down Expand Up @@ -46,20 +46,20 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
let topic = "base/solution";
match (dim, obj) {
("se2", "points") => {
let callback = RerunSender::<SE2, Points2D>::new(rec, topic);
optimizer.params.add_callback(callback)
let callback = RerunObserver::<SE2, Points2D>::new(rec, topic);
optimizer.observers.add(callback)
}
("se2", "arrows") => {
let callback = RerunSender::<SE2, Arrows2D>::new(rec, topic);
optimizer.params.add_callback(callback)
let callback = RerunObserver::<SE2, Arrows2D>::new(rec, topic);
optimizer.observers.add(callback)
}
("se3", "points") => {
let callback = RerunSender::<SE3, Points3D>::new(rec, topic);
optimizer.params.add_callback(callback)
let callback = RerunObserver::<SE3, Points3D>::new(rec, topic);
optimizer.observers.add(callback)
}
("se3", "arrows") => {
let callback = RerunSender::<SE3, Arrows3D>::new(rec, topic);
optimizer.params.add_callback(callback)
let callback = RerunObserver::<SE3, Arrows3D>::new(rec, topic);
optimizer.observers.add(callback)
}
_ => panic!("Invalid arguments"),
};
Expand Down
2 changes: 1 addition & 1 deletion examples/g2o.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::{env, time::Instant};

use factrs::{
optimizers::{GaussNewton, Optimizer},
optimizers::{GaussNewton, GraphOptimizer, Optimizer},
utils::load_g20,
};
fn main() {
Expand Down
7 changes: 7 additions & 0 deletions src/containers/values.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,6 +62,13 @@ impl Values {
self.values.get_mut(key)
}

// TODO: This should be some kind of error
pub fn get_mut_cast<T: VariableSafe>(&mut self, key: &Symbol) -> Option<&mut T> {
self.values
.get_mut(key)
.and_then(|value| value.downcast_mut::<T>())
}

pub fn remove(&mut self, key: &Symbol) -> Option<Box<dyn VariableSafe>> {
self.values.remove(key)
}
Expand Down
25 changes: 17 additions & 8 deletions src/linear/values.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,18 +16,27 @@ impl LinearValues {
Self { values, order }
}

pub fn from_order_and_values(order: ValuesOrder, values: VectorX) -> Self {
pub fn zero_from_values(values: &Values) -> Self {
let order = ValuesOrder::from_values(values);
let values = VectorX::zeros(order.dim());
Self { values, order }
}

pub fn from_order_and_vector(order: ValuesOrder, values: VectorX) -> Self {
assert!(
values.len() == order.dim(),
"Values and order must have the same dimension when creating LinearValues"
"Vector and order must have the same dimension when creating LinearValues"
);
Self { values, order }
}

pub fn zero_from_values(values: &Values) -> Self {
pub fn from_values_and_vector(values: &Values, vector: VectorX) -> Self {
let order = ValuesOrder::from_values(values);
let values = VectorX::zeros(order.dim());
Self { values, order }
assert!(
vector.len() == order.dim(),
"Vector and values must have the same dimension when creating LinearValues"
);
Self::from_order_and_vector(order, vector)
}

pub fn len(&self) -> usize {
Expand Down Expand Up @@ -96,11 +105,11 @@ mod test {
}

#[test]
fn from_order_and_values() {
fn from_order_and_vector() {
let (order, vector) = make_order_vector();

// Create LinearValues
let linear_values = LinearValues::from_order_and_values(order, vector);
let linear_values = LinearValues::from_order_and_vector(order, vector);
assert!(linear_values.len() == 3);
assert!(linear_values.dim() == 11);
assert!(linear_values.get(&X(0)).unwrap().len() == 2);
Expand All @@ -114,6 +123,6 @@ mod test {
fn mismatched_size() {
let (order, vector) = make_order_vector();
let vector = vector.push(0.0);
LinearValues::from_order_and_values(order, vector);
LinearValues::from_order_and_vector(order, vector);
}
}
28 changes: 21 additions & 7 deletions src/optimizers/gauss_newton.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
use faer_ext::IntoNalgebra;

use super::{OptResult, Optimizer, OptimizerParams};
use super::{GraphOptimizer, OptObserverVec, OptParams, OptResult, Optimizer};
use crate::{
containers::{Graph, GraphOrder, Values, ValuesOrder},
linalg::DiffResult,
Expand All @@ -11,26 +11,40 @@ use crate::{
pub struct GaussNewton<S: LinearSolver = CholeskySolver> {
graph: Graph,
solver: S,
pub params: OptimizerParams,
pub params: OptParams,
pub observers: OptObserverVec<Values>,
// For caching computation between steps
graph_order: Option<GraphOrder>,
}

impl<S: LinearSolver> Optimizer for GaussNewton<S> {
impl<S: LinearSolver> GraphOptimizer for GaussNewton<S> {
fn new(graph: Graph) -> Self {
Self {
graph,
solver: S::default(),
params: OptimizerParams::default(),
observers: OptObserverVec::default(),
params: OptParams::default(),
graph_order: None,
}
}

fn graph(&self) -> &Graph {
&self.graph
}
}

impl<S: LinearSolver> Optimizer for GaussNewton<S> {
type Input = Values;

fn observers(&self) -> &OptObserverVec<Values> {
&self.observers
}

fn error(&self, values: &Values) -> crate::dtype {
self.graph.error(values)
}

fn params(&self) -> &OptimizerParams {
fn params(&self) -> &OptParams {
&self.params
}

Expand All @@ -43,7 +57,7 @@ impl<S: LinearSolver> Optimizer for GaussNewton<S> {
);
}

fn step(&mut self, mut values: Values) -> OptResult {
fn step(&mut self, mut values: Values) -> OptResult<Values> {
// Solve the linear system
let linear_graph = self.graph.linearize(&values);
let DiffResult { value: r, diff: j } =
Expand All @@ -59,7 +73,7 @@ impl<S: LinearSolver> Optimizer for GaussNewton<S> {
.clone_owned();

// Update the values
let dx = LinearValues::from_order_and_values(
let dx = LinearValues::from_order_and_vector(
self.graph_order.as_ref().unwrap().order.clone(),
delta,
);
Expand Down
28 changes: 21 additions & 7 deletions src/optimizers/levenberg_marquardt.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ use std::ops::Mul;
use faer::{scale, sparse::SparseColMat};
use faer_ext::IntoNalgebra;

use super::{OptError, OptResult, Optimizer, OptimizerParams};
use super::{GraphOptimizer, OptError, OptObserverVec, OptParams, OptResult, Optimizer};
use crate::{
containers::{Graph, GraphOrder, Values, ValuesOrder},
dtype,
Expand Down Expand Up @@ -32,20 +32,22 @@ impl Default for LevenParams {
pub struct LevenMarquardt<S: LinearSolver = CholeskySolver> {
graph: Graph,
solver: S,
pub params_base: OptimizerParams,
pub params_base: OptParams,
pub params_leven: LevenParams,
observers: OptObserverVec<Values>,
lambda: dtype,
// For caching computation between steps
graph_order: Option<GraphOrder>,
}

impl<S: LinearSolver> Optimizer for LevenMarquardt<S> {
impl<S: LinearSolver> GraphOptimizer for LevenMarquardt<S> {
fn new(graph: Graph) -> Self {
Self {
graph,
solver: S::default(),
params_base: OptimizerParams::default(),
params_base: OptParams::default(),
params_leven: LevenParams::default(),
observers: OptObserverVec::default(),
lambda: 1e-5,
graph_order: None,
}
Expand All @@ -54,11 +56,23 @@ impl<S: LinearSolver> Optimizer for LevenMarquardt<S> {
fn graph(&self) -> &Graph {
&self.graph
}
}

impl<S: LinearSolver> Optimizer for LevenMarquardt<S> {
type Input = Values;

fn observers(&self) -> &OptObserverVec<Self::Input> {
&self.observers
}

fn params(&self) -> &OptimizerParams {
fn params(&self) -> &OptParams {
&self.params_base
}

fn error(&self, values: &Values) -> crate::dtype {
self.graph.error(values)
}

fn init(&mut self, _values: &Values) {
// TODO: Some way to manual specify how to computer ValuesOrder
// Precompute the sparsity pattern
Expand All @@ -70,7 +84,7 @@ impl<S: LinearSolver> Optimizer for LevenMarquardt<S> {

// TODO: Some form of logging of the lambda value
// TODO: More sophisticated stopping criteria based on magnitude of the gradient
fn step(&mut self, mut values: Values) -> OptResult {
fn step(&mut self, mut values: Values) -> OptResult<Values> {
// Make an ordering
let order = ValuesOrder::from_values(&values);

Expand Down Expand Up @@ -122,7 +136,7 @@ impl<S: LinearSolver> Optimizer for LevenMarquardt<S> {
.into_nalgebra()
.column(0)
.clone_owned();
dx = LinearValues::from_order_and_values(
dx = LinearValues::from_order_and_vector(
self.graph_order.as_ref().unwrap().order.clone(),
delta,
);
Expand Down
14 changes: 11 additions & 3 deletions src/optimizers/mod.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,13 @@
mod traits;
pub use traits::{OptError, OptResult, Optimizer, OptimizerCallback, OptimizerParams};
pub use traits::{
GraphOptimizer,
OptError,
OptObserver,
OptObserverVec,
OptParams,
OptResult,
Optimizer,
};

mod macros;

Expand Down Expand Up @@ -30,7 +38,7 @@ pub mod test {
T: 'static + VariableUmbrella<Dim = Const<DIM>>,
UnitNoise<DIM>: NoiseModelSafe,
PriorResidual<T>: ResidualSafe,
O: Optimizer,
O: Optimizer<Input = Values> + GraphOptimizer,
{
let t = VectorX::from_fn(T::DIM, |_, i| ((i + 1) as dtype) / 10.0);
let p = T::exp(t.as_view());
Expand Down Expand Up @@ -63,7 +71,7 @@ pub mod test {
ResidualSafe + Residual<DimIn = Const<DIM>, DimOut = Const<DIM>, NumVars = Const<1>>,
BetweenResidual<T>: ResidualSafe
+ Residual<DimIn = Const<DIM_DOUBLE>, DimOut = Const<DIM>, NumVars = Const<2>>,
O: Optimizer,
O: Optimizer<Input = Values> + GraphOptimizer,
{
let t = VectorX::from_fn(T::DIM, |_, i| ((i as dtype) - (T::DIM as dtype)) / 10.0);
let p1 = T::exp(t.as_view());
Expand Down
Loading

0 comments on commit 84449aa

Please sign in to comment.