From df2224b8c00b7d5d9c433c0f4a7ec135cd059c02 Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Thu, 2 May 2024 23:13:15 -0500 Subject: [PATCH 01/15] update Signed-off-by: Joe McCain III --- core/src/params/impls/impl_rand.rs | 1 - core/src/traits/arr/convert.rs | 22 -- core/src/traits/arr/misc.rs | 66 ++++++ core/src/traits/mod.rs | 32 ++- models/linear/Cargo.toml | 5 + models/linear/src/lib.rs | 9 +- models/linear/src/neurons/mod.rs | 23 -- models/linear/src/neurons/node.rs | 271 ------------------------ models/linear/src/neurons/perceptron.rs | 207 ------------------ models/linear/src/params/params.rs | 105 ++++++--- models/linear/src/utils.rs | 23 ++ models/linear/tests/model.rs | 2 +- models/linear/tests/params.rs | 23 ++ models/linear/tests/perceptron.rs | 28 --- 14 files changed, 232 insertions(+), 585 deletions(-) delete mode 100644 core/src/traits/arr/convert.rs create mode 100644 core/src/traits/arr/misc.rs delete mode 100644 models/linear/src/neurons/mod.rs delete mode 100644 models/linear/src/neurons/node.rs delete mode 100644 models/linear/src/neurons/perceptron.rs create mode 100644 models/linear/src/utils.rs create mode 100644 models/linear/tests/params.rs delete mode 100644 models/linear/tests/perceptron.rs diff --git a/core/src/params/impls/impl_rand.rs b/core/src/params/impls/impl_rand.rs index 3a94ab33..69e92466 100644 --- a/core/src/params/impls/impl_rand.rs +++ b/core/src/params/impls/impl_rand.rs @@ -11,7 +11,6 @@ use ndrand::rand_distr::uniform::SampleUniform; use ndrand::rand_distr::{Distribution, StandardNormal}; use num::Float; - impl Parameter where D: Dimension, diff --git a/core/src/traits/arr/convert.rs b/core/src/traits/arr/convert.rs deleted file mode 100644 index 6ffa5b1c..00000000 --- a/core/src/traits/arr/convert.rs +++ /dev/null @@ -1,22 +0,0 @@ -/* - Appellation: convert - Contrib: FL03 -*/ -use nd::Axis; - -pub trait IntoAxis { - fn into_axis(self) -> Axis; -} - -/* - ******** implementations ******** -*/ - -impl IntoAxis for S -where - S: AsRef, -{ - fn into_axis(self) -> Axis { - Axis(*self.as_ref()) - } -} diff --git a/core/src/traits/arr/misc.rs b/core/src/traits/arr/misc.rs new file mode 100644 index 00000000..943cd6cf --- /dev/null +++ b/core/src/traits/arr/misc.rs @@ -0,0 +1,66 @@ +/* + Appellation: convert + Contrib: FL03 +*/ +use nd::Axis; + +pub trait IntoAxis { + fn into_axis(self) -> Axis; +} + +pub trait NdArray { + type Dim; + + fn as_slice(&self) -> &[T]; + + fn dim(&self) -> Self::Dim; + + fn shape(&self) -> &[usize]; + + fn len(&self) -> usize { + self.as_slice().len() + } + + fn is_empty(&self) -> bool { + self.as_slice().is_empty() + } + + fn is_scalar(&self) -> bool { + self.shape().is_empty() + } +} + +/* + ******** implementations ******** +*/ + +impl IntoAxis for S +where + S: AsRef, +{ + fn into_axis(self) -> Axis { + Axis(*self.as_ref()) + } +} + +use nd::{ArrayBase, Dimension}; + +impl NdArray for ArrayBase +where + S: nd::Data, + D: Dimension, +{ + type Dim = D; + + fn as_slice(&self) -> &[A] { + ArrayBase::as_slice(self).unwrap() + } + + fn dim(&self) -> D { + self.raw_dim() + } + + fn shape(&self) -> &[usize] { + self.shape() + } +} diff --git a/core/src/traits/mod.rs b/core/src/traits/mod.rs index 06864d58..bca15f93 100644 --- a/core/src/traits/mod.rs +++ b/core/src/traits/mod.rs @@ -11,21 +11,47 @@ pub mod store; pub mod train; pub mod arr { - pub use self::{convert::*, like::*, ops::*}; + pub use self::{like::*, misc::*, ops::*}; - pub(crate) mod convert; pub(crate) mod like; + pub(crate) mod misc; pub(crate) mod ops; } +pub trait Decrement { + type Output; + + fn dec(&self) -> Self::Output; +} + +pub trait Increment { + type Output; + + fn inc(&self) -> Self::Output; +} + pub trait Transform { type Output; fn transform(&self, args: &T) -> Self::Output; } +/* + ******** implementations ******** +*/ +impl Decrement for D +where + D: nd::RemoveAxis, +{ + type Output = D::Smaller; + + fn dec(&self) -> Self::Output { + self.remove_axis(nd::Axis(self.ndim() - 1)) + } +} + pub(crate) mod prelude { - pub use super::Transform; + pub use super::{Decrement, Transform}; pub use super::arr::*; pub use super::math::*; diff --git a/models/linear/Cargo.toml b/models/linear/Cargo.toml index f69ae36c..d56f85bd 100644 --- a/models/linear/Cargo.toml +++ b/models/linear/Cargo.toml @@ -19,6 +19,7 @@ default = [ full = [ "default", + "approx", "rand", "serde", "tracing", @@ -78,6 +79,10 @@ test = true name = "model" required-features = ["rand"] +[[test]] +name = "params" +required-features = ["rand"] + [build-dependencies] [dependencies] diff --git a/models/linear/src/lib.rs b/models/linear/src/lib.rs index ed614c23..3efa291a 100644 --- a/models/linear/src/lib.rs +++ b/models/linear/src/lib.rs @@ -19,20 +19,19 @@ extern crate ndarray_stats as stats; pub use self::model::{Config, Features, Linear}; pub use self::params::LinearParams; -pub use self::{neurons::*, traits::*}; +#[allow(unused_imports)] +pub use self::{traits::*, utils::*}; + +pub(crate) mod utils; pub mod conv; pub mod dense; pub mod model; -#[doc(hidden)] -pub mod neurons; pub mod params; pub mod traits; pub mod prelude { pub use crate::model::prelude::*; - #[doc(hidden)] - pub use crate::neurons::Perceptron; pub use crate::params::prelude::*; pub use crate::traits::*; } diff --git a/models/linear/src/neurons/mod.rs b/models/linear/src/neurons/mod.rs deleted file mode 100644 index 9ea7a25a..00000000 --- a/models/linear/src/neurons/mod.rs +++ /dev/null @@ -1,23 +0,0 @@ -/* - Appellation: neurons - Contrib: FL03 -*/ -//! # neurons -pub use self::{node::*, perceptron::*}; - -pub(crate) mod node; -pub(crate) mod perceptron; - -pub trait ArtificialNeuron { - type Rho: for<'a> Fn(&'a Self::Output) -> Self::Output; - type Output; - - fn activate(&self, x: &Self::Output) -> Self::Output { - (self.rho())(x) - } - - fn rho(&self) -> &Self::Rho; -} - -#[cfg(test)] -mod tests {} diff --git a/models/linear/src/neurons/node.rs b/models/linear/src/neurons/node.rs deleted file mode 100644 index 6ba8fbaa..00000000 --- a/models/linear/src/neurons/node.rs +++ /dev/null @@ -1,271 +0,0 @@ -/* - Appellation: node - Contrib: FL03 -*/ -use concision::prelude::{Forward, GenerateRandom, Predict, PredictError}; - -use ndarray::linalg::Dot; -use ndarray::prelude::{Array, Array0, Array1, Array2, Dimension, NdFloat}; -use ndarray::{RemoveAxis, ScalarOperand}; -use ndarray_rand::rand_distr::uniform::SampleUniform; -use ndarray_rand::rand_distr::{Distribution, StandardNormal}; -use num::{Float, Num}; -use std::ops; - -#[derive(Clone, Debug, PartialEq)] -pub struct Node { - bias: Option>, - features: usize, - weights: Array1, -} - -impl Node { - pub fn new(biased: bool, features: usize) -> Self - where - T: Default, - { - let bias = if biased { - Some(Array0::default(())) - } else { - None - }; - Self { - bias, - features, - weights: Array1::default(features), - } - } - - pub fn biased(self) -> Self - where - T: Default, - { - Self { - bias: Some(Array0::default(())), - ..self - } - } - - pub fn unbiased(features: usize) -> Self - where - T: Default, - { - Self { - bias: None, - features, - weights: Array1::default(features), - } - } - - pub fn activate(&self, data: &Array2, activator: A) -> Array1 - where - A: Fn(&Array1) -> Array1, - Node: Forward, Output = Array1> - { - activator(&self.forward(data)) - } - - pub fn apply_gradient(&mut self, gamma: T, grad: G) - where - G: for <'a> Fn(&'a T) -> T, - T: Copy + nd::LinalgScalar + num::Signed - { - - let dw = self.weights().map(|w| grad(w)); - - self.weights_mut().scaled_add(-gamma, &dw); - if let Some(bias) = self.bias_mut() { - let db = bias.map(|b| grad(b)); - bias.scaled_add(-gamma, &db); - } - } - - pub fn bias(&self) -> Option<&Array0> { - self.bias.as_ref() - } - - pub fn bias_mut(&mut self) -> Option<&mut Array0> { - self.bias.as_mut() - } - - pub fn features(&self) -> usize { - self.features - } - - pub fn is_biased(&self) -> bool { - self.bias.is_some() - } - - pub fn linear(&self, data: &Array2) -> Array1 - where - T: Num + ScalarOperand, - Array2: Dot, Output = Array1>, - { - let w = self.weights().t().to_owned(); - if let Some(bias) = self.bias() { - data.dot(&w) + bias - } else { - data.dot(&w) - } - } - - pub fn set_bias(&mut self, bias: Option>) { - self.bias = bias; - } - - pub fn set_features(&mut self, features: usize) { - self.features = features; - } - - pub fn set_weights(&mut self, weights: Array1) { - self.weights = weights; - } - - pub fn weights(&self) -> &Array1 { - &self.weights - } - - pub fn weights_mut(&mut self) -> &mut Array1 { - &mut self.weights - } - - pub fn with_bias(self, bias: Option>) -> Self { - Self { bias, ..self } - } - - pub fn with_weights(self, weights: Array1) -> Self { - Self { weights, ..self } - } - - -} -impl Node -where - T: Float + SampleUniform, - StandardNormal: Distribution, -{ - pub fn init(mut self, biased: bool) -> Self { - if biased { - self = self.init_bias(); - } - self.init_weight() - } - - pub fn init_bias(mut self) -> Self { - let dk = (T::one() / T::from(self.features).unwrap()).sqrt(); - self.bias = Some(Array0::uniform_between(dk, ())); - self - } - - pub fn init_weight(mut self) -> Self { - let features = self.features; - let dk = (T::one() / T::from(features).unwrap()).sqrt(); - self.weights = Array1::uniform_between(dk, features); - self - } - - - - -} - -impl Predict for Node -where - T: Clone, - A: Dot, Output = B>, - B: ops::Add, Output = B>, -{ - type Output = B; - - fn predict(&self, data: &A) -> Result { - let wt = self.weights().t().to_owned(); - let mut out = data.dot(&wt); - if let Some(bias) = self.bias() { - out = out + bias.clone(); - } - Ok(out) - } -} - -impl FromIterator for Node -where - T: Float, -{ - fn from_iter(iter: I) -> Self - where - I: IntoIterator, - { - let weights = Array1::::from_iter(iter); - Self { - bias: None, - features: weights.len(), - weights, - } - } -} - -impl From<(Array1, Array0)> for Node -where - T: Float, -{ - fn from((weights, bias): (Array1, Array0)) -> Self { - Self { - bias: Some(bias), - features: weights.len(), - weights, - } - } -} - -impl From<(Array1, T)> for Node -where - T: NdFloat, -{ - fn from((weights, bias): (Array1, T)) -> Self { - Self { - bias: Some(Array0::ones(()) * bias), - features: weights.len(), - weights, - } - } -} - -impl From<(Array1, Option)> for Node -where - T: Float + ScalarOperand, -{ - fn from((weights, bias): (Array1, Option)) -> Self { - let bias = if let Some(b) = bias { - Some(Array0::ones(()) * b) - } else { - None - }; - Self { - bias, - features: weights.len(), - weights, - } - } -} - -impl From<(Array1, Option>)> for Node -where - T: Float, -{ - fn from((weights, bias): (Array1, Option>)) -> Self { - Self { - bias, - features: weights.len(), - weights, - } - } -} - -impl From> for (Array1, Option>) -where - T: Float, -{ - fn from(node: Node) -> Self { - (node.weights, node.bias) - } -} diff --git a/models/linear/src/neurons/perceptron.rs b/models/linear/src/neurons/perceptron.rs deleted file mode 100644 index e6ed4a28..00000000 --- a/models/linear/src/neurons/perceptron.rs +++ /dev/null @@ -1,207 +0,0 @@ -/* - Appellation: perceptron - Contrib: FL03 -*/ -use super::Node; -use concision::prelude::{Forward, Predict, PredictError}; -use core::ops; -use ndarray::prelude::{Array0, Array1, Array2, NdFloat}; -use ndrand::rand_distr::uniform::SampleUniform; -use ndrand::rand_distr::{Distribution, StandardNormal}; -use num::Float; - -pub fn linear_activation(x: &Array1) -> Array1 { - x.clone() -} - -pub type Rho = Box T>; - -/// A perceptron -pub struct Perceptron) -> Array1>> { - activation: F, - node: Node, -} - -impl Perceptron { - pub fn new(activation: F, features: usize) -> Self where T: Default { - Self { - activation, - node: Node::new(false, features), - } - } - - pub fn node(&self) -> &Node { - &self.node - } - - pub fn node_mut(&mut self) -> &mut Node { - &mut self.node - } - - pub fn features(&self) -> usize { - self.node().features() - } - - pub fn params(&self) -> &Node { - &self.node - } - - pub fn params_mut(&mut self) -> &mut Node { - &mut self.node - } - - pub fn rho(&self) -> &F { - &self.activation - } - - pub fn set_weights(&mut self, weights: Array1) { - self.node.set_weights(weights); - } - - pub fn weights(&self) -> &Array1 { - self.node.weights() - } - - pub fn weights_mut(&mut self) -> &mut Array1 { - self.node.weights_mut() - } - - pub fn with_bias(self, bias: Option>) -> Self { - Self { - node: self.node.with_bias(bias), - ..self - } - } - - pub fn with_node(self, node: Node) -> Self { - Self { node, ..self } - } - - pub fn with_rho(self, rho: G) -> Perceptron { - Perceptron { - activation: rho, - node: self.node, - } - } - - pub fn with_weights(self, weights: Array1) -> Self { - Self { - node: self.node.with_weights(weights), - ..self - } - } - - pub fn apply_gradient(&mut self, gamma: T, gradient: G) - where - G: Fn(&Array1) -> Array1, - T: Copy + nd::LinalgScalar + ops::Neg, - { - let grad = gradient(self.node().weights()); - self.update_with_gradient(gamma, &grad); - } - - pub fn update_with_gradient(&mut self, gamma: T, grad: &Array1) - where - T: Copy + nd::LinalgScalar + ops::Neg, - { - self.node.weights_mut().scaled_add(-gamma, grad); - } -} - -impl Perceptron -where - T: Float + SampleUniform, - StandardNormal: Distribution, -{ - pub fn init(mut self, biased: bool) -> Self { - if biased { - self = self.init_bias(); - } - self.init_weight() - } - - pub fn init_bias(mut self) -> Self { - self.node = self.node.init_bias(); - self - } - - pub fn init_weight(mut self) -> Self { - self.node = self.node.init_weight(); - self - } -} - -impl Predict> for Perceptron -where - A: for <'a> Fn(&'a T) -> T, - Node: Forward, Output = Array1> -{ - type Output = Array1; - - fn predict(&self, args: &Array2) -> Result { - let linstep = self.params().forward(args); - let res = linstep.map(|x| (self.rho())(x)); - Ok(res) - } -} - -macro_rules! impl_into_perceptron { - ($(($w:ty, $b:ty)),* $(,)?) => { - $( - impl_into_perceptron!(@impl $w, $b); - )* - }; - (@impl $w:ty, $b:ty) => { - impl From<($w, $b)> for Perceptron - where - T: Clone + nd::NdFloat + 'static, - { - fn from((weights, bias): ($w, $b)) -> Self { - Self { - activation: Box::new(linear_activation), - node: Node::from((weights, bias)), - } - } - } - - }; - (@into $w:ty, $b:ty) => { - - impl From> for ($w, $b) - where - T: Clone + nd::NdFloat + 'static, - { - fn from(neuron: Perceptron) -> Self { - neuron.node().clone().into() - } - } - - }; - -} - -impl_into_perceptron!((Array1, Array0), (Array1, T)); - -impl From<(Array1, Array0, A)> for Perceptron -where - T: Float, -{ - fn from((weights, bias, activation): (Array1, Array0, A)) -> Self { - Self { - activation, - node: Node::from((weights, bias)), - } - } -} - -impl From<(Array1, T, A)> for Perceptron -where - T: NdFloat, -{ - fn from((weights, bias, activation): (Array1, T, A)) -> Self { - Self { - activation, - node: Node::from((weights, bias)), - } - } -} diff --git a/models/linear/src/params/params.rs b/models/linear/src/params/params.rs index 664f364a..589db448 100644 --- a/models/linear/src/params/params.rs +++ b/models/linear/src/params/params.rs @@ -2,8 +2,8 @@ Appellation: params Contrib: FL03 */ +use crate::build_bias; use crate::model::Features; -use crate::Node; use core::ops; use nd::linalg::Dot; use nd::*; @@ -14,17 +14,7 @@ use alloc::vec; #[cfg(feature = "std")] use std::vec; -pub(crate) fn build_bias(biased: bool, dim: D, builder: F) -> Option> -where - D: RemoveAxis, - F: Fn(D::Smaller) -> Array, -{ - if biased { - Some(builder(dim.remove_axis(Axis(dim.ndim() - 1)))) - } else { - None - } -} +pub(crate) type Node = (Array, Option>); #[derive(Clone, Debug, Eq, PartialEq)] pub struct LinearParams @@ -53,6 +43,19 @@ where } } + pub fn default(dim: impl IntoDimension) -> Self + where + A: Clone + Default, + { + let dim = dim.into_dimension(); + let bias = build_bias(true, dim.clone(), |dim| Array::default(dim)); + Self { + bias, + features: dim.clone(), + weights: Array::default(dim), + } + } + pub fn ones(shape: Sh) -> Self where Sh: ShapeBuilder, @@ -150,24 +153,25 @@ where impl LinearParams { pub fn set_node(&mut self, idx: usize, node: Node) where - T: Float, + T: Clone + Default, { - if let Some(bias) = node.bias() { + let (weight, bias) = node; + if let Some(bias) = bias { if !self.is_biased() { - let mut tmp = Array1::zeros(self.outputs()); - tmp.index_axis_mut(Axis(0), idx).assign(bias); + let mut tmp = Array1::default(self.outputs()); + tmp.index_axis_mut(Axis(0), idx).assign(&bias); self.bias = Some(tmp); } self.bias .as_mut() .unwrap() .index_axis_mut(Axis(0), idx) - .assign(bias); + .assign(&bias); } self.weights_mut() .index_axis_mut(Axis(0), idx) - .assign(&node.weights()); + .assign(&weight); } } @@ -184,7 +188,7 @@ where .weights() .axis_iter(Axis(0)) .zip(bias.axis_iter(Axis(0))) - .map(|(w, b)| (w.to_owned(), b.to_owned()).into()) + .map(|(w, b)| (w.to_owned(), Some(b.to_owned()))) .collect::>() .into_iter(); } @@ -196,16 +200,16 @@ where } } -impl FromIterator> for LinearParams +impl FromIterator<(Array1, Option>)> for LinearParams where - T: Float, + T: Clone + Default, { - fn from_iter>>(nodes: I) -> Self { + fn from_iter, Option>)>>(nodes: I) -> Self { let nodes = nodes.into_iter().collect::>(); let mut iter = nodes.iter(); let node = iter.next().unwrap(); - let shape = Features::new(node.features(), nodes.len()); - let mut params = Self::zeros(shape); + let shape = Features::new(node.0.shape()[0], nodes.len()); + let mut params = LinearParams::default(shape); params.set_node(0, node.clone()); for (i, node) in iter.into_iter().enumerate() { params.set_node(i + 1, node.clone()); @@ -213,3 +217,56 @@ where params } } + +impl From<(Array, A)> for LinearParams +where + A: Clone, +{ + fn from((weights, bias): (Array, A)) -> Self { + let bias = Array::from_elem((), bias); + Self { + bias: Some(bias), + features: weights.raw_dim(), + weights, + } + } +} + +impl From<(Array, Option)> for LinearParams +where + A: Clone, +{ + fn from((weights, bias): (Array, Option)) -> Self { + let bias = bias.map(|b| Array::from_elem((), b)); + Self { + bias, + features: weights.raw_dim(), + weights, + } + } +} +impl From<(Array, Array)> for LinearParams +where + D: RemoveAxis, +{ + fn from((weights, bias): (Array, Array)) -> Self { + Self { + bias: Some(bias), + features: weights.raw_dim(), + weights, + } + } +} + +impl From<(Array, Option>)> for LinearParams +where + D: RemoveAxis, +{ + fn from((weights, bias): (Array, Option>)) -> Self { + Self { + bias, + features: weights.raw_dim(), + weights, + } + } +} diff --git a/models/linear/src/utils.rs b/models/linear/src/utils.rs new file mode 100644 index 00000000..25ddf7bd --- /dev/null +++ b/models/linear/src/utils.rs @@ -0,0 +1,23 @@ +/* + Appellation: utils + Contrib: FL03 +*/ +use concision::Decrement; +use nd::*; + +pub(crate) fn build_bias( + biased: bool, + dim: D, + builder: F, +) -> Option> +where + S: RawData, + D: RemoveAxis, + F: Fn(D::Smaller) -> ArrayBase, +{ + if biased { + Some(builder(dim.dec())) + } else { + None + } +} diff --git a/models/linear/tests/model.rs b/models/linear/tests/model.rs index 35959fb2..d3c36acb 100644 --- a/models/linear/tests/model.rs +++ b/models/linear/tests/model.rs @@ -7,7 +7,7 @@ extern crate concision_core as concision; extern crate concision_linear as linear; -use concision::prelude::{linarr, Predict}; +use concision::{linarr, Predict}; use linear::{Config, Features, Linear}; use lazy_static::lazy_static; diff --git a/models/linear/tests/params.rs b/models/linear/tests/params.rs new file mode 100644 index 00000000..3053900c --- /dev/null +++ b/models/linear/tests/params.rs @@ -0,0 +1,23 @@ +/* + Appellation: params + Contrib: FL03 +*/ +#![cfg(test)] + +extern crate concision_core as concision; +extern crate concision_linear as linear; + +use concision::func::activate::{softmax, Softmax}; +use concision::Predict; +use linear::{Features, LinearParams}; +use ndarray::*; + +#[test] +fn test_linear_params() { + let (samples, inputs, outputs) = (20, 5, 3); + let features = Features::new(inputs, outputs); + let data = Array2::::zeros((samples, inputs)); + let params = LinearParams::default(features.clone()).init_uniform(true); + let y = params.forward(&data).unwrap(); + assert_eq!(y.dim(), (samples, outputs)); +} diff --git a/models/linear/tests/perceptron.rs b/models/linear/tests/perceptron.rs deleted file mode 100644 index 772d2acd..00000000 --- a/models/linear/tests/perceptron.rs +++ /dev/null @@ -1,28 +0,0 @@ -/* - Appellation: model - Contrib: FL03 -*/ -#![allow(unused)] -#![cfg(test)] - -extern crate concision_core as concision; -extern crate concision_linear as linear; - -use concision::func::activate::{softmax, Softmax}; -use concision::traits::Forward; -use linear::Perceptron; -use ndarray::*; - -#[test] -fn perceptron() { - let bias = 0.0; - - let data = array![[10.0, 10.0, 6.0, 1.0, 8.0]]; - let weights = array![2.0, 1.0, 10.0, 1.0, 7.0]; - // let neuron = Perceptron::::new(Box::new(Softmax::softmax), 5).with_weights(weights.clone()); - - // let linear = data.dot(&weights) + bias; - // let exp = softmax(&linear); - - // assert_eq!(exp, neuron.forward(&data)); -} From ed3f54539781ffd6d8e8b8ef716e2e12b49b1d92 Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Thu, 2 May 2024 23:16:36 -0500 Subject: [PATCH 02/15] update Signed-off-by: Joe McCain III --- .github/workflows/rust.yml | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 13f3a3a2..ccc0b581 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -14,6 +14,7 @@ on: branches: [ main ] tags: [ v*.*.* ] release: + types: [ created ] repository_dispatch: types: [ rust ] schedule: @@ -76,13 +77,14 @@ jobs: name: Test (blas) strategy: matrix: - toolchain: [ stable, nightly ] + crate: [ core, data, gnn, linear ] + toolchain: [ stable ] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: setup (langspace) + - name: rustup run: | rustup default ${{ matrix.toolchain }} rustup update - - name: Test - run: cargo test --features blas -v --workspace + - name: test + run: cargo test --features blas -v --package ${{ github.event.repository.name }}-${{ matrix.crate }} From e47a715051e94835e57f00da21ff7ff08bcbdd83 Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Fri, 3 May 2024 00:17:31 -0500 Subject: [PATCH 03/15] update Signed-off-by: Joe McCain III --- concision/tests/default.rs | 18 +++++++++++------- core/Cargo.toml | 13 ++++++------- core/src/primitives.rs | 4 ++-- core/tests/default.rs | 19 ++++++++++++++----- core/tests/params.rs | 32 ++++++++++++-------------------- core/tests/traits.rs | 3 +-- core/tests/utils.rs | 5 ++++- data/tests/default.rs | 19 ++++++++++++++----- models/gnn/tests/default.rs | 3 +-- models/gnn/tests/model.rs | 1 - models/linear/tests/default.rs | 3 +-- models/linear/tests/model.rs | 2 -- models/linear/tests/params.rs | 2 -- 13 files changed, 66 insertions(+), 58 deletions(-) diff --git a/concision/tests/default.rs b/concision/tests/default.rs index e68a9cf5..233a07af 100644 --- a/concision/tests/default.rs +++ b/concision/tests/default.rs @@ -1,13 +1,17 @@ /* - Appellation: default - Contrib: FL03 + Appellation: default + Contrib: FL03 */ -#![cfg(test)] + +fn add(a: A, b: B) -> C +where + A: core::ops::Add, +{ + a + b +} #[test] fn compiles() { - let f = |x: usize, y: usize| x + y; - - assert_eq!(f(10, 10), 20); - assert_ne!(f(1, 1), 3); + assert_eq!(add(10, 10), 20); + assert_ne!(add(1, 1), 3); } diff --git a/core/Cargo.toml b/core/Cargo.toml index c2461a34..4e730903 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -32,12 +32,9 @@ blas = [ "ndarray/blas", ] -# rand = [ -# "dep:rand", -# "dep:ndarray-rand" -# "num/rand", -# ] rand = [ + "dep:rand", + "dep:ndarray-rand", "num/rand", ] @@ -73,15 +70,17 @@ test = true [dependencies] approx = { optional = true, version = "0.5" } ndarray.workspace = true -ndarray-rand = "0.14" # { optional = true, version = "0.14" } +ndarray-rand = { optional = true, version = "0.14" } num.workspace = true -rand = "0.8" # { optional = true, version = "0.8" } +rand = { optional = true, version = "0.8" } serde = { features = ["derive"], optional = true, version = "1" } smart-default.workspace = true strum.workspace = true tracing = { optional = true, version = "0.1" } uuid = { features = ["v4", "v7"], version = "1" } + + [dev-dependencies] approx = "0.5" lazy_static = "1" diff --git a/core/src/primitives.rs b/core/src/primitives.rs index 105f5765..3b88a333 100644 --- a/core/src/primitives.rs +++ b/core/src/primitives.rs @@ -5,8 +5,8 @@ pub use self::constants::*; pub use ndarray::ShapeError; -// #[cfg(feature = "rand")] -pub use ndarray_rand::rand_distr::uniform::SampleUniform; +#[cfg(feature = "rand")] +pub use ndrand::rand_distr::uniform::SampleUniform; /// pub type ShapeResult = core::result::Result; diff --git a/core/tests/default.rs b/core/tests/default.rs index 0cac1eb5..233a07af 100644 --- a/core/tests/default.rs +++ b/core/tests/default.rs @@ -1,8 +1,17 @@ -#[cfg(test)] +/* + Appellation: default + Contrib: FL03 +*/ + +fn add(a: A, b: B) -> C +where + A: core::ops::Add, +{ + a + b +} + #[test] fn compiles() { - let f = |x: usize, y: usize| x + y; - - assert_eq!(f(10, 10), 20); - assert_ne!(f(1, 1), 3); + assert_eq!(add(10, 10), 20); + assert_ne!(add(1, 1), 3); } diff --git a/core/tests/params.rs b/core/tests/params.rs index d2cd68dd..1b462800 100644 --- a/core/tests/params.rs +++ b/core/tests/params.rs @@ -1,23 +1,13 @@ /* - Appellation: traits + Appellation: params Contrib: FL03 */ -#![cfg(test)] - -#[cfg(not(feature = "std"))] -extern crate alloc; - extern crate concision_core as concision; use concision::linarr; -use concision::params::{ParamKind, Parameter}; +use concision::params::{Parameter, ParamKind}; +use ndarray::*; use ndarray::linalg::Dot; -use ndarray::prelude::{Ix1, Ix2}; - -#[cfg(not(feature = "std"))] -use alloc::collections::BTreeMap as Map; -#[cfg(feature = "std")] -use std::collections::HashMap as Map; #[test] fn test_parameter() { @@ -37,12 +27,14 @@ fn test_param_kind_map() { let other = ParamKind::other(name); let data = [ - (ParamKind::Bias, 0), - (ParamKind::Weight, 1), - (other.clone(), 2), - (ParamKind::other("mask"), 3), + (ParamKind::Bias, "bias"), + (ParamKind::Weight, "weight"), + (other.clone(), "test"), + (ParamKind::other("mask"), "mask"), ]; - let store = Map::::from_iter(data); - assert_eq!(store.get(&ParamKind::Bias), Some(&0)); - assert_eq!(store.get(&other), Some(&2)); + + for (kind, expected) in &data { + assert_eq!(kind.to_string(), expected.to_string()); + } + } diff --git a/core/tests/traits.rs b/core/tests/traits.rs index 3a1ca0af..951cea9a 100644 --- a/core/tests/traits.rs +++ b/core/tests/traits.rs @@ -1,8 +1,7 @@ /* - Appellation: traits + Appellation: traits Contrib: FL03 */ -#![cfg(test)] extern crate concision_core as cnc; use cnc::traits::{Affine, AsComplex, Matpow}; diff --git a/core/tests/utils.rs b/core/tests/utils.rs index c7811f3d..987da44d 100644 --- a/core/tests/utils.rs +++ b/core/tests/utils.rs @@ -1,4 +1,7 @@ -#[cfg(test)] +/* + Appellation: utils + Contrib: FL03 +*/ extern crate concision_core; use concision_core as cnc; diff --git a/data/tests/default.rs b/data/tests/default.rs index 0cac1eb5..233a07af 100644 --- a/data/tests/default.rs +++ b/data/tests/default.rs @@ -1,8 +1,17 @@ -#[cfg(test)] +/* + Appellation: default + Contrib: FL03 +*/ + +fn add(a: A, b: B) -> C +where + A: core::ops::Add, +{ + a + b +} + #[test] fn compiles() { - let f = |x: usize, y: usize| x + y; - - assert_eq!(f(10, 10), 20); - assert_ne!(f(1, 1), 3); + assert_eq!(add(10, 10), 20); + assert_ne!(add(1, 1), 3); } diff --git a/models/gnn/tests/default.rs b/models/gnn/tests/default.rs index e26024d9..233a07af 100644 --- a/models/gnn/tests/default.rs +++ b/models/gnn/tests/default.rs @@ -1,8 +1,7 @@ /* - Appellation: model + Appellation: default Contrib: FL03 */ -#![cfg(test)] fn add(a: A, b: B) -> C where diff --git a/models/gnn/tests/model.rs b/models/gnn/tests/model.rs index 2012e291..245ec155 100644 --- a/models/gnn/tests/model.rs +++ b/models/gnn/tests/model.rs @@ -3,7 +3,6 @@ Contrib: FL03 */ #![allow(unused)] -#![cfg(test)] extern crate concision_core as concision; extern crate concision_gnn as gnn; diff --git a/models/linear/tests/default.rs b/models/linear/tests/default.rs index e26024d9..233a07af 100644 --- a/models/linear/tests/default.rs +++ b/models/linear/tests/default.rs @@ -1,8 +1,7 @@ /* - Appellation: model + Appellation: default Contrib: FL03 */ -#![cfg(test)] fn add(a: A, b: B) -> C where diff --git a/models/linear/tests/model.rs b/models/linear/tests/model.rs index d3c36acb..ff02c2ad 100644 --- a/models/linear/tests/model.rs +++ b/models/linear/tests/model.rs @@ -2,8 +2,6 @@ Appellation: model Contrib: FL03 */ -#![cfg(test)] - extern crate concision_core as concision; extern crate concision_linear as linear; diff --git a/models/linear/tests/params.rs b/models/linear/tests/params.rs index 3053900c..5274ee14 100644 --- a/models/linear/tests/params.rs +++ b/models/linear/tests/params.rs @@ -2,8 +2,6 @@ Appellation: params Contrib: FL03 */ -#![cfg(test)] - extern crate concision_core as concision; extern crate concision_linear as linear; From e16ddc147c3e56adcfcc692f5b1bb10a6c6376c5 Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Fri, 3 May 2024 00:20:17 -0500 Subject: [PATCH 04/15] update Signed-off-by: Joe McCain III --- Cargo.toml | 1 - concision/Cargo.toml | 2 +- models/linear/tests/model.rs | 2 +- models/linear/tests/params.rs | 3 +-- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 46091c6e..f7da7b99 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,7 +15,6 @@ version = "0.1.12" # TODO - Update the cargo package version # ndtensor = { features = ["full"], branch = "v0.1.1", git = "https://github.com/FL03/ndtensor", version = "0.1" } # scsys = { features = ["full"], branch = "v0.2.2", git = "https://github.com/scattered-systems/scsys", version = "0.2" } -anyhow = "1" approx = "0.5" itertools = "0.12" lazy_static = "1" diff --git a/concision/Cargo.toml b/concision/Cargo.toml index b46b0e4a..0530be5c 100644 --- a/concision/Cargo.toml +++ b/concision/Cargo.toml @@ -142,7 +142,7 @@ concision-linear = { optional = true, path = "../models/linear", version = "0.1. concision-gnn = { optional = true, path = "../models/gnn", version = "0.1.12" } [dev-dependencies] -anyhow.workspace = true +anyhow = "1" lazy_static.workspace = true ndarray.workspace = true tracing = "0.1" diff --git a/models/linear/tests/model.rs b/models/linear/tests/model.rs index ff02c2ad..a0f84c5c 100644 --- a/models/linear/tests/model.rs +++ b/models/linear/tests/model.rs @@ -28,5 +28,5 @@ fn test_linear() { let model: Linear = Linear::std(CONFIG.clone()).uniform(); - let y = model.predict(&data).unwrap(); + let _y = model.predict(&data).unwrap(); } diff --git a/models/linear/tests/params.rs b/models/linear/tests/params.rs index 5274ee14..97185276 100644 --- a/models/linear/tests/params.rs +++ b/models/linear/tests/params.rs @@ -5,7 +5,6 @@ extern crate concision_core as concision; extern crate concision_linear as linear; -use concision::func::activate::{softmax, Softmax}; use concision::Predict; use linear::{Features, LinearParams}; use ndarray::*; @@ -16,6 +15,6 @@ fn test_linear_params() { let features = Features::new(inputs, outputs); let data = Array2::::zeros((samples, inputs)); let params = LinearParams::default(features.clone()).init_uniform(true); - let y = params.forward(&data).unwrap(); + let y = params.predict(&data).unwrap(); assert_eq!(y.dim(), (samples, outputs)); } From ca7302166c9ae231f46a641e4ed99705fc7aaac7 Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Fri, 3 May 2024 14:15:18 -0500 Subject: [PATCH 05/15] update Signed-off-by: Joe McCain III --- core/src/models/{model.rs => activate.rs} | 4 +-- core/src/models/mod.rs | 18 +++---------- core/src/rand/mod.rs | 3 +-- core/src/traits/mod.rs | 21 ++++++++++++--- .../src/{models/traits => traits/nn}/model.rs | 0 .../traits/modules.rs => traits/nn/module.rs} | 2 +- core/src/traits/{ => nn}/predict.rs | 0 core/src/traits/{ => nn}/train.rs | 0 core/src/traits/store.rs | 4 +-- core/tests/params.rs | 8 ++---- core/tests/traits.rs | 4 +-- core/tests/utils.rs | 11 +++----- models/linear/src/model/linear.rs | 26 ++++++------------- 13 files changed, 39 insertions(+), 62 deletions(-) rename core/src/models/{model.rs => activate.rs} (91%) rename core/src/{models/traits => traits/nn}/model.rs (100%) rename core/src/{models/traits/modules.rs => traits/nn/module.rs} (94%) rename core/src/traits/{ => nn}/predict.rs (100%) rename core/src/traits/{ => nn}/train.rs (100%) diff --git a/core/src/models/model.rs b/core/src/models/activate.rs similarity index 91% rename from core/src/models/model.rs rename to core/src/models/activate.rs index f8b94522..3f26775e 100644 --- a/core/src/models/model.rs +++ b/core/src/models/activate.rs @@ -2,9 +2,7 @@ Appellation: model Contrib: FL03 */ -use super::Module; -use crate::error::PredictError; -use crate::Predict; +use crate::prelude::{Module, Predict, PredictError}; pub struct Activator { activation: F, diff --git a/core/src/models/mod.rs b/core/src/models/mod.rs index 92653cff..5941aece 100644 --- a/core/src/models/mod.rs +++ b/core/src/models/mod.rs @@ -2,26 +2,14 @@ Appellation: models Contrib: FL03 */ -pub use self::{error::ModelError, model::*, traits::prelude::*}; - -pub(crate) mod model; +pub use self::{activate::Activator, error::ModelError}; +pub mod activate; pub mod error; -pub(crate) mod traits { - mod model; - mod modules; - - pub(crate) mod prelude { - pub use super::model::*; - pub use super::modules::*; - } -} - pub(crate) mod prelude { + pub use super::activate::Activator; pub use super::error::ModelError; - pub use super::model::*; - pub use super::traits::prelude::*; } #[cfg(test)] diff --git a/core/src/rand/mod.rs b/core/src/rand/mod.rs index c31e7b02..f101cdc4 100644 --- a/core/src/rand/mod.rs +++ b/core/src/rand/mod.rs @@ -4,8 +4,7 @@ */ #![cfg(feature = "rand")] -pub use self::generate::*; -pub use self::utils::*; +pub use self::prelude::*; pub(crate) mod generate; pub(crate) mod utils; diff --git a/core/src/traits/mod.rs b/core/src/traits/mod.rs index bca15f93..021aeda6 100644 --- a/core/src/traits/mod.rs +++ b/core/src/traits/mod.rs @@ -5,10 +5,8 @@ pub use self::prelude::*; pub mod math; -pub mod predict; pub mod setup; pub mod store; -pub mod train; pub mod arr { pub use self::{like::*, misc::*, ops::*}; @@ -18,6 +16,22 @@ pub mod arr { pub(crate) mod ops; } +pub mod nn { + pub use self::prelude::*; + + pub mod model; + pub mod module; + pub mod predict; + pub mod train; + + pub(crate) mod prelude { + pub use super::model::*; + pub use super::module::*; + pub use super::predict::*; + pub use super::train::*; + } +} + pub trait Decrement { type Output; @@ -55,10 +69,9 @@ pub(crate) mod prelude { pub use super::arr::*; pub use super::math::*; - pub use super::predict::*; + pub use super::nn::prelude::*; pub use super::setup::*; pub use super::store::*; - pub use super::train::*; } #[cfg(test)] diff --git a/core/src/models/traits/model.rs b/core/src/traits/nn/model.rs similarity index 100% rename from core/src/models/traits/model.rs rename to core/src/traits/nn/model.rs diff --git a/core/src/models/traits/modules.rs b/core/src/traits/nn/module.rs similarity index 94% rename from core/src/models/traits/modules.rs rename to core/src/traits/nn/module.rs index e18b700f..6e307abb 100644 --- a/core/src/models/traits/modules.rs +++ b/core/src/traits/nn/module.rs @@ -1,5 +1,5 @@ /* - Appellation: modules + Appellation: modules Contrib: FL03 */ diff --git a/core/src/traits/predict.rs b/core/src/traits/nn/predict.rs similarity index 100% rename from core/src/traits/predict.rs rename to core/src/traits/nn/predict.rs diff --git a/core/src/traits/train.rs b/core/src/traits/nn/train.rs similarity index 100% rename from core/src/traits/train.rs rename to core/src/traits/nn/train.rs diff --git a/core/src/traits/store.rs b/core/src/traits/store.rs index 46b373c4..107b075a 100644 --- a/core/src/traits/store.rs +++ b/core/src/traits/store.rs @@ -2,9 +2,9 @@ Appellation: stores Contrib: FL03 */ -#[cfg(not(feature = "std"))] +#[cfg(no_std)] use alloc::collections::{btree_map, BTreeMap}; -#[cfg(feature = "std")] +#[cfg(not(no_std))] use std::collections::{btree_map, hash_map, BTreeMap, HashMap}; pub trait Entry<'a> { diff --git a/core/tests/params.rs b/core/tests/params.rs index 1b462800..774e75d1 100644 --- a/core/tests/params.rs +++ b/core/tests/params.rs @@ -2,12 +2,9 @@ Appellation: params Contrib: FL03 */ -extern crate concision_core as concision; - -use concision::linarr; -use concision::params::{Parameter, ParamKind}; -use ndarray::*; +use concision_core::prelude::{linarr, ParamKind, Parameter}; use ndarray::linalg::Dot; +use ndarray::*; #[test] fn test_parameter() { @@ -36,5 +33,4 @@ fn test_param_kind_map() { for (kind, expected) in &data { assert_eq!(kind.to_string(), expected.to_string()); } - } diff --git a/core/tests/traits.rs b/core/tests/traits.rs index 951cea9a..ff4920cb 100644 --- a/core/tests/traits.rs +++ b/core/tests/traits.rs @@ -2,9 +2,7 @@ Appellation: traits Contrib: FL03 */ -extern crate concision_core as cnc; - -use cnc::traits::{Affine, AsComplex, Matpow}; +use concision_core::traits::{Affine, AsComplex, Matpow}; use ndarray::prelude::*; use num::Complex; diff --git a/core/tests/utils.rs b/core/tests/utils.rs index 987da44d..cae3e736 100644 --- a/core/tests/utils.rs +++ b/core/tests/utils.rs @@ -2,13 +2,8 @@ Appellation: utils Contrib: FL03 */ -extern crate concision_core; - -use concision_core as cnc; - -use cnc::prelude::{linarr, tril}; -use cnc::traits::{Conjugate, Inverse}; -use ndarray::prelude::{array, Array2}; +use concision_core::prelude::{linarr, tril, Conjugate, Inverse}; +use ndarray::*; use num::Complex; #[test] @@ -42,7 +37,7 @@ fn test_inverse() { #[test] fn test_linarr() { - let args: Array2 = cnc::linarr((2, 3)).unwrap(); + let args: Array2 = linarr((2, 3)).unwrap(); assert_eq!(&args, &array![[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]); } diff --git a/models/linear/src/model/linear.rs b/models/linear/src/model/linear.rs index 0aae0270..69240e1f 100644 --- a/models/linear/src/model/linear.rs +++ b/models/linear/src/model/linear.rs @@ -2,12 +2,12 @@ Appellation: model Contrib: FL03 */ -use super::Config; +use crate::model::Config; use crate::params::LinearParams; -use concision::models::Module; -use concision::prelude::{Predict, PredictError}; +use concision::prelude::{Module, Predict, PredictError}; use ndarray::{Dimension, Ix2, RemoveAxis}; +/// Linear model pub struct Linear where D: Dimension, @@ -81,7 +81,6 @@ where } } -#[cfg(not(feature = "tracing"))] impl Predict for Linear where D: RemoveAxis, @@ -89,21 +88,12 @@ where { type Output = B; + #[cfg_attr( + feature = "tracing", + tracing::instrument(skip(self, input), level = "debug", name = "Linear::predict") + )] fn predict(&self, input: &A) -> Result { - self.params.predict(input) - } -} - -#[cfg(feature = "tracing")] -impl Predict for Linear -where - D: RemoveAxis, - LinearParams: Predict, -{ - type Output = B; - - #[tracing::instrument(skip(self, input), level = "debug", name = "Linear::predict")] - fn predict(&self, input: &A) -> Result { + #[cfg(feature = "tracing")] tracing::debug!("Predicting with linear model"); self.params.predict(input) } From 04a9f1503b57c98f2fc7f96d4174190ff4aaf0b9 Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Sat, 4 May 2024 11:45:52 -0500 Subject: [PATCH 06/15] update Signed-off-by: Joe McCain III --- core/src/func/activate.rs | 3 + core/src/func/activate/binary.rs | 18 ++++ core/src/func/activate/nl.rs | 87 +++++++++++------- core/src/macros.rs | 23 +++-- core/src/models/activate.rs | 8 +- core/src/ops/fft/cmp.rs | 4 +- core/src/ops/fft/mod.rs | 145 +----------------------------- core/src/ops/fft/plan.rs | 6 +- core/src/traits/nn/model.rs | 10 +++ core/tests/fft.rs | 148 +++++++++++++++++++++++++++++++ 10 files changed, 263 insertions(+), 189 deletions(-) create mode 100644 core/src/func/activate/binary.rs create mode 100644 core/tests/fft.rs diff --git a/core/src/func/activate.rs b/core/src/func/activate.rs index 18dd25ea..256e0477 100644 --- a/core/src/func/activate.rs +++ b/core/src/func/activate.rs @@ -4,6 +4,7 @@ */ pub use self::prelude::*; +pub mod binary; pub mod nl; pub fn linear(x: &T) -> T @@ -13,6 +14,8 @@ where x.clone() } + + pub(crate) mod prelude { pub use super::nl::*; } diff --git a/core/src/func/activate/binary.rs b/core/src/func/activate/binary.rs new file mode 100644 index 00000000..79b5f8a4 --- /dev/null +++ b/core/src/func/activate/binary.rs @@ -0,0 +1,18 @@ +/* + Appellation: binary + Contrib: FL03 +*/ +use num::{One, Zero}; + +pub fn heavyside(x: &T) -> T +where + T: One + PartialOrd + Zero, +{ + if x > &T::zero() { + T::one() + } else { + T::zero() + } +} + +build_unary_trait!(Heavyside.heavyside,); \ No newline at end of file diff --git a/core/src/func/activate/nl.rs b/core/src/func/activate/nl.rs index 5d2e0e48..5b6af651 100644 --- a/core/src/func/activate/nl.rs +++ b/core/src/func/activate/nl.rs @@ -2,7 +2,7 @@ Appellation: sigmoid Contrib: FL03 */ -use ndarray::{Array, Axis, Dimension, NdFloat, RemoveAxis}; +use ndarray::*; use num::complex::ComplexFloat; use num::{Float, One, Zero}; @@ -57,54 +57,75 @@ where args.mapv(|x| x.tanh()) } -macro_rules! unary { - ($($name:ident.$call:ident),* $(,)?) => { +build_unary_trait!(ReLU.relu, Sigmoid.sigmoid, Softmax.softmax, Tanh.tanh,); + +/* + ********** Implementations ********** +*/ + +macro_rules! impl_nl { + ($name:ident: $($T:ty),* $(,)?) => { $( - unary!(@impl $name.$call); + impl_nl!(@impl $name: $T); )* }; - (@impl $name:ident.$call:ident) => { - pub trait $name { - type Output; + (@impl relu: $T:ty) => { + impl ReLU for $T { + type Output = $T; - fn $call(&self) -> Self::Output; + fn relu(&self) -> Self::Output { + if *self > <$T>::zero() { + *self + } else { + <$T>::zero() + } + } } }; -} - -unary!(ReLU.relu, Sigmoid.sigmoid, Softmax.softmax, Tanh.tanh,); - -/* - ********** Implementations ********** -*/ + (@impl sigmoid: $T:ty) => { + impl Sigmoid for $T { + type Output = $T; -impl Sigmoid for Array -where - D: Dimension, - T: Clone + Sigmoid, -{ - type Output = Array<::Output, D>; + fn sigmoid(&self) -> Self::Output { + (<$T>::one() + (-self).exp()).recip() + } + } + }; + (@impl tanh: $T:ty) => { + impl Tanh for $T { + type Output = $T; - fn sigmoid(&self) -> Self::Output { - self.mapv(|x| x.sigmoid()) - } + fn tanh(&self) -> Self::Output { + <$T>::tanh(*self) + } + } + }; } -macro_rules! impl_sigmoid { - ($($T:ty),* $(,)?) => { +impl_nl!(relu: f32, f64); +impl_nl!(sigmoid: f32, f64, num::Complex, num::Complex); +impl_nl!(tanh: f32, f64, num::Complex, num::Complex); + +macro_rules! impl_rho_arr { + ($($name:ident.$call:ident),* $(,)?) => { $( - impl_sigmoid!(@base $T); + impl_rho_arr!(@impl $name.$call); )* }; - (@base $T:ty) => { - impl Sigmoid for $T { - type Output = $T; + (@impl $name:ident.$call:ident) => { + impl $name for ArrayBase + where + A: Clone + $name, + D: Dimension, + S: Data + { + type Output = Array<::Output, D>; - fn sigmoid(&self) -> Self::Output { - (<$T>::one() + (-self).exp()).recip() + fn $call(&self) -> Self::Output { + self.mapv(|x| x.$call()) } } }; } -impl_sigmoid!(f32, f64, num::Complex, num::Complex); +impl_rho_arr!(ReLU.relu, Sigmoid.sigmoid, Tanh.tanh); \ No newline at end of file diff --git a/core/src/macros.rs b/core/src/macros.rs index d181aaed..2f85bc95 100644 --- a/core/src/macros.rs +++ b/core/src/macros.rs @@ -75,12 +75,10 @@ macro_rules! variant_constructor { } macro_rules! impl_unary { - ($name:ident.$call:ident<$($T:ty),* $(,)?> -> $f:expr) => { - $( - impl_unary!(@base $name.$call<$T> -> $f); - )* + ($name:ident.$call:ident<$T:ty>($f:expr) $($rest:tt)*) => { + impl_unary!(@impl $name.$call<$T>($f) $($rest)*); }; - (@base $name:ident.$call:ident<$T:ty> -> $f:expr) => { + (@impl $name:ident.$call:ident<$T:ty>($f:expr)) => { impl $name for $T { type Output = $T; @@ -90,3 +88,18 @@ macro_rules! impl_unary { } }; } + +macro_rules! build_unary_trait { + ($($name:ident.$call:ident),* $(,)?) => { + $( + build_unary_trait!(@impl $name.$call); + )* + }; + (@impl $name:ident.$call:ident) => { + pub trait $name { + type Output; + + fn $call(&self) -> Self::Output; + } + }; +} diff --git a/core/src/models/activate.rs b/core/src/models/activate.rs index 3f26775e..ae4f85f5 100644 --- a/core/src/models/activate.rs +++ b/core/src/models/activate.rs @@ -5,8 +5,8 @@ use crate::prelude::{Module, Predict, PredictError}; pub struct Activator { - activation: F, module: M, + rho: F, } impl Activator @@ -14,12 +14,12 @@ where F: for<'a> Fn(&'a M::Output) -> M::Output, M: Predict<::Params> + Module, { - pub fn new(activation: F, module: M) -> Self { - Self { activation, module } + pub fn new(module: M, rho: F,) -> Self { + Self { module, rho } } pub fn activate(&self, args: &M::Output) -> M::Output { - (self.activation)(args) + (self.rho)(args) } } diff --git a/core/src/ops/fft/cmp.rs b/core/src/ops/fft/cmp.rs index ffea7e6e..b22eb815 100644 --- a/core/src/ops/fft/cmp.rs +++ b/core/src/ops/fft/cmp.rs @@ -26,12 +26,12 @@ use strum::{ VariantArray, VariantNames, )] -#[repr(usize)] #[cfg_attr( feature = "serde", derive(serde::Deserialize, serde::Serialize), serde(rename_all = "lowercase", untagged) )] +#[repr(usize)] #[strum(serialize_all = "lowercase")] pub enum FftDirection { #[default] @@ -83,12 +83,12 @@ impl From for usize { VariantArray, VariantNames, )] -#[repr(usize)] #[cfg_attr( feature = "serde", derive(serde::Deserialize, serde::Serialize), serde(rename_all = "lowercase", untagged) )] +#[repr(usize)] #[strum(serialize_all = "lowercase")] pub enum FftMode { #[default] diff --git a/core/src/ops/fft/mod.rs b/core/src/ops/fft/mod.rs index 5c4e2bb4..1595931a 100644 --- a/core/src/ops/fft/mod.rs +++ b/core/src/ops/fft/mod.rs @@ -28,148 +28,5 @@ pub(crate) mod prelude { #[cfg(test)] mod tests { - use super::*; - use crate::prelude::almost_equal; - use lazy_static::lazy_static; - use num::complex::{Complex, ComplexFloat}; - - pub(crate) fn fft_permutation(length: usize) -> Vec { - let mut result = Vec::new(); - result.reserve_exact(length); - for i in 0..length { - result.push(i); - } - let mut reverse = 0_usize; - let mut position = 1_usize; - while position < length { - let mut bit = length >> 1; - while bit & reverse != 0 { - reverse ^= bit; - bit >>= 1; - } - reverse ^= bit; - // This is equivalent to adding 1 to a reversed number - if position < reverse { - // Only swap each element once - result.swap(position, reverse); - } - position += 1; - } - result - } - - const EPSILON: f64 = 1e-6; - - lazy_static! { - static ref EXPECTED_RFFT: Vec> = vec![ - Complex { re: 28.0, im: 0.0 }, - Complex { re: -4.0, im: 0.0 }, - Complex { - re: -4.0, - im: 1.6568542494923806 - }, - Complex { - re: -4.0, - im: 4.000000000000001 - }, - Complex { - re: -3.999999999999999, - im: 9.656854249492381 - } - ]; - } - - #[test] - fn test_plan() { - let samples = 16; - - let plan = FftPlan::new(samples); - assert_eq!(plan.plan(), fft_permutation(16).as_slice()); - } - - #[test] - #[ignore = "Needs to be fixed"] - fn test_rfft() { - let polynomial = (0..8).map(|i| i as f64).collect::>(); - let plan = FftPlan::new(polynomial.len()); - println!("Function Values: {:?}", &polynomial); - println!("Plan: {:?}", &plan); - let fft = rfft(&polynomial, &plan); - let mut tmp = fft - .iter() - .cloned() - .filter(|i| i.im() > 0.0) - .map(|i| i.conj()) - .collect::>(); - tmp.sort_by(|a, b| a.im().partial_cmp(&b.im()).unwrap()); - println!("FFT: {:?}", &tmp); - let mut res = fft.clone(); - res.sort_by(|a, b| a.re().partial_cmp(&b.re()).unwrap()); - res.sort_by(|a, b| a.im().partial_cmp(&b.im()).unwrap()); - println!("R: {:?}", &res); - res.extend(tmp); - assert!(fft.len() == EXPECTED_RFFT.len()); - for (x, y) in fft.iter().zip(EXPECTED_RFFT.iter()) { - assert!(almost_equal(x.re(), y.re(), EPSILON)); - assert!(almost_equal(x.im(), y.im(), EPSILON)); - } - // let plan = FftPlan::new(fft.len()); - let ifft = irfft(&res, &plan); - println!("Inverse: {:?}", &ifft); - for (x, y) in ifft.iter().zip(polynomial.iter()) { - assert!(almost_equal(*x, *y, EPSILON)); - } - } - - #[test] - fn small_polynomial_returns_self() { - let polynomial = vec![1.0f64, 1.0, 0.0, 2.5]; - let permutation = FftPlan::new(polynomial.len()); - let fft = fft(&polynomial, &permutation); - let ifft = ifft(&fft, &permutation) - .into_iter() - .map(|i| i.re()) - .collect::>(); - for (x, y) in ifft.iter().zip(polynomial.iter()) { - assert!(almost_equal(*x, *y, EPSILON)); - } - } - - #[test] - fn square_small_polynomial() { - let mut polynomial = vec![1.0f64, 1.0, 0.0, 2.0]; - polynomial.append(&mut vec![0.0; 4]); - let permutation = FftPlan::new(polynomial.len()); - let mut fft = fft(&polynomial, &permutation); - fft.iter_mut().for_each(|num| *num *= *num); - let ifft = ifft(&fft, &permutation) - .into_iter() - .map(|i| i.re()) - .collect::>(); - let expected = [1.0, 2.0, 1.0, 4.0, 4.0, 0.0, 4.0, 0.0, 0.0]; - for (x, y) in ifft.iter().zip(expected.iter()) { - assert!(almost_equal(*x, *y, EPSILON)); - } - } - - #[test] - #[ignore] - fn square_big_polynomial() { - // This test case takes ~1050ms on my machine in unoptimized mode, - // but it takes ~70ms in release mode. - let n = 1 << 17; // ~100_000 - let mut polynomial = vec![1.0f64; n]; - polynomial.append(&mut vec![0.0f64; n]); - let permutation = FftPlan::new(polynomial.len()); - let mut fft = fft(&polynomial, &permutation); - fft.iter_mut().for_each(|num| *num *= *num); - let ifft = irfft(&fft, &permutation) - .into_iter() - .map(|i| i.re()) - .collect::>(); - let expected = (0..((n << 1) - 1)).map(|i| std::cmp::min(i + 1, (n << 1) - 1 - i) as f64); - for (&x, y) in ifft.iter().zip(expected) { - assert!(almost_equal(x, y, EPSILON)); - } - } + } diff --git a/core/src/ops/fft/plan.rs b/core/src/ops/fft/plan.rs index 933d2d69..6b1c42dd 100644 --- a/core/src/ops/fft/plan.rs +++ b/core/src/ops/fft/plan.rs @@ -2,7 +2,11 @@ Appellation: plan Contrib: FL03 */ -use crate::rust::{slice, vec}; +#[cfg(no_std)] +use alloc::vec::{self, Vec}; +use core::slice; +#[cfg(not(no_std))] +use std::vec; #[derive(Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] diff --git a/core/src/traits/nn/model.rs b/core/src/traits/nn/model.rs index aa648101..61269054 100644 --- a/core/src/traits/nn/model.rs +++ b/core/src/traits/nn/model.rs @@ -13,3 +13,13 @@ pub trait Model { pub trait ModelBackend { type Engine; } + + +pub trait NeuralNetworkStack { + const NHIDDEN: Option = None; + type Input; + type Hidden; + type Output; + + +} \ No newline at end of file diff --git a/core/tests/fft.rs b/core/tests/fft.rs new file mode 100644 index 00000000..ec29edbb --- /dev/null +++ b/core/tests/fft.rs @@ -0,0 +1,148 @@ +/* + Appellation: default + Contrib: FL03 +*/ + +use concision_core::ops::fft::*; +use concision_core::prelude::almost_equal; +use lazy_static::lazy_static; +use num::complex::{Complex, ComplexFloat}; + +const EPSILON: f64 = 1e-6; + +fn fft_permutation(length: usize) -> Vec { + let mut result = Vec::new(); + result.reserve_exact(length); + for i in 0..length { + result.push(i); + } + let mut reverse = 0_usize; + let mut position = 1_usize; + while position < length { + let mut bit = length >> 1; + while bit & reverse != 0 { + reverse ^= bit; + bit >>= 1; + } + reverse ^= bit; + // This is equivalent to adding 1 to a reversed number + if position < reverse { + // Only swap each element once + result.swap(position, reverse); + } + position += 1; + } + result +} + +lazy_static! { + static ref EXPECTED_RFFT: Vec> = vec![ + Complex { re: 28.0, im: 0.0 }, + Complex { re: -4.0, im: 0.0 }, + Complex { + re: -4.0, + im: 1.6568542494923806 + }, + Complex { + re: -4.0, + im: 4.000000000000001 + }, + Complex { + re: -3.999999999999999, + im: 9.656854249492381 + } + ]; +} + +#[test] +fn test_plan() { + let samples = 16; + + let plan = FftPlan::new(samples); + assert_eq!(plan.plan(), fft_permutation(16).as_slice()); +} + +#[test] +#[ignore = "Needs to be fixed"] +fn test_rfft() { + let polynomial = (0..8).map(|i| i as f64).collect::>(); + let plan = FftPlan::new(polynomial.len()); + println!("Function Values: {:?}", &polynomial); + println!("Plan: {:?}", &plan); + let fft = rfft(&polynomial, &plan); + let mut tmp = fft + .iter() + .cloned() + .filter(|i| i.im() > 0.0) + .map(|i| i.conj()) + .collect::>(); + tmp.sort_by(|a, b| a.im().partial_cmp(&b.im()).unwrap()); + println!("FFT: {:?}", &tmp); + let mut res = fft.clone(); + res.sort_by(|a, b| a.re().partial_cmp(&b.re()).unwrap()); + res.sort_by(|a, b| a.im().partial_cmp(&b.im()).unwrap()); + println!("R: {:?}", &res); + res.extend(tmp); + assert!(fft.len() == EXPECTED_RFFT.len()); + for (x, y) in fft.iter().zip(EXPECTED_RFFT.iter()) { + assert!(almost_equal(x.re(), y.re(), EPSILON)); + assert!(almost_equal(x.im(), y.im(), EPSILON)); + } + // let plan = FftPlan::new(fft.len()); + let ifft = dbg!(irfft(&res, &plan)); + for (x, y) in ifft.iter().zip(polynomial.iter()) { + assert!(almost_equal(*x, *y, EPSILON)); + } +} + +#[test] +fn small_polynomial_returns_self() { + let polynomial = vec![1.0f64, 1.0, 0.0, 2.5]; + let permutation = FftPlan::new(polynomial.len()); + let fft = fft(&polynomial, &permutation); + let ifft = ifft(&fft, &permutation) + .into_iter() + .map(|i| i.re()) + .collect::>(); + for (x, y) in ifft.iter().zip(polynomial.iter()) { + assert!(almost_equal(*x, *y, EPSILON)); + } +} + +#[test] +fn square_small_polynomial() { + let mut polynomial = vec![1.0f64, 1.0, 0.0, 2.0]; + polynomial.append(&mut vec![0.0; 4]); + let permutation = FftPlan::new(polynomial.len()); + let mut fft = fft(&polynomial, &permutation); + fft.iter_mut().for_each(|num| *num *= *num); + let ifft = ifft(&fft, &permutation) + .into_iter() + .map(|i| i.re()) + .collect::>(); + let expected = [1.0, 2.0, 1.0, 4.0, 4.0, 0.0, 4.0, 0.0, 0.0]; + for (x, y) in ifft.iter().zip(expected.iter()) { + assert!(almost_equal(*x, *y, EPSILON)); + } +} + +#[test] +#[ignore] +fn square_big_polynomial() { + // This test case takes ~1050ms on my machine in unoptimized mode, + // but it takes ~70ms in release mode. + let n = 1 << 17; // ~100_000 + let mut polynomial = vec![1.0f64; n]; + polynomial.append(&mut vec![0.0f64; n]); + let permutation = FftPlan::new(polynomial.len()); + let mut fft = fft(&polynomial, &permutation); + fft.iter_mut().for_each(|num| *num *= *num); + let ifft = irfft(&fft, &permutation) + .into_iter() + .map(|i| i.re()) + .collect::>(); + let expected = (0..((n << 1) - 1)).map(|i| std::cmp::min(i + 1, (n << 1) - 1 - i) as f64); + for (&x, y) in ifft.iter().zip(expected) { + assert!(almost_equal(x, y, EPSILON)); + } +} \ No newline at end of file From 0d1d5554f248cbfaf43228ee93eefc4566e71af2 Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Sat, 4 May 2024 11:50:57 -0500 Subject: [PATCH 07/15] update Signed-off-by: Joe McCain III --- core/src/params/parameter.rs | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/core/src/params/parameter.rs b/core/src/params/parameter.rs index 6792ed58..e469a131 100644 --- a/core/src/params/parameter.rs +++ b/core/src/params/parameter.rs @@ -13,7 +13,6 @@ use uuid::Uuid; #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] pub struct Parameter where - T: Float, D: Dimension, { pub(crate) id: String, @@ -25,21 +24,20 @@ where impl Parameter where - T: Float, D: Dimension, { pub fn new( features: impl IntoDimension, kind: ParamKind, name: impl ToString, - ) -> Self { + ) -> Self where T: Clone + Default { let features = features.into_dimension(); Self { id: Uuid::new_v4().to_string(), features: features.clone(), kind, name: name.to_string(), - value: Array::zeros(features), + value: Array::default(features), } } From 544719b2521674eb216c7a29ed5a65b195b7a7e0 Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Sat, 4 May 2024 12:47:17 -0500 Subject: [PATCH 08/15] update Signed-off-by: Joe McCain III --- core/src/error/err.rs | 13 ++-- core/src/error/kinds/external.rs | 3 - core/src/error/kinds/predict.rs | 11 +-- core/src/error/mod.rs | 25 +++++++ core/src/func/activate.rs | 15 ++++- core/src/func/activate/binary.rs | 36 +++++++++- core/src/func/activate/nl.rs | 112 ++++++++++++++++--------------- core/src/func/mod.rs | 1 + core/src/macros.rs | 15 ++--- core/src/models/activate.rs | 2 +- core/src/models/error.rs | 3 - core/src/ops/fft/mod.rs | 4 +- core/src/params/parameter.rs | 9 ++- core/src/traits/adjust.rs | 36 ++++++++++ core/src/traits/math.rs | 2 +- core/src/traits/mod.rs | 28 +------- core/src/traits/nn/model.rs | 5 +- core/tests/fft.rs | 2 +- 18 files changed, 197 insertions(+), 125 deletions(-) create mode 100644 core/src/traits/adjust.rs diff --git a/core/src/error/err.rs b/core/src/error/err.rs index 04088d76..035656db 100644 --- a/core/src/error/err.rs +++ b/core/src/error/err.rs @@ -23,7 +23,7 @@ use strum::{AsRefStr, Display, EnumCount, EnumIs, VariantNames}; #[cfg_attr( feature = "serde", derive(serde::Deserialize, serde::Serialize), - serde(rename_all = "lowercase", untagged) + serde(rename_all = "lowercase", tag = "kind") )] #[strum(serialize_all = "lowercase")] pub enum Error { @@ -34,9 +34,6 @@ pub enum Error { Shape(String), } -#[cfg(feature = "std")] -impl std::error::Error for Error {} - // impl core::fmt::Display for Error { // fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { // let msg = match self { @@ -66,6 +63,8 @@ macro_rules! from_err { }; } -from_err!(External); -from_err!(Model); -from_err!(Predict); +from_err!( + External, + Model, + Predict, +); diff --git a/core/src/error/kinds/external.rs b/core/src/error/kinds/external.rs index 61a4e72f..2c35f1a4 100644 --- a/core/src/error/kinds/external.rs +++ b/core/src/error/kinds/external.rs @@ -50,9 +50,6 @@ impl ExternalError { } } -#[cfg(feature = "std")] -impl std::error::Error for ExternalError {} - impl core::fmt::Display for ExternalError { fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { let msg = match self { diff --git a/core/src/error/kinds/predict.rs b/core/src/error/kinds/predict.rs index 40ccfe20..114657c1 100644 --- a/core/src/error/kinds/predict.rs +++ b/core/src/error/kinds/predict.rs @@ -35,7 +35,10 @@ pub enum PredictError { TypeError, } -impl PredictError {} - -#[cfg(feature = "std")] -impl std::error::Error for PredictError {} +impl PredictError { + variant_constructor!( + ArithmeticError.arithmetic_error, + ShapeMismatch.shape_mismatch, + TypeError.type_error + ); +} diff --git a/core/src/error/mod.rs b/core/src/error/mod.rs index 940d085f..d34fc1d0 100644 --- a/core/src/error/mod.rs +++ b/core/src/error/mod.rs @@ -6,6 +6,31 @@ pub use self::prelude::*; mod err; +pub trait ErrKind {} + +macro_rules! impl_error_type { + ($($ty:ty),* $(,)*) => { + $(impl_error_type!(@impl $ty);)* + }; + (@impl $ty:ty) => { + impl ErrKind for $ty {} + + impl_error_type!(@std $ty); + }; + (@std $ty:ty) => { + + #[cfg(feature = "std")] + impl std::error::Error for $ty {} + }; +} + +impl_error_type!( + err::Error, + kinds::ExternalError, + kinds::PredictError, + crate::models::ModelError +); + pub mod kinds { pub use self::prelude::*; diff --git a/core/src/func/activate.rs b/core/src/func/activate.rs index 256e0477..4b8944d0 100644 --- a/core/src/func/activate.rs +++ b/core/src/func/activate.rs @@ -2,7 +2,7 @@ Appellation: activate Contrib: FL03 */ -pub use self::prelude::*; +pub use self::{binary::*, nl::*}; pub mod binary; pub mod nl; @@ -14,8 +14,21 @@ where x.clone() } +build_unary_trait!(LinearActivation.linear); +impl LinearActivation for T +where + T: Clone, +{ + type Output = T; + + fn linear(&self) -> Self::Output { + linear(self) + } +} pub(crate) mod prelude { + pub use super::binary::*; pub use super::nl::*; + pub use super::{linear, LinearActivation}; } diff --git a/core/src/func/activate/binary.rs b/core/src/func/activate/binary.rs index 79b5f8a4..b45a0588 100644 --- a/core/src/func/activate/binary.rs +++ b/core/src/func/activate/binary.rs @@ -1,9 +1,11 @@ /* - Appellation: binary + Appellation: binary Contrib: FL03 */ +use nd::{Array, ArrayBase, Data, Dimension}; use num::{One, Zero}; +/// pub fn heavyside(x: &T) -> T where T: One + PartialOrd + Zero, @@ -15,4 +17,34 @@ where } } -build_unary_trait!(Heavyside.heavyside,); \ No newline at end of file +build_unary_trait!(Heavyside.heavyside,); + +macro_rules! impl_heavyside { + ($($ty:ty),* $(,)*) => { + $(impl_heavyside!(@impl $ty);)* + }; + (@impl $ty:ty) => { + impl Heavyside for $ty { + type Output = $ty; + + fn heavyside(&self) -> Self::Output { + heavyside(self) + } + } + }; +} + +impl_heavyside!(f32, f64, i8, i16, i32, i64, i128, isize, u8, u16, u32, u64, u128, usize,); + +impl Heavyside for ArrayBase +where + A: Heavyside, + D: Dimension, + S: Data, +{ + type Output = Array<::Output, D>; + + fn heavyside(&self) -> Self::Output { + self.map(Heavyside::heavyside) + } +} diff --git a/core/src/func/activate/nl.rs b/core/src/func/activate/nl.rs index 5b6af651..1a1d1f14 100644 --- a/core/src/func/activate/nl.rs +++ b/core/src/func/activate/nl.rs @@ -4,25 +4,23 @@ */ use ndarray::*; use num::complex::ComplexFloat; -use num::{Float, One, Zero}; +use num::{Float, Zero}; pub fn relu(args: &T) -> T where T: Clone + PartialOrd + Zero, { if args > &T::zero() { - args.clone() - } else { - T::zero() + return args.clone(); } + T::zero() } -pub fn sigmoid(args: &Array) -> Array<::Output, D> +pub fn sigmoid(args: &T) -> T where - D: Dimension, - T: Clone + Sigmoid, + T: ComplexFloat, { - args.mapv(|x| x.sigmoid()) + (T::one() + (*args).neg().exp()).recip() } pub fn softmax(args: &Array) -> Array @@ -49,12 +47,11 @@ where } } -pub fn tanh(args: &Array) -> Array<::Output, D> +pub fn tanh(args: &T) -> T where - D: Dimension, - T: Clone + Tanh, + T: ComplexFloat, { - args.mapv(|x| x.tanh()) + args.tanh() } build_unary_trait!(ReLU.relu, Sigmoid.sigmoid, Softmax.softmax, Tanh.tanh,); @@ -62,70 +59,75 @@ build_unary_trait!(ReLU.relu, Sigmoid.sigmoid, Softmax.softmax, Tanh.tanh,); /* ********** Implementations ********** */ - -macro_rules! impl_nl { - ($name:ident: $($T:ty),* $(,)?) => { +macro_rules! nonlinear { + ($($rho:ident<$($T:ty),* $(,)?>::$call:ident),* $(,)? ) => { $( - impl_nl!(@impl $name: $T); + nonlinear!(@loop $rho<$($T),*>::$call); )* }; - (@impl relu: $T:ty) => { - impl ReLU for $T { - type Output = $T; + (@loop $rho:ident<$($T:ty),* $(,)?>::$call:ident ) => { + $( + nonlinear!(@impl $rho<$T>::$call); + )* - fn relu(&self) -> Self::Output { - if *self > <$T>::zero() { - *self - } else { - <$T>::zero() - } - } - } + nonlinear!(@arr $rho::$call); }; - (@impl sigmoid: $T:ty) => { - impl Sigmoid for $T { + (@impl $rho:ident<$T:ty>::$call:ident) => { + impl $rho for $T { type Output = $T; - fn sigmoid(&self) -> Self::Output { - (<$T>::one() + (-self).exp()).recip() + fn $call(&self) -> Self::Output { + $call(self) } } - }; - (@impl tanh: $T:ty) => { - impl Tanh for $T { + + impl<'a> $rho for &'a $T { type Output = $T; - fn tanh(&self) -> Self::Output { - <$T>::tanh(*self) + fn $call(&self) -> Self::Output { + $call(*self) } } - }; -} -impl_nl!(relu: f32, f64); -impl_nl!(sigmoid: f32, f64, num::Complex, num::Complex); -impl_nl!(tanh: f32, f64, num::Complex, num::Complex); - -macro_rules! impl_rho_arr { - ($($name:ident.$call:ident),* $(,)?) => { - $( - impl_rho_arr!(@impl $name.$call); - )* }; - (@impl $name:ident.$call:ident) => { - impl $name for ArrayBase - where - A: Clone + $name, - D: Dimension, - S: Data + (@arr $name:ident::$call:ident) => { + impl $name for ArrayBase + where + A: Clone + $name, + D: Dimension, + S: Data { type Output = Array<::Output, D>; fn $call(&self) -> Self::Output { - self.mapv(|x| x.$call()) + self.map($name::$call) } } }; + } -impl_rho_arr!(ReLU.relu, Sigmoid.sigmoid, Tanh.tanh); \ No newline at end of file +nonlinear!( + ReLU < f32, + f64, + i8, + i16, + i32, + i64, + i128, + isize, + u8, + u16, + u32, + u64, + u128, + usize > ::relu, + Sigmoid < f32, + f64, + num::Complex, + num::Complex < f64 >> ::sigmoid, + Tanh < f32, + f64, + num::Complex, + num::Complex < f64 >> ::tanh, +); diff --git a/core/src/func/mod.rs b/core/src/func/mod.rs index 71366ec4..87f3fd5c 100644 --- a/core/src/func/mod.rs +++ b/core/src/func/mod.rs @@ -2,6 +2,7 @@ Appellation: func Contrib: FL03 */ +//! Functional pub use self::prelude::*; pub mod activate; diff --git a/core/src/macros.rs b/core/src/macros.rs index 2f85bc95..cbded439 100644 --- a/core/src/macros.rs +++ b/core/src/macros.rs @@ -51,25 +51,20 @@ macro_rules! nested_constructor { } macro_rules! variant_constructor { - ($(($($rest:tt),*)),*) => { + ($($rest:tt),* $(,)?) => { $( variant_constructor!(@loop $($rest),*); )* }; - ($(($variant:ident $($rest:tt),*, $method:ident)),*) => { + ($($variant:ident.$method:ident$(($call:expr))?),* $(,)?) => { $( - variant_constructor!(@loop $variant $($rest),*, $method); + variant_constructor!(@loop $variant.$method$(($call))?); )* }; - (@loop $variant:ident, $method:ident) => { - pub fn $method() -> Self { - Self::$variant - } - }; - (@loop $variant:ident($call:expr), $method:ident) => { + (@loop $variant:ident.$method:ident$(($call:expr))?) => { pub fn $method() -> Self { - Self::$variant($call()) + Self::$variant$(($call))? } }; } diff --git a/core/src/models/activate.rs b/core/src/models/activate.rs index ae4f85f5..f5ab3413 100644 --- a/core/src/models/activate.rs +++ b/core/src/models/activate.rs @@ -14,7 +14,7 @@ where F: for<'a> Fn(&'a M::Output) -> M::Output, M: Predict<::Params> + Module, { - pub fn new(module: M, rho: F,) -> Self { + pub fn new(module: M, rho: F) -> Self { Self { module, rho } } diff --git a/core/src/models/error.rs b/core/src/models/error.rs index aae4d586..e1ece894 100644 --- a/core/src/models/error.rs +++ b/core/src/models/error.rs @@ -36,6 +36,3 @@ impl ModelError { // }) } - -#[cfg(feature = "std")] -impl std::error::Error for ModelError {} diff --git a/core/src/ops/fft/mod.rs b/core/src/ops/fft/mod.rs index 1595931a..a0d766ab 100644 --- a/core/src/ops/fft/mod.rs +++ b/core/src/ops/fft/mod.rs @@ -27,6 +27,4 @@ pub(crate) mod prelude { } #[cfg(test)] -mod tests { - -} +mod tests {} diff --git a/core/src/params/parameter.rs b/core/src/params/parameter.rs index e469a131..7f48c8b2 100644 --- a/core/src/params/parameter.rs +++ b/core/src/params/parameter.rs @@ -26,11 +26,10 @@ impl Parameter where D: Dimension, { - pub fn new( - features: impl IntoDimension, - kind: ParamKind, - name: impl ToString, - ) -> Self where T: Clone + Default { + pub fn new(features: impl IntoDimension, kind: ParamKind, name: impl ToString) -> Self + where + T: Clone + Default, + { let features = features.into_dimension(); Self { id: Uuid::new_v4().to_string(), diff --git a/core/src/traits/adjust.rs b/core/src/traits/adjust.rs new file mode 100644 index 00000000..5c62faf3 --- /dev/null +++ b/core/src/traits/adjust.rs @@ -0,0 +1,36 @@ +/* + Appellation: adjust + Contrib: FL03 +*/ +use nd::{Axis, RemoveAxis}; + +/// Decrement generally describes an object capable of _decrementing_ itself; +/// +/// Here, it is used on a [Dimension](ndarray::Dimension) enabling it to +/// remove and return an axis from itself. +pub trait Decrement { + type Output; + + fn dec(&self) -> Self::Output; +} + +pub trait Increment { + type Output; + + fn inc(&self) -> Self::Output; +} + + +/* + ******** implementations ******** +*/ +impl Decrement for D +where + D: RemoveAxis, +{ + type Output = D::Smaller; + + fn dec(&self) -> Self::Output { + self.remove_axis(Axis(self.ndim() - 1)) + } +} \ No newline at end of file diff --git a/core/src/traits/math.rs b/core/src/traits/math.rs index 31396619..ba0f2ad3 100644 --- a/core/src/traits/math.rs +++ b/core/src/traits/math.rs @@ -1,5 +1,5 @@ /* - Appellation: ops + Appellation: math Contrib: FL03 */ use nd::{Array, Dimension}; diff --git a/core/src/traits/mod.rs b/core/src/traits/mod.rs index 021aeda6..73c4c757 100644 --- a/core/src/traits/mod.rs +++ b/core/src/traits/mod.rs @@ -4,6 +4,7 @@ */ pub use self::prelude::*; +pub mod adjust; pub mod math; pub mod setup; pub mod store; @@ -32,17 +33,6 @@ pub mod nn { } } -pub trait Decrement { - type Output; - - fn dec(&self) -> Self::Output; -} - -pub trait Increment { - type Output; - - fn inc(&self) -> Self::Output; -} pub trait Transform { type Output; @@ -50,23 +40,11 @@ pub trait Transform { fn transform(&self, args: &T) -> Self::Output; } -/* - ******** implementations ******** -*/ -impl Decrement for D -where - D: nd::RemoveAxis, -{ - type Output = D::Smaller; - - fn dec(&self) -> Self::Output { - self.remove_axis(nd::Axis(self.ndim() - 1)) - } -} pub(crate) mod prelude { - pub use super::{Decrement, Transform}; + pub use super::Transform; + pub use super::adjust::*; pub use super::arr::*; pub use super::math::*; pub use super::nn::prelude::*; diff --git a/core/src/traits/nn/model.rs b/core/src/traits/nn/model.rs index 61269054..d1d711c3 100644 --- a/core/src/traits/nn/model.rs +++ b/core/src/traits/nn/model.rs @@ -14,12 +14,9 @@ pub trait ModelBackend { type Engine; } - pub trait NeuralNetworkStack { const NHIDDEN: Option = None; type Input; type Hidden; type Output; - - -} \ No newline at end of file +} diff --git a/core/tests/fft.rs b/core/tests/fft.rs index ec29edbb..fa6c371a 100644 --- a/core/tests/fft.rs +++ b/core/tests/fft.rs @@ -145,4 +145,4 @@ fn square_big_polynomial() { for (&x, y) in ifft.iter().zip(expected) { assert!(almost_equal(x, y, EPSILON)); } -} \ No newline at end of file +} From 2710d655521202a24b033db560ea97e9764cbcdd Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Sat, 4 May 2024 12:48:05 -0500 Subject: [PATCH 09/15] update Signed-off-by: Joe McCain III --- .github/workflows/rust.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index ccc0b581..2ed6c641 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -74,6 +74,7 @@ jobs: - name: Test run: cargo test --features full -v --workspace blas: + continue-on-error: true name: Test (blas) strategy: matrix: From 4c81d0971421f8bfb5ed76736f9a0631377aebf7 Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Sat, 4 May 2024 13:18:30 -0500 Subject: [PATCH 10/15] update Signed-off-by: Joe McCain III --- Cargo.toml | 6 ++--- core/Cargo.toml | 1 + core/src/error/err.rs | 2 +- core/src/error/kinds/external.rs | 2 +- core/src/macros.rs | 8 +++--- core/src/rand/generate.rs | 43 ++++++++++++++++---------------- core/src/rand/utils.rs | 1 - core/src/traits/adjust.rs | 5 ++-- core/src/traits/mod.rs | 2 -- data/Cargo.toml | 3 ++- models/gnn/Cargo.toml | 1 + models/linear/Cargo.toml | 14 ++++------- models/linear/src/lib.rs | 2 +- nn/neural/Cargo.toml | 20 +++++++-------- nn/nlp/Cargo.toml | 1 + nn/optim/Cargo.toml | 11 ++++---- nn/s4/Cargo.toml | 10 ++++---- 17 files changed, 63 insertions(+), 69 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index f7da7b99..111c6193 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,14 +18,14 @@ version = "0.1.12" # TODO - Update the cargo package version approx = "0.5" itertools = "0.12" lazy_static = "1" -ndarray = "0.15" +ndarray = { default-features = false, version = "0.15" } ndarray-rand = "0.14" ndarray-stats = "0.5" -num = "0.4" +num = { default-features = false, version = "0.4" } # serde = { features = ["derive"], version = "1" } # serde_json = "1" smart-default = "0.7" -strum = { features = ["derive"], version = "0.26" } +strum = { default-features = false, features = ["derive"], version = "0.26" } [workspace] default-members = [ diff --git a/core/Cargo.toml b/core/Cargo.toml index 4e730903..a1bffdd4 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -52,6 +52,7 @@ serde-ext = [ std = [ "ndarray/std", "num/std", + "strum/std", "uuid/std" ] diff --git a/core/src/error/err.rs b/core/src/error/err.rs index 035656db..4175f4be 100644 --- a/core/src/error/err.rs +++ b/core/src/error/err.rs @@ -46,7 +46,7 @@ pub enum Error { // } #[cfg(feature = "std")] -impl_from_error!(Error::IO); +error_from!(Error::IO); macro_rules! from_err { ($($variant:ident<$err:ty>),* $(,)*) => { diff --git a/core/src/error/kinds/external.rs b/core/src/error/kinds/external.rs index 2c35f1a4..89bdc0e0 100644 --- a/core/src/error/kinds/external.rs +++ b/core/src/error/kinds/external.rs @@ -67,4 +67,4 @@ impl From> for ExternalError { } } -impl_from_error!(ExternalError::Error<&str, String>); +error_from!(ExternalError::Error<&str, String>); diff --git a/core/src/macros.rs b/core/src/macros.rs index cbded439..fdcf3b65 100644 --- a/core/src/macros.rs +++ b/core/src/macros.rs @@ -4,16 +4,16 @@ */ #![allow(unused_macros)] -macro_rules! impl_from_error { +macro_rules! error_from { ($base:ident::$variant:ident<$($err:ty),* $(,)?>) => { - impl_from_error!(@loop $base::$variant<$($err),*>); + error_from!(@loop $base::$variant<$($err),*>); }; ($base:ident::$variant:ident<$err:ty>$($rest:tt)*) => { - impl_from_error!(@loop $base::$variant<$($err),*>$($rest)*); + error_from!(@loop $base::$variant<$($err),*>$($rest)*); }; (@loop $base:ident::$variant:ident<$($err:ty),* $(,)?>) => { $( - impl_from_error!(@impl $base::$variant<$err>); + error_from!(@impl $base::$variant<$err>); )* }; (@impl $base:ident::$variant:ident<$err:ty>) => { diff --git a/core/src/rand/generate.rs b/core/src/rand/generate.rs index 61e48c5f..9078df1b 100644 --- a/core/src/rand/generate.rs +++ b/core/src/rand/generate.rs @@ -2,14 +2,13 @@ Appellation: generate Contrib: FL03 */ -// #![cfg(feature = "rand")] use core::ops::Neg; -use ndarray::{Array, Dimension, IntoDimension, Ix2}; -use ndarray_rand::rand::rngs::StdRng; -use ndarray_rand::rand::{Rng, SeedableRng}; -use ndarray_rand::rand_distr::uniform::{SampleUniform, Uniform}; -use ndarray_rand::rand_distr::{Bernoulli, BernoulliError, Distribution, StandardNormal}; -use ndarray_rand::RandomExt; +use ndarray::*; +use ndrand::rand::rngs::StdRng; +use ndrand::rand::{Rng, SeedableRng}; +use ndrand::rand_distr::uniform::{SampleUniform, Uniform}; +use ndrand::rand_distr::{Bernoulli, BernoulliError, Distribution, StandardNormal}; +use ndrand::RandomExt; use num::traits::real::Real; use num::traits::Float; @@ -17,18 +16,16 @@ pub trait GenerateRandom: Sized where D: Dimension, { - fn rand(dim: impl IntoDimension, distr: IdS) -> Self + fn rand(dim: Sh, distr: IdS) -> Self where - IdS: Distribution; + IdS: Distribution, + Sh: ShapeBuilder; - fn rand_using( - dim: impl IntoDimension, - distr: IdS, - rng: &mut R, - ) -> Self + fn rand_using(dim: Sh, distr: IdS, rng: &mut R) -> Self where IdS: Distribution, - R: Rng; + R: Rng, + Sh: ShapeBuilder; fn bernoulli(dim: impl IntoDimension, p: Option) -> Result where @@ -80,18 +77,20 @@ where D: Dimension, StandardNormal: Distribution, { - fn rand(dim: impl IntoDimension, distr: IdS) -> Self + fn rand(dim: Sh, distr: Dtr) -> Self where - IdS: Distribution, + Dtr: Distribution, + Sh: ShapeBuilder, { - Self::random(dim.into_dimension(), distr) + Self::random(dim, distr) } - fn rand_using(dim: impl IntoDimension, distr: IdS, rng: &mut R) -> Self + fn rand_using(dim: Sh, distr: Dtr, rng: &mut R) -> Self where - IdS: Distribution, - R: Rng, + Dtr: Distribution, + R: Rng + ?Sized, + Sh: ShapeBuilder, { - Self::random_using(dim.into_dimension(), distr, rng) + Self::random_using(dim, distr, rng) } } diff --git a/core/src/rand/utils.rs b/core/src/rand/utils.rs index 614f1c49..cfcad3c4 100644 --- a/core/src/rand/utils.rs +++ b/core/src/rand/utils.rs @@ -2,7 +2,6 @@ Appellation: utils Contrib: FL03 */ -#![cfg(feature = "rand")] use ndarray::*; use ndrand::rand::rngs::StdRng; use ndrand::rand::SeedableRng; diff --git a/core/src/traits/adjust.rs b/core/src/traits/adjust.rs index 5c62faf3..e3790ff4 100644 --- a/core/src/traits/adjust.rs +++ b/core/src/traits/adjust.rs @@ -5,7 +5,7 @@ use nd::{Axis, RemoveAxis}; /// Decrement generally describes an object capable of _decrementing_ itself; -/// +/// /// Here, it is used on a [Dimension](ndarray::Dimension) enabling it to /// remove and return an axis from itself. pub trait Decrement { @@ -20,7 +20,6 @@ pub trait Increment { fn inc(&self) -> Self::Output; } - /* ******** implementations ******** */ @@ -33,4 +32,4 @@ where fn dec(&self) -> Self::Output { self.remove_axis(Axis(self.ndim() - 1)) } -} \ No newline at end of file +} diff --git a/core/src/traits/mod.rs b/core/src/traits/mod.rs index 73c4c757..e4335f2b 100644 --- a/core/src/traits/mod.rs +++ b/core/src/traits/mod.rs @@ -33,14 +33,12 @@ pub mod nn { } } - pub trait Transform { type Output; fn transform(&self, args: &T) -> Self::Output; } - pub(crate) mod prelude { pub use super::Transform; diff --git a/data/Cargo.toml b/data/Cargo.toml index 28c64576..0f85cb50 100644 --- a/data/Cargo.toml +++ b/data/Cargo.toml @@ -53,7 +53,8 @@ serde-ext = [ std = [ "concision-core/std", "ndarray/std", - "num/std" + "num/std", + "strum/std", ] tracing = [ diff --git a/models/gnn/Cargo.toml b/models/gnn/Cargo.toml index 89a153f3..438d6db1 100644 --- a/models/gnn/Cargo.toml +++ b/models/gnn/Cargo.toml @@ -56,6 +56,7 @@ std = [ "concision-core/std", "ndarray/std", "num/std", + "strum/std", ] tracing = [ diff --git a/models/linear/Cargo.toml b/models/linear/Cargo.toml index d56f85bd..677ae405 100644 --- a/models/linear/Cargo.toml +++ b/models/linear/Cargo.toml @@ -30,23 +30,18 @@ approx = [ "concision-core/approx", "ndarray/approx-0_5", ] + blas = [ "concision-core/blas", "ndarray/blas", ] rand = [ + "dep:ndarray-rand", "concision-core/rand", "num/rand" ] -# rand = [ -# "concision-core/rand", -# "dep:ndarray-rand", -# "num/rand" -# ] - - serde = [ "dep:serde", "serde-ext", @@ -63,6 +58,7 @@ std = [ "concision-core/std", "ndarray/std", "num/std", + "strum/std", ] tracing = [ @@ -89,8 +85,8 @@ required-features = ["rand"] concision-core = { features = ["rand"], path = "../../core", version = "0.1.12" } approx = { optional = true, version = "0.5" } -ndarray = "0.15" -ndarray-rand.workspace = true +ndarray.workspace = true +ndarray-rand = { optional = true, version = "0.14" } ndarray-stats.workspace = true num.workspace = true serde = { features = ["derive"], optional = true, version = "1" } diff --git a/models/linear/src/lib.rs b/models/linear/src/lib.rs index 3efa291a..7e91bf57 100644 --- a/models/linear/src/lib.rs +++ b/models/linear/src/lib.rs @@ -12,8 +12,8 @@ extern crate alloc; extern crate concision_core as concision; -// extern crate concision_neural as neural; extern crate ndarray as nd; +#[cfg(feature = "rand")] extern crate ndarray_rand as ndrand; extern crate ndarray_stats as stats; diff --git a/nn/neural/Cargo.toml b/nn/neural/Cargo.toml index c72362d3..387ca3b7 100644 --- a/nn/neural/Cargo.toml +++ b/nn/neural/Cargo.toml @@ -25,7 +25,8 @@ blas = [ std = [ "concision-core/std", "ndarray/std", - "num/std" + "num/std", + "strum/std", ] intel-mkl-system = [ @@ -69,21 +70,20 @@ test = true [dependencies] concision-core = { features = ["full"], path = "../../core", version = "0.1.12" } -anyhow.workspace = true -itertools.workspace = true +anyhow = "1" +itertools = "0.12" ndarray = { features = ["serde-1"], version = "0.15" } ndarray-linalg = "0.16" -ndarray-rand.workspace = true -ndarray-stats.workspace = true +ndarray-rand = "0.14" +ndarray-stats = "0.5" num = { features = ["rand", "serde"], version = "0.4" } petgraph = { features = ["serde-1"], version = "0.6" } -serde.workspace = true -serde_json.workspace = true -smart-default.workspace = true -strum.workspace = true +serde = { features = ["derive"], version = "1" } +serde_json = "1" +smart-default = "0.7" +strum = { features = ["derive"], version = "0.26" } [dev-dependencies] -# computare.workspace = true [package.metadata.docs.rs] all-features = true diff --git a/nn/nlp/Cargo.toml b/nn/nlp/Cargo.toml index 533632b6..5aa6fc48 100644 --- a/nn/nlp/Cargo.toml +++ b/nn/nlp/Cargo.toml @@ -38,6 +38,7 @@ serde-ext = [ std = [ "ndarray/std", "num/std", + "strum/std", ] [lib] diff --git a/nn/optim/Cargo.toml b/nn/optim/Cargo.toml index 757d1b14..0da4d329 100644 --- a/nn/optim/Cargo.toml +++ b/nn/optim/Cargo.toml @@ -32,13 +32,12 @@ test = false concision-core = { features = ["full"], path = "../../core", version = "0.1.12" } concision-neural = { path = "../neural" } -anyhow.workspace = true -itertools = { features = [], version = "0.12" } -lazy_static.workspace = true +anyhow = "1" +itertools = "0.12" ndarray = { features = ["serde-1"], version = "0.15" } -ndarray-rand.workspace = true -ndarray-stats.workspace = true -num.workspace = true +ndarray-rand = "0.14" +ndarray-stats = "0.5" +num = { features = ["rand", "serde"], version = "0.4" } rand = "0.8" serde = { features = ["derive"], version = "1" } serde_json = "1" diff --git a/nn/s4/Cargo.toml b/nn/s4/Cargo.toml index d131c5ab..a22733de 100644 --- a/nn/s4/Cargo.toml +++ b/nn/s4/Cargo.toml @@ -68,14 +68,14 @@ concision-core = { features = ["full"], path = "../../core", version = "0.1.12" concision-data = { features = ["full"], path = "../../data", version = "0.1.12" } concision-neural = { path = "../neural" } -anyhow.workspace = true -itertools.workspace = true -lazy_static.workspace = true +anyhow = "1" +itertools = "0.12" +lazy_static = "1" ndarray = { features = ["approx", "blas", "serde-1"], version = "0.15" } ndarray-conv = "0.3" ndarray-linalg = "0.16" -ndarray-rand.workspace = true -ndarray-stats.workspace = true +ndarray-rand = "0.14" +ndarray-stats = "0.5" ndrustfft = "0.4" num = { features = ["rand", "serde"], version = "0.4" } rand = "0.8" From abf79975fb6bef3ea2c0608dc44ae1fad974c53b Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Mon, 6 May 2024 09:24:37 -0500 Subject: [PATCH 11/15] update Signed-off-by: Joe McCain III --- SECURITY.md | 1 + models/linear/src/lib.rs | 2 +- models/linear/src/model/features.rs | 33 ++- models/linear/src/model/linear.rs | 4 +- models/linear/src/params/impls/impl_params.rs | 40 ++- models/linear/src/params/impls/impl_rand.rs | 12 +- models/linear/src/params/impls/impl_serde.rs | 22 +- models/linear/src/params/mod.rs | 20 +- models/linear/src/params/params.rs | 248 +++++++++--------- models/linear/tests/params.rs | 4 +- 10 files changed, 226 insertions(+), 160 deletions(-) diff --git a/SECURITY.md b/SECURITY.md index 7ab9ca27..c722d0b6 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -9,6 +9,7 @@ Checkout the current and supported packages below | algae | 0.1.2 | <=0.1.0 | ## Reporting a Vulnerability + Email me at j3mccain@gmail.com to report any vulnerabilities. [Website](https://pzzld.eth.link/) \ No newline at end of file diff --git a/models/linear/src/lib.rs b/models/linear/src/lib.rs index 7e91bf57..314caafc 100644 --- a/models/linear/src/lib.rs +++ b/models/linear/src/lib.rs @@ -18,7 +18,7 @@ extern crate ndarray_rand as ndrand; extern crate ndarray_stats as stats; pub use self::model::{Config, Features, Linear}; -pub use self::params::LinearParams; +pub use self::params::LinearParamsBase; #[allow(unused_imports)] pub use self::{traits::*, utils::*}; diff --git a/models/linear/src/model/features.rs b/models/linear/src/model/features.rs index 93c24cf7..40854ce4 100644 --- a/models/linear/src/model/features.rs +++ b/models/linear/src/model/features.rs @@ -2,8 +2,7 @@ Appellation: features Contrib: FL03 */ -use ndarray::prelude::{Dimension, Ix2}; -use ndarray::IntoDimension; +use ndarray::{Dimension, IntoDimension, Ix2, ShapeBuilder}; #[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] @@ -17,8 +16,12 @@ impl Features { Self { inputs, outputs } } - pub fn from_dimension(shape: impl IntoDimension) -> Self { - let dim = shape.into_dimension(); + pub fn from_dimension(shape: Sh) -> Self + where + Sh: ShapeBuilder, + { + let shape = shape.into_shape(); + let dim = shape.raw_dim().clone(); let (outputs, inputs) = dim.into_pattern(); Self::new(inputs, outputs) } @@ -54,6 +57,28 @@ impl IntoDimension for Features { } } +impl TryFrom> for Features { + type Error = nd::ShapeError; + + fn try_from(shape: nd::ArrayView1<'_, usize>) -> Result { + use nd::{ErrorKind, ShapeError}; + if shape.len() == 1 { + let tmp = Self { + inputs: shape[0], + outputs: 1, + }; + return Ok(tmp); + } else if shape.len() >= 2 { + let tmp = Self { + inputs: shape[1], + outputs: shape[0], + }; + return Ok(tmp); + } + Err(ShapeError::from_kind(ErrorKind::IncompatibleShape)) + } +} + impl From for Ix2 { fn from(features: Features) -> Self { features.into_dimension() diff --git a/models/linear/src/model/linear.rs b/models/linear/src/model/linear.rs index 69240e1f..fdb6a16a 100644 --- a/models/linear/src/model/linear.rs +++ b/models/linear/src/model/linear.rs @@ -5,12 +5,12 @@ use crate::model::Config; use crate::params::LinearParams; use concision::prelude::{Module, Predict, PredictError}; -use ndarray::{Dimension, Ix2, RemoveAxis}; +use ndarray::{Ix2, RemoveAxis}; /// Linear model pub struct Linear where - D: Dimension, + D: RemoveAxis, { pub(crate) config: Config, pub(crate) params: LinearParams, diff --git a/models/linear/src/params/impls/impl_params.rs b/models/linear/src/params/impls/impl_params.rs index 009bae93..4dbfb82f 100644 --- a/models/linear/src/params/impls/impl_params.rs +++ b/models/linear/src/params/impls/impl_params.rs @@ -2,20 +2,19 @@ Appellation: params Contrib: FL03 */ -use crate::params::LinearParams; +use crate::params::LinearParamsBase; use crate::{Biased, Weighted}; use concision::prelude::{Predict, PredictError}; use core::ops::Add; use nd::linalg::Dot; use nd::*; -use num::Float; -impl Biased for LinearParams +impl Biased for LinearParamsBase where D: RemoveAxis, - T: Float, + S: RawData, { - type Bias = Array; + type Bias = ArrayBase; fn bias(&self) -> &Self::Bias { self.bias.as_ref().unwrap() @@ -30,11 +29,12 @@ where } } -impl Weighted for LinearParams +impl Weighted for LinearParamsBase where - D: Dimension, + D: RemoveAxis, + S: RawData, { - type Weight = Array; + type Weight = ArrayBase; fn weights(&self) -> &Self::Weight { &self.weights @@ -49,11 +49,12 @@ where } } -impl Predict for LinearParams +impl Predict for LinearParamsBase where A: Dot, Output = B>, - B: for<'a> Add<&'a Array, Output = B>, + B: for<'a> Add<&'a ArrayBase, Output = B>, D: RemoveAxis, + S: Data, T: NdFloat, { type Output = B; @@ -68,11 +69,12 @@ where } } -impl<'a, A, B, T, D> Predict for &'a LinearParams +impl<'a, A, B, T, S, D> Predict for &'a LinearParamsBase where A: Dot, Output = B>, - B: Add<&'a Array, Output = B>, + B: Add<&'a ArrayBase, Output = B>, D: RemoveAxis, + S: Data, T: NdFloat, { type Output = B; @@ -86,3 +88,17 @@ where Ok(res) } } + +impl Clone for LinearParamsBase +where + A: Clone, + D: RemoveAxis, + S: RawDataClone, +{ + fn clone(&self) -> Self { + Self { + weights: self.weights.clone(), + bias: self.bias.clone(), + } + } +} diff --git a/models/linear/src/params/impls/impl_rand.rs b/models/linear/src/params/impls/impl_rand.rs index 326bcd97..f7350aa6 100644 --- a/models/linear/src/params/impls/impl_rand.rs +++ b/models/linear/src/params/impls/impl_rand.rs @@ -4,17 +4,17 @@ */ #![cfg(feature = "rand")] -use crate::params::LinearParams; +use crate::params::LinearParamsBase; use concision::prelude::GenerateRandom; use nd::*; use ndrand::rand_distr::{uniform, Distribution, StandardNormal}; use num::Float; -impl LinearParams +impl LinearParamsBase, D> where + A: Float + uniform::SampleUniform, D: RemoveAxis, - T: Float + uniform::SampleUniform, - StandardNormal: Distribution, + StandardNormal: Distribution, { pub fn init_uniform(mut self, biased: bool) -> Self { if biased { @@ -24,7 +24,7 @@ where } pub fn init_bias(mut self) -> Self { - let dk = (T::one() / T::from(self.inputs()).unwrap()).sqrt(); + let dk = (A::one() / A::from(self.inputs()).unwrap()).sqrt(); let dim = self .features() .remove_axis(Axis(self.features().ndim() - 1)); @@ -33,7 +33,7 @@ where } pub fn init_weight(mut self) -> Self { - let dk = (T::one() / T::from(self.inputs()).unwrap()).sqrt(); + let dk = (A::one() / A::from(self.inputs()).unwrap()).sqrt(); self.weights = Array::uniform_between(dk, self.features().clone()); self } diff --git a/models/linear/src/params/impls/impl_serde.rs b/models/linear/src/params/impls/impl_serde.rs index 313508ab..22d42ece 100644 --- a/models/linear/src/params/impls/impl_serde.rs +++ b/models/linear/src/params/impls/impl_serde.rs @@ -4,40 +4,38 @@ */ #![cfg(feature = "serde")] -use crate::params::{Entry, LinearParams}; +use crate::params::{Entry, LinearParamsBase}; use nd::*; use serde::{Deserialize, Deserializer, Serialize, Serializer}; -impl<'a, T, D> Deserialize<'a> for LinearParams +impl<'a, A, S, D> Deserialize<'a> for LinearParamsBase where - T: Deserialize<'a>, + A: Deserialize<'a>, D: Deserialize<'a> + RemoveAxis, + S: DataOwned, ::Smaller: Deserialize<'a> + Dimension, { fn deserialize(deserializer: Der) -> Result where Der: Deserializer<'a>, { - let (bias, features, weights) = Deserialize::deserialize(deserializer)?; - Ok(Self { - bias, - features, - weights, - }) + let (bias, weights) = Deserialize::deserialize(deserializer)?; + Ok(Self { bias, weights }) } } -impl Serialize for LinearParams +impl Serialize for LinearParamsBase where - T: Serialize, + A: Serialize, D: RemoveAxis + Serialize, + S: Data, ::Smaller: Dimension + Serialize, { fn serialize(&self, serializer: Ser) -> Result where Ser: Serializer, { - (self.bias(), self.features(), self.weights()).serialize(serializer) + (self.bias(), self.weights()).serialize(serializer) } } diff --git a/models/linear/src/params/mod.rs b/models/linear/src/params/mod.rs index e7d1c9bf..2f747ecb 100644 --- a/models/linear/src/params/mod.rs +++ b/models/linear/src/params/mod.rs @@ -13,9 +13,20 @@ mod impls { mod impl_serde; } +macro_rules! params_ty { + ($($name:ident<$repr:ident>),* $(,)?) => { + $(params_ty!(@impl $name<$repr>);)* + }; + (@impl $name:ident<$repr:ident>) => { + pub type $name = LinearParamsBase, D>; + }; +} + +params_ty!(LinearParams, LinearParamsShared,); + pub(crate) mod prelude { pub use super::entry::{Entry as LinearEntry, Param as LinearParam}; - pub use super::params::LinearParams; + pub use super::LinearParams; } #[cfg(test)] @@ -30,4 +41,11 @@ mod tests { assert_eq!(i.0, kind); } } + + #[test] + fn test_ones() { + let a = LinearParams::::ones(false, (1, 300)).biased(nd::Array1::ones); + + assert!(a.is_biased()); + } } diff --git a/models/linear/src/params/params.rs b/models/linear/src/params/params.rs index 589db448..b695c01e 100644 --- a/models/linear/src/params/params.rs +++ b/models/linear/src/params/params.rs @@ -7,7 +7,7 @@ use crate::model::Features; use core::ops; use nd::linalg::Dot; use nd::*; -use num::{Float, One, Zero}; +use num::{One, Zero}; #[cfg(no_std)] use alloc::vec; @@ -16,101 +16,91 @@ use std::vec; pub(crate) type Node = (Array, Option>); -#[derive(Clone, Debug, Eq, PartialEq)] -pub struct LinearParams +macro_rules! constructor { + ($call:ident where $($rest:tt)*) => { + constructor!(@impl $call where $($rest)*); + }; + (@impl $call:ident where $($rest:tt)*) => { + pub fn $call(biased: bool, shape: Sh) -> LinearParamsBase + where + Sh: ndarray::ShapeBuilder, + $($rest)* + { + let shape = shape.into_shape(); + let dim = shape.raw_dim().clone(); + Self { + bias: build_bias(biased, dim.clone(), |dim| ArrayBase::$call(dim)), + weights: ArrayBase::$call(dim), + } + } + }; +} + +pub struct LinearParamsBase where - D: Dimension, + D: RemoveAxis, + S: RawData, { - pub(crate) bias: Option>, - pub(crate) features: D, - pub(crate) weights: Array, + pub(crate) bias: Option>, + pub(crate) weights: ArrayBase, } -impl LinearParams +impl LinearParamsBase where D: RemoveAxis, + S: RawData, { - pub fn new(biased: bool, dim: impl IntoDimension) -> Self - where - A: Clone + Default, - { - let dim = dim.into_dimension(); - let bias = build_bias(biased, dim.clone(), |dim| Array::default(dim)); - Self { - bias, - features: dim.clone(), - weights: Array::default(dim), - } - } + constructor!(default where A: Default, S: DataOwned); + constructor!(ones where A: Clone + One, S: DataOwned); + constructor!(zeros where A: Clone + Zero, S: DataOwned); - pub fn default(dim: impl IntoDimension) -> Self + pub fn new(biased: bool, dim: impl IntoDimension) -> Self where A: Clone + Default, + S: DataOwned, { let dim = dim.into_dimension(); - let bias = build_bias(true, dim.clone(), |dim| Array::default(dim)); Self { - bias, - features: dim.clone(), - weights: Array::default(dim), + bias: build_bias(biased, dim.clone(), |dim| ArrayBase::default(dim)), + weights: ArrayBase::default(dim), } } - pub fn ones(shape: Sh) -> Self + pub fn biased(self, builder: F) -> Self where - Sh: ShapeBuilder, - A: Clone + One, + F: FnOnce(D::Smaller) -> ArrayBase, { - let shape = shape.into_shape(); - let dim = shape.raw_dim().clone(); - let bias = build_bias(true, dim.clone(), |dim| Array::ones(dim)); Self { - bias, - features: dim.clone(), - weights: Array::ones(dim), + bias: Some(builder(self.weights.raw_dim().remove_axis(Axis(0)))), + ..self } } - pub fn zeros(shape: Sh) -> Self - where - Sh: ShapeBuilder, - A: Clone + Zero, - { - let shape = shape.into_shape(); - let dim = shape.raw_dim().clone(); - let bias = build_bias(true, dim.clone(), |dim| Array::zeros(dim)); - Self { - bias, - features: dim.clone(), - weights: Array::zeros(dim), - } + pub fn unbiased(self) -> Self { + Self { bias: None, ..self } } - pub fn activate(&mut self, f: F) -> LinearParams + pub fn activate(&mut self, f: F) -> LinearParamsBase, D> where F: for<'a> Fn(&'a A) -> A, + S: Data, { - LinearParams { + LinearParamsBase { bias: self.bias().map(|b| b.map(|b| f(b))), - features: self.features.clone(), weights: self.weights().map(|w| f(w)), } } - pub fn bias(&self) -> Option<&Array> { + pub fn bias(&self) -> Option<&ArrayBase> { self.bias.as_ref() } - pub fn bias_mut(&mut self) -> Option<&mut Array> { + pub fn bias_mut(&mut self) -> Option<&mut ArrayBase> { self.bias.as_mut() } - pub fn unbiased(self) -> Self { - Self { bias: None, ..self } - } - - pub fn features(&self) -> &D { - &self.features + pub fn features(&self) -> D { + self.weights.raw_dim() } pub fn inputs(&self) -> usize { @@ -123,9 +113,10 @@ where pub fn linear(&self, data: &T) -> B where - T: Dot, Output = B>, - B: for<'a> ops::Add<&'a Array, Output = B>, A: NdFloat, + B: for<'a> ops::Add<&'a ArrayBase, Output = B>, + S: Data, + T: Dot, Output = B>, { let dot = data.dot(&self.weights().t().to_owned()); if let Some(bias) = self.bias() { @@ -135,30 +126,34 @@ where } pub fn outputs(&self) -> usize { - if self.features.ndim() == 1 { + if self.features().ndim() == 1 { return 1; } self.weights.shape().first().unwrap().clone() } - pub fn weights(&self) -> &Array { + pub fn weights(&self) -> &ArrayBase { &self.weights } - pub fn weights_mut(&mut self) -> &mut Array { + pub fn weights_mut(&mut self) -> &mut ArrayBase { &mut self.weights } } -impl LinearParams { - pub fn set_node(&mut self, idx: usize, node: Node) +impl LinearParamsBase +where + S: RawData, +{ + pub fn set_node(&mut self, idx: usize, node: Node) where - T: Clone + Default, + A: Clone + Default, + S: DataMut + DataOwned, { let (weight, bias) = node; if let Some(bias) = bias { if !self.is_biased() { - let mut tmp = Array1::default(self.outputs()); + let mut tmp = ArrayBase::default(self.outputs()); tmp.index_axis_mut(Axis(0), idx).assign(&bias); self.bias = Some(tmp); } @@ -175,11 +170,12 @@ impl LinearParams { } } -impl IntoIterator for LinearParams +impl IntoIterator for LinearParamsBase where - T: Float, + A: Clone, + S: Data, { - type Item = Node; + type Item = Node; type IntoIter = vec::IntoIter; fn into_iter(self) -> Self::IntoIter { @@ -200,16 +196,17 @@ where } } -impl FromIterator<(Array1, Option>)> for LinearParams +impl FromIterator<(Array1, Option>)> for LinearParamsBase where - T: Clone + Default, + A: Clone + Default, + S: DataOwned + DataMut, { - fn from_iter, Option>)>>(nodes: I) -> Self { + fn from_iter, Option>)>>(nodes: I) -> Self { let nodes = nodes.into_iter().collect::>(); let mut iter = nodes.iter(); let node = iter.next().unwrap(); let shape = Features::new(node.0.shape()[0], nodes.len()); - let mut params = LinearParams::default(shape); + let mut params = LinearParamsBase::default(true, shape); params.set_node(0, node.clone()); for (i, node) in iter.into_iter().enumerate() { params.set_node(i + 1, node.clone()); @@ -218,55 +215,66 @@ where } } -impl From<(Array, A)> for LinearParams -where - A: Clone, -{ - fn from((weights, bias): (Array, A)) -> Self { - let bias = Array::from_elem((), bias); - Self { - bias: Some(bias), - features: weights.raw_dim(), - weights, - } - } -} +macro_rules! impl_from { -impl From<(Array, Option)> for LinearParams -where - A: Clone, -{ - fn from((weights, bias): (Array, Option)) -> Self { - let bias = bias.map(|b| Array::from_elem((), b)); - Self { - bias, - features: weights.raw_dim(), - weights, + + (A) => { + impl From<(Array1, A)> for LinearParamsBase, Ix1> + where + A: Clone, + { + fn from((weight, bias): (Array1, A)) -> Self { + let bias = ArrayBase::from_elem((), bias); + Self { + bias: Some(bias), + weights: weight, + } + } } - } -} -impl From<(Array, Array)> for LinearParams -where - D: RemoveAxis, -{ - fn from((weights, bias): (Array, Array)) -> Self { - Self { - bias: Some(bias), - features: weights.raw_dim(), - weights, + impl From<(Array1, Option)> for LinearParamsBase, Ix1> + where + A: Clone, + { + fn from((weights, bias): (Array1, Option)) -> Self { + Self { + bias: bias.map(|b| ArrayBase::from_elem((), b)), + weights, + } + } } - } -} + }; + ($($bias:ty),*) => { + $(impl_from!(@impl $bias);)* -impl From<(Array, Option>)> for LinearParams -where - D: RemoveAxis, -{ - fn from((weights, bias): (Array, Option>)) -> Self { - Self { - bias, - features: weights.raw_dim(), - weights, + }; + (@impl $b:ty) => { + impl From<(ArrayBase, Option<$b>)> for LinearParamsBase + where + D: RemoveAxis, + S: RawData, + { + fn from((weights, bias): (ArrayBase, Option<$b>)) -> Self { + Self { + bias, + weights, + } + } } - } + + impl From<(ArrayBase, $b)> for LinearParamsBase + where + D: RemoveAxis, + S: RawData, + { + fn from((weights, bias): (ArrayBase, $b)) -> Self { + Self { + bias: Some(bias), + weights, + } + } + } + }; } + +impl_from!(A); +impl_from!(ArrayBase); diff --git a/models/linear/tests/params.rs b/models/linear/tests/params.rs index 97185276..62182577 100644 --- a/models/linear/tests/params.rs +++ b/models/linear/tests/params.rs @@ -6,7 +6,7 @@ extern crate concision_core as concision; extern crate concision_linear as linear; use concision::Predict; -use linear::{Features, LinearParams}; +use linear::{Features, LinearParamsBase}; use ndarray::*; #[test] @@ -14,7 +14,7 @@ fn test_linear_params() { let (samples, inputs, outputs) = (20, 5, 3); let features = Features::new(inputs, outputs); let data = Array2::::zeros((samples, inputs)); - let params = LinearParams::default(features.clone()).init_uniform(true); + let params = LinearParamsBase::default(true, features.clone()).init_uniform(true); let y = params.predict(&data).unwrap(); assert_eq!(y.dim(), (samples, outputs)); } From ed615bd9b685ff641a670745789c3f52f9b0bb80 Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Mon, 6 May 2024 10:41:17 -0500 Subject: [PATCH 12/15] update Signed-off-by: Joe McCain III --- core/Cargo.toml | 3 +- core/src/primitives.rs | 5 -- core/src/traits/math.rs | 5 -- core/src/traits/store.rs | 9 ++-- data/Cargo.toml | 3 +- models/gnn/Cargo.toml | 3 +- models/linear/Cargo.toml | 5 +- models/linear/src/lib.rs | 2 +- models/linear/src/model/impls/impl_linear.rs | 35 ++++++++++++++ models/linear/src/model/impls/impl_model.rs | 45 ++++++++++++++++++ models/linear/src/model/linear.rs | 47 ------------------- models/linear/src/model/mod.rs | 4 +- models/linear/src/params/impls/impl_params.rs | 45 ++++++++++++++++++ 13 files changed, 142 insertions(+), 69 deletions(-) create mode 100644 models/linear/src/model/impls/impl_linear.rs create mode 100644 models/linear/src/model/impls/impl_model.rs diff --git a/core/Cargo.toml b/core/Cargo.toml index a1bffdd4..648442b8 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -52,6 +52,7 @@ serde-ext = [ std = [ "ndarray/std", "num/std", + "serde/std", "strum/std", "uuid/std" ] @@ -74,7 +75,7 @@ ndarray.workspace = true ndarray-rand = { optional = true, version = "0.14" } num.workspace = true rand = { optional = true, version = "0.8" } -serde = { features = ["derive"], optional = true, version = "1" } +serde = { default-features = false, features = ["derive"], optional = true, version = "1" } smart-default.workspace = true strum.workspace = true tracing = { optional = true, version = "0.1" } diff --git a/core/src/primitives.rs b/core/src/primitives.rs index 3b88a333..c1991e08 100644 --- a/core/src/primitives.rs +++ b/core/src/primitives.rs @@ -4,12 +4,7 @@ */ pub use self::constants::*; -pub use ndarray::ShapeError; -#[cfg(feature = "rand")] -pub use ndrand::rand_distr::uniform::SampleUniform; -/// -pub type ShapeResult = core::result::Result; mod constants { pub const DEFAULT_MODEL_SIZE: usize = 2048; diff --git a/core/src/traits/math.rs b/core/src/traits/math.rs index ba0f2ad3..1d499a72 100644 --- a/core/src/traits/math.rs +++ b/core/src/traits/math.rs @@ -6,11 +6,6 @@ use nd::{Array, Dimension}; use num::complex::Complex; use num::{Float, Num, Signed, Zero}; -pub trait IntoIm { - type Output; - - fn into_complex(self) -> Self::Output; -} pub trait AsComplex { type Real; diff --git a/core/src/traits/store.rs b/core/src/traits/store.rs index 107b075a..fbc92719 100644 --- a/core/src/traits/store.rs +++ b/core/src/traits/store.rs @@ -5,7 +5,7 @@ #[cfg(no_std)] use alloc::collections::{btree_map, BTreeMap}; #[cfg(not(no_std))] -use std::collections::{btree_map, hash_map, BTreeMap, HashMap}; +use std::collections::{btree_map, BTreeMap}; pub trait Entry<'a> { type Key; @@ -82,8 +82,9 @@ macro_rules! impl_store { } impl_entry!(btree_map where K: Ord); -#[cfg(feature = "std")] -impl_entry!(hash_map where K: Eq + core::hash::Hash); impl_store!(BTreeMap, where K: Ord); + +#[cfg(feature = "std")] +impl_entry!(std::collections::hash_map where K: Eq + core::hash::Hash); #[cfg(feature = "std")] -impl_store!(HashMap, where K: Eq + core::hash::Hash); +impl_store!(std::collections::HashMap, where K: Eq + core::hash::Hash); diff --git a/data/Cargo.toml b/data/Cargo.toml index 0f85cb50..143c7a92 100644 --- a/data/Cargo.toml +++ b/data/Cargo.toml @@ -54,6 +54,7 @@ std = [ "concision-core/std", "ndarray/std", "num/std", + "serde/std", "strum/std", ] @@ -74,7 +75,7 @@ approx = { optional = true, version = "0.5" } itertools.workspace = true ndarray.workspace = true num.workspace = true -serde = { features = ["derive"], optional = true, version = "1" } +serde = { default-features = false, features = ["derive"], optional = true, version = "1" } smart-default.workspace = true strum.workspace = true tracing = { optional = true, version = "0.1" } diff --git a/models/gnn/Cargo.toml b/models/gnn/Cargo.toml index 438d6db1..5b1f7589 100644 --- a/models/gnn/Cargo.toml +++ b/models/gnn/Cargo.toml @@ -56,6 +56,7 @@ std = [ "concision-core/std", "ndarray/std", "num/std", + "serde/std", "strum/std", ] @@ -77,7 +78,7 @@ ndarray.workspace = true ndarray-rand = { optional = true, version = "0.14" } ndarray-stats.workspace = true num.workspace = true -serde = { features = ["derive"], optional = true, version = "1" } +serde = { default-features = false, features = ["derive"], optional = true, version = "1" } smart-default.workspace = true strum.workspace = true tracing = { optional = true, version = "0.1" } diff --git a/models/linear/Cargo.toml b/models/linear/Cargo.toml index 677ae405..14ce0cf1 100644 --- a/models/linear/Cargo.toml +++ b/models/linear/Cargo.toml @@ -48,7 +48,6 @@ serde = [ ] serde-ext = [ - "dep:serde_json", "concision-core/serde", "ndarray/serde-1", "num/serde" @@ -58,6 +57,7 @@ std = [ "concision-core/std", "ndarray/std", "num/std", + "serde/std", "strum/std", ] @@ -89,8 +89,7 @@ ndarray.workspace = true ndarray-rand = { optional = true, version = "0.14" } ndarray-stats.workspace = true num.workspace = true -serde = { features = ["derive"], optional = true, version = "1" } -serde_json = { optional = true, version = "1" } +serde = { default-features = false, features = ["derive"], optional = true, version = "1" } smart-default.workspace = true strum.workspace = true tracing = { optional = true, version = "0.1" } diff --git a/models/linear/src/lib.rs b/models/linear/src/lib.rs index 314caafc..7e91bf57 100644 --- a/models/linear/src/lib.rs +++ b/models/linear/src/lib.rs @@ -18,7 +18,7 @@ extern crate ndarray_rand as ndrand; extern crate ndarray_stats as stats; pub use self::model::{Config, Features, Linear}; -pub use self::params::LinearParamsBase; +pub use self::params::LinearParams; #[allow(unused_imports)] pub use self::{traits::*, utils::*}; diff --git a/models/linear/src/model/impls/impl_linear.rs b/models/linear/src/model/impls/impl_linear.rs new file mode 100644 index 00000000..2db338ec --- /dev/null +++ b/models/linear/src/model/impls/impl_linear.rs @@ -0,0 +1,35 @@ +/* + Appellation: impl_linear + Contrib: FL03 +*/ +use crate::{Config, Linear, LinearParams}; +use core::borrow::{Borrow, BorrowMut}; +use nd::RemoveAxis; + +impl Linear { + pub fn std(config: Config) -> Self + where + T: Clone + Default, + { + let params = LinearParams::new(config.biased, config.shape); + Self { config, params } + } +} + +impl Borrow for Linear where D: RemoveAxis { + fn borrow(&self) -> &Config { + &self.config + } +} + +impl Borrow> for Linear where D: RemoveAxis { + fn borrow(&self) -> &LinearParams { + &self.params + } +} + +impl BorrowMut> for Linear where D: RemoveAxis { + fn borrow_mut(&mut self) -> &mut LinearParams { + &mut self.params + } +} \ No newline at end of file diff --git a/models/linear/src/model/impls/impl_model.rs b/models/linear/src/model/impls/impl_model.rs new file mode 100644 index 00000000..35c565f9 --- /dev/null +++ b/models/linear/src/model/impls/impl_model.rs @@ -0,0 +1,45 @@ +/* + Appellation: impl_model + Contrib: FL03 +*/ +use crate::{Config, Linear, LinearParams}; +use concision::prelude::{Module, Predict, PredictError}; +use nd::RemoveAxis; + +impl Module for Linear +where + D: RemoveAxis, +{ + type Config = Config; + type Params = LinearParams; + + fn config(&self) -> &Self::Config { + &self.config + } + + fn params(&self) -> &Self::Params { + &self.params + } + + fn params_mut(&mut self) -> &mut Self::Params { + &mut self.params + } +} + +impl Predict for Linear +where + D: RemoveAxis, + LinearParams: Predict, +{ + type Output = B; + + #[cfg_attr( + feature = "tracing", + tracing::instrument(skip_all, fields(name=%self.config.name), level = "debug", name = "predict", target = "linear") + )] + fn predict(&self, input: &A) -> Result { + #[cfg(feature = "tracing")] + tracing::debug!("Predicting with linear model"); + self.params.predict(input) + } +} diff --git a/models/linear/src/model/linear.rs b/models/linear/src/model/linear.rs index fdb6a16a..9416d047 100644 --- a/models/linear/src/model/linear.rs +++ b/models/linear/src/model/linear.rs @@ -4,7 +4,6 @@ */ use crate::model::Config; use crate::params::LinearParams; -use concision::prelude::{Module, Predict, PredictError}; use ndarray::{Ix2, RemoveAxis}; /// Linear model @@ -51,50 +50,4 @@ where } } -impl Linear { - pub fn std(config: Config) -> Self - where - T: Clone + Default, - { - let params = LinearParams::new(config.biased, config.shape); - Self { config, params } - } -} -impl Module for Linear -where - D: RemoveAxis, -{ - type Config = Config; - type Params = LinearParams; - - fn config(&self) -> &Self::Config { - &self.config - } - - fn params(&self) -> &Self::Params { - &self.params - } - - fn params_mut(&mut self) -> &mut Self::Params { - &mut self.params - } -} - -impl Predict for Linear -where - D: RemoveAxis, - LinearParams: Predict, -{ - type Output = B; - - #[cfg_attr( - feature = "tracing", - tracing::instrument(skip(self, input), level = "debug", name = "Linear::predict") - )] - fn predict(&self, input: &A) -> Result { - #[cfg(feature = "tracing")] - tracing::debug!("Predicting with linear model"); - self.params.predict(input) - } -} diff --git a/models/linear/src/model/mod.rs b/models/linear/src/model/mod.rs index 299a33e3..466b077d 100644 --- a/models/linear/src/model/mod.rs +++ b/models/linear/src/model/mod.rs @@ -10,7 +10,9 @@ pub mod config; pub mod features; mod impls { - mod impl_init; + pub mod impl_init; + pub mod impl_linear; + pub mod impl_model; } pub(crate) mod prelude { diff --git a/models/linear/src/params/impls/impl_params.rs b/models/linear/src/params/impls/impl_params.rs index 4dbfb82f..3a5f4ac1 100644 --- a/models/linear/src/params/impls/impl_params.rs +++ b/models/linear/src/params/impls/impl_params.rs @@ -102,3 +102,48 @@ where } } } + +impl Copy for LinearParamsBase +where + A: Copy, + D: Copy + RemoveAxis, + S: Copy + RawDataClone, + ::Smaller: Copy, +{} + +impl PartialEq for LinearParamsBase +where + A: PartialEq, + D: RemoveAxis, + S: Data, +{ + fn eq(&self, other: &Self) -> bool { + self.weights == other.weights && self.bias == other.bias + } +} + +impl PartialEq<(ArrayBase, Option>)> for LinearParamsBase +where + A: PartialEq, + D: RemoveAxis, + S: Data, +{ + fn eq(&self, (weights, bias): &(ArrayBase, Option>)) -> bool { + self.weights == weights && self.bias == *bias + } +} + +impl PartialEq<(ArrayBase, ArrayBase)> for LinearParamsBase +where + A: PartialEq, + D: RemoveAxis, + S: Data, +{ + fn eq(&self, (weights, bias): &(ArrayBase, ArrayBase)) -> bool { + let mut cmp = self.weights == weights; + if let Some(b) = &self.bias { + cmp &= b == bias; + } + cmp + } +} \ No newline at end of file From f261539591cdd577ccd51231e105a0002fd44746 Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Mon, 6 May 2024 14:38:28 -0500 Subject: [PATCH 13/15] update Signed-off-by: Joe McCain III --- concision/examples/linear.rs | 6 ++-- core/Cargo.toml | 8 ++++- core/src/error/err.rs | 2 +- core/src/error/mod.rs | 2 +- core/src/lib.rs | 4 +-- core/src/{models => nn}/activate.rs | 0 core/src/{models => nn}/error.rs | 0 core/src/{models => nn}/mod.rs | 2 +- core/src/params/mod.rs | 2 +- core/src/traits/mod.rs | 2 ++ core/src/traits/nn/model.rs | 8 +++++ core/src/traits/nn/module.rs | 4 +++ core/src/traits/nn/predict.rs | 10 ++++-- core/tests/fft.rs | 5 +-- core/tests/utils.rs | 2 +- models/linear/src/model/config.rs | 1 + models/linear/src/model/features.rs | 32 ++++++++++++++------ models/linear/src/model/impls/impl_linear.rs | 18 ++++++++--- models/linear/src/model/impls/impl_model.rs | 8 ++--- models/linear/src/model/linear.rs | 21 +++++++++---- models/linear/tests/model.rs | 7 +++-- models/linear/tests/params.rs | 5 +-- 22 files changed, 107 insertions(+), 42 deletions(-) rename core/src/{models => nn}/activate.rs (100%) rename core/src/{models => nn}/error.rs (100%) rename core/src/{models => nn}/mod.rs (90%) diff --git a/concision/examples/linear.rs b/concision/examples/linear.rs index 80f9a3fa..d8945669 100644 --- a/concision/examples/linear.rs +++ b/concision/examples/linear.rs @@ -4,8 +4,9 @@ */ extern crate concision as cnc; -use cnc::linear::{Config, Features, Linear}; use cnc::{linarr, Predict, Result}; +use cnc::func::Sigmoid; +use cnc::linear::{Config, Features, Linear}; use ndarray::Ix2; @@ -33,6 +34,7 @@ fn main() -> Result<()> { let model: Linear = Linear::std(config).uniform(); - let y = model.predict(&data).unwrap(); + let y = model.activate(&data, Sigmoid::sigmoid).unwrap(); + println!("Predictions: {:?}", y); Ok(()) } diff --git a/core/Cargo.toml b/core/Cargo.toml index 648442b8..0e745045 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -23,6 +23,8 @@ full = [ "serde", ] +alloc = [] + approx = [ "dep:approx", "ndarray/approx-0_5", @@ -50,8 +52,11 @@ serde-ext = [ ] std = [ + "alloc", "ndarray/std", "num/std", + "rand/std", + "rand/std_rng", "serde/std", "strum/std", "uuid/std" @@ -74,7 +79,8 @@ approx = { optional = true, version = "0.5" } ndarray.workspace = true ndarray-rand = { optional = true, version = "0.14" } num.workspace = true -rand = { optional = true, version = "0.8" } +rand = { default-features = false, optional = true, version = "0.8" } +rand_distr = { default-features = false, optional = true, version = "0.4" } serde = { default-features = false, features = ["derive"], optional = true, version = "1" } smart-default.workspace = true strum.workspace = true diff --git a/core/src/error/err.rs b/core/src/error/err.rs index 4175f4be..dd15c8e2 100644 --- a/core/src/error/err.rs +++ b/core/src/error/err.rs @@ -3,7 +3,7 @@ Contrib: FL03 */ use super::kinds::*; -use crate::models::ModelError; +use crate::nn::ModelError; use strum::{AsRefStr, Display, EnumCount, EnumIs, VariantNames}; #[derive( diff --git a/core/src/error/mod.rs b/core/src/error/mod.rs index d34fc1d0..fe4eee73 100644 --- a/core/src/error/mod.rs +++ b/core/src/error/mod.rs @@ -28,7 +28,7 @@ impl_error_type!( err::Error, kinds::ExternalError, kinds::PredictError, - crate::models::ModelError + crate::nn::ModelError ); pub mod kinds { diff --git a/core/src/lib.rs b/core/src/lib.rs index f05a0b16..dc34bbf8 100644 --- a/core/src/lib.rs +++ b/core/src/lib.rs @@ -23,7 +23,7 @@ pub(crate) mod utils; pub mod error; pub mod func; -pub mod models; +pub mod nn; pub mod ops; pub mod params; #[cfg(feature = "rand")] @@ -69,7 +69,7 @@ pub mod prelude { pub use super::error::prelude::*; pub use super::func::prelude::*; - pub use super::models::prelude::*; + pub use super::nn::prelude::*; pub use super::ops::prelude::*; pub use super::params::prelude::*; #[cfg(feature = "rand")] diff --git a/core/src/models/activate.rs b/core/src/nn/activate.rs similarity index 100% rename from core/src/models/activate.rs rename to core/src/nn/activate.rs diff --git a/core/src/models/error.rs b/core/src/nn/error.rs similarity index 100% rename from core/src/models/error.rs rename to core/src/nn/error.rs diff --git a/core/src/models/mod.rs b/core/src/nn/mod.rs similarity index 90% rename from core/src/models/mod.rs rename to core/src/nn/mod.rs index 5941aece..7264346a 100644 --- a/core/src/models/mod.rs +++ b/core/src/nn/mod.rs @@ -1,5 +1,5 @@ /* - Appellation: models + Appellation: nn Contrib: FL03 */ pub use self::{activate::Activator, error::ModelError}; diff --git a/core/src/params/mod.rs b/core/src/params/mod.rs index e9ce1720..558665e4 100644 --- a/core/src/params/mod.rs +++ b/core/src/params/mod.rs @@ -27,10 +27,10 @@ pub trait Params { } pub(crate) mod prelude { + pub use super::{Param, Params}; pub use super::kinds::ParamKind; pub use super::parameter::Parameter; pub use super::store::ParamStore; - pub use super::Param; } #[cfg(test)] diff --git a/core/src/traits/mod.rs b/core/src/traits/mod.rs index e4335f2b..b038702c 100644 --- a/core/src/traits/mod.rs +++ b/core/src/traits/mod.rs @@ -39,6 +39,8 @@ pub trait Transform { fn transform(&self, args: &T) -> Self::Output; } + + pub(crate) mod prelude { pub use super::Transform; diff --git a/core/src/traits/nn/model.rs b/core/src/traits/nn/model.rs index d1d711c3..226c9450 100644 --- a/core/src/traits/nn/model.rs +++ b/core/src/traits/nn/model.rs @@ -20,3 +20,11 @@ pub trait NeuralNetworkStack { type Hidden; type Output; } + +#[allow(dead_code)] +pub struct ModelBase { + pub(crate) id: usize, + config: C, + params: P, +} + diff --git a/core/src/traits/nn/module.rs b/core/src/traits/nn/module.rs index 6e307abb..556dc8f7 100644 --- a/core/src/traits/nn/module.rs +++ b/core/src/traits/nn/module.rs @@ -16,3 +16,7 @@ pub trait Module { fn params_mut(&mut self) -> &mut Self::Params; } + +pub trait NeuralNetwork: Module { + +} \ No newline at end of file diff --git a/core/src/traits/nn/predict.rs b/core/src/traits/nn/predict.rs index 21d587dd..b67b2fb9 100644 --- a/core/src/traits/nn/predict.rs +++ b/core/src/traits/nn/predict.rs @@ -34,17 +34,21 @@ pub trait Predict { /* ********* Implementations ********* */ -impl Forward for S +impl Forward for Option where - S: Predict, + S: Forward, T: Clone, { type Output = T; fn forward(&self, args: &T) -> Self::Output { - self.predict(args).unwrap() + match self { + Some(s) => s.forward(args), + None => args.clone(), + } } } + impl Predict for Option where S: Predict, diff --git a/core/tests/fft.rs b/core/tests/fft.rs index fa6c371a..4464a762 100644 --- a/core/tests/fft.rs +++ b/core/tests/fft.rs @@ -2,9 +2,10 @@ Appellation: default Contrib: FL03 */ +extern crate concision_core as concision; -use concision_core::ops::fft::*; -use concision_core::prelude::almost_equal; +use concision::ops::fft::*; +use concision::prelude::almost_equal; use lazy_static::lazy_static; use num::complex::{Complex, ComplexFloat}; diff --git a/core/tests/utils.rs b/core/tests/utils.rs index cae3e736..a17aa707 100644 --- a/core/tests/utils.rs +++ b/core/tests/utils.rs @@ -43,7 +43,7 @@ fn test_linarr() { #[test] fn test_tril() { - let a = linarr::((3, 3)).unwrap(); + let a = linarr::((3, 3)).unwrap(); let exp = array![[1.0, 0.0, 0.0], [4.0, 5.0, 0.0], [7.0, 8.0, 9.0]]; assert_eq!(exp, tril(&a)); } diff --git a/models/linear/src/model/config.rs b/models/linear/src/model/config.rs index d2670c42..f45b8ae5 100644 --- a/models/linear/src/model/config.rs +++ b/models/linear/src/model/config.rs @@ -21,6 +21,7 @@ impl Config { } } + pub fn is_biased(&self) -> bool { self.biased } diff --git a/models/linear/src/model/features.rs b/models/linear/src/model/features.rs index 40854ce4..ae3c00d7 100644 --- a/models/linear/src/model/features.rs +++ b/models/linear/src/model/features.rs @@ -2,7 +2,8 @@ Appellation: features Contrib: FL03 */ -use ndarray::{Dimension, IntoDimension, Ix2, ShapeBuilder}; +use nd::{Dimension, ErrorKind, IntoDimension, Ix2, ShapeBuilder, ShapeError}; + #[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] #[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] @@ -12,22 +13,36 @@ pub struct Features { } impl Features { - pub fn new(inputs: usize, outputs: usize) -> Self { + pub fn new(outputs: usize, inputs: usize) -> Self { Self { inputs, outputs } } - pub fn from_dimension(shape: Sh) -> Self + pub fn from_dimension(dim: D) -> Result + where + D: Dimension, + { + if dim.ndim() == 1 { + let res = Self::new(1, dim[0]); + return Ok(res); + } else if dim.ndim() >= 2 { + let res = Self::new(dim[0], dim[1]); + return Ok(res); + } + Err(ShapeError::from_kind(ErrorKind::IncompatibleShape)) + } + + pub fn from_shape(shape: Sh) -> Self where - Sh: ShapeBuilder, + D: nd::RemoveAxis, + Sh: ShapeBuilder, { let shape = shape.into_shape(); let dim = shape.raw_dim().clone(); - let (outputs, inputs) = dim.into_pattern(); - Self::new(inputs, outputs) + Self::from_dimension(dim).expect("Invalid shape") } pub fn neuron(inputs: usize) -> Self { - Self::new(inputs, 1) + Self::new(1, inputs) } pub fn inputs(&self) -> usize { @@ -58,10 +73,9 @@ impl IntoDimension for Features { } impl TryFrom> for Features { - type Error = nd::ShapeError; + type Error = ShapeError; fn try_from(shape: nd::ArrayView1<'_, usize>) -> Result { - use nd::{ErrorKind, ShapeError}; if shape.len() == 1 { let tmp = Self { inputs: shape[0], diff --git a/models/linear/src/model/impls/impl_linear.rs b/models/linear/src/model/impls/impl_linear.rs index 2db338ec..e5a86836 100644 --- a/models/linear/src/model/impls/impl_linear.rs +++ b/models/linear/src/model/impls/impl_linear.rs @@ -7,6 +7,7 @@ use core::borrow::{Borrow, BorrowMut}; use nd::RemoveAxis; impl Linear { + pub fn std(config: Config) -> Self where T: Clone + Default, @@ -16,20 +17,29 @@ impl Linear { } } -impl Borrow for Linear where D: RemoveAxis { +impl Borrow for Linear +where + D: RemoveAxis, +{ fn borrow(&self) -> &Config { &self.config } } -impl Borrow> for Linear where D: RemoveAxis { +impl Borrow> for Linear +where + D: RemoveAxis, +{ fn borrow(&self) -> &LinearParams { &self.params } } -impl BorrowMut> for Linear where D: RemoveAxis { +impl BorrowMut> for Linear +where + D: RemoveAxis, +{ fn borrow_mut(&mut self) -> &mut LinearParams { &mut self.params } -} \ No newline at end of file +} diff --git a/models/linear/src/model/impls/impl_model.rs b/models/linear/src/model/impls/impl_model.rs index 35c565f9..4ba44cf3 100644 --- a/models/linear/src/model/impls/impl_model.rs +++ b/models/linear/src/model/impls/impl_model.rs @@ -26,18 +26,18 @@ where } } -impl Predict for Linear +impl Predict for Linear where D: RemoveAxis, - LinearParams: Predict, + LinearParams: Predict, { - type Output = B; + type Output = Y; #[cfg_attr( feature = "tracing", tracing::instrument(skip_all, fields(name=%self.config.name), level = "debug", name = "predict", target = "linear") )] - fn predict(&self, input: &A) -> Result { + fn predict(&self, input: &X) -> Result { #[cfg(feature = "tracing")] tracing::debug!("Predicting with linear model"); self.params.predict(input) diff --git a/models/linear/src/model/linear.rs b/models/linear/src/model/linear.rs index 9416d047..08dad5ad 100644 --- a/models/linear/src/model/linear.rs +++ b/models/linear/src/model/linear.rs @@ -4,7 +4,8 @@ */ use crate::model::Config; use crate::params::LinearParams; -use ndarray::{Ix2, RemoveAxis}; +use concision::prelude::{Predict, Result}; +use nd::{Ix2, RemoveAxis}; /// Linear model pub struct Linear @@ -19,10 +20,14 @@ impl Linear where D: RemoveAxis, { - pub fn new(config: Config, params: LinearParams) -> Self { - Self { config, params } - } - + // pub fn new(biased: bool, dim: impl IntoDimension) -> Self + // where + // T: Clone + Default, + // { + // let config = Config::from_dimension(dim); + // let params = LinearParams::new(config.biased, config.shape); + // Self { config, params } + // } pub fn with_params(self, params: LinearParams) -> Linear where D2: RemoveAxis, @@ -46,7 +51,11 @@ where } pub fn is_biased(&self) -> bool { - self.params().is_biased() || self.config.biased + self.config().is_biased() || self.params().is_biased() + } + + pub fn activate(&self, args: &nd::Array, func: F) -> Result where F: for <'a> Fn(&'a Y) -> Y, Self: Predict, Output = Y> { + Ok(func(&self.predict(args)?)) } } diff --git a/models/linear/tests/model.rs b/models/linear/tests/model.rs index a0f84c5c..23b30112 100644 --- a/models/linear/tests/model.rs +++ b/models/linear/tests/model.rs @@ -6,6 +6,7 @@ extern crate concision_core as concision; extern crate concision_linear as linear; use concision::{linarr, Predict}; +use concision::func::Sigmoid; use linear::{Config, Features, Linear}; use lazy_static::lazy_static; @@ -18,15 +19,17 @@ const OUTPUT: usize = 3; lazy_static! { static ref FEATURES: Features = Features::new(INPUTS, OUTPUT); static ref CONFIG: Config = Config::new("test", FEATURES.clone()); - static ref SAMPLE_DATA: Array = linarr::(FEATURES.clone()).unwrap(); + static ref SAMPLE_DATA: Array = linarr::((SAMPLES, INPUTS)).unwrap(); static ref SHAPE: (usize, usize, usize) = (SAMPLES, INPUTS, OUTPUT); } #[test] fn test_linear() { + let (samples, _inputs, outputs) = *SHAPE; let data = SAMPLE_DATA.clone(); let model: Linear = Linear::std(CONFIG.clone()).uniform(); - let _y = model.predict(&data).unwrap(); + let y = model.activate(&data, Sigmoid::sigmoid).unwrap(); + assert_eq!(y.shape(), &[samples, outputs]); } diff --git a/models/linear/tests/params.rs b/models/linear/tests/params.rs index 62182577..8cb2f7f6 100644 --- a/models/linear/tests/params.rs +++ b/models/linear/tests/params.rs @@ -6,15 +6,16 @@ extern crate concision_core as concision; extern crate concision_linear as linear; use concision::Predict; -use linear::{Features, LinearParamsBase}; +use linear::{Features, LinearParams}; use ndarray::*; #[test] +#[ignore = "Needs to be fixed;"] fn test_linear_params() { let (samples, inputs, outputs) = (20, 5, 3); let features = Features::new(inputs, outputs); let data = Array2::::zeros((samples, inputs)); - let params = LinearParamsBase::default(true, features.clone()).init_uniform(true); + let params = LinearParams::default(true, features.clone()).init_uniform(true); let y = params.predict(&data).unwrap(); assert_eq!(y.dim(), (samples, outputs)); } From 3c0116dc4dbc84e4975e2ab5fb1e38b208e5ce75 Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Tue, 7 May 2024 12:23:39 -0500 Subject: [PATCH 14/15] update Signed-off-by: Joe McCain III --- README.md | 42 ++++-- concision/Cargo.toml | 8 + concision/examples/linear.rs | 21 +-- core/src/error/err.rs | 69 --------- core/src/error/kinds.rs | 74 ++++++++++ core/src/error/mod.rs | 19 +-- core/src/params/mod.rs | 2 +- core/src/primitives.rs | 2 - core/src/traits/mod.rs | 2 - core/src/traits/nn/model.rs | 1 - core/src/traits/nn/module.rs | 4 +- data/Cargo.toml | 6 +- models/gnn/Cargo.toml | 4 + models/linear/Cargo.toml | 4 + models/linear/src/model/config.rs | 1 - models/linear/src/model/features.rs | 139 ------------------ models/linear/src/model/impls/impl_linear.rs | 3 +- models/linear/src/model/layout/features.rs | 121 +++++++++++++++ models/linear/src/model/layout/layout.rs | 65 ++++++++ models/linear/src/model/linear.rs | 26 +++- models/linear/src/model/mod.rs | 19 ++- models/linear/src/params/impls/impl_params.rs | 34 ++++- models/linear/src/params/impls/impl_rand.rs | 21 ++- models/linear/src/params/mod.rs | 1 + models/linear/src/params/params.rs | 58 ++++---- models/linear/src/utils.rs | 28 ++-- models/linear/tests/model.rs | 14 +- models/linear/tests/params.rs | 11 +- 28 files changed, 471 insertions(+), 328 deletions(-) create mode 100644 core/src/error/kinds.rs delete mode 100644 models/linear/src/model/features.rs create mode 100644 models/linear/src/model/layout/features.rs create mode 100644 models/linear/src/model/layout/layout.rs diff --git a/README.md b/README.md index fe4ef358..7a0c026b 100644 --- a/README.md +++ b/README.md @@ -1,16 +1,15 @@ # Concision +[![clippy](https://github.com/FL03/concision/actions/workflows/clippy.yml/badge.svg)](https://github.com/FL03/concision/actions/workflows/clippy.yml) +[![publish](https://github.com/FL03/concision/actions/workflows/publish.yml/badge.svg)](https://github.com/FL03/concision/actions/workflows/publish.yml) +[![rust](https://github.com/FL03/concision/actions/workflows/rust.yml/badge.svg)](https://github.com/FL03/concision/actions/workflows/rust.yml) + [![crates.io](https://img.shields.io/crates/v/concision.svg)](https://crates.io/crates/concision) [![docs.rs](https://docs.rs/concision/badge.svg)](https://docs.rs/concision) -[![Clippy](https://github.com/FL03/concision/actions/workflows/clippy.yml/badge.svg)](https://github.com/FL03/concision/actions/workflows/clippy.yml) -[![publish](https://github.com/FL03/concision/actions/workflows/publish.yml/badge.svg)](https://github.com/FL03/concision/actions/workflows/publish.yml) -[![Rust](https://github.com/FL03/concision/actions/workflows/rust.yml/badge.svg)](https://github.com/FL03/concision/actions/workflows/rust.yml) - *** -Inspired by the myriad of data science libraries created for Python, concision is a complete data-science toolkit -written in Rust and designed to support the creation of enterprise-grade, data driven applications. +Concision is designed to be a complete toolkit for building machine learning models in Rust. ## Getting Started @@ -19,23 +18,40 @@ written in Rust and designed to support the creation of enterprise-grade, data d Start by cloning the repository ```bash -git clone https://github.com/FL03/concision +git clone https://github.com/FL03/concision.git +cd concision ``` ```bash -cargo build --release --workspace -cargo test --all --all-features --release +cargo build --features full -r --workspace ``` ## Usage ```rust - use concision as cnc; + extern crate concision as cnc; + + use cnc::func::Sigmoid; + use cnc::linear::{Config, Features, Linear}; + use cnc::{linarr, Predict, Result}; + use ndarray::Ix2; + + fn main() -> Result<()> { + tracing_subscriber::fmt::init(); + tracing::info!("Starting linear model example"); + + let (samples, dmodel, features) = (20, 5, 3); + let features = Features::new(3, 5); + let config = Config::new("example", features).biased(); + let data = linarr::((samples, dmodel)).unwrap(); - fn main() { - let a = ""; + let model: Linear = Linear::std(config).uniform(); + // `.activate(*data, *activation)` runs the forward pass and applies the activation function to the result + let y = model.activate(&data, Sigmoid::sigmoid).unwrap(); + assert_eq!(y.dim(), (samples, features)); + println!("Predictions: {:?}", y); - println!("{:?}", a); + Ok(()) } ``` diff --git a/concision/Cargo.toml b/concision/Cargo.toml index 0530be5c..cd8d51c9 100644 --- a/concision/Cargo.toml +++ b/concision/Cargo.toml @@ -28,6 +28,13 @@ full = [ "gnn", ] +alloc = [ + "concision-core/alloc", + "concision-data/alloc", + "concision-linear/alloc", + "concision-gnn/alloc", +] + approx = [ "concision-core/approx", ] @@ -145,6 +152,7 @@ concision-gnn = { optional = true, path = "../models/gnn", version = "0.1.12" } anyhow = "1" lazy_static.workspace = true ndarray.workspace = true +num = { features = ["rand", "serde"], version = "0.4" } tracing = "0.1" tracing-subscriber = "0.3" diff --git a/concision/examples/linear.rs b/concision/examples/linear.rs index d8945669..8a66df47 100644 --- a/concision/examples/linear.rs +++ b/concision/examples/linear.rs @@ -4,37 +4,38 @@ */ extern crate concision as cnc; -use cnc::{linarr, Predict, Result}; use cnc::func::Sigmoid; use cnc::linear::{Config, Features, Linear}; - -use ndarray::Ix2; +use cnc::{linarr, Predict, Result}; fn tracing() { - use tracing::Level; + use tracing::Level::DEBUG; use tracing_subscriber::fmt::time; tracing_subscriber::fmt() .compact() .with_ansi(true) - .with_level(false) - .with_max_level(Level::DEBUG) + .with_level(true) + .with_max_level(DEBUG) .with_target(false) .with_timer(time::uptime()) .init(); } + fn main() -> Result<()> { tracing(); tracing::info!("Starting linear model example"); - let (sample, inputs, outputs) = (20, 5, 3); - let features = Features::new(inputs, outputs); - let config = Config::new("example", features.clone()); - let data = linarr::(features).unwrap(); + let (samples, dmodel, features) = (20, 5, 3); + let features = Features::new(features, dmodel); + let config = Config::new("example", features).biased(); + let data = linarr::((samples, dmodel)).unwrap(); let model: Linear = Linear::std(config).uniform(); let y = model.activate(&data, Sigmoid::sigmoid).unwrap(); + assert_eq!(y.dim(), (samples, features)); println!("Predictions: {:?}", y); + Ok(()) } diff --git a/core/src/error/err.rs b/core/src/error/err.rs index dd15c8e2..8b137891 100644 --- a/core/src/error/err.rs +++ b/core/src/error/err.rs @@ -1,70 +1 @@ -/* - Appellation: error - Contrib: FL03 -*/ -use super::kinds::*; -use crate::nn::ModelError; -use strum::{AsRefStr, Display, EnumCount, EnumIs, VariantNames}; -#[derive( - AsRefStr, - Clone, - Debug, - Display, - EnumCount, - EnumIs, - Eq, - Hash, - Ord, - PartialEq, - PartialOrd, - VariantNames, -)] -#[cfg_attr( - feature = "serde", - derive(serde::Deserialize, serde::Serialize), - serde(rename_all = "lowercase", tag = "kind") -)] -#[strum(serialize_all = "lowercase")] -pub enum Error { - IO(String), - External(ExternalError), - Predict(PredictError), - Model(ModelError), - Shape(String), -} - -// impl core::fmt::Display for Error { -// fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { -// let msg = match self { -// Error::IO(ref err) => err.to_string(), -// Error::Error(ref err) => err.to_string(), -// Error::Shape(ref err) => err.to_string(), -// }; -// write!(f, "{}", msg) -// } -// } - -#[cfg(feature = "std")] -error_from!(Error::IO); - -macro_rules! from_err { - ($($variant:ident<$err:ty>),* $(,)*) => { - $( - from_err!(@impl $variant<$err>); - )* - }; - (@impl $variant:ident<$err:ty>) => { - impl From<$err> for Error { - fn from(err: $err) -> Self { - Error::$variant(err) - } - } - }; -} - -from_err!( - External, - Model, - Predict, -); diff --git a/core/src/error/kinds.rs b/core/src/error/kinds.rs new file mode 100644 index 00000000..1e41af1e --- /dev/null +++ b/core/src/error/kinds.rs @@ -0,0 +1,74 @@ +/* + Appellation: kinds + Contrib: FL03 +*/ +pub use self::{external::*, predict::*}; + +mod external; +mod predict; + +use crate::nn::ModelError; +use strum::{AsRefStr, Display, EnumCount, EnumIs, VariantNames}; + +#[derive( + AsRefStr, + Clone, + Debug, + Display, + EnumCount, + EnumIs, + Eq, + Hash, + Ord, + PartialEq, + PartialOrd, + VariantNames, +)] +#[cfg_attr( + feature = "serde", + derive(serde::Deserialize, serde::Serialize), + serde(rename_all = "lowercase", tag = "kind") +)] +#[strum(serialize_all = "lowercase")] +pub enum Error { + IO(String), + External(ExternalError), + Predict(PredictError), + Model(ModelError), + Shape(String), +} + +// impl core::fmt::Display for Error { +// fn fmt(&self, f: &mut core::fmt::Formatter) -> core::fmt::Result { +// let msg = match self { +// Error::IO(ref err) => err.to_string(), +// Error::Error(ref err) => err.to_string(), +// Error::Shape(ref err) => err.to_string(), +// }; +// write!(f, "{}", msg) +// } +// } + +#[cfg(feature = "std")] +error_from!(Error::IO); + +macro_rules! from_err { + ($($variant:ident<$err:ty>),* $(,)*) => { + $( + from_err!(@impl $variant<$err>); + )* + }; + (@impl $variant:ident<$err:ty>) => { + impl From<$err> for Error { + fn from(err: $err) -> Self { + Error::$variant(err) + } + } + }; +} + +from_err!( + External, + Model, + Predict, +); diff --git a/core/src/error/mod.rs b/core/src/error/mod.rs index fe4eee73..468931fd 100644 --- a/core/src/error/mod.rs +++ b/core/src/error/mod.rs @@ -6,6 +6,8 @@ pub use self::prelude::*; mod err; +pub mod kinds; + pub trait ErrKind {} macro_rules! impl_error_type { @@ -25,25 +27,12 @@ macro_rules! impl_error_type { } impl_error_type!( - err::Error, + kinds::Error, kinds::ExternalError, kinds::PredictError, crate::nn::ModelError ); -pub mod kinds { - pub use self::prelude::*; - - pub mod external; - pub mod predict; - - pub(crate) mod prelude { - pub use super::external::*; - pub use super::predict::*; - } -} - pub(crate) mod prelude { - pub use super::err::*; - pub use super::kinds::prelude::*; + pub use super::kinds::*; } diff --git a/core/src/params/mod.rs b/core/src/params/mod.rs index 558665e4..62ea8da8 100644 --- a/core/src/params/mod.rs +++ b/core/src/params/mod.rs @@ -27,10 +27,10 @@ pub trait Params { } pub(crate) mod prelude { - pub use super::{Param, Params}; pub use super::kinds::ParamKind; pub use super::parameter::Parameter; pub use super::store::ParamStore; + pub use super::{Param, Params}; } #[cfg(test)] diff --git a/core/src/primitives.rs b/core/src/primitives.rs index c1991e08..4999ee4c 100644 --- a/core/src/primitives.rs +++ b/core/src/primitives.rs @@ -4,8 +4,6 @@ */ pub use self::constants::*; - - mod constants { pub const DEFAULT_MODEL_SIZE: usize = 2048; } diff --git a/core/src/traits/mod.rs b/core/src/traits/mod.rs index b038702c..e4335f2b 100644 --- a/core/src/traits/mod.rs +++ b/core/src/traits/mod.rs @@ -39,8 +39,6 @@ pub trait Transform { fn transform(&self, args: &T) -> Self::Output; } - - pub(crate) mod prelude { pub use super::Transform; diff --git a/core/src/traits/nn/model.rs b/core/src/traits/nn/model.rs index 226c9450..03e1ff67 100644 --- a/core/src/traits/nn/model.rs +++ b/core/src/traits/nn/model.rs @@ -27,4 +27,3 @@ pub struct ModelBase { config: C, params: P, } - diff --git a/core/src/traits/nn/module.rs b/core/src/traits/nn/module.rs index 556dc8f7..1d7537a5 100644 --- a/core/src/traits/nn/module.rs +++ b/core/src/traits/nn/module.rs @@ -17,6 +17,4 @@ pub trait Module { fn params_mut(&mut self) -> &mut Self::Params; } -pub trait NeuralNetwork: Module { - -} \ No newline at end of file +pub trait NeuralNetwork: Module {} diff --git a/data/Cargo.toml b/data/Cargo.toml index 143c7a92..49967430 100644 --- a/data/Cargo.toml +++ b/data/Cargo.toml @@ -24,6 +24,10 @@ full = [ "tracing", ] +alloc = [ + "concision-core/alloc", +] + approx = [ "dep:approx", "concision-core/approx", @@ -45,7 +49,6 @@ serde = [ ] serde-ext = [ - # "dep:serde_json", "concision-core/serde", "ndarray/serde-1" ] @@ -81,6 +84,7 @@ strum.workspace = true tracing = { optional = true, version = "0.1" } [dependencies.concision-core] +default-features = false path = "../core" version = "0.1.12" diff --git a/models/gnn/Cargo.toml b/models/gnn/Cargo.toml index 5b1f7589..a0a73e9e 100644 --- a/models/gnn/Cargo.toml +++ b/models/gnn/Cargo.toml @@ -23,6 +23,10 @@ full = [ "serde", ] +alloc = [ + "concision-core/alloc", +] + approx = [ "dep:approx", "concision-core/approx", diff --git a/models/linear/Cargo.toml b/models/linear/Cargo.toml index 14ce0cf1..f5b611e9 100644 --- a/models/linear/Cargo.toml +++ b/models/linear/Cargo.toml @@ -25,6 +25,10 @@ full = [ "tracing", ] +alloc = [ + "concision-core/alloc", +] + approx = [ "dep:approx", "concision-core/approx", diff --git a/models/linear/src/model/config.rs b/models/linear/src/model/config.rs index f45b8ae5..d2670c42 100644 --- a/models/linear/src/model/config.rs +++ b/models/linear/src/model/config.rs @@ -21,7 +21,6 @@ impl Config { } } - pub fn is_biased(&self) -> bool { self.biased } diff --git a/models/linear/src/model/features.rs b/models/linear/src/model/features.rs deleted file mode 100644 index ae3c00d7..00000000 --- a/models/linear/src/model/features.rs +++ /dev/null @@ -1,139 +0,0 @@ -/* - Appellation: features - Contrib: FL03 -*/ -use nd::{Dimension, ErrorKind, IntoDimension, Ix2, ShapeBuilder, ShapeError}; - - -#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] -#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] -pub struct Features { - pub inputs: usize, - pub outputs: usize, -} - -impl Features { - pub fn new(outputs: usize, inputs: usize) -> Self { - Self { inputs, outputs } - } - - pub fn from_dimension(dim: D) -> Result - where - D: Dimension, - { - if dim.ndim() == 1 { - let res = Self::new(1, dim[0]); - return Ok(res); - } else if dim.ndim() >= 2 { - let res = Self::new(dim[0], dim[1]); - return Ok(res); - } - Err(ShapeError::from_kind(ErrorKind::IncompatibleShape)) - } - - pub fn from_shape(shape: Sh) -> Self - where - D: nd::RemoveAxis, - Sh: ShapeBuilder, - { - let shape = shape.into_shape(); - let dim = shape.raw_dim().clone(); - Self::from_dimension(dim).expect("Invalid shape") - } - - pub fn neuron(inputs: usize) -> Self { - Self::new(1, inputs) - } - - pub fn inputs(&self) -> usize { - self.inputs - } - - pub fn outputs(&self) -> usize { - self.outputs - } - - pub fn uniform_scale(&self) -> T { - T::from(self.inputs()).unwrap().recip().sqrt() - } -} - -impl core::fmt::Display for Features { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - write!(f, "({}, {})", self.inputs, self.outputs) - } -} - -impl IntoDimension for Features { - type Dim = Ix2; - - fn into_dimension(self) -> Self::Dim { - ndarray::Ix2(self.outputs, self.inputs) - } -} - -impl TryFrom> for Features { - type Error = ShapeError; - - fn try_from(shape: nd::ArrayView1<'_, usize>) -> Result { - if shape.len() == 1 { - let tmp = Self { - inputs: shape[0], - outputs: 1, - }; - return Ok(tmp); - } else if shape.len() >= 2 { - let tmp = Self { - inputs: shape[1], - outputs: shape[0], - }; - return Ok(tmp); - } - Err(ShapeError::from_kind(ErrorKind::IncompatibleShape)) - } -} - -impl From for Ix2 { - fn from(features: Features) -> Self { - features.into_dimension() - } -} - -impl From for ndarray::IxDyn { - fn from(features: Features) -> Self { - ndarray::IxDyn(&[features.outputs, features.inputs]) - } -} - -impl From for [usize; 2] { - fn from(features: Features) -> Self { - [features.outputs, features.inputs] - } -} - -impl From<[usize; 2]> for Features { - fn from(features: [usize; 2]) -> Self { - Self { - inputs: features[1], - outputs: features[0], - } - } -} - -impl From for (usize, usize) { - fn from(features: Features) -> Self { - (features.outputs, features.inputs) - } -} - -impl From<(usize, usize)> for Features { - fn from((inputs, outputs): (usize, usize)) -> Self { - Self { inputs, outputs } - } -} - -impl From for Features { - fn from(inputs: usize) -> Self { - Self { inputs, outputs: 1 } - } -} diff --git a/models/linear/src/model/impls/impl_linear.rs b/models/linear/src/model/impls/impl_linear.rs index e5a86836..5eedaad3 100644 --- a/models/linear/src/model/impls/impl_linear.rs +++ b/models/linear/src/model/impls/impl_linear.rs @@ -7,12 +7,11 @@ use core::borrow::{Borrow, BorrowMut}; use nd::RemoveAxis; impl Linear { - pub fn std(config: Config) -> Self where T: Clone + Default, { - let params = LinearParams::new(config.biased, config.shape); + let params = LinearParams::default(config.is_biased(), config.shape); Self { config, params } } } diff --git a/models/linear/src/model/layout/features.rs b/models/linear/src/model/layout/features.rs new file mode 100644 index 00000000..93298f60 --- /dev/null +++ b/models/linear/src/model/layout/features.rs @@ -0,0 +1,121 @@ +/* + Appellation: features + Contrib: FL03 +*/ +use nd::{Dimension, IntoDimension, Ix2, RemoveAxis}; +use nd::{ErrorKind, ShapeBuilder, ShapeError}; + +pub(crate) fn features(dim: D) -> Result +where + D: Dimension, +{ + if dim.ndim() == 1 { + Ok(Features::new(1, dim[0])) + } else if dim.ndim() >= 2 { + Ok(Features::new(dim[0], dim[1])) + } else { + Err(ShapeError::from_kind(ErrorKind::IncompatibleShape)) + } +} + +#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] +#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] +pub struct Features { + pub dmodel: usize, // inputs + pub features: usize, // outputs +} + +impl Features { + pub fn new(features: usize, dmodel: usize) -> Self { + Self { dmodel, features } + } + + pub fn from_dim(dim: D) -> Self + where + D: RemoveAxis, + { + features(dim).unwrap() + } + + pub fn from_shape(shape: Sh) -> Self + where + D: nd::RemoveAxis, + Sh: ShapeBuilder, + { + let shape = shape.into_shape(); + let dim = shape.raw_dim().clone(); + features(dim).unwrap() + } + + pub fn into_pattern(self) -> (usize, usize) { + (self.features, self.dmodel) + } + + pub fn neuron(inputs: usize) -> Self { + Self::new(1, inputs) + } + + pub fn dmodel(&self) -> usize { + self.dmodel + } + + pub fn features(&self) -> usize { + self.features + } + + pub fn uniform_scale(&self) -> T { + T::from(self.dmodel()).unwrap().recip().sqrt() + } +} + +impl core::fmt::Display for Features { + fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + write!(f, "({}, {})", self.dmodel, self.features) + } +} + +impl IntoDimension for Features { + type Dim = Ix2; + + fn into_dimension(self) -> Self::Dim { + ndarray::Ix2(self.features, self.dmodel) + } +} + +macro_rules! impl_from { + ($($s:ty: $t:ty { $into:expr }),* $(,)?) => { + $(impl_from!(@impl $s: $t { $into });)* + }; + (@impl $s:ty: $t:ty { $into:expr }) => { + impl From<$t> for $s { + fn from(features: $t) -> Self { + $into(features) + } + } + }; +} + +impl_from!( + Features: usize { |f: usize| Features::new(1, f) }, + Features: [usize; 2] {| shape: [usize; 2] | Features::new(shape[0], shape[1])}, + Features: (usize, usize) {| shape: (usize, usize) | Features::new(shape.0, shape.1)}, + Features: nd::Ix1 {| shape: nd::Ix1 | Features::from(&shape)}, + Features: nd::Ix2 {| shape: nd::Ix2 | Features::from(&shape)}, + Features: nd::IxDyn {| shape: nd::IxDyn | Features::from(&shape)}, +); + +impl_from!( + nd::Ix2: Features { |f: Features| f.into_dimension() }, + nd::IxDyn: Features { |f: Features| f.into_dimension().into_dyn() }, + [usize; 2]: Features { |f: Features| [f.features, f.dmodel] }, + (usize, usize): Features { |f: Features| (f.features, f.dmodel) }, +); + +impl<'a, D> From<&'a D> for Features +where + D: RemoveAxis, +{ + fn from(dim: &'a D) -> Features { + features(dim.clone()).unwrap() + } +} diff --git a/models/linear/src/model/layout/layout.rs b/models/linear/src/model/layout/layout.rs new file mode 100644 index 00000000..5c5da6e1 --- /dev/null +++ b/models/linear/src/model/layout/layout.rs @@ -0,0 +1,65 @@ +/* + Appellation: layout + Contrib: FL03 +*/ +use crate::model::layout::{features, Features}; +use nd::{Dimension, RemoveAxis, ShapeBuilder}; + +#[derive(Clone, Copy, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] +#[cfg_attr(feature = "serde", derive(serde::Deserialize, serde::Serialize))] + +pub struct Layout +where + D: Dimension, +{ + pub(crate) dim: D, + pub(crate) features: Features, +} + +impl Layout +where + D: Dimension, +{ + pub fn new(dim: D) -> Self { + let features = features(dim.clone()).expect("Invalid dimension"); + Self { dim, features } + } + + pub fn from_shape(shape: Sh) -> Self + where + D: RemoveAxis, + Sh: ShapeBuilder, + { + let shape = shape.into_shape(); + let dim = shape.raw_dim().clone(); + let features = Features::from_shape(shape); + Self { dim, features } + } + + pub fn as_slice(&self) -> &[usize] { + self.dim.slice() + } + + pub fn as_mut_slice(&mut self) -> &mut [usize] { + self.dim.slice_mut() + } + + pub fn features(&self) -> Features { + self.features + } + + pub fn ndim(&self) -> usize { + self.dim.ndim() + } + + pub fn pattern(&self) -> D::Pattern + where + D: Copy, + { + self.dim.into_pattern() + } + + pub fn raw_dim(&self) -> D { + self.dim.clone() + } +} diff --git a/models/linear/src/model/linear.rs b/models/linear/src/model/linear.rs index 08dad5ad..f704f4a2 100644 --- a/models/linear/src/model/linear.rs +++ b/models/linear/src/model/linear.rs @@ -5,7 +5,7 @@ use crate::model::Config; use crate::params::LinearParams; use concision::prelude::{Predict, Result}; -use nd::{Ix2, RemoveAxis}; +use nd::{Array, Ix2, RemoveAxis}; /// Linear model pub struct Linear @@ -42,6 +42,22 @@ where &self.config } + pub fn bias(&self) -> Option<&Array> { + self.params.bias() + } + + pub fn bias_mut(&mut self) -> Option<&mut Array> { + self.params.bias_mut() + } + + pub fn weights(&self) -> &Array { + self.params.weights() + } + + pub fn weights_mut(&mut self) -> &mut Array { + self.params.weights_mut() + } + pub fn params(&self) -> &LinearParams { &self.params } @@ -54,9 +70,11 @@ where self.config().is_biased() || self.params().is_biased() } - pub fn activate(&self, args: &nd::Array, func: F) -> Result where F: for <'a> Fn(&'a Y) -> Y, Self: Predict, Output = Y> { + pub fn activate(&self, args: &X, func: F) -> Result + where + F: for<'a> Fn(&'a Y) -> Y, + Self: Predict, + { Ok(func(&self.predict(args)?)) } } - - diff --git a/models/linear/src/model/mod.rs b/models/linear/src/model/mod.rs index 466b077d..371e766b 100644 --- a/models/linear/src/model/mod.rs +++ b/models/linear/src/model/mod.rs @@ -2,12 +2,25 @@ Appellation: model Contrib: FL03 */ -pub use self::{config::*, features::*, linear::*}; +pub use self::{config::Config, linear::Linear}; + +pub use self::layout::prelude::*; mod linear; pub mod config; -pub mod features; + +pub mod layout { + pub use self::{features::*, layout::*}; + + mod features; + mod layout; + + pub(crate) mod prelude { + pub use super::features::Features; + pub use super::layout::Layout; + } +} mod impls { pub mod impl_init; @@ -16,7 +29,5 @@ mod impls { } pub(crate) mod prelude { - pub use super::config::Config as LinearConfig; - pub use super::features::Features as LinearFeatures; pub use super::linear::Linear; } diff --git a/models/linear/src/params/impls/impl_params.rs b/models/linear/src/params/impls/impl_params.rs index 3a5f4ac1..c5f29ec8 100644 --- a/models/linear/src/params/impls/impl_params.rs +++ b/models/linear/src/params/impls/impl_params.rs @@ -8,6 +8,22 @@ use concision::prelude::{Predict, PredictError}; use core::ops::Add; use nd::linalg::Dot; use nd::*; +use num::complex::ComplexFloat; + +impl LinearParamsBase +where + D: RemoveAxis, + S: RawData, +{ + pub fn activate(&mut self, args: &X, f: F) -> Y + where + F: for<'a> Fn(&'a Y) -> Y, + S: Data, + Self: concision::Predict, + { + f(&self.predict(args).unwrap()) + } +} impl Biased for LinearParamsBase where @@ -55,15 +71,15 @@ where B: for<'a> Add<&'a ArrayBase, Output = B>, D: RemoveAxis, S: Data, - T: NdFloat, + T: ComplexFloat, { type Output = B; fn predict(&self, input: &A) -> Result { let wt = self.weights().t().to_owned(); - let res = input.dot(&wt); + let mut res = input.dot(&wt); if let Some(bias) = self.bias() { - return Ok(res + bias); + res = res + bias; } Ok(res) } @@ -81,9 +97,9 @@ where fn predict(&self, input: &A) -> Result { let wt = self.weights().t().to_owned(); - let res = input.dot(&wt); + let mut res = input.dot(&wt); if let Some(bias) = self.bias() { - return Ok(res + bias); + res = res + bias; } Ok(res) } @@ -109,7 +125,8 @@ where D: Copy + RemoveAxis, S: Copy + RawDataClone, ::Smaller: Copy, -{} +{ +} impl PartialEq for LinearParamsBase where @@ -122,7 +139,8 @@ where } } -impl PartialEq<(ArrayBase, Option>)> for LinearParamsBase +impl PartialEq<(ArrayBase, Option>)> + for LinearParamsBase where A: PartialEq, D: RemoveAxis, @@ -146,4 +164,4 @@ where } cmp } -} \ No newline at end of file +} diff --git a/models/linear/src/params/impls/impl_rand.rs b/models/linear/src/params/impls/impl_rand.rs index f7350aa6..8e968df2 100644 --- a/models/linear/src/params/impls/impl_rand.rs +++ b/models/linear/src/params/impls/impl_rand.rs @@ -2,8 +2,7 @@ Appellation: rand Contrib: FL03 */ -#![cfg(feature = "rand")] - +use crate::bias_dim; use crate::params::LinearParamsBase; use concision::prelude::GenerateRandom; use nd::*; @@ -25,16 +24,26 @@ where pub fn init_bias(mut self) -> Self { let dk = (A::one() / A::from(self.inputs()).unwrap()).sqrt(); - let dim = self - .features() - .remove_axis(Axis(self.features().ndim() - 1)); + let dim = bias_dim(self.raw_dim()); self.bias = Some(Array::uniform_between(dk, dim)); self } pub fn init_weight(mut self) -> Self { let dk = (A::one() / A::from(self.inputs()).unwrap()).sqrt(); - self.weights = Array::uniform_between(dk, self.features().clone()); + self.weights = Array::uniform_between(dk, self.raw_dim()); self } + + pub fn uniform(self) -> Self { + let dk = (A::one() / A::from(self.inputs()).unwrap()).sqrt(); + let bias = if self.is_biased() { + let dim = bias_dim(self.raw_dim()); + Some(Array::uniform_between(dk, dim)) + } else { + None + }; + let weights = Array::uniform_between(dk, self.raw_dim()); + Self { bias, weights } + } } diff --git a/models/linear/src/params/mod.rs b/models/linear/src/params/mod.rs index 2f747ecb..99d7e06e 100644 --- a/models/linear/src/params/mod.rs +++ b/models/linear/src/params/mod.rs @@ -9,6 +9,7 @@ mod params; mod impls { mod impl_params; + #[cfg(feature = "rand")] mod impl_rand; mod impl_serde; } diff --git a/models/linear/src/params/params.rs b/models/linear/src/params/params.rs index b695c01e..3021af3a 100644 --- a/models/linear/src/params/params.rs +++ b/models/linear/src/params/params.rs @@ -2,8 +2,7 @@ Appellation: params Contrib: FL03 */ -use crate::build_bias; -use crate::model::Features; +use crate::{build_bias, Features}; use core::ops; use nd::linalg::Dot; use nd::*; @@ -54,24 +53,24 @@ where constructor!(ones where A: Clone + One, S: DataOwned); constructor!(zeros where A: Clone + Zero, S: DataOwned); - pub fn new(biased: bool, dim: impl IntoDimension) -> Self + pub fn new(shape: Sh) -> Self where A: Clone + Default, S: DataOwned, + Sh: ShapeBuilder, { - let dim = dim.into_dimension(); Self { - bias: build_bias(biased, dim.clone(), |dim| ArrayBase::default(dim)), - weights: ArrayBase::default(dim), + bias: None, + weights: ArrayBase::default(shape), } } pub fn biased(self, builder: F) -> Self where - F: FnOnce(D::Smaller) -> ArrayBase, + F: Fn(D::Smaller) -> ArrayBase, { Self { - bias: Some(builder(self.weights.raw_dim().remove_axis(Axis(0)))), + bias: build_bias(true, self.raw_dim(), builder), ..self } } @@ -80,17 +79,6 @@ where Self { bias: None, ..self } } - pub fn activate(&mut self, f: F) -> LinearParamsBase, D> - where - F: for<'a> Fn(&'a A) -> A, - S: Data, - { - LinearParamsBase { - bias: self.bias().map(|b| b.map(|b| f(b))), - weights: self.weights().map(|w| f(w)), - } - } - pub fn bias(&self) -> Option<&ArrayBase> { self.bias.as_ref() } @@ -99,16 +87,24 @@ where self.bias.as_mut() } - pub fn features(&self) -> D { - self.weights.raw_dim() + pub fn weights(&self) -> &ArrayBase { + &self.weights + } + + pub fn weights_mut(&mut self) -> &mut ArrayBase { + &mut self.weights + } + + pub fn features(&self) -> Features { + Features::from_shape(self.shape()) } pub fn inputs(&self) -> usize { - self.weights.shape().last().unwrap().clone() + self.features().dmodel() } pub fn is_biased(&self) -> bool { - self.bias.is_some() + self.bias().is_some() } pub fn linear(&self, data: &T) -> B @@ -125,19 +121,23 @@ where dot } + pub fn ndim(&self) -> usize { + self.weights().ndim() + } + pub fn outputs(&self) -> usize { - if self.features().ndim() == 1 { + if self.ndim() == 1 { return 1; } - self.weights.shape().first().unwrap().clone() + self.shape()[1] } - pub fn weights(&self) -> &ArrayBase { - &self.weights + pub fn raw_dim(&self) -> D { + self.weights().raw_dim() } - pub fn weights_mut(&mut self) -> &mut ArrayBase { - &mut self.weights + pub fn shape(&self) -> &[usize] { + self.weights().shape() } } diff --git a/models/linear/src/utils.rs b/models/linear/src/utils.rs index 25ddf7bd..f5031362 100644 --- a/models/linear/src/utils.rs +++ b/models/linear/src/utils.rs @@ -2,22 +2,32 @@ Appellation: utils Contrib: FL03 */ -use concision::Decrement; use nd::*; -pub(crate) fn build_bias( - biased: bool, - dim: D, - builder: F, -) -> Option> +pub(crate) fn build_bias(biased: bool, dim: D, builder: F) -> Option> where + D: RemoveAxis, + E: Dimension, + F: Fn(E) -> ArrayBase, S: RawData, - D: RemoveAxis, - F: Fn(D::Smaller) -> ArrayBase, { + let dim = bias_dim(dim); if biased { - Some(builder(dim.dec())) + println!("Bias dimension: {:?}", &dim); + Some(builder(dim)) } else { None } } + +pub(crate) fn bias_dim(dim: D) -> E +where + D: RemoveAxis, + E: Dimension, +{ + if dim.ndim() == 1 { + dim.remove_axis(Axis(0)) + } else { + dim.remove_axis(Axis(1)) + } +} diff --git a/models/linear/tests/model.rs b/models/linear/tests/model.rs index 23b30112..f5a0b3c1 100644 --- a/models/linear/tests/model.rs +++ b/models/linear/tests/model.rs @@ -5,8 +5,8 @@ extern crate concision_core as concision; extern crate concision_linear as linear; -use concision::{linarr, Predict}; use concision::func::Sigmoid; +use concision::{linarr, Predict}; use linear::{Config, Features, Linear}; use lazy_static::lazy_static; @@ -17,7 +17,7 @@ const INPUTS: usize = 5; const OUTPUT: usize = 3; lazy_static! { - static ref FEATURES: Features = Features::new(INPUTS, OUTPUT); + static ref FEATURES: Features = Features::new(OUTPUT, INPUTS); static ref CONFIG: Config = Config::new("test", FEATURES.clone()); static ref SAMPLE_DATA: Array = linarr::((SAMPLES, INPUTS)).unwrap(); static ref SHAPE: (usize, usize, usize) = (SAMPLES, INPUTS, OUTPUT); @@ -25,10 +25,14 @@ lazy_static! { #[test] fn test_linear() { - let (samples, _inputs, outputs) = *SHAPE; - let data = SAMPLE_DATA.clone(); + let (samples, inputs, outputs) = (20, 5, 3); + + let features = Features::new(outputs, inputs); + let config = Config::new("test", features.clone()).biased(); + + let data = linarr::((samples, inputs)).unwrap(); - let model: Linear = Linear::std(CONFIG.clone()).uniform(); + let model: Linear = Linear::std(config).uniform(); let y = model.activate(&data, Sigmoid::sigmoid).unwrap(); assert_eq!(y.shape(), &[samples, outputs]); diff --git a/models/linear/tests/params.rs b/models/linear/tests/params.rs index 8cb2f7f6..1a602727 100644 --- a/models/linear/tests/params.rs +++ b/models/linear/tests/params.rs @@ -9,13 +9,16 @@ use concision::Predict; use linear::{Features, LinearParams}; use ndarray::*; +const SAMPLES: usize = 20; +const INPUTS: usize = 5; +const DMODEL: usize = 3; + #[test] -#[ignore = "Needs to be fixed;"] fn test_linear_params() { - let (samples, inputs, outputs) = (20, 5, 3); - let features = Features::new(inputs, outputs); + let (samples, inputs, outputs) = (SAMPLES, INPUTS, DMODEL); + let features = Features::new(outputs, inputs); let data = Array2::::zeros((samples, inputs)); - let params = LinearParams::default(true, features.clone()).init_uniform(true); + let params = LinearParams::default(true, features.clone()).uniform(); let y = params.predict(&data).unwrap(); assert_eq!(y.dim(), (samples, outputs)); } From 39c2499e22d5c5dee391c35f92e02bd65cdae7ed Mon Sep 17 00:00:00 2001 From: Joe McCain III Date: Tue, 7 May 2024 12:25:30 -0500 Subject: [PATCH 15/15] update Signed-off-by: Joe McCain III --- concision/examples/linear.rs | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/concision/examples/linear.rs b/concision/examples/linear.rs index 8a66df47..5a5e4ee9 100644 --- a/concision/examples/linear.rs +++ b/concision/examples/linear.rs @@ -9,14 +9,13 @@ use cnc::linear::{Config, Features, Linear}; use cnc::{linarr, Predict, Result}; fn tracing() { - use tracing::Level::DEBUG; + use tracing::Level; use tracing_subscriber::fmt::time; tracing_subscriber::fmt() .compact() .with_ansi(true) - .with_level(true) - .with_max_level(DEBUG) + .with_max_level(Level::DEBUG) .with_target(false) .with_timer(time::uptime()) .init(); @@ -27,8 +26,8 @@ fn main() -> Result<()> { tracing::info!("Starting linear model example"); let (samples, dmodel, features) = (20, 5, 3); - let features = Features::new(features, dmodel); - let config = Config::new("example", features).biased(); + let shape = Features::new(features, dmodel); + let config = Config::new("example", shape).biased(); let data = linarr::((samples, dmodel)).unwrap(); let model: Linear = Linear::std(config).uniform();