Skip to content

Commit

Permalink
feat/serialization: add serialization
Browse files Browse the repository at this point in the history
  • Loading branch information
hobofan committed Apr 6, 2016
1 parent de0eb7c commit cb1a1b4
Show file tree
Hide file tree
Showing 13 changed files with 440 additions and 23 deletions.
9 changes: 8 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@ readme = "README.md"
keywords = ["deep-learning", "neural-networks", "machine-learning", "framework"]
license = "MIT OR Apache-2.0"

build = "build.rs"

[dependencies]
collenchyma = { version = "0.0.8", default-features = false, features = ["native"] } # native feature to read/write data into tensors
collenchyma-blas = { version = "0.2.0", default-features = false, features = ["native"] } # only compiles with native feature
Expand All @@ -22,10 +24,15 @@ log = "0.3.2"
rand = "0.3.0"
num = "0.1"

clippy = { version = "0.0.41", optional = true }
capnp = "0.6.2"

timeit = "0.1.2"

clippy = { version = "0.0.41", optional = true }

[build-dependencies]
capnpc = "0.6.1"

[dev-dependencies]
env_logger = "0.3"

Expand Down
5 changes: 5 additions & 0 deletions build.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
extern crate capnpc;

fn main() {
::capnpc::compile("capnp", &["capnp/leaf.capnp"]).unwrap();
}
92 changes: 92 additions & 0 deletions capnp/leaf.capnp
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
@0x8316e0f30c445924;

# The structs here try to mirror all the *Config structs as close as possible.
# Before changing anything take a look at https://capnproto.org/language.html#evolving-your-protocol

struct Weight {
name @0 :Text;
tensor @1 :Tensor;
}

struct Tensor {
shape @0 :List(UInt64);
data @1 :List(Float32);
}

struct Layer {
name @0 :Text;
config @1 :LayerConfig;
weightsData @2 :List(Weight);
}

struct LayerConfig {
name @0 :Text;
layerType :union {
# Common layers
convolution @1 :ConvolutionConfig;
linear @2 :LinearConfig;
logSoftmax @3 :Void;
pooling @4 :PoolingConfig;
sequential @5 :SequentialConfig;
softmax @6 :Void;
# Activation layers
relu @7 :Void;
sigmoid @8 :Void;
# Loss layers
negativeLogLikelihood @9 :NegativeLogLikelihoodConfig;
# Utility layers
reshape @10 :ReshapeConfig;
}

outputs @11 :List(Text);
inputs @12 :List(Text);
params @13 :List(WeightConfig);
propagateDown @14 :List(Bool);
}

# TODO: incomplete since WeightConfig isn't really used internally in Leaf.
struct WeightConfig {
name @0 :Text;
}

struct ConvolutionConfig {
numOutput @0 :UInt64;
filterShape @1 :List(UInt64);
stride @2 :List(UInt64);
padding @3 :List(UInt64);
}

struct LinearConfig {
outputSize @0 :UInt64;
}

struct PoolingConfig {
mode @0 :PoolingMode;
filterShape @1 :List(UInt64);
stride @2 :List(UInt64);
padding @3 :List(UInt64);
}

enum PoolingMode {
max @0;
average @1; # not implemented yet, but we can't create a single variant enum so this is better than a meaningless "Dummy" value.
}

struct SequentialConfig {
layers @0 :List(LayerConfig);
inputs @1 :List(ShapedInput);
forceBackward @2 :Bool;
}

struct ShapedInput {
name @0 :Text;
shape @1 :List(UInt64);
}

struct NegativeLogLikelihoodConfig {
numClasses @0 :UInt64;
}

struct ReshapeConfig {
shape @0 :List(UInt64);
}
9 changes: 9 additions & 0 deletions src/capnp_util.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
//! Provides functionality for Cap'n Proto (de)serialization.

pub trait CapnpWrite<'a> {
/// The Builder that was autogenerated by capnp.
type Builder;

/// Write the struct into the message that is being built by the Builder.
fn write_capnp(&self, builder: &mut Self::Builder);
}
Loading

0 comments on commit cb1a1b4

Please sign in to comment.