Skip to content

Commit

Permalink
feat/serialization: add deserialization
Browse files Browse the repository at this point in the history
CLOSES #14
  • Loading branch information
hobofan committed Apr 17, 2016
1 parent cb1a1b4 commit df7c9d8
Show file tree
Hide file tree
Showing 12 changed files with 424 additions and 12 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
target
Cargo.lock

mynetwork
34 changes: 25 additions & 9 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -28,16 +28,30 @@ script:
travis-cargo --only stable doc -- --no-default-features --features $FEATURES
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- libcurl4-openssl-dev
- libelf-dev
- libdw-dev
- libblas-dev
- fglrx
- opencl-headers
- binutils-dev
- nvidia-opencl-dev

- libcurl4-openssl-dev
- libelf-dev
- libdw-dev
- libblas-dev
- fglrx
- opencl-headers
- binutils-dev
- nvidia-opencl-dev
- gcc-4.8
- g++-4.8
install:
- git clone https://github.com/kentonv/capnproto.git
- cd capnproto/c++
- git checkout tags/v0.5.3
- ./setup-autotools.sh
- autoreconf -i
- ./configure --disable-shared
- make -j5
- export PATH="$PATH:$(pwd)"
- export LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$(pwd)"
- cd ../..
after_success:
- travis-cargo doc-upload
- travis-cargo coveralls --no-sudo --verify
Expand All @@ -46,6 +60,8 @@ notifications:
on_success: never
env:
global:
- CC=gcc-4.8
- CXX=g++-4.8
- secure: QcJ9u0BrVpvjYnerd/3dukvM+GLFQNikIoDHhtKjVenuM2ozZtW6+/RyyXVC1YMh/SghwTnu4Kcnv1sdmwuiC5KWdPoppfalXdxafPkl5PGEfTOexe6L5UAJNW6BdA4lbRKM3xnaUg0Guq6x6tD/zdABIkh8nym/gRLGKT40e9Xitkf6wUQqPBHTGZimip59qg5Fty8lAD48pCBEXynJm+ihA2tz6EDhp0/7wvieHyEl/FqNwvUL5+Z9EeTzEJfKNF8PA5DTHkgeXgeCnWKLm8cCdPEziRZlgdQtvIW27oZBkNTQGHyqI9/tVYhaW4AeKstzE5BoJuyRzmerWYRQCNiz8bgyAjc5HnpWLJPmPSFaGBWTRzwYwUk/iOUP4YEZiN3p0Xj1sKgSB0TA2AjKWND7cufwjrW8NdPdZ3hURVOnM8DHYSQMm2HOfbUNnkw+P5M8n+flT2HKWFdnPhJ3n12rDlLYdHeg9PQ3emJ6kE8Y/jrNT+6yZRrSwLQnsV0uU8Ii44MFQHpdUOGuOIxZFGh9rjKsUwhruUpGtbwI4FWPOqiQJvIaBFY1IUjIVlVCZevvIG3fPXvPksIEKwK93hM/ThDi2PLq2qwBpA87RNfKxDG4S0aR2j19IG+ludbpPcP95mYFVnGCb4rpj44iZoCifC8c9tVqC4L85hEGzik=
matrix:
- FEATURES=travis
Expand Down
8 changes: 8 additions & 0 deletions src/capnp_util.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,11 @@ pub trait CapnpWrite<'a> {
/// Write the struct into the message that is being built by the Builder.
fn write_capnp(&self, builder: &mut Self::Builder);
}

pub trait CapnpRead<'a> {
/// The Reader that was autogenerated by capnp.
type Reader;

/// Read the struct from the Reader.
fn read_capnp(reader: Self::Reader) -> Self;
}
198 changes: 195 additions & 3 deletions src/layer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ use std::fmt;
use std::cmp;
use std::collections::{HashMap, HashSet};
use std::fs::File;
use std::io;
use std::io::{self, BufReader};
use std::path::Path;
use std::rc::Rc;
use std::sync::{Arc, RwLock};
Expand Down Expand Up @@ -455,7 +455,7 @@ impl<B: IBackend> Layer<B> {
// reshape input tensor to the reshaped shape
let old_shape = self.input_blobs_data[input_i].read().unwrap().desc().clone();
if old_shape.size() != reshaped_shape.size() {
panic!("The provided input does not have the expected shape");
panic!("The provided input does not have the expected shape of {:?}", reshaped_shape);
}
self.input_blobs_data[input_i].write().unwrap().reshape(&reshaped_shape).unwrap();
}
Expand Down Expand Up @@ -583,6 +583,39 @@ impl<B: IBackend> Layer<B> {
/// Serialize the Layer and it's weights to a Cap'n Proto file at the specified path.
///
/// You can find the capnp schema [here](../../../../capnp/leaf.capnp).
///
/// ```
/// # #[cfg(feature = "native")]
/// # mod native {
/// # use std::rc::Rc;
/// # use leaf::layer::*;
/// # use leaf::layers::*;
/// # use leaf::util;
/// # pub fn test() {
/// #
/// let mut net_cfg = SequentialConfig::default();
/// // ... set up network ...
/// let cfg = LayerConfig::new("network", net_cfg);
///
/// let native_backend = Rc::new(util::native_backend());
/// let mut layer = Layer::from_config(native_backend, &cfg);
/// // ... do stuff with the layer ...
/// // ... and save it
/// layer.save("mynetwork").unwrap();
/// #
/// # }}
/// #
/// # #[cfg(not(feature = "native"))]
/// # mod native {
/// # pub fn test() {}
/// # }
/// #
/// # fn main() {
/// # if cfg!(feature = "native") {
/// # ::native::test();
/// # }
/// # }
/// ```
pub fn save<P: AsRef<Path>>(&mut self, path: P) -> io::Result<()> {
let path = path.as_ref();
let ref mut out = try!(File::create(path));
Expand All @@ -597,6 +630,92 @@ impl<B: IBackend> Layer<B> {
Ok(())
}

/// Read a Cap'n Proto file at the specified path and deserialize the Layer inside it.
///
/// You can find the capnp schema [here](../../../../capnp/leaf.capnp).
///
/// ```
/// # extern crate leaf;
/// # extern crate collenchyma;
/// # #[cfg(feature = "native")]
/// # mod native {
/// # use std::rc::Rc;
/// # use leaf::layer::*;
/// # use leaf::layers::*;
/// # use leaf::util;
/// use collenchyma::prelude::*;
/// # pub fn test() {
///
/// let native_backend = Rc::new(util::native_backend());
/// # let mut net_cfg = SequentialConfig::default();
/// # let cfg = LayerConfig::new("network", net_cfg);
/// # let mut layer = Layer::from_config(native_backend.clone(), &cfg);
/// # layer.save("mynetwork").unwrap();
/// // Load layer from file "mynetwork"
/// let layer = Layer::<Backend<Native>>::load(native_backend, "mynetwork").unwrap();
/// #
/// # }}
/// #
/// # #[cfg(not(feature = "native"))]
/// # mod native {
/// # pub fn test() {}
/// # }
/// #
/// # fn main() {
/// # if cfg!(feature = "native") {
/// # ::native::test();
/// # }
/// # }
/// ```
pub fn load<LB: IBackend + LayerOps<f32> + 'static, P: AsRef<Path>>(backend: Rc<LB>, path: P) -> io::Result<Layer<LB>> {
let path = path.as_ref();
let ref mut file = try!(File::open(path));
let mut reader = BufReader::new(file);

let message_reader = ::capnp::serialize_packed::read_message(&mut reader,
::capnp::message::ReaderOptions::new()).unwrap();
let read_layer = message_reader.get_root::<capnp_layer::Reader>().unwrap();

let name = read_layer.get_name().unwrap().to_owned();
let layer_config = LayerConfig::read_capnp(read_layer.get_config().unwrap());
let mut layer = Layer::from_config(backend, &layer_config);
layer.name = name;

let read_weights = read_layer.get_weights_data().unwrap();

let names = layer.learnable_weights_names();
let weights_data = layer.learnable_weights_data();

let native_backend = Backend::<Native>::default().unwrap();
for (i, (name, weight)) in names.iter().zip(weights_data).enumerate() {
for j in 0..read_weights.len() {
let capnp_weight = read_weights.get(i as u32);
if capnp_weight.get_name().unwrap() != name {
continue
}

let mut weight_lock = weight.write().unwrap();
weight_lock.sync(native_backend.device()).unwrap();

let capnp_tensor = capnp_weight.get_tensor().unwrap();
let mut shape = Vec::new();
let capnp_shape = capnp_tensor.get_shape().unwrap();
for k in 0..capnp_shape.len() {
shape.push(capnp_shape.get(k) as usize)
}
weight_lock.reshape(&shape).unwrap();

let mut native_slice = weight_lock.get_mut(native_backend.device()).unwrap().as_mut_native().unwrap().as_mut_slice::<f32>();
let data = capnp_tensor.get_data().unwrap();
for k in 0..data.len() {
native_slice[k as usize] = data.get(k);
}
}
}

Ok(layer)
}

/// Sets whether the layer should compute gradients w.r.t. a
/// weight at a particular index given by `weight_id`.
///
Expand Down Expand Up @@ -672,6 +791,9 @@ impl<B: IBackend> Layer<B> {
}
}

#[allow(unsafe_code)]
unsafe impl<B: IBackend> Send for Layer<B> {}

impl<'a, B: IBackend> CapnpWrite<'a> for Layer<B> {
type Builder = capnp_layer::Builder<'a>;

Expand Down Expand Up @@ -1269,6 +1391,31 @@ impl<'a> CapnpWrite<'a> for LayerType {
}
}

impl<'a> CapnpRead<'a> for LayerType {
type Reader = capnp_layer_type::Reader<'a>;

fn read_capnp(reader: Self::Reader) -> Self {
match reader.which().unwrap() {
#[cfg(all(feature="cuda", not(feature="native")))]
capnp_layer_type::Which::Convolution(read_config) => { let config = ConvolutionConfig::read_capnp(read_config.unwrap()); LayerType::Convolution(config) },
#[cfg(not(all(feature="cuda", not(feature="native"))))]
capnp_layer_type::Which::Convolution(_) => { panic!("Can not load Network because Convolution layer is not supported with the used feature flags.") },
capnp_layer_type::Which::Linear(read_config) => { let config = LinearConfig::read_capnp(read_config.unwrap()); LayerType::Linear(config) },
capnp_layer_type::Which::LogSoftmax(read_config) => { LayerType::LogSoftmax },
#[cfg(all(feature="cuda", not(feature="native")))]
capnp_layer_type::Which::Pooling(read_config) => { let config = PoolingConfig::read_capnp(read_config.unwrap()); LayerType::Pooling(config) },
#[cfg(not(all(feature="cuda", not(feature="native"))))]
capnp_layer_type::Which::Pooling(_) => { panic!("Can not load Network because Pooling layer is not supported with the used feature flags.") },
capnp_layer_type::Which::Sequential(read_config) => { let config = SequentialConfig::read_capnp(read_config.unwrap()); LayerType::Sequential(config) },
capnp_layer_type::Which::Softmax(_) => { LayerType::Softmax },
capnp_layer_type::Which::Relu(_) => { LayerType::ReLU },
capnp_layer_type::Which::Sigmoid(_) => { LayerType::Sigmoid },
capnp_layer_type::Which::NegativeLogLikelihood(read_config) => { let config = NegativeLogLikelihoodConfig::read_capnp(read_config.unwrap()); LayerType::NegativeLogLikelihood(config) },
capnp_layer_type::Which::Reshape(read_config) => { let config = ReshapeConfig::read_capnp(read_config.unwrap()); LayerType::Reshape(config) },
}
}
}

impl LayerConfig {
/// Creates a new LayerConfig
pub fn new<L: Into<LayerType>>(name: &str, layer_type: L) -> LayerConfig {
Expand Down Expand Up @@ -1338,9 +1485,13 @@ impl LayerConfig {
Err("propagate_down config must be specified either 0 or inputs_len times")
}
}
}

impl<'a> CapnpWrite<'a> for LayerConfig {
type Builder = capnp_layer_config::Builder<'a>;

/// Write the LayerConfig into a capnp message.
pub fn write_capnp(&self, builder: &mut capnp_layer_config::Builder) {
fn write_capnp(&self, builder: &mut Self::Builder) {
builder.set_name(&self.name);
{
let mut layer_type = builder.borrow().init_layer_type();
Expand Down Expand Up @@ -1373,3 +1524,44 @@ impl LayerConfig {
}
}
}

impl<'a> CapnpRead<'a> for LayerConfig {
type Reader = capnp_layer_config::Reader<'a>;

fn read_capnp(reader: Self::Reader) -> Self {
let name = reader.get_name().unwrap().to_owned();
let layer_type = LayerType::read_capnp(reader.get_layer_type());

let read_outputs = reader.get_outputs().unwrap();
let mut outputs = Vec::new();
for i in 0..read_outputs.len() {
outputs.push(read_outputs.get(i).unwrap().to_owned())
}
let read_inputs = reader.get_inputs().unwrap();
let mut inputs = Vec::new();
for i in 0..read_inputs.len() {
inputs.push(read_inputs.get(i).unwrap().to_owned())
}

let read_params = reader.get_params().unwrap();
let mut params = Vec::new();
for i in 0..read_params.len() {
params.push(WeightConfig::read_capnp(read_params.get(i)))
}

let read_propagate_down = reader.get_propagate_down().unwrap();
let mut propagate_down = Vec::new();
for i in 0..read_propagate_down.len() {
propagate_down.push(read_propagate_down.get(i))
}

LayerConfig {
name: name,
layer_type: layer_type,
outputs: outputs,
inputs: inputs,
params: params,
propagate_down: propagate_down,
}
}
}
31 changes: 31 additions & 0 deletions src/layers/common/convolution.rs
Original file line number Diff line number Diff line change
Expand Up @@ -264,6 +264,37 @@ impl<'a> CapnpWrite<'a> for ConvolutionConfig {
}
}

impl<'a> CapnpRead<'a> for ConvolutionConfig {
type Reader = capnp_config::Reader<'a>;

fn read_capnp(reader: Self::Reader) -> Self {
let num_output = reader.get_num_output() as usize;

let read_filter_shape = reader.get_filter_shape().unwrap();
let mut filter_shape = Vec::new();
for i in 0..read_filter_shape.len() {
filter_shape.push(read_filter_shape.get(i) as usize)
}
let read_stride = reader.get_stride().unwrap();
let mut stride = Vec::new();
for i in 0..read_stride.len() {
stride.push(read_stride.get(i) as usize)
}
let read_padding = reader.get_padding().unwrap();
let mut padding = Vec::new();
for i in 0..read_padding.len() {
padding.push(read_padding.get(i) as usize)
}

ConvolutionConfig {
num_output: num_output,
filter_shape: filter_shape,
stride: stride,
padding: padding,
}
}
}

#[cfg(test)]
mod tests {
use co::*;
Expand Down
12 changes: 12 additions & 0 deletions src/layers/common/linear.rs
Original file line number Diff line number Diff line change
Expand Up @@ -195,6 +195,18 @@ impl<'a> CapnpWrite<'a> for LinearConfig {
}
}

impl<'a> CapnpRead<'a> for LinearConfig {
type Reader = capnp_config::Reader<'a>;

fn read_capnp(reader: Self::Reader) -> Self {
let output_size = reader.get_output_size() as usize;

LinearConfig {
output_size: output_size
}
}
}

impl Into<LayerType> for LinearConfig {
fn into(self) -> LayerType {
LayerType::Linear(self)
Expand Down
Loading

0 comments on commit df7c9d8

Please sign in to comment.