Skip to content

Commit

Permalink
add InputDataLayer for Net input
Browse files Browse the repository at this point in the history
Create an input layer to replace oddball Net `input` fields.

close BVLC#1245
  • Loading branch information
shelhamer committed Oct 17, 2015
1 parent 16de340 commit dd2f0fc
Show file tree
Hide file tree
Showing 3 changed files with 72 additions and 1 deletion.
33 changes: 33 additions & 0 deletions include/caffe/data_layers.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -256,6 +256,39 @@ class ImageDataLayer : public BasePrefetchingDataLayer<Dtype> {
int lines_id_;
};

/**
* @brief Provides data to the Net by assigning tops directly.
*
* This data layer is a container that merely holds the data assigned to it;
* forward, backward, and reshape are all no-ops.
*/
template <typename Dtype>
class InputDataLayer : public Layer<Dtype> {
public:
explicit InputDataLayer(const LayerParameter& param)
: Layer<Dtype>(param) {}
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
// Data layers should be shared by multiple solvers in parallel
virtual inline bool ShareInParallel() const { return true; }
// Data layers have no bottoms, so reshaping is trivial.
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {}

virtual inline const char* type() const { return "InputData"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int MinTopBlobs() const { return 1; }

protected:
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {}
virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {}
virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {}
};


/**
* @brief Provides data to the Net from memory.
*
Expand Down
29 changes: 29 additions & 0 deletions src/caffe/layers/input_data_layer.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
#include <vector>

#include "caffe/filler.hpp"
#include "caffe/layer.hpp"
#include "caffe/vision_layers.hpp"

namespace caffe {

template <typename Dtype>
void InputDataLayer<Dtype>::LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
const int num_top = top.size();
const InputDataParameter& param = this->layer_param_.input_data_param();
const int num_shape = param.shape_size();
CHECK(num_shape == 0 || num_shape == 1 || num_shape == num_top)
<< "Must specify 'shape' once, once per top blob, or not at all: "
<< num_top << " tops vs. " << num_shape << " shapes.";
if (num_shape > 0) {
for (int i = 0; i < num_top; ++i) {
const int shape_index = (param.shape_size() == 1) ? 0 : i;
top[i]->Reshape(param.shape(shape_index));
}
}
}

INSTANTIATE_CLASS(InputDataLayer);
REGISTER_LAYER_CLASS(InputData);

} // namespace caffe
11 changes: 10 additions & 1 deletion src/caffe/proto/caffe.proto
Original file line number Diff line number Diff line change
Expand Up @@ -306,7 +306,7 @@ message ParamSpec {
// NOTE
// Update the next available ID when you add a new LayerParameter field.
//
// LayerParameter next available layer-specific ID: 139 (last added: tile_param)
// LayerParameter next available layer-specific ID: 139 (last added: input_data_param)
message LayerParameter {
optional string name = 1; // the layer name
optional string type = 2; // the layer type
Expand Down Expand Up @@ -371,6 +371,7 @@ message LayerParameter {
optional ImageDataParameter image_data_param = 115;
optional InfogainLossParameter infogain_loss_param = 116;
optional InnerProductParameter inner_product_param = 117;
optional InputDataParameter input_data_param = 999; // TODO(shelhamer)
optional LogParameter log_param = 134;
optional LRNParameter lrn_param = 118;
optional MemoryDataParameter memory_data_param = 119;
Expand Down Expand Up @@ -710,6 +711,14 @@ message InnerProductParameter {
optional int32 axis = 5 [default = 1];
}

message InputDataParameter {
// This layer produces N >= 1 top blob(s) to be assigned manually.
// Define N shapes to set a shape for each top.
// Define 1 shape to set the same shape for every top.
// Define no shape to defer to reshaping manually.
repeated BlobShape shape = 1;
}

// Message that stores parameters used by LogLayer
message LogParameter {
// LogLayer computes outputs y = log_base(shift + scale * x), for base > 0.
Expand Down

0 comments on commit dd2f0fc

Please sign in to comment.