Skip to content

Commit

Permalink
Merge pull request BVLC#3211 from shelhamer/input-layer
Browse files Browse the repository at this point in the history
Add Input Layer to Replace `input`s
  • Loading branch information
shelhamer committed Feb 27, 2016
2 parents fe0f441 + f88073a commit a04ac11
Show file tree
Hide file tree
Showing 27 changed files with 295 additions and 332 deletions.
11 changes: 5 additions & 6 deletions examples/cifar10/cifar10_full.prototxt
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
name: "CIFAR10_full_deploy"
# N.B. input image must be in CIFAR-10 format
# as described at http://www.cs.toronto.edu/~kriz/cifar.html
input: "data"
input_shape {
dim: 1
dim: 3
dim: 32
dim: 32
layer {
name: "data"
type: "Input"
top: "data"
input_param { shape: { dim: 1 dim: 3 dim: 32 dim: 32 } }
}
layer {
name: "conv1"
Expand Down
11 changes: 5 additions & 6 deletions examples/cifar10/cifar10_quick.prototxt
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
name: "CIFAR10_quick_test"
input: "data"
input_shape {
dim: 1
dim: 3
dim: 32
dim: 32
layer {
name: "data"
type: "Input"
top: "data"
input_param { shape: { dim: 1 dim: 3 dim: 32 dim: 32 } }
}
layer {
name: "conv1"
Expand Down
2 changes: 1 addition & 1 deletion examples/cpp_classification/classification.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -159,7 +159,7 @@ std::vector<float> Classifier::Predict(const cv::Mat& img) {

Preprocess(img, &input_channels);

net_->ForwardPrefilled();
net_->Forward();

/* Copy the output layer to a std::vector */
Blob<float>* output_layer = net_->output_blobs()[0];
Expand Down
11 changes: 5 additions & 6 deletions examples/mnist/lenet.prototxt
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
name: "LeNet"
input: "data"
input_shape {
dim: 64
dim: 1
dim: 28
dim: 28
layer {
name: "data"
type: "Input"
top: "data"
input_param { shape: { dim: 64 dim: 1 dim: 28 dim: 28 } }
}
layer {
name: "conv1"
Expand Down
43 changes: 21 additions & 22 deletions examples/net_surgery.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -5494,48 +5494,47 @@
"name": "stdout",
"output_type": "stream",
"text": [
"1,2c1\r\n",
"1,2c1,2\r\n",
"< # Fully convolutional network version of CaffeNet.\r\n",
"< name: \"CaffeNetConv\"\r\n",
"---\r\n",
"> name: \"CaffeNet\"\r\n",
"4c3\r\n",
"< input_dim: 1\r\n",
"> input: \"data\"\r\n",
"7,11c7\r\n",
"< input_param {\r\n",
"< # initial shape for a fully convolutional network:\r\n",
"< # the shape can be set for each input by reshape.\r\n",
"< shape: { dim: 1 dim: 3 dim: 451 dim: 451 }\r\n",
"< }\r\n",
"---\r\n",
"> input_dim: 10\r\n",
"6,7c5,6\r\n",
"< input_dim: 451\r\n",
"< input_dim: 451\r\n",
"---\r\n",
"> input_dim: 227\r\n",
"> input_dim: 227\r\n",
"152,153c151,152\r\n",
"> input_param { shape: { dim: 10 dim: 3 dim: 227 dim: 227 } }\r\n",
"157,158c153,154\r\n",
"< name: \"fc6-conv\"\r\n",
"< type: \"Convolution\"\r\n",
"---\r\n",
"> name: \"fc6\"\r\n",
"> type: \"InnerProduct\"\r\n",
"155,156c154,155\r\n",
"160,161c156,157\r\n",
"< top: \"fc6-conv\"\r\n",
"< convolution_param {\r\n",
"---\r\n",
"> top: \"fc6\"\r\n",
"> inner_product_param {\r\n",
"158d156\r\n",
"163d158\r\n",
"< kernel_size: 6\r\n",
"164,165c162,163\r\n",
"169,170c164,165\r\n",
"< bottom: \"fc6-conv\"\r\n",
"< top: \"fc6-conv\"\r\n",
"---\r\n",
"> bottom: \"fc6\"\r\n",
"> top: \"fc6\"\r\n",
"170,171c168,169\r\n",
"175,176c170,171\r\n",
"< bottom: \"fc6-conv\"\r\n",
"< top: \"fc6-conv\"\r\n",
"---\r\n",
"> bottom: \"fc6\"\r\n",
"> top: \"fc6\"\r\n",
"177,181c175,179\r\n",
"182,186c177,181\r\n",
"< name: \"fc7-conv\"\r\n",
"< type: \"Convolution\"\r\n",
"< bottom: \"fc6-conv\"\r\n",
Expand All @@ -5547,21 +5546,21 @@
"> bottom: \"fc6\"\r\n",
"> top: \"fc7\"\r\n",
"> inner_product_param {\r\n",
"183d180\r\n",
"188d182\r\n",
"< kernel_size: 1\r\n",
"189,190c186,187\r\n",
"194,195c188,189\r\n",
"< bottom: \"fc7-conv\"\r\n",
"< top: \"fc7-conv\"\r\n",
"---\r\n",
"> bottom: \"fc7\"\r\n",
"> top: \"fc7\"\r\n",
"195,196c192,193\r\n",
"200,201c194,195\r\n",
"< bottom: \"fc7-conv\"\r\n",
"< top: \"fc7-conv\"\r\n",
"---\r\n",
"> bottom: \"fc7\"\r\n",
"> top: \"fc7\"\r\n",
"202,206c199,203\r\n",
"207,211c201,205\r\n",
"< name: \"fc8-conv\"\r\n",
"< type: \"Convolution\"\r\n",
"< bottom: \"fc7-conv\"\r\n",
Expand All @@ -5573,9 +5572,9 @@
"> bottom: \"fc7\"\r\n",
"> top: \"fc8\"\r\n",
"> inner_product_param {\r\n",
"208d204\r\n",
"213d206\r\n",
"< kernel_size: 1\r\n",
"214c210\r\n",
"219c212\r\n",
"< bottom: \"fc8-conv\"\r\n",
"---\r\n",
"> bottom: \"fc8\"\r\n"
Expand Down
15 changes: 9 additions & 6 deletions examples/net_surgery/bvlc_caffenet_full_conv.prototxt
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
# Fully convolutional network version of CaffeNet.
name: "CaffeNetConv"
input: "data"
input_shape {
dim: 1
dim: 3
dim: 451
dim: 451
layer {
name: "data"
type: "Input"
top: "data"
input_param {
# initial shape for a fully convolutional network:
# the shape can be set for each input by reshape.
shape: { dim: 1 dim: 3 dim: 451 dim: 451 }
}
}
layer {
name: "conv1"
Expand Down
11 changes: 5 additions & 6 deletions examples/net_surgery/conv.prototxt
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
# Simple single-layer network to showcase editing model parameters.
name: "convolution"
input: "data"
input_shape {
dim: 1
dim: 1
dim: 100
dim: 100
layer {
name: "data"
type: "Input"
top: "data"
input_param { shape: { dim: 1 dim: 1 dim: 100 dim: 100 } }
}
layer {
name: "conv"
Expand Down
13 changes: 7 additions & 6 deletions examples/siamese/mnist_siamese.prototxt
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
name: "mnist_siamese"
input: "data"
input_shape {
dim: 10000
dim: 1
dim: 28
dim: 28
layer {
name: "data"
type: "Input"
top: "data"
input_param {
shape: { dim: 10000 dim: 1 dim: 28 dim: 28 }
}
}
layer {
name: "conv1"
Expand Down
44 changes: 44 additions & 0 deletions include/caffe/layers/input_layer.hpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
#ifndef CAFFE_INPUT_LAYER_HPP_
#define CAFFE_INPUT_LAYER_HPP_

#include <vector>

#include "caffe/blob.hpp"
#include "caffe/layer.hpp"
#include "caffe/proto/caffe.pb.h"

namespace caffe {

/**
* @brief Provides data to the Net by assigning tops directly.
*
* This data layer is a container that merely holds the data assigned to it;
* forward, backward, and reshape are all no-ops.
*/
template <typename Dtype>
class InputLayer : public Layer<Dtype> {
public:
explicit InputLayer(const LayerParameter& param)
: Layer<Dtype>(param) {}
virtual void LayerSetUp(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top);
// Data layers should be shared by multiple solvers in parallel
virtual inline bool ShareInParallel() const { return true; }
// Data layers have no bottoms, so reshaping is trivial.
virtual void Reshape(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {}

virtual inline const char* type() const { return "Input"; }
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int MinTopBlobs() const { return 1; }

protected:
virtual void Forward_cpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {}
virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
const vector<bool>& propagate_down, const vector<Blob<Dtype>*>& bottom) {}
};

} // namespace caffe

#endif // CAFFE_INPUT_LAYER_HPP_
26 changes: 12 additions & 14 deletions include/caffe/net.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,16 @@ class Net {
void Init(const NetParameter& param);

/**
* @brief Run Forward with the input Blob%s already fed separately.
* @brief Run Forward and return the result.
*
* You can get the input blobs using input_blobs().
*/
const vector<Blob<Dtype>*>& ForwardPrefilled(Dtype* loss = NULL);
const vector<Blob<Dtype>*>& Forward(Dtype* loss = NULL);
/// @brief DEPRECATED; use Forward() instead.
const vector<Blob<Dtype>*>& ForwardPrefilled(Dtype* loss = NULL) {
LOG_EVERY_N(WARNING, 1000) << "DEPRECATED: ForwardPrefilled() "
<< "will be removed in a future version. Use Forward().";
return Forward(loss);
}

/**
* The From and To variants of Forward and Backward operate on the
Expand All @@ -49,14 +54,9 @@ class Net {
Dtype ForwardFromTo(int start, int end);
Dtype ForwardFrom(int start);
Dtype ForwardTo(int end);
/// @brief Run forward using a set of bottom blobs, and return the result.
/// @brief DEPRECATED; set input blobs then use Forward() instead.
const vector<Blob<Dtype>*>& Forward(const vector<Blob<Dtype>* > & bottom,
Dtype* loss = NULL);
/**
* @brief Run forward using a serialized BlobProtoVector and return the
* result as a serialized BlobProtoVector
*/
string Forward(const string& input_blob_protos, Dtype* loss = NULL);

/**
* @brief Zeroes out the diffs of all net parameters.
Expand All @@ -82,9 +82,9 @@ class Net {
*/
void Reshape();

Dtype ForwardBackward(const vector<Blob<Dtype>* > & bottom) {
Dtype ForwardBackward() {
Dtype loss;
Forward(bottom, &loss);
Forward(&loss);
Backward();
return loss;
}
Expand Down Expand Up @@ -229,7 +229,7 @@ class Net {

protected:
// Helpers for Init.
/// @brief Append a new input or top blob to the net.
/// @brief Append a new top blob to the net.
void AppendTop(const NetParameter& param, const int layer_id,
const int top_id, set<string>* available_blobs,
map<string, int>* blob_name_to_idx);
Expand All @@ -241,8 +241,6 @@ class Net {
void AppendParam(const NetParameter& param, const int layer_id,
const int param_id);

/// @brief Helper for displaying debug info in Forward about input Blobs.
void InputDebugInfo(const int layer_id);
/// @brief Helper for displaying debug info in Forward.
void ForwardDebugInfo(const int layer_id);
/// @brief Helper for displaying debug info in Backward.
Expand Down
6 changes: 6 additions & 0 deletions include/caffe/util/upgrade_proto.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,12 @@ bool UpgradeV1LayerParameter(const V1LayerParameter& v1_layer_param,

const char* UpgradeV1LayerType(const V1LayerParameter_LayerType type);

// Return true iff the Net contains input fields.
bool NetNeedsInputUpgrade(const NetParameter& net_param);

// Perform all necessary transformations to upgrade input fields into layers.
void UpgradeNetInput(NetParameter* net_param);

// Return true iff the solver contains any old solver_type specified as enums
bool SolverNeedsTypeUpgrade(const SolverParameter& solver_param);

Expand Down
11 changes: 5 additions & 6 deletions models/bvlc_alexnet/deploy.prototxt
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
name: "AlexNet"
input: "data"
input_shape {
dim: 10
dim: 3
dim: 227
dim: 227
layer {
name: "data"
type: "Input"
top: "data"
input_param { shape: { dim: 10 dim: 3 dim: 227 dim: 227 } }
}
layer {
name: "conv1"
Expand Down
11 changes: 5 additions & 6 deletions models/bvlc_googlenet/deploy.prototxt
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
name: "GoogleNet"
input: "data"
input_shape {
dim: 10
dim: 3
dim: 224
dim: 224
layer {
name: "data"
type: "Input"
top: "data"
input_param { shape: { dim: 10 dim: 3 dim: 224 dim: 224 } }
}
layer {
name: "conv1/7x7_s2"
Expand Down
11 changes: 5 additions & 6 deletions models/bvlc_reference_caffenet/deploy.prototxt
Original file line number Diff line number Diff line change
@@ -1,10 +1,9 @@
name: "CaffeNet"
input: "data"
input_shape {
dim: 10
dim: 3
dim: 227
dim: 227
layer {
name: "data"
type: "Input"
top: "data"
input_param { shape: { dim: 10 dim: 3 dim: 227 dim: 227 } }
}
layer {
name: "conv1"
Expand Down
Loading

0 comments on commit a04ac11

Please sign in to comment.