-
Notifications
You must be signed in to change notification settings - Fork 68
Commit
vector<int> shape_ instead of (num, channels, height, width).
- Loading branch information
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,6 +1,9 @@ | ||
#ifndef CAFFE_BLOB_HPP_ | ||
#define CAFFE_BLOB_HPP_ | ||
|
||
#include <string> | ||
#include <vector> | ||
|
||
#include "caffe/common.hpp" | ||
#include "caffe/proto/caffe.pb.h" | ||
#include "caffe/syncedmem.hpp" | ||
|
@@ -19,10 +22,16 @@ template <typename Dtype> | |
class Blob { | ||
public: | ||
Blob() | ||
: data_(), diff_(), num_(0), channels_(0), height_(0), width_(0), | ||
count_(0), capacity_(0) {} | ||
: data_(), diff_(), count_(0), capacity_(0) {} | ||
|
||
/// @brief Deprecated; use <code>Blob(const vector<int>& shape)</code>. | ||
explicit Blob(const int num, const int channels, const int height, | ||
const int width); | ||
const int width); | ||
explicit Blob(const vector<int>& shape); | ||
|
||
/// @brief Deprecated; use <code>Reshape(const vector<int>& shape)</code>. | ||
void Reshape(const int num, const int channels, const int height, | ||
const int width); | ||
/** | ||
* @brief Change the dimensions of the blob, allocating new memory if | ||
* necessary. | ||
|
@@ -37,25 +46,46 @@ class Blob { | |
* an error; either Net::Forward or Net::Reshape need to be called to | ||
* propagate the new input shape to higher layers. | ||
*/ | ||
void Reshape(const int num, const int channels, const int height, | ||
const int width); | ||
void Reshape(const vector<int>& shape); | ||
void ReshapeLike(const Blob& other); | ||
inline int num() const { return num_; } | ||
inline int channels() const { return channels_; } | ||
inline int height() const { return height_; } | ||
inline int width() const { return width_; } | ||
inline string shape_string() const { | ||
ostringstream stream; | ||
for (int i = 0; i < shape_.size(); ++i) { | ||
stream << shape_[i] << " "; | ||
} | ||
stream << "(" << count_ << ")"; | ||
return stream.str(); | ||
} | ||
inline const vector<int>& shape() const { return shape_; } | ||
inline int shape(int index) const { | ||
CHECK_GE(index, 0) << "index must be non-negative"; | ||
if (index < shape_.size()) { | ||
// Explicitly specified dimension; return it. | ||
return shape_[index]; | ||
} else if (count_ == 0) { | ||
// Empty blob; unspecified dimensions are 0. | ||
return 0; | ||
} else { | ||
// Non-empty blob; unspecified dimensions are singletons (1). | ||
return 1; | ||
} | ||
} | ||
inline int num() const { return shape(0); } | ||
inline int channels() const { return shape(1); } | ||
inline int height() const { return shape(2); } | ||
inline int width() const { return shape(3); } | ||
inline int count() const { return count_; } | ||
inline int offset(const int n, const int c = 0, const int h = 0, | ||
This comment has been minimized.
Sorry, something went wrong.
This comment has been minimized.
Sorry, something went wrong.
jeffdonahue
Author
Owner
|
||
const int w = 0) const { | ||
CHECK_GE(n, 0); | ||
CHECK_LE(n, num_); | ||
CHECK_GE(channels_, 0); | ||
CHECK_LE(c, channels_); | ||
CHECK_GE(height_, 0); | ||
CHECK_LE(h, height_); | ||
CHECK_GE(width_, 0); | ||
CHECK_LE(w, width_); | ||
return ((n * channels_ + c) * height_ + h) * width_ + w; | ||
CHECK_LE(n, shape(0)); | ||
CHECK_GE(shape(1), 0); | ||
CHECK_LE(c, shape(1)); | ||
CHECK_GE(shape(2), 0); | ||
CHECK_LE(h, shape(2)); | ||
CHECK_GE(shape(3), 0); | ||
CHECK_LE(w, shape(3)); | ||
return ((n * shape(1) + c) * shape(2) + h) * shape(3) + w; | ||
} | ||
/** | ||
* @brief Copy from a source Blob. | ||
|
@@ -126,13 +156,14 @@ class Blob { | |
*/ | ||
void ShareDiff(const Blob& other); | ||
|
||
bool ShapeEquals(const vector<int>& other_shape); | ||
bool ShapeEquals(const Blob& other); | ||
bool ShapeEquals(const BlobProto& other); | ||
This comment has been minimized.
Sorry, something went wrong.
longjon
Collaborator
|
||
|
||
protected: | ||
shared_ptr<SyncedMemory> data_; | ||
shared_ptr<SyncedMemory> diff_; | ||
int num_; | ||
int channels_; | ||
int height_; | ||
int width_; | ||
vector<int> shape_; | ||
int count_; | ||
int capacity_; | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -100,11 +100,7 @@ void Net<Dtype>::Init(const NetParameter& in_param) { | |
blob_loss_weights_.resize(top_id_vecs_[layer_id][top_id] + 1, Dtype(0)); | ||
} | ||
blob_loss_weights_[top_id_vecs_[layer_id][top_id]] = layer->loss(top_id); | ||
LOG(INFO) << "Top shape: " << top_vecs_[layer_id][top_id]->num() << " " | ||
<< top_vecs_[layer_id][top_id]->channels() << " " | ||
<< top_vecs_[layer_id][top_id]->height() << " " | ||
<< top_vecs_[layer_id][top_id]->width() << " (" | ||
<< top_vecs_[layer_id][top_id]->count() << ")"; | ||
LOG(INFO) << "Top shape: " << top_vecs_[layer_id][top_id]->shape_string(); | ||
if (layer->loss(top_id)) { | ||
LOG(INFO) << " with loss weight " << layer->loss(top_id); | ||
} | ||
|
@@ -708,10 +704,7 @@ void Net<Dtype>::CopyTrainedLayersFrom(const NetParameter& param) { | |
CHECK_EQ(target_blobs.size(), source_layer.blobs_size()) | ||
<< "Incompatible number of blobs for layer " << source_layer_name; | ||
for (int j = 0; j < target_blobs.size(); ++j) { | ||
CHECK_EQ(target_blobs[j]->num(), source_layer.blobs(j).num()); | ||
CHECK_EQ(target_blobs[j]->channels(), source_layer.blobs(j).channels()); | ||
CHECK_EQ(target_blobs[j]->height(), source_layer.blobs(j).height()); | ||
CHECK_EQ(target_blobs[j]->width(), source_layer.blobs(j).width()); | ||
CHECK(target_blobs[j]->ShapeEquals(source_layer.blobs(j))); | ||
This comment has been minimized.
Sorry, something went wrong.
jeffdonahue
Author
Owner
|
||
target_blobs[j]->FromProto(source_layer.blobs(j)); | ||
} | ||
} | ||
|
Should we generalize this (and/or provide generalized indexing), perhaps in another PR?