Skip to content

Commit

Permalink
Merge pull request BVLC#480 from jeffdonahue/dummy-data-layer
Browse files Browse the repository at this point in the history
Dummy data layer
  • Loading branch information
jeffdonahue committed Jun 9, 2014
2 parents b2669d3 + 30fc73a commit 7d15c6b
Show file tree
Hide file tree
Showing 5 changed files with 353 additions and 4 deletions.
27 changes: 27 additions & 0 deletions include/caffe/data_layers.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

#include "caffe/blob.hpp"
#include "caffe/common.hpp"
#include "caffe/filler.hpp"
#include "caffe/layer.hpp"
#include "caffe/proto/caffe.pb.h"

Expand Down Expand Up @@ -146,6 +147,32 @@ class DataLayer : public Layer<Dtype> {
Caffe::Phase phase_;
};

template <typename Dtype>
class DummyDataLayer : public Layer<Dtype> {
public:
explicit DummyDataLayer(const LayerParameter& param)
: Layer<Dtype>(param) {}
virtual void SetUp(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);

virtual inline LayerParameter_LayerType type() const {
return LayerParameter_LayerType_DUMMY_DATA;
}
virtual inline int ExactNumBottomBlobs() const { return 0; }
virtual inline int MinTopBlobs() const { return 1; }

protected:
virtual Dtype Forward_cpu(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);
virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
const bool propagate_down, vector<Blob<Dtype>*>* bottom) { return; }
virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
const bool propagate_down, vector<Blob<Dtype>*>* bottom) { return; }

vector<shared_ptr<Filler<Dtype> > > fillers_;
vector<bool> refill_;
};

// This function is used to create a pthread that prefetches the data.
template <typename Dtype>
void* ImageDataLayerPrefetch(void* layer_pointer);
Expand Down
2 changes: 2 additions & 0 deletions src/caffe/layer_factory.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ Layer<Dtype>* GetLayer(const LayerParameter& param) {
return new DataLayer<Dtype>(param);
case LayerParameter_LayerType_DROPOUT:
return new DropoutLayer<Dtype>(param);
case LayerParameter_LayerType_DUMMY_DATA:
return new DummyDataLayer<Dtype>(param);
case LayerParameter_LayerType_EUCLIDEAN_LOSS:
return new EuclideanLossLayer<Dtype>(param);
case LayerParameter_LayerType_ELTWISE:
Expand Down
100 changes: 100 additions & 0 deletions src/caffe/layers/dummy_data_layer.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
// Copyright 2014 BVLC and contributors.

#include <vector>

#include "caffe/filler.hpp"
#include "caffe/layer.hpp"
#include "caffe/vision_layers.hpp"

namespace caffe {

template <typename Dtype>
void DummyDataLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top) {
const int num_top = top->size();
const DummyDataParameter& param = this->layer_param_.dummy_data_param();
const int num_data_filler = param.data_filler_size();
CHECK(num_data_filler == 0 || num_data_filler == 1 ||
num_data_filler == num_top)
<< "Number of data fillers must be 0, 1 or equal to the number of tops: "
<< num_top << "; you specified " << num_data_filler << " data fillers.";
CHECK(param.num_size() == 1 || param.num_size() == num_top)
<< "Must specify either a single (1) 'num' or one for each top blob ("
<< num_top << "); you specified " << param.num_size() << ".";
CHECK(param.channels_size() == 1 || param.channels_size() == num_top)
<< "Must specify either a single (1) 'channels' or one for each top blob ("
<< num_top << "); you specified " << param.channels_size() << ".";
CHECK(param.height_size() == 1 || param.height_size() == num_top)
<< "Must specify either a single (1) 'height' or one for each top blob ("
<< num_top << "); you specified " << param.height_size() << ".";
CHECK(param.width_size() == 1 || param.width_size() == num_top)
<< "Must specify either a single (1) 'width' or one for each top blob ("
<< num_top << "); you specified " << param.width_size() << ".";
// refill_[i] tells Forward i whether or not to actually refill top Blob i.
// If refill_[i] is false, Forward does nothing for Blob i. We use this to
// avoid wastefully refilling "constant" Blobs in every forward pass.
// We first fill refill_ in with the INVERSE of its final values.
// The first time we run Forward from the SetUp method, we'll fill only the
// Blobs for which refill_ is normally false. These Blobs will never be
// filled again.
refill_.clear();
fillers_.clear();
if (num_data_filler <= 1) {
FillerParameter filler_param;
if (num_data_filler == 0) {
filler_param.set_type("constant");
filler_param.set_value(0);
} else {
filler_param.CopyFrom(param.data_filler(0));
}
// Refill on each iteration iff not using a constant filler,
// but use the inverse of this rule for the first run.
refill_.resize(1);
refill_[0] = (strcmp(filler_param.type().c_str(), "constant") == 0);
fillers_.resize(1);
fillers_[0].reset(GetFiller<Dtype>(filler_param));
} else {
refill_.resize(num_top);
fillers_.resize(num_top);
for (int i = 0; i < num_top; ++i) {
fillers_[i].reset(GetFiller<Dtype>(param.data_filler(i)));
// Refill on each iteration iff not using a constant filler,
// but use the inverse of this rule for the first run.
refill_[i] =
(strcmp(param.data_filler(i).type().c_str(), "constant") == 0);
}
}
for (int i = 0; i < num_top; ++i) {
const int num = (param.num_size() == 1) ? param.num(0) : param.num(i);
const int channels =
(param.channels_size() == 1) ? param.channels(0) : param.channels(i);
const int height =
(param.height_size() == 1) ? param.height(0) : param.height(i);
const int width =
(param.width_size() == 1) ? param.width(0) : param.width(i);
(*top)[i]->Reshape(num, channels, height, width);
}
// Run Forward once, with refill_ inverted, to fill the constant Blobs.
Forward(bottom, top);
// Invert the inverted refill_ values to refill the desired (non-constant)
// Blobs in every usual forward pass.
for (int i = 0; i < refill_.size(); ++i) {
refill_[i] = !refill_[i];
}
}

template <typename Dtype>
Dtype DummyDataLayer<Dtype>::Forward_cpu(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top) {
for (int i = 0; i < top->size(); ++i) {
const int filler_id = (fillers_.size() > 1) ? i : 0;
if (refill_[filler_id]) {
fillers_[filler_id]->Fill((*top)[i]);
}
}
return Dtype(0.);
}

INSTANTIATE_CLASS(DummyDataLayer);

} // namespace caffe
26 changes: 22 additions & 4 deletions src/caffe/proto/caffe.proto
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ message SolverState {
// NOTE
// Update the next available ID when you add a new LayerParameter field.
//
// LayerParameter next available ID: 25 (last added: eltwise_param)
// LayerParameter next available ID: 27 (last added: dummy_data_param)
message LayerParameter {
repeated string bottom = 2; // the name of the bottom blobs
repeated string top = 3; // the name of the top blobs
Expand All @@ -127,7 +127,7 @@ message LayerParameter {
// line above the enum. Update the next available ID when you add a new
// LayerType.
//
// LayerType next available ID: 32 (last added: THRESHOLD)
// LayerType next available ID: 33 (last added: DUMMY_DATA)
enum LayerType {
// "NONE" layer type is 0th enum element so that we don't cause confusion
// by defaulting to an existent LayerType (instead, should usually error if
Expand All @@ -140,6 +140,7 @@ message LayerParameter {
CONVOLUTION = 4;
DATA = 5;
DROPOUT = 6;
DUMMY_DATA = 32;
EUCLIDEAN_LOSS = 7;
ELTWISE = 25;
FLATTEN = 8;
Expand Down Expand Up @@ -175,13 +176,12 @@ message LayerParameter {
// The weight decay that is multiplied on the global weight decay.
repeated float weight_decay = 8;

// Parameters for particular layer types.
// Parameters next available ID: 26 (last added: ThresholdParameter)
optional ArgMaxParameter argmax_param = 23;
optional ConcatParameter concat_param = 9;
optional ConvolutionParameter convolution_param = 10;
optional DataParameter data_param = 11;
optional DropoutParameter dropout_param = 12;
optional DummyDataParameter dummy_data_param = 26;
optional EltwiseParameter eltwise_param = 24;
optional HDF5DataParameter hdf5_data_param = 13;
optional HDF5OutputParameter hdf5_output_param = 14;
Expand Down Expand Up @@ -254,6 +254,24 @@ message DropoutParameter {
optional float dropout_ratio = 1 [default = 0.5]; // dropout ratio
}

// Message that stores parameters used by DummyDataLayer.
// DummyDataLayer fills any number of arbitrarily shaped blobs with random
// (or constant) data generated by "Fillers" (see "message FillerParameter").
message DummyDataParameter {
// This layer produces N >= 1 top blobs. DummyDataParameter must specify 1 or N
// num, N channels, N height, and N width fields, and must specify 0, 1 or N
// data_fillers.
//
// If 0 data_fillers are specified, ConstantFiller with a value of 0 is used.
// If 1 data_filler is specified, it is applied to all top blobs. If N are
// specified, the ith is applied to the ith top blob.
repeated FillerParameter data_filler = 1;
repeated uint32 num = 2;
repeated uint32 channels = 3;
repeated uint32 height = 4;
repeated uint32 width = 5;
}

// Message that stores parameters used by EltwiseLayer
message EltwiseParameter {
enum EltwiseOp {
Expand Down
Loading

0 comments on commit 7d15c6b

Please sign in to comment.