Skip to content

Commit

Permalink
move MemoryDataLayer decl. from vision_layers.hpp to data_layers.hpp
Browse files Browse the repository at this point in the history
  • Loading branch information
jeffdonahue committed Jun 9, 2014
1 parent 69af0c8 commit fe96991
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 43 deletions.
42 changes: 42 additions & 0 deletions include/caffe/data_layers.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -198,6 +198,48 @@ class ImageDataLayer : public Layer<Dtype> {
Caffe::Phase phase_;
};

/* MemoryDataLayer
*/
template <typename Dtype>
class MemoryDataLayer : public Layer<Dtype> {
public:
explicit MemoryDataLayer(const LayerParameter& param)
: Layer<Dtype>(param) {}
virtual void SetUp(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);

virtual inline LayerParameter_LayerType type() const {
return LayerParameter_LayerType_MEMORY_DATA;
}
virtual inline int ExactNumBottomBlobs() { return 0; }
virtual inline int ExactNumTopBlobs() { return 2; }

// Reset should accept const pointers, but can't, because the memory
// will be given to Blob, which is mutable
void Reset(Dtype* data, Dtype* label, int n);
int datum_channels() { return datum_channels_; }
int datum_height() { return datum_height_; }
int datum_width() { return datum_width_; }
int batch_size() { return batch_size_; }

protected:
virtual Dtype Forward_cpu(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);
virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
const bool propagate_down, vector<Blob<Dtype>*>* bottom) { return; }
virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
const bool propagate_down, vector<Blob<Dtype>*>* bottom) { return; }

Dtype* data_;
Dtype* labels_;
int datum_channels_;
int datum_height_;
int datum_width_;
int datum_size_;
int batch_size_;
int n_;
int pos_;
};

// This function is used to create a pthread that prefetches the window data.
template <typename Dtype>
Expand Down
43 changes: 0 additions & 43 deletions include/caffe/vision_layers.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -333,49 +333,6 @@ class LRNLayer : public Layer<Dtype> {
vector<Blob<Dtype>*> product_bottom_vec_;
};

/* MemoryDataLayer
*/
template <typename Dtype>
class MemoryDataLayer : public Layer<Dtype> {
public:
explicit MemoryDataLayer(const LayerParameter& param)
: Layer<Dtype>(param) {}
virtual void SetUp(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);

virtual inline LayerParameter_LayerType type() const {
return LayerParameter_LayerType_MEMORY_DATA;
}
virtual inline int ExactNumBottomBlobs() { return 0; }
virtual inline int ExactNumTopBlobs() { return 2; }

// Reset should accept const pointers, but can't, because the memory
// will be given to Blob, which is mutable
void Reset(Dtype* data, Dtype* label, int n);
int datum_channels() { return datum_channels_; }
int datum_height() { return datum_height_; }
int datum_width() { return datum_width_; }
int batch_size() { return batch_size_; }

protected:
virtual Dtype Forward_cpu(const vector<Blob<Dtype>*>& bottom,
vector<Blob<Dtype>*>* top);
virtual void Backward_cpu(const vector<Blob<Dtype>*>& top,
const bool propagate_down, vector<Blob<Dtype>*>* bottom) { return; }
virtual void Backward_gpu(const vector<Blob<Dtype>*>& top,
const bool propagate_down, vector<Blob<Dtype>*>* bottom) { return; }

Dtype* data_;
Dtype* labels_;
int datum_channels_;
int datum_height_;
int datum_width_;
int datum_size_;
int batch_size_;
int n_;
int pos_;
};

/* PoolingLayer
*/
template <typename Dtype>
Expand Down

0 comments on commit fe96991

Please sign in to comment.