Skip to content

Commit

Permalink
Merge pull request BVLC#103 from Yangqing/master
Browse files Browse the repository at this point in the history
bugfix regarding BVLC#100
  • Loading branch information
Yangqing committed Feb 13, 2014
2 parents f55acb2 + b543b2f commit 97c0811
Showing 1 changed file with 9 additions and 5 deletions.
14 changes: 9 additions & 5 deletions src/caffe/net.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -117,14 +117,18 @@ void Net<Dtype>::Init(const NetParameter& param) {
<< top_vecs_[i][topid]->height() << " "
<< top_vecs_[i][topid]->width();
}
// catch: if a layer param does not specify blobs_lr, we should assume the
// learning rate to be 1. Thus we will need to perform backward.
if (layers_[i]->layer_param().blobs_lr_size()) {
int blobs_lr_size = layers_[i]->layer_param().blobs_lr_size();
CHECK(blobs_lr_size == layers_[i]->blobs().size() || blobs_lr_size == 0)
<< "Incorrect blobs lr size: should be either 0 or the same as "
"the number of the layer's parameter blobs.";
if (blobs_lr_size) {
// Check if this layer needs backward operation itself
for (int j = 0; j < layers_[i]->layer_param().blobs_lr_size(); ++j) {
for (int j = 0; j < blobs_lr_size; ++j) {
need_backward |= (layers_[i]->layer_param().blobs_lr(j) > 0);
}
} else {
} else if (layers_[i]->blobs().size()) {
// catch: if a layer param does not specify blobs_lr, we should assume the
// learning rate to be 1. Thus we will need to perform backward.
need_backward = true;
}
// Finally, set the backward flag
Expand Down

0 comments on commit 97c0811

Please sign in to comment.