Skip to content

Commit

Permalink
test_gradient_check_util: check_bottom < -1 only checks params
Browse files Browse the repository at this point in the history
  • Loading branch information
jeffdonahue committed Mar 2, 2015
1 parent c07407d commit 927e3ad
Showing 1 changed file with 8 additions and 3 deletions.
11 changes: 8 additions & 3 deletions include/caffe/test/test_gradient_check_util.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,10 @@ class GradientChecker {
void CheckGradientEltwise(Layer<Dtype>* layer,
const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top);

// Checks the gradient of a single output with respect to particular input
// blob(s). If check_bottom = i >= 0, check only the ith bottom Blob.
// If check_bottom == -1, check everything -- all bottom Blobs and all
// param Blobs. Otherwise (if check_bottom < -1), check only param Blobs.
void CheckGradientSingle(Layer<Dtype>* layer,
const vector<Blob<Dtype>*>& bottom, const vector<Blob<Dtype>*>& top,
int check_bottom, int top_id, int top_data_id, bool element_wise = false);
Expand Down Expand Up @@ -83,21 +87,22 @@ void GradientChecker<Dtype>::CheckGradientSingle(Layer<Dtype>* layer,
// First, figure out what blobs we need to check against, and zero init
// parameter blobs.
vector<Blob<Dtype>*> blobs_to_check;
vector<bool> propagate_down(bottom.size(), check_bottom < 0);
vector<bool> propagate_down(bottom.size(), check_bottom == -1);
for (int i = 0; i < layer->blobs().size(); ++i) {
Blob<Dtype>* blob = layer->blobs()[i].get();
caffe_set(blob->count(), static_cast<Dtype>(0), blob->mutable_cpu_diff());
blobs_to_check.push_back(blob);
}
if (check_bottom < 0) {
if (check_bottom == -1) {
for (int i = 0; i < bottom.size(); ++i) {
blobs_to_check.push_back(bottom[i]);
}
} else {
} else if (check_bottom >= 0) {
CHECK_LT(check_bottom, bottom.size());
blobs_to_check.push_back(bottom[check_bottom]);
propagate_down[check_bottom] = true;
}
CHECK_GT(blobs_to_check.size(), 0) << "No blobs to check.";
// Compute the gradient analytically using Backward
Caffe::set_random_seed(seed_);
// Ignore the loss from the layer (it's just the weighted sum of the losses
Expand Down

0 comments on commit 927e3ad

Please sign in to comment.