-
Notifications
You must be signed in to change notification settings - Fork 18.6k
Within-channel LRN layer #273
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Merged
Merged
Changes from all commits
Commits
Show all changes
18 commits
Select commit
Hold shift + click to select a range
42c9d66
add LRN within map layer and dependencies (eltwise product and power)
jeffdonahue f3e2fe6
add unit tests for new layer types
jeffdonahue 6e92f47
use average pool instead of conv
jeffdonahue bd756fe
add padding for average pooling
jeffdonahue d047bef
bug fix: average pooling already divides by N^2
jeffdonahue 6c844cc
use split layer in LRNMapLayer
jeffdonahue 2031663
use bvlc copyright
jeffdonahue 42b832c
add cifar example using LRN_MAP (just like the cuda-convnet layers-18pct
jeffdonahue 4c81ee5
fix some param bugs
jeffdonahue 824c344
merge LRNMapLayer into LRNLayer with norm_region proto field
jeffdonahue 24f7318
replace old cifar full with within channel LRN (per cuda-convnet
jeffdonahue 69ac4f4
minor polishing
jeffdonahue d61adb8
remove unnecessary local variables from EltwiseProductLayer
jeffdonahue 9fb7818
don't recompute pre_pad
jeffdonahue 45f8626
cleanup extra LRN method names
jeffdonahue 86db2a9
minor unit test cleanup
jeffdonahue bf511f7
cleanup power layer test suite
jeffdonahue 404f22d
update proto field IDs from placeholder values
jeffdonahue File filter
Filter by extension
Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
There are no files selected for viewing
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,62 @@ | ||
// Copyright 2014 BVLC and contributors. | ||
|
||
#include <vector> | ||
|
||
#include "caffe/layer.hpp" | ||
#include "caffe/vision_layers.hpp" | ||
#include "caffe/util/math_functions.hpp" | ||
|
||
namespace caffe { | ||
|
||
template <typename Dtype> | ||
void EltwiseProductLayer<Dtype>::SetUp(const vector<Blob<Dtype>*>& bottom, | ||
vector<Blob<Dtype>*>* top) { | ||
CHECK_GE(bottom.size(), 2) << | ||
"Eltwise Product Layer takes at least 2 blobs as input."; | ||
CHECK_EQ(top->size(), 1) << | ||
"Eltwise Product Layer takes a single blob as output."; | ||
const int num = bottom[0]->num(); | ||
const int channels = bottom[0]->channels(); | ||
const int height = bottom[0]->height(); | ||
const int width = bottom[0]->width(); | ||
for (int i = 1; i < bottom.size(); ++i) { | ||
CHECK_EQ(num, bottom[i]->num()); | ||
CHECK_EQ(channels, bottom[i]->channels()); | ||
CHECK_EQ(height, bottom[i]->height()); | ||
CHECK_EQ(width, bottom[i]->width()); | ||
} | ||
(*top)[0]->Reshape(num, channels, height, width); | ||
} | ||
|
||
template <typename Dtype> | ||
Dtype EltwiseProductLayer<Dtype>::Forward_cpu( | ||
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) { | ||
const int count = (*top)[0]->count(); | ||
Dtype* top_data = (*top)[0]->mutable_cpu_data(); | ||
caffe_mul(count, bottom[0]->cpu_data(), bottom[1]->cpu_data(), top_data); | ||
for (int i = 2; i < bottom.size(); ++i) { | ||
caffe_mul(count, top_data, bottom[i]->cpu_data(), top_data); | ||
} | ||
return Dtype(0.); | ||
} | ||
|
||
template <typename Dtype> | ||
void EltwiseProductLayer<Dtype>::Backward_cpu(const vector<Blob<Dtype>*>& top, | ||
const bool propagate_down, vector<Blob<Dtype>*>* bottom) { | ||
if (propagate_down) { | ||
const int count = top[0]->count(); | ||
const Dtype* top_data = top[0]->cpu_data(); | ||
const Dtype* top_diff = top[0]->cpu_diff(); | ||
for (int i = 0; i < bottom->size(); ++i) { | ||
const Dtype* bottom_data = (*bottom)[i]->cpu_data(); | ||
Dtype* bottom_diff = (*bottom)[i]->mutable_cpu_diff(); | ||
caffe_div(count, top_data, bottom_data, bottom_diff); | ||
caffe_mul(count, bottom_diff, top_diff, bottom_diff); | ||
} | ||
} | ||
} | ||
|
||
INSTANTIATE_CLASS(EltwiseProductLayer); | ||
|
||
|
||
} // namespace caffe |
42 changes: 42 additions & 0 deletions
42
src/caffe/layers/eltwise_product_layer.cu
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,42 @@ | ||
// Copyright 2014 BVLC and contributors. | ||
|
||
#include <vector> | ||
|
||
#include "caffe/layer.hpp" | ||
#include "caffe/vision_layers.hpp" | ||
#include "caffe/util/math_functions.hpp" | ||
|
||
namespace caffe { | ||
|
||
template <typename Dtype> | ||
Dtype EltwiseProductLayer<Dtype>::Forward_gpu( | ||
const vector<Blob<Dtype>*>& bottom, vector<Blob<Dtype>*>* top) { | ||
const int count = (*top)[0]->count(); | ||
Dtype* top_data = (*top)[0]->mutable_gpu_data(); | ||
caffe_gpu_mul(count, bottom[0]->gpu_data(), bottom[1]->gpu_data(), top_data); | ||
for (int i = 2; i < bottom.size(); ++i) { | ||
caffe_gpu_mul(count, top_data, bottom[i]->gpu_data(), top_data); | ||
} | ||
return Dtype(0.); | ||
} | ||
|
||
template <typename Dtype> | ||
void EltwiseProductLayer<Dtype>::Backward_gpu(const vector<Blob<Dtype>*>& top, | ||
const bool propagate_down, vector<Blob<Dtype>*>* bottom) { | ||
if (propagate_down) { | ||
const int count = top[0]->count(); | ||
const Dtype* top_data = top[0]->gpu_data(); | ||
const Dtype* top_diff = top[0]->gpu_diff(); | ||
for (int i = 0; i < bottom->size(); ++i) { | ||
const Dtype* bottom_data = (*bottom)[i]->gpu_data(); | ||
Dtype* bottom_diff = (*bottom)[i]->mutable_gpu_diff(); | ||
caffe_gpu_div(count, top_data, bottom_data, bottom_diff); | ||
caffe_gpu_mul(count, bottom_diff, top_diff, bottom_diff); | ||
} | ||
} | ||
} | ||
|
||
INSTANTIATE_CLASS(EltwiseProductLayer); | ||
|
||
|
||
} // namespace caffe |
Oops, something went wrong.
Add this suggestion to a batch that can be applied as a single commit.
This suggestion is invalid because no changes were made to the code.
Suggestions cannot be applied while the pull request is closed.
Suggestions cannot be applied while viewing a subset of changes.
Only one suggestion per line can be applied in a batch.
Add this suggestion to a batch that can be applied as a single commit.
Applying suggestions on deleted lines is not supported.
You must change the existing code in this line in order to create a valid suggestion.
Outdated suggestions cannot be applied.
This suggestion has been applied or marked resolved.
Suggestions cannot be applied from pending reviews.
Suggestions cannot be applied on multi-line comments.
Suggestions cannot be applied while the pull request is queued to merge.
Suggestion cannot be applied right now. Please check back later.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
It seems that both PowerLayer and EltwiseProductLayer are suitable to be refactored in #244.