-
Notifications
You must be signed in to change notification settings - Fork 25
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Sharing the code used to extract FC_7 and CONV5_4 features
- Loading branch information
Showing
6 changed files
with
826 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
This directory contains the files used to extract the visual features for the WMT16 Shared Task. | ||
|
||
We used code from Andrej Karpathy's [neuraltalk](https://github.com/karpathy/neuraltalk). | ||
|
||
We modified the [original VGG-19 deploy.protoxt](https://gist.github.com/ksimonyan/3785162f95cd2d5fee77#file-vgg_ilsvrc_19_layers_deploy-prototxt) to extract features from the 'relu7' and 'conv5_4' layers. See the following files for details. | ||
|
||
* deploy_features-conv54.prototxt (extract CONV5_4 features) | ||
* deploy_features-fc7.prototxt (extract FC_7 features) |
316 changes: 316 additions & 0 deletions
316
matlab_features_reference/deploy_features-conv54.prototxt
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,316 @@ | ||
name: "VGG_ILSVRC_19_layers" | ||
input: "data" | ||
input_dim: 10 | ||
input_dim: 3 | ||
input_dim: 224 | ||
input_dim: 224 | ||
layers { | ||
bottom: "data" | ||
top: "conv1_1" | ||
name: "conv1_1" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 64 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv1_1" | ||
top: "conv1_1" | ||
name: "relu1_1" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv1_1" | ||
top: "conv1_2" | ||
name: "conv1_2" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 64 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv1_2" | ||
top: "conv1_2" | ||
name: "relu1_2" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv1_2" | ||
top: "pool1" | ||
name: "pool1" | ||
type: POOLING | ||
pooling_param { | ||
pool: MAX | ||
kernel_size: 2 | ||
stride: 2 | ||
} | ||
} | ||
layers { | ||
bottom: "pool1" | ||
top: "conv2_1" | ||
name: "conv2_1" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 128 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv2_1" | ||
top: "conv2_1" | ||
name: "relu2_1" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv2_1" | ||
top: "conv2_2" | ||
name: "conv2_2" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 128 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv2_2" | ||
top: "conv2_2" | ||
name: "relu2_2" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv2_2" | ||
top: "pool2" | ||
name: "pool2" | ||
type: POOLING | ||
pooling_param { | ||
pool: MAX | ||
kernel_size: 2 | ||
stride: 2 | ||
} | ||
} | ||
layers { | ||
bottom: "pool2" | ||
top: "conv3_1" | ||
name: "conv3_1" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 256 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv3_1" | ||
top: "conv3_1" | ||
name: "relu3_1" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv3_1" | ||
top: "conv3_2" | ||
name: "conv3_2" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 256 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv3_2" | ||
top: "conv3_2" | ||
name: "relu3_2" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv3_2" | ||
top: "conv3_3" | ||
name: "conv3_3" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 256 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv3_3" | ||
top: "conv3_3" | ||
name: "relu3_3" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv3_3" | ||
top: "conv3_4" | ||
name: "conv3_4" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 256 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv3_4" | ||
top: "conv3_4" | ||
name: "relu3_4" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv3_4" | ||
top: "pool3" | ||
name: "pool3" | ||
type: POOLING | ||
pooling_param { | ||
pool: MAX | ||
kernel_size: 2 | ||
stride: 2 | ||
} | ||
} | ||
layers { | ||
bottom: "pool3" | ||
top: "conv4_1" | ||
name: "conv4_1" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 512 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv4_1" | ||
top: "conv4_1" | ||
name: "relu4_1" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv4_1" | ||
top: "conv4_2" | ||
name: "conv4_2" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 512 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv4_2" | ||
top: "conv4_2" | ||
name: "relu4_2" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv4_2" | ||
top: "conv4_3" | ||
name: "conv4_3" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 512 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv4_3" | ||
top: "conv4_3" | ||
name: "relu4_3" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv4_3" | ||
top: "conv4_4" | ||
name: "conv4_4" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 512 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv4_4" | ||
top: "conv4_4" | ||
name: "relu4_4" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv4_4" | ||
top: "pool4" | ||
name: "pool4" | ||
type: POOLING | ||
pooling_param { | ||
pool: MAX | ||
kernel_size: 2 | ||
stride: 2 | ||
} | ||
} | ||
layers { | ||
bottom: "pool4" | ||
top: "conv5_1" | ||
name: "conv5_1" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 512 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv5_1" | ||
top: "conv5_1" | ||
name: "relu5_1" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv5_1" | ||
top: "conv5_2" | ||
name: "conv5_2" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 512 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv5_2" | ||
top: "conv5_2" | ||
name: "relu5_2" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv5_2" | ||
top: "conv5_3" | ||
name: "conv5_3" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 512 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} | ||
layers { | ||
bottom: "conv5_3" | ||
top: "conv5_3" | ||
name: "relu5_3" | ||
type: RELU | ||
} | ||
layers { | ||
bottom: "conv5_3" | ||
top: "conv5_4" | ||
name: "conv5_4" | ||
type: CONVOLUTION | ||
convolution_param { | ||
num_output: 512 | ||
pad: 1 | ||
kernel_size: 3 | ||
} | ||
} |
Oops, something went wrong.