176 lines
2.3 KiB
Plaintext
176 lines
2.3 KiB
Plaintext
|
name: "CaffeNet"
|
||
|
input: "data"
|
||
|
input_dim: 1
|
||
|
input_dim: 3
|
||
|
input_dim: 227
|
||
|
input_dim: 227
|
||
|
layers {
|
||
|
name: "conv1"
|
||
|
type: CONVOLUTION
|
||
|
bottom: "data"
|
||
|
top: "conv1"
|
||
|
convolution_param {
|
||
|
num_output: 96
|
||
|
kernel_size: 7
|
||
|
stride: 4
|
||
|
}
|
||
|
}
|
||
|
layers {
|
||
|
name: "relu1"
|
||
|
type: RELU
|
||
|
bottom: "conv1"
|
||
|
top: "conv1"
|
||
|
}
|
||
|
layers {
|
||
|
name: "pool1"
|
||
|
type: POOLING
|
||
|
bottom: "conv1"
|
||
|
top: "pool1"
|
||
|
pooling_param {
|
||
|
pool: MAX
|
||
|
kernel_size: 3
|
||
|
stride: 2
|
||
|
}
|
||
|
}
|
||
|
layers {
|
||
|
name: "norm1"
|
||
|
type: LRN
|
||
|
bottom: "pool1"
|
||
|
top: "norm1"
|
||
|
lrn_param {
|
||
|
local_size: 5
|
||
|
alpha: 0.0001
|
||
|
beta: 0.75
|
||
|
}
|
||
|
}
|
||
|
layers {
|
||
|
name: "conv2"
|
||
|
type: CONVOLUTION
|
||
|
bottom: "norm1"
|
||
|
top: "conv2"
|
||
|
convolution_param {
|
||
|
num_output: 256
|
||
|
pad: 2
|
||
|
kernel_size: 5
|
||
|
}
|
||
|
}
|
||
|
layers {
|
||
|
name: "relu2"
|
||
|
type: RELU
|
||
|
bottom: "conv2"
|
||
|
top: "conv2"
|
||
|
}
|
||
|
layers {
|
||
|
name: "pool2"
|
||
|
type: POOLING
|
||
|
bottom: "conv2"
|
||
|
top: "pool2"
|
||
|
pooling_param {
|
||
|
pool: MAX
|
||
|
kernel_size: 3
|
||
|
stride: 2
|
||
|
}
|
||
|
}
|
||
|
layers {
|
||
|
name: "norm2"
|
||
|
type: LRN
|
||
|
bottom: "pool2"
|
||
|
top: "norm2"
|
||
|
lrn_param {
|
||
|
local_size: 5
|
||
|
alpha: 0.0001
|
||
|
beta: 0.75
|
||
|
}
|
||
|
}
|
||
|
layers {
|
||
|
name: "conv3"
|
||
|
type: CONVOLUTION
|
||
|
bottom: "norm2"
|
||
|
top: "conv3"
|
||
|
convolution_param {
|
||
|
num_output: 384
|
||
|
pad: 1
|
||
|
kernel_size: 3
|
||
|
}
|
||
|
}
|
||
|
layers{
|
||
|
name: "relu3"
|
||
|
type: RELU
|
||
|
bottom: "conv3"
|
||
|
top: "conv3"
|
||
|
}
|
||
|
layers {
|
||
|
name: "pool5"
|
||
|
type: POOLING
|
||
|
bottom: "conv3"
|
||
|
top: "pool5"
|
||
|
pooling_param {
|
||
|
pool: MAX
|
||
|
kernel_size: 3
|
||
|
stride: 2
|
||
|
}
|
||
|
}
|
||
|
layers {
|
||
|
name: "fc6"
|
||
|
type: INNER_PRODUCT
|
||
|
bottom: "pool5"
|
||
|
top: "fc6"
|
||
|
inner_product_param {
|
||
|
num_output: 512
|
||
|
}
|
||
|
}
|
||
|
layers {
|
||
|
name: "relu6"
|
||
|
type: RELU
|
||
|
bottom: "fc6"
|
||
|
top: "fc6"
|
||
|
}
|
||
|
layers {
|
||
|
name: "drop6"
|
||
|
type: DROPOUT
|
||
|
bottom: "fc6"
|
||
|
top: "fc6"
|
||
|
dropout_param {
|
||
|
dropout_ratio: 0.5
|
||
|
}
|
||
|
}
|
||
|
layers {
|
||
|
name: "fc7"
|
||
|
type: INNER_PRODUCT
|
||
|
bottom: "fc6"
|
||
|
top: "fc7"
|
||
|
inner_product_param {
|
||
|
num_output: 512
|
||
|
}
|
||
|
}
|
||
|
layers {
|
||
|
name: "relu7"
|
||
|
type: RELU
|
||
|
bottom: "fc7"
|
||
|
top: "fc7"
|
||
|
}
|
||
|
layers {
|
||
|
name: "drop7"
|
||
|
type: DROPOUT
|
||
|
bottom: "fc7"
|
||
|
top: "fc7"
|
||
|
dropout_param {
|
||
|
dropout_ratio: 0.5
|
||
|
}
|
||
|
}
|
||
|
layers {
|
||
|
name: "fc8"
|
||
|
type: INNER_PRODUCT
|
||
|
bottom: "fc7"
|
||
|
top: "fc8"
|
||
|
inner_product_param {
|
||
|
num_output: 2
|
||
|
}
|
||
|
}
|
||
|
layers {
|
||
|
name: "prob"
|
||
|
type: SOFTMAX
|
||
|
bottom: "fc8"
|
||
|
top: "prob"
|
||
|
}
|