如果我们想原封不动的传送数据,也就是说建一个不做任何操作仅穿数据的层,可以利用Scale层,Python中写法如下:
n.scale1 = L.Scale(n.pool2,name='scale1',param=dict(lr_mult=0),filler=dict(type="constant",value=1))
生成的prototxt协议文件如下:
layer {
name: "scale1"
type: "Scale"
bottom: "pool2"
top: "scale1"
param {
lr_mult: 0
}
scale_param {
filler {
type: "constant"
value: 1
}
}
}
功能: 使用scale_layer乘以一个标量0.01。
test_scale_layer.py
#coding=UTF-8
import numpy as np
import caffe
# weight_file can be any prtrained models
weight_file = '../attention_network/snapshots_aflw_vgg16_klLoss_finetune_attention1/cross1/vgg_iter_10000.caffemodel'
deploy_file = 'scale_deploy.prototxt'
net = caffe.Net(deploy_file,weight_file,caffe.TEST)
arr0 = np.array([[11,12,13],[21,22,23]]) # w11 w12 w13 for sample1, w21 w22 w23 for sample2
print(arr0.shape) # (2,3),2 stand for sample, 3 stand for channel
print(arr0)
arr = np.reshape(arr0,(arr0.shape[0],arr0.shape[1],1,1))
net.blobs['Features'].data[...] = arr
net.forward()
feat = net.blobs['scale'].data
print('scale: ',feat.shape)
print(feat)
scale_deploy.prototxt
layer {
name: "data"
type: "Input"
top: "Features"
input_param {
shape {
dim: 2
dim: 3
dim: 1
dim: 1
}
}
}
layer {
name: "scale_layer"
type: "Scale"
bottom: "Features"
top: "scale"
param {
lr_mult: 0
}
scale_param {
filler {
type: "constant"
value: 0.01
}
}
}