# 第一种方式： 通过net_surgery给定，

import caffe
import surgery, score

import numpy as np
import os
import sys

try:
import setproctitle
setproctitle.setproctitle(os.path.basename(os.getcwd()))
except:
pass

weights = '../ilsvrc-nets/vgg16-fcn.caffemodel'

# init
caffe.set_device(int(sys.argv[1]))
caffe.set_mode_gpu()

solver = caffe.SGDSolver('solver.prototxt')
solver.net.copy_from(weights)

# surgeries  （这里就是对于反卷积层的参数进行初始化）
interp_layers = [k for k in solver.net.params.keys() if 'up' in k]
surgery.interp(solver.net, interp_layers)

# scoring

for _ in range(25):
solver.step(4000)
score.seg_tests(solver, False, val, layer='score')

 #  make a bilinear interpolation kernel
def upsample_filt(self,size):
factor = (size + 1) // 2
if size % 2 == 1:
center = factor - 1
else:
center = factor - 0.5
og = np.ogrid[:size, :size]
return (1 - abs(og[0] - center) / factor) * \
(1 - abs(og[1] - center) / factor)

# set parameters s.t. deconvolutional layers compute bilinear interpolation
# N.B. this is for deconvolution without groups
def interp_surgery(self,net, layers):
for l in layers:
print l
m, k, h, w = net.params[l][0].data.shape   #仅仅修改w，不需要修改bias，其为0
print("deconv shape:\n")
print m, k, h, w
if m != k and k != 1:
print 'input + output channels need to be the same or |output| == 1'
raise
if h != w:
print 'filters need to be square'
raise
filt = self.upsample_filt(h)
print(filt)
net.params[l][0].data[range(m), range(k), :, :] = filt

# 第二种方式：直接在Deconvolution中给定参数weight_filler，即：

layer {
name: "fc8_upsample"
type: "Deconvolution"
bottom: "fc8"
top: "fc8_upsample"
param {
lr_mult: 0
decay_mult: 0
}
param {
lr_mult: 0
decay_mult: 0
}
convolution_param {
num_output: 1
kernel_size: 16
stride: 8
weight_filler {   # 这里相当于上面的直接赋值
type: "bilinear"
}
}
}

weight_filler初始化成双线性就等价于直接按照上面的方式赋值。

[0,0,:,:]:

[[ 0.00390625 0.01171875 0.01953125 0.02734375 0.03515625 0.04296875
0.05078125 0.05859375 0.05859375 0.05078125 0.04296875 0.03515625
0.02734375 0.01953125 0.01171875 0.00390625]
[ 0.01171875 0.03515625 0.05859375 0.08203125 0.10546875 0.12890625
0.15234375 0.17578125 0.17578125 0.15234375 0.12890625 0.10546875
0.08203125 0.05859375 0.03515625 0.01171875]
[ 0.01953125 0.05859375 0.09765625 0.13671875 0.17578125 0.21484375
0.25390625 0.29296875 0.29296875 0.25390625 0.21484375 0.17578125
0.13671875 0.09765625 0.05859375 0.01953125]
[ 0.02734375 0.08203125 0.13671875 0.19140625 0.24609375 0.30078125
0.35546875 0.41015625 0.41015625 0.35546875 0.30078125 0.24609375
0.19140625 0.13671875 0.08203125 0.02734375]
[ 0.03515625 0.10546875 0.17578125 0.24609375 0.31640625 0.38671875
0.45703125 0.52734375 0.52734375 0.45703125 0.38671875 0.31640625
0.24609375 0.17578125 0.10546875 0.03515625]
[ 0.04296875 0.12890625 0.21484375 0.30078125 0.38671875 0.47265625
0.55859375 0.64453125 0.64453125 0.55859375 0.47265625 0.38671875
0.30078125 0.21484375 0.12890625 0.04296875]
[ 0.05078125 0.15234375 0.25390625 0.35546875 0.45703125 0.55859375
0.66015625 0.76171875 0.76171875 0.66015625 0.55859375 0.45703125
0.35546875 0.25390625 0.15234375 0.05078125]
[ 0.05859375 0.17578125 0.29296875 0.41015625 0.52734375 0.64453125
0.76171875 0.87890625 0.87890625 0.76171875 0.64453125 0.52734375
0.41015625 0.29296875 0.17578125 0.05859375]
[ 0.05859375 0.17578125 0.29296875 0.41015625 0.52734375 0.64453125
0.76171875 0.87890625 0.87890625 0.76171875 0.64453125 0.52734375
0.41015625 0.29296875 0.17578125 0.05859375]
[ 0.05078125 0.15234375 0.25390625 0.35546875 0.45703125 0.55859375
0.66015625 0.76171875 0.76171875 0.66015625 0.55859375 0.45703125
0.35546875 0.25390625 0.15234375 0.05078125]
[ 0.04296875 0.12890625 0.21484375 0.30078125 0.38671875 0.47265625
0.55859375 0.64453125 0.64453125 0.55859375 0.47265625 0.38671875
0.30078125 0.21484375 0.12890625 0.04296875]
[ 0.03515625 0.10546875 0.17578125 0.24609375 0.31640625 0.38671875
0.45703125 0.52734375 0.52734375 0.45703125 0.38671875 0.31640625
0.24609375 0.17578125 0.10546875 0.03515625]
[ 0.02734375 0.08203125 0.13671875 0.19140625 0.24609375 0.30078125
0.35546875 0.41015625 0.41015625 0.35546875 0.30078125 0.24609375
0.19140625 0.13671875 0.08203125 0.02734375]
[ 0.01953125 0.05859375 0.09765625 0.13671875 0.17578125 0.21484375
0.25390625 0.29296875 0.29296875 0.25390625 0.21484375 0.17578125
0.13671875 0.09765625 0.05859375 0.01953125]
[ 0.01171875 0.03515625 0.05859375 0.08203125 0.10546875 0.12890625
0.15234375 0.17578125 0.17578125 0.15234375 0.12890625 0.10546875
0.08203125 0.05859375 0.03515625 0.01171875]
[ 0.00390625 0.01171875 0.01953125 0.02734375 0.03515625 0.04296875
0.05078125 0.05859375 0.05859375 0.05078125 0.04296875 0.03515625
0.02734375 0.01953125 0.01171875 0.00390625]]
[0,1,:,:]:

[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
[1,0,:,:]：

[[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]
[ 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0. 0.]]
[1,1,:,:]:

[[ 0.00390625 0.01171875 0.01953125 0.02734375 0.03515625 0.04296875
0.05078125 0.05859375 0.05859375 0.05078125 0.04296875 0.03515625
0.02734375 0.01953125 0.01171875 0.00390625]
[ 0.01171875 0.03515625 0.05859375 0.08203125 0.10546875 0.12890625
0.15234375 0.17578125 0.17578125 0.15234375 0.12890625 0.10546875
0.08203125 0.05859375 0.03515625 0.01171875]
[ 0.01953125 0.05859375 0.09765625 0.13671875 0.17578125 0.21484375
0.25390625 0.29296875 0.29296875 0.25390625 0.21484375 0.17578125
0.13671875 0.09765625 0.05859375 0.01953125]
[ 0.02734375 0.08203125 0.13671875 0.19140625 0.24609375 0.30078125
0.35546875 0.41015625 0.41015625 0.35546875 0.30078125 0.24609375
0.19140625 0.13671875 0.08203125 0.02734375]
[ 0.03515625 0.10546875 0.17578125 0.24609375 0.31640625 0.38671875
0.45703125 0.52734375 0.52734375 0.45703125 0.38671875 0.31640625
0.24609375 0.17578125 0.10546875 0.03515625]
[ 0.04296875 0.12890625 0.21484375 0.30078125 0.38671875 0.47265625
0.55859375 0.64453125 0.64453125 0.55859375 0.47265625 0.38671875
0.30078125 0.21484375 0.12890625 0.04296875]
[ 0.05078125 0.15234375 0.25390625 0.35546875 0.45703125 0.55859375
0.66015625 0.76171875 0.76171875 0.66015625 0.55859375 0.45703125
0.35546875 0.25390625 0.15234375 0.05078125]
[ 0.05859375 0.17578125 0.29296875 0.41015625 0.52734375 0.64453125
0.76171875 0.87890625 0.87890625 0.76171875 0.64453125 0.52734375
0.41015625 0.29296875 0.17578125 0.05859375]
[ 0.05859375 0.17578125 0.29296875 0.41015625 0.52734375 0.64453125
0.76171875 0.87890625 0.87890625 0.76171875 0.64453125 0.52734375
0.41015625 0.29296875 0.17578125 0.05859375]
[ 0.05078125 0.15234375 0.25390625 0.35546875 0.45703125 0.55859375
0.66015625 0.76171875 0.76171875 0.66015625 0.55859375 0.45703125
0.35546875 0.25390625 0.15234375 0.05078125]
[ 0.04296875 0.12890625 0.21484375 0.30078125 0.38671875 0.47265625
0.55859375 0.64453125 0.64453125 0.55859375 0.47265625 0.38671875
0.30078125 0.21484375 0.12890625 0.04296875]
[ 0.03515625 0.10546875 0.17578125 0.24609375 0.31640625 0.38671875
0.45703125 0.52734375 0.52734375 0.45703125 0.38671875 0.31640625
0.24609375 0.17578125 0.10546875 0.03515625]
[ 0.02734375 0.08203125 0.13671875 0.19140625 0.24609375 0.30078125
0.35546875 0.41015625 0.41015625 0.35546875 0.30078125 0.24609375
0.19140625 0.13671875 0.08203125 0.02734375]
[ 0.01953125 0.05859375 0.09765625 0.13671875 0.17578125 0.21484375
0.25390625 0.29296875 0.29296875 0.25390625 0.21484375 0.17578125
0.13671875 0.09765625 0.05859375 0.01953125]
[ 0.01171875 0.03515625 0.05859375 0.08203125 0.10546875 0.12890625
0.15234375 0.17578125 0.17578125 0.15234375 0.12890625 0.10546875
0.08203125 0.05859375 0.03515625 0.01171875]
[ 0.00390625 0.01171875 0.01953125 0.02734375 0.03515625 0.04296875
0.05078125 0.05859375 0.05859375 0.05078125 0.04296875 0.03515625
0.02734375 0.01953125 0.01171875 0.00390625]]

10-19 1884

05-24 3622

06-02 2625

03-09 1万+

03-15 516

01-05 7348

11-28 1万+

03-25 539

04-23 234

10-30

02-01 1390

03-01 3万+

10-22 3045

10-16 1万+

08-07 348

11-05 2532

06-11 2198

11-13

02-13 2866

12-08 5000

03-16 1875

03-14 768

04-08 2771

11-27 529

10-21 5万+

11-24 1万+

08-26 3371

01-04 1万+

05-15 2019

02-15 1万+

03-08 2万+

02-22 5242

06-12 2449

03-08 2万+

02-27 1万+

03-25 3万+

05-14 2万+

04-05 1万+

01-27 3万+

03-17 1万+

02-24 6382

03-22 2万+

01-13

09-06 1万+

11-26 86

03-24 3万+

03-02 1万+

03-22 4万+

04-07 6万+

01-20 1914

10-02 1万+

12-12 4716

09-26

12-23 82

02-06 19万+

03-19 3万+

02-27 8万+

03-01 13万+

08-16 4502

01-21 28万+

03-04 14万+

03-01 11万+

02-19 18万+

03-08 7万+

02-15 18万+

03-13 15万+

02-11 12万+

01-05 27万+

02-03 18万+

05-08 5万+

04-25 7万+

01-27 12万+

03-10 13万+

02-19 9万+

01-20 4万+

03-18 4万+

03-19 8万+

03-12 11万+

#### 如果你是老板，你会不会踢了这样的员工？

©️2019 CSDN 皮肤主题: 编程工作室 设计师: CSDN官方博客