首先,在命令行打開jupyter
sudo ipython notebook --allow-root
Then, open a new python, you can see like this
matplotlib---paint the pictures
#import required packages
import numpy as np
import matplotlib.pyplot as plt
import os,sys,caffe
from PIL import Image
%matplotlib inline
#set path to root
caffe_root='/home/peipei/caffe/'
os.chdir(caffe_root)
sys.path.insert(0,caffe_root+'python')
#set show parameters
plt.rcParams['figure.figsize'] = (64, 64)
plt.rcParams['image.interpolation'] = 'nearest'
plt.rcParams['image.cmap'] = 'gray'
#import caffe model, set the model architecture deploy.prototxt and model parameters .caffemodel
net = caffe.Net(caffe_root + 'examples/myfile2/cifar10_quick.prototxt',
caffe_root + 'examples/myfile2/cifar10_quick_iter_300.caffemodel',
caffe.TEST)
#print each layer
[(k, v[0].data.shape) for k, v in net.params.items()]
net.blobs['data'].data.shape
#编写一个函数,用于显示各层的参数
def show_feature(data, padsize=1, padval=0):
data -= data.min()
data /= data.max()
# force the number of filters to be square
n = int(np.ceil(np.sqrt(data.shape[0])))
padding = ((0, n ** 2 - data.shape[0]), (0, padsize), (0, padsize)) + ((0, 0),) * (data.ndim - 3)
data = np.pad(data, padding, mode='constant', constant_values=(padval, padval))
# tile the filters into an image
data = data.reshape((n, n) + data.shape[1:]).transpose((0, 2, 1, 3) + tuple(range(4, data.ndim + 1)))
data = data.reshape((n * data.shape[1], n * data.shape[3]) + data.shape[4:])
plt.imshow(data, cmap='gray')
plt.axis('off')
# first convolution layer, show in images
weight = net.params["conv1"][0].data
print weight.shape
#show_feature(weight.transpose(0, 2, 3, 1))
#set CPU mode
caffe.set_mode_cpu()
#加载测试图片,并显示
im = caffe.io.load_image('/home/peipei/caffe/examples/myfile2/test/FVC2004_fingerprints_544/4/ms_ds2_s0001_fo10.bmp', False)
print im.shape
#imr = im.resize((64,64)) #resize image required by input
print im.shape
#plt.imshow(im)
# 编写一个函数,将二进制的均值转换为python的均值
def convert_mean(binMean,npyMean):
blob = caffe.proto.caffe_pb2.BlobProto()
bin_mean = open(binMean, 'rb' ).read()
blob.ParseFromString(bin_mean)
arr = np.array( caffe.io.blobproto_to_array(blob) )
npy_mean = arr[0]
np.save(npyMean, npy_mean )
#generate the python file: mean.npy
binMean=caffe_root+'examples/myfile2/mean.binaryproto'
npyMean=caffe_root+'examples/myfile2/mean.npy'
convert_mean(binMean,npyMean)
#将图片载入blob中,并减去均值
transformer = caffe.io.Transformer({'data': net.blobs['data'].data.shape}) # 设定图片的shape格式(1,1,64,64)
transformer.set_transpose('data', (2,0,1)) # 改变维度的顺序,由原始图片(64,64,1)变为(1,64,64)
transformer.set_mean('data', np.load(npyMean).mean(1).mean(1)) # 减去均值
transformer.set_raw_scale('data', 255) # 缩放到【0,255】之间
#transformer.set_channel_swap('data', (2,1,0)) #交换通道,将图片由RGB变为BGR
net.blobs['data'].data[...] = transformer.preprocess('data',im) #执行上面设置的图片预处理操作,并将图片载入到blob中
inputData=net.blobs['data'].data
out = net.forward()
# 编写一个函数,用于显示各层数据
def show_data(data, padsize=1, padval=0):
data -= data.min()
data /= data.max()
# force the number of filters to be square
n = int(np.ceil(np.sqrt(data.shape[0])))
padding = ((0, n ** 2 - data.shape[0]), (0, padsize), (0, padsize)) + ((0, 0),) * (data.ndim - 3)
data = np.pad(data, padding, mode='constant', constant_values=(padval, padval))
# tile the filters into an image
data = data.reshape((n, n) + data.shape[1:]).transpose((0, 2, 1, 3) + tuple(range(4, data.ndim + 1)))
data = data.reshape((n * data.shape[1], n * data.shape[3]) + data.shape[4:])
plt.figure()
plt.imshow(data,cmap='gray')
plt.axis('off')
plt.rcParams['figure.figsize'] = (64, 64)
plt.rcParams['image.interpolation'] = 'nearest'
plt.rcParams['image.cmap'] = 'gray'
#显示第一个卷积层的输出数据和权值(filter)
show_data(net.blobs['conv1'].data[0])
print net.blobs['conv1'].data.shape
show_data(net.params['conv1'][0].data.reshape(32,5,5))
print net.params['conv1'][0].data.shape
# 最后一层输入属于某个类的概率
prob = net.blobs['prob'].data[0].flatten()
print prob
plt.plot(prob)
order=prob.argsort()[2]
print order
finanly, show the probability of all the labels:
[ 0.02902012 0.946181 0.02479883]print order show the label with the highest probability.