转自:http://christopher5106.github.io/deep/learning/2015/09/04/Deep-learning-tutorial-on-Caffe-Technology.html
Define a model in Python
It is also possible to define the net model directly in Python, and save it to a prototxt files. Here are the commands :
from caffe import layers as L
from caffe import params as P
def lenet(lmdb, batch_size):
# our version of LeNet: a series of linear and simple nonlinear transformations
n = caffe.NetSpec()
n.data, n.label = L.Data(batch_size=batch_size, backend=P.Data.LMDB, source=lmdb,
transform_param=dict(scale=1./255), ntop=2)
n.conv1 = L.Convolution(n.data, kernel_size=5, num_output=20, weight_filler=dict(type='xavier'))
n.pool1 = L.Pooling(n.conv1, kernel_size=2, stride=2, pool=P.Pooling.MAX)
n.conv2 = L.Convolution(n.pool1, kernel_size=5, num_output=50, weight_filler=dict(type='xavier'))
n.pool2 = L.Pooling(n.conv2, kernel_size=2, stride=2, pool=P.Pooling.MAX)
n.ip1 = L.InnerProduct(n.pool2, num_output=500, weight_filler=dict(type='xavier'))
n.relu1 = L.ReLU(n.ip1, in_place=True)
n.ip2 = L.InnerProduct(n.relu1, num_output=10, weight_filler=dict(type='xavier'))
n.loss = L.SoftmaxWithLoss(n.ip2, n.label)
return n.to_proto()
with open('examples/mnist/lenet_auto_train.prototxt', 'w') as f:
f.write(str(lenet('examples/mnist/mnist_train_lmdb', 64)))
with open('examples/mnist/lenet_auto_test.prototxt', 'w') as f:
f.write(str(lenet('examples/mnist/mnist_test_lmdb', 100)))
Create your custom python layer
Let’s create a layer to add a value.
Add a custom python layer to your conv.prototxt
file :
layer {
name: 'MyPythonLayer'
type: 'Python'
top: 'output'
bottom: 'conv'
python_param {
module: 'mypythonlayer'
layer: 'MyLayer'
param_str: "'num': 21"
}
}
and create a mypythonlayer.py
file that has to to be in the current directory or in the PYTHONPATH :
import caffe
import numpy as np
import yaml
class MyLayer(caffe.Layer):
def setup(self, bottom, top):
self.num = yaml.load(self.param_str)["num"]
print "Parameter num : ", self.num
def reshape(self, bottom, top):
pass
def forward(self, bottom, top):
top[0].reshape(*bottom[0].shape)
top[0].data[...] = bottom[0].data + self.num
def backward(self, top, propagate_down, bottom):
pass
This layer will simply add a value
net = caffe.Net('conv.prototxt',caffe.TEST)
im = np.array(Image.open('cat_gray.jpg'))
im_input = im[np.newaxis, np.newaxis, :, :]
net.blobs['data'].reshape(*im_input.shape)
net.blobs['data'].data[...] = im_input
net.forward()