使用pycaffe定义网络

1、引入库

import caffe
from caffe import layers as L
from caffe import params as P

2、使用pycaffe定义Net

n = caffe.NetSpec()

3、定义Datalayer

n.data,n.label = L.Data(batch_size=batch_size,backend = P.data.LMDB,source=lmdb,transform_param =dict(scale1./255),ntop=2)
#效果如下:
layer {
  name: "data"
  type: "Data"
  top: "data"
  top: "label"
  transform_param {
    scale: 0.00392156862745
  }
  data_param {
    source: "mnist/mnist_train_lmdb"
    batch_size: 64
    backend: LMDB
  }
}

定义ConvoluntionLayer

n.conv1 = L.Convolution(n.data,kernel_size= 5,num_output=20,weight_filler=dict(type='xavier'))
#效果如下:
layer {
  name: "conv1"
  type: "Convolution"
  bottom: "data"
  top: "conv1"
  convolution_param {
    num_output: 20
    kernel_size: 5
    weight_filler {
      type: "xavier"
    }
  }
}

定义PoolingLayer

n.pool1 = L.Pooling(n.conv1,kernel_size=2,stride=2,pool = P.Pooling.MAX)
#效果如下:
layer {
  name: "pool1"
  type: "Pooling"
  bottom: "conv1"
  top: "pool1"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}

定义InnerProductLayer

n.ip1 = L.InnerProduct(n.pool2,num_output=500,weight_filler = dict(type='xavier'))
# 效果如下:

layer {
  name: "ip1"
  type: "InnerProduct"
  bottom: "pool2"
  top: "ip1"
  inner_product_param {
    num_output: 500
    weight_filler {
      type: "xavier"
    }
  }
}          

定义ReLULayer

n.relu1 = L.ReLU(n.ip1,in_place=True)
# 效果如下:

layer {
  name: "relu1"
  type: "ReLU"
  bottom: "ip1"
  top: "ip1"
}

定义SoftmaxWithLossLayer

n.loss = L.SoftmaxWithLoss(n.ip2,n.label)

# 效果如下:

layer {
  name: "loss"
  type: "SoftmaxWithLoss"
  bottom: "ip2"
  bottom: "label"
  top: "loss"
}

定义minst网络

import caffe
from caffe import layers as L,params as P
def lenet(lmdb,batch_size):
    n = caffe.NetSpec()
    n.data,n.label = L.Data(batch_size = batch_size,backend = P.Data.LMDB,source= lmdb,transform_param = dict(scale=1./255),ntop=2)
    n.conv1 = L.Convolution(n.data,kernel_size=5,num_output=20,weight_filler = dict(type = 'xavier')
    n.pool1 = L.Pooling(n.conv1,kernel_size = 2,stride=2,Pool=P.Pooling.MAX)
    n.conv2 = L.Convolution(n.pool1,kernel_size=5,num_output=50,weight_filler=dict(type = 'xavier'))
    n.pool2 = L.Pooling(n.conv2,kernel_size=2,stride = 2,pool=P.Pooling.MAX)
    n.ip1 = L.InnerProduct(n.pool2,num_output=500,weight_filler=dict(type = 'xavier'))
    n.relu1 = L.ReLU(n.ip1,in_place=True)
    n.ip2 = L.InnerProduct(n.relu1,num_output=10,weight_filler=dict(type= 'xavier'))
    n.loss = L.SoftmaxWithLoss(n.ip2,n.label)
return n.to_proto()

保存定义网络:

with open('mnist/lenet_auto_train.prototxtx','w') as f:
    f.write(str(lenet('mnist/mnist_train_lmdb',64)))
with open('mnist/lenet_auto_test.prototxt','w') as f:
    f.write(str(lenet('mnist/mnist_test_lmdb',100)))

得lenet_auto_train.prototxt文件如(lenet_auto_test.prototxt文件类似):

layer {
  name: "data"
  type: "Data"
  top: "data"
  top: "label"
  transform_param {
    scale: 0.00392156862745
  }
  data_param {
    source: "mnist/mnist_train_lmdb"
    batch_size: 64
    backend: LMDB
  }
}
layer {
  name: "conv1"
  type: "Convolution"
  bottom: "data"
  top: "conv1"
  convolution_param {
    num_output: 20
    kernel_size: 5
    weight_filler {
      type: "xavier"
    }
  }
}
layer {
  name: "pool1"
  type: "Pooling"
  bottom: "conv1"
  top: "pool1"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "conv2"
  type: "Convolution"
  bottom: "pool1"
  top: "conv2"
  convolution_param {
    num_output: 50
    kernel_size: 5
    weight_filler {
      type: "xavier"
    }
  }
}
layer {
  name: "pool2"
  type: "Pooling"
  bottom: "conv2"
  top: "pool2"
  pooling_param {
    pool: MAX
    kernel_size: 2
    stride: 2
  }
}
layer {
  name: "ip1"
  type: "InnerProduct"
  bottom: "pool2"
  top: "ip1"
  inner_product_param {
    num_output: 500
    weight_filler {
      type: "xavier"
    }
  }
}
layer {
  name: "relu1"
  type: "ReLU"
  bottom: "ip1"
  top: "ip1"
}
layer {
  name: "ip2"
  type: "InnerProduct"
  bottom: "ip1"
  top: "ip2"
  inner_product_param {
    num_output: 10
    weight_filler {
      type: "xavier"
    }
  }
}
layer {
  name: "loss"
  type: "SoftmaxWithLoss"
  bottom: "ip2"
  bottom: "label"
  top: "loss"
}
  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值