151221

import os
import os.path

def GenConvlutionLayer(train_prototxt_file,layer_name,bottom,top,num_output,kernel_size,pad=0):
    train_prototxt_file.write("layer {" + "\n")
    train_prototxt_file.write("  name: " +"\""+ layer_name + "\""+"\n")
    train_prototxt_file.write("  type: " +"\""+ "Convolution" + "\""+"\n")
    train_prototxt_file.write("  bottom: " +"\""+ bottom + "\""+"\n")
    train_prototxt_file.write("  top: " +"\""+ top + "\""+"\n")
    
    train_prototxt_file.write("  param {" + "\n")
    train_prototxt_file.write("    lr_mult: " + "1" + "\n")
    train_prototxt_file.write("    decay_mult: " + "1" + "\n")
    train_prototxt_file.write("  } " + "\n")
    
    train_prototxt_file.write("  param {"  + "\n")
    train_prototxt_file.write("    lr_mult: " + "2" + "\n")
    train_prototxt_file.write("    decay_mult: " + "0" + "\n")
    train_prototxt_file.write("  } " + "\n")
    
    train_prototxt_file.write("  convolution_param {"  + "\n")
    train_prototxt_file.write("    num_output: " + str(num_output) + "\n")
    if pad !=0:
        train_prototxt_file.write("    pad: " + str(pad) + "\n")
    train_prototxt_file.write("    kernel_size: " + str(kernel_size) + "\n")
    train_prototxt_file.write("    weight_filler {" + "\n")
    train_prototxt_file.write("      type: " +"\""+ "xavier" + "\""+"\n")
    train_prototxt_file.write("    } " + "\n") 
    
    train_prototxt_file.write("    bias_filler {"  + "\n")
    train_prototxt_file.write("      type: " +"\""+ "constant" + "\""+"\n")
    train_prototxt_file.write("      value: " + "0.2" + "\n")
    train_prototxt_file.write("    } " + "\n") 
    
    train_prototxt_file.write("  } " + "\n")    
    train_prototxt_file.write("} " + "\n")
    train_prototxt_file.write("\n")
    
def GenReLuLayer(train_prototxt_file,layer_name,bottom,top):
    train_prototxt_file.write("layer {" + "\n")
    train_prototxt_file.write("  name: " +"\""+ layer_name + "\""+"\n")
    train_prototxt_file.write("  type: " +"\""+ "ReLU" + "\""+"\n")
    train_prototxt_file.write("  bottom: " +"\""+ bottom + "\""+"\n")
    train_prototxt_file.write("  top: " +"\""+ top + "\""+"\n")
    train_prototxt_file.write("} " + "\n")
    train_prototxt_file.write("\n")

def GenPoolingLayer(train_prototxt_file,layer_name,bottom,top,pool='MAX',kernel_size=3,stride=1,pad=1):
    train_prototxt_file.write("layer {" + "\n")
    train_prototxt_file.write("  name: " +"\""+ layer_name + "\""+"\n")
    train_prototxt_file.write("  type: " +"\""+ "Pooling" + "\""+"\n")
    train_prototxt_file.write("  bottom: " +"\""+ bottom + "\""+"\n")
    train_prototxt_file.write("  top: " +"\""+ top + "\""+"\n")
    
    train_prototxt_file.write("  pooling_param {" + "\n")
    train_prototxt_file.write("    pool: " + pool + "\n")
    train_prototxt_file.write("    kernel_size: " + str(kernel_size) + "\n")
    train_prototxt_file.write("    stride: " + str(stride) + "\n")
    train_prototxt_file.write("    pad: " + str(pad) + "\n")
    train_prototxt_file.write("  } " + "\n")    
    train_prototxt_file.write("} " + "\n")
    train_prototxt_file.write("\n")

def GenConcatLayer(train_prototxt_file,layer_name,bottom1,bottom2,bottom3,bottom4,top):
    train_prototxt_file.write("layer {" + "\n")
    train_prototxt_file.write("  name: " +"\""+ layer_name + "\""+"\n")
    train_prototxt_file.write("  type: " +"\""+ "Concat" + "\""+"\n")
    train_prototxt_file.write("  bottom: " +"\""+ bottom1 + "\""+"\n")
    train_prototxt_file.write("  bottom: " +"\""+ bottom2 + "\""+"\n")
    train_prototxt_file.write("  bottom: " +"\""+ bottom3 + "\""+"\n")
    train_prototxt_file.write("  bottom: " +"\""+ bottom4 + "\""+"\n")
    train_prototxt_file.write("  top: " +"\""+ top + "\""+"\n")
    train_prototxt_file.write("} " + "\n")
    train_prototxt_file.write("\n")
    
    
def GenOriginalInceptionModule(train_prototxt_file,layer_name,bottom,kernel_size1=1,kernel_size2=3,kernel_size3=5,):
     train_prototxt_file = open(train_prototxt_file, "w")
     input_bottom = bottom  # pool 1*1 3*3 5*5 They share the same input bottom
     # "this_layer_name/1x1" layer,such as:"inception_3a/1x1"
     kernel_size= kernel_size1
     this_layer_name = layer_name + "/" + str(kernel_size) + "x" + str(kernel_size)
     bottom = bottom
     top = this_layer_name
     num_output = 22
     GenConvlutionLayer(train_prototxt_file,this_layer_name,bottom,top,num_output,kernel_size)
     # "this_layer_name/relu_1x1" layer,such as:"inception_3a/relu_1x1"
     this_layer_name = layer_name + "/relu_" + str(kernel_size) + "x" + str(kernel_size)
     bottom = top
     top = bottom
     GenReLuLayer(train_prototxt_file,this_layer_name,bottom,top)
     # "this_layer_name/3x3_reduce" layer,such as:"inception_3a/3x3_reduce"
     kernel_size= kernel_size2
     this_layer_name = layer_name + "/3x3_reduce" + str(kernel_size) + "x" + str(kernel_size)
     bottom = input_bottom
     top = this_layer_name
     num_output = 22    
     GenConvlutionLayer(train_prototxt_file,this_layer_name,bottom,top,num_output,kernel_size)
     # "this_layer_name/relu_3x3_reduce" layer,such as:"inception_3a/relu_3x3_reduce"
     kernel_size= kernel_size2
     this_layer_name = layer_name + "/relu_" + str(kernel_size) + "x" + str(kernel_size)+ "_reduce" 
     bottom = top
     top = bottom
     GenReLuLayer(train_prototxt_file,this_layer_name,bottom,top)        
     # "this_layer_name/3x3" layer,such as:"inception_3a/3x3"
     kernel_size= kernel_size2
     this_layer_name = layer_name + "/3x3" + str(kernel_size) + "x" + str(kernel_size)
     bottom = top
     top = this_layer_name
     num_output = 22 
     pad = 1                 ##### this layer has pad 
     GenConvlutionLayer(train_prototxt_file,this_layer_name,bottom,top,num_output,kernel_size,pad)
     # "this_layer_name/relu_3x3" layer,such as:"inception_3a/relu_3x3"
     kernel_size= kernel_size2
     this_layer_name = layer_name + "/relu_" + str(kernel_size) + "x" + str(kernel_size)
     bottom = top
     top = bottom
     GenReLuLayer(train_prototxt_file,this_layer_name,bottom,top)      
     # "this_layer_name/5x5_reduce" layer,such as:"inception_3a/5x5_reduce"
     kernel_size= kernel_size
     this_layer_name = layer_name + "/" + str(kernel_size) + "x" + str(kernel_size) + "/_reduce"
     bottom = input_bottom
     top = this_layer_name
     num_output = 22 
     GenConvlutionLayer(train_prototxt_file,this_layer_name,bottom,top,num_output,kernel_size,pad)
     # "this_layer_name/relu_5x5_reduce" layer,such as:"inception_3a/relu_5x5_reduce"
     kernel_size= kernel_size
     this_layer_name = layer_name + "/relu_" + str(kernel_size) + "x" + str(kernel_size) + "/_reduce"
     bottom = top
     top = bottom
     GenReLuLayer(train_prototxt_file,this_layer_name,bottom,top) 
     # "this_layer_name/5x5" layer,such as:"inception_3a/5x5"
     kernel_size= kernel_size3
     this_layer_name = layer_name + "/5x5" + str(kernel_size) + "x" + str(kernel_size)
     bottom = top
     top = this_layer_name
     num_output = 22 
     pad = 2                 ##### this layer has pad 
     GenConvlutionLayer(train_prototxt_file,this_layer_name,bottom,top,num_output,kernel_size,pad)
     # "this_layer_name/relu_5x5" layer,such as:"inception_3a/relu_5x5"
     kernel_size= kernel_size3
     this_layer_name = layer_name + "/relu_" + str(kernel_size) + "x" + str(kernel_size)
     bottom = top
     top = bottom
     GenReLuLayer(train_prototxt_file,this_layer_name,bottom,top) 
     # "this_layer_name/pool" layer,such as:"inception_3a/pool"
     kernel_size= kernel_size3
     this_layer_name = layer_name + "/pool" 
     bottom = input_bottom
     top = this_layer_name
     num_output = 22 
     pad = 1                 ##### this layer has pad 
     GenPoolingLayer(train_prototxt_file,layer_name,bottom,top)
     # "this_layer_name/relu_5x5" layer,such as:"inception_3a/relu_5x5"
     kernel_size= kernel_size3
     this_layer_name = layer_name + "/relu_" + str(kernel_size) + "x" + str(kernel_size)
     bottom = top
     top = bottom
     GenReLuLayer(train_prototxt_file,this_layer_name,bottom,top)      
#     GenConvlutionLayer(train_prototxt_file,layer_name,bottom,top,num_output,kernel_size)
#     GenReLuLayer(train_prototxt_file,layer_name,bottom,top)
#     GenPoolingLayer(train_prototxt_file,layer_name,bottom,top)
#     GenConcatLayer(train_prototxt_file,layer_name,bottom1,bottom2,bottom3,bottom4,top)
     
   
#def GenOriginalInceptionModule(train_prototxt_file,info_list):
#     train_prototxt_file = open(train_prototxt_file, "w")
#     layer_name = 'inception_3a'
#     bottom = 'pool2/3x3_s2'
#     top = 'inception_3a/5x5_reduce'
#     num_output = 22
#     kernel_size= 1
#     bottom1 = 'pool2/3x3_s2'
#     bottom2 = 'pool2/3x3_s2'
#     bottom3 = 'pool2/3x3_s2'
#     bottom4 = 'pool2/3x3_s2'       
#     GenConvlutionLayer(train_prototxt_file,layer_name,bottom,top,num_output,kernel_size)
#     GenReLuLayer(train_prototxt_file,layer_name,bottom,top)
#     GenPoolingLayer(train_prototxt_file,layer_name,bottom,top)
#     GenConcatLayer(train_prototxt_file,layer_name,bottom1,bottom2,bottom3,bottom4,top)
#     
#	for name,num in info_list:
#		this_layer_name = layer_name + "/" + str(num) + "x" + str(num) 
#		train_prototxt_file.write("layer {" + "\n")
#		train_prototxt_file.write("  name: " + this_layer_name + "\n")
#		this_layer_name = []
#		print num
	
	
	
if __name__ == "__main__":
    
    train_prototxt_file = os.getcwd() + "/train.prototxt"
	#info_list = [('bottom','pool2/3x3_s2'),('kernal',3),('kernal',3)] #default there has pool and 1    
	#GenOriginalInceptionModule(train_prototxt_file,info_list)
      #GenOriginalInceptionModule(train_prototxt_file,layer_name,bottom,kernel_size1=3,kernel_size2=5,)
    layer_name = "inception_3a"
    bottom = 'pool2/3x3_s2'
    GenOriginalInceptionModule(train_prototxt_file,layer_name,bottom)


	

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值