python如何增加隐藏层_Python CNN。如何获取数据的隐藏fc层编码?

defvariables_lenet5(filter_size=filter_size_1,filter_size_2=filter_size_2,filter_depth1=filter_size_1,filter_depth2=filter_size_2,num_hidden1=hid_1,num_hidden2=hid_2,#num_hidden3 = hid_3,image_width=image_width,image_height=image_height,image_depth=3,num_labels=num_labels):w1=tf.Variable(tf.truncated_normal([filter_size,filter_size,image_depth,filter_depth1],stddev=0.1))b1=tf.Variable(tf.zeros([filter_depth1]))w2=tf.Variable(tf.truncated_normal([filter_size_2,filter_size_2,filter_depth1,filter_depth2],stddev=0.1))b2=tf.Variable(tf.constant(1.0,shape=[filter_depth2]))#w2_a = tf.Variable(tf.truncated_normal([num_hidden3,num_labels], stddev=0.1))#b2_a = tf.Variable(tf.constant(1.0, shape=[num_labels]))w3=tf.Variable(tf.truncated_normal([288,num_hidden1],stddev=0.1))b3=tf.Variable(tf.constant(1.0,shape=[num_hidden1]))w4=tf.Variable(tf.truncated_normal([num_hidden1,num_hidden2],stddev=0.1))b4=tf.Variable(tf.constant(1.0,shape=[num_hidden2]))w5=tf.Variable(tf.truncated_normal([num_hidden2,num_labels],stddev=0.1))b5=tf.Variable(tf.constant(1.0,shape=[num_labels]))#w5 = tf.Variable(tf.truncated_normal([num_hidden2, num_hidden3], stddev=0.1))#b5 = tf.Variable(tf.constant(1.0, shape = [num_hidden3]))variables={'w1':w1,'w2':w2,'w3':w3,'w4':w4,'w5':w5,#'w6':w2_a,'b1':b1,'b2':b2,'b3':b3,'b4':b4,'b5':b5,#'b6':b2_a}returnvariablesdefmodel_lenet5(data,variables):layer1_conv=tf.nn.conv2d(data,variables['w1'],[1,1,1,1],padding='SAME')layer1_actv=tf.sigmoid(layer1_conv+variables['b1'])layer1_pool=tf.nn.avg_pool(layer1_actv,[1,2,2,1],[1,2,2,1],padding='SAME')layer2_conv=tf.nn.conv2d(layer1_pool,variables['w2'],[1,2,2,1],padding='SAME')#era Validlayer2_actv=tf.sigmoid(layer2_conv+variables['b2'])layer2_pool=tf.nn.max_pool(layer2_actv,[1,2,2,1],[1,2,2,1],padding='SAME')# layer3_conv = tf.nn.conv2d(layer2_pool, variables['w6'], [1, 2, 2, 1], padding='VALID') #era Valid#layer3_actv = tf.sigmoid(layer3_conv + variables['b6'])#layer3_pool = tf.nn.max_pool(layer3_actv, [1, 2, 2, 1], [1, 2, 2, 1], padding='SAME')flat_layer=flatten_tf_array(layer2_pool)layer3_fccd=tf.matmul(flat_layer,variables['w3'])+variables['b3']layer3_actv=tf.nn.sigmoid(layer3_fccd)layer4_fccd=tf.matmul(layer3_actv,variables['w4'])+variables['b4']layer4_actv=tf.nn.sigmoid(layer4_fccd)#logits = tf.matmul(layer4_actv, variables['w5']) + variables['b5']logits=tf.matmul(layer4_actv,variables['w5'])+variables['b5']#layer5_fccd = tf.matmul(layer4_actv, variables['w5']) + variables['b5']#layer5_actv = tf.nn.sigmoid(layer5_fccd)#logits =tf.nn.sigmoid(tf.matmul(layer5_actv, variables['w6']) + variables['b6'])returnlogits

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值