c语言二维卷积变一维,Keras 对序列进行一维和二维卷积

网络结构来自https://github.com/nfmcclure/tensorflow_cookbook/tree/master/06_Neural_Networks/05_Implementing_Different_Layers

Conv1D

import numpy as np

import keras

# 固定随机数种子以复现结果

seed=13

np.random.seed(seed)

# 创建 1 维向量,并扩展维度适应 Keras 对输入的要求, data_1d 的大小为 (1, 25, 1)

data_1d = np.random.normal(size=25)

data_1d = np.expand_dims(data_1d, 0)

data_1d = np.expand_dims(data_1d, 2)

# 定义卷积层

filters = 1 # 卷积核数量为 1

kernel_size = 5 # 卷积核大小为 5

convolution_1d_layer = keras.layers.convolutional.Conv1D(filters, kernel_size, strides=1, padding='valid', input_shape=(25, 1), activation="relu", name="convolution_1d_layer")

# 定义最大化池化层

max_pooling_layer = keras.layers.MaxPool1D(pool_size=5, strides=1, padding="valid", name="max_pooling_layer")

# 平铺层,调整维度适应全链接层

reshape_layer = keras.layers.core.Flatten(name="reshape_layer")

# 定义全链接层

full_connect_layer = keras.layers.Dense(5, kernel_initializer=keras.initializers.RandomNormal(mean=0.0, stddev=0.1, seed=seed), bias_initializer="random_normal", use_bias=True, name="full_connect_layer")

# 编译模型

model = keras.Sequential()

model.add(convolution_1d_layer)

model.add(max_pooling_layer)

model.add(reshape_layer)

model.add(full_connect_layer)

# 打印 full_connect_layer 层的输出

output = keras.Model(inputs=model.input, outputs=model.get_layer('full_connect_layer').output).predict(data_1d)

print(output)

# 打印网络结构

print(model.summary())

复制代码

最终输出如下

======================卷积结果=========================

[[-0.0131043 -0.11734447 0.13395447 -0.75453871 -0.69782442]]

======================网络结构=========================

_________________________________________________________________

Layer (type) Output Shape Param #

=================================================================

convolution_1d_layer (Conv1D (None, 21, 1) 6

_________________________________________________________________

max_pooling_layer (MaxPoolin (None, 17, 1) 0

_________________________________________________________________

reshape_layer (Flatten) (None, 17) 0

_________________________________________________________________

full_connect_layer (Dense) (None, 5) 90

=================================================================

Total params: 96

Trainable params: 96

Non-trainable params: 0

_________________________________________________________________

None

复制代码

Conv2D

data_size = [10, 10]

data_2d = np.random.normal(size=data_size)

data_2d = np.expand_dims(data_2d, 0)

data_2d = np.expand_dims(data_2d, 3)

print data_2d.shape

# 定义卷积层

conv_size = 2

conv_stride_size = 2

convolution_2d_layer = keras.layers.Conv2D(filters=1, kernel_size=(conv_size, conv_size), strides=(conv_stride_size, conv_stride_size), input_shape=(data_size[0], data_size[0], 1))

# convolution_2d_layer = keras.layers.Conv2D(filter=1, kernel_size=kernel, strides=[1,1], padding="valid", activation="relu", name="convolution_2d_layer", input_shape=(1, data_size[0], data_size[0]))

# 定义最大化池化层

pooling_size = (2, 2)

max_pooling_2d_layer = keras.layers.MaxPool2D(pool_size=pooling_size, strides=1, padding="valid", name="max_pooling_2d_layer")

# 平铺层,调整维度适应全链接层

reshape_layer = keras.layers.core.Flatten(name="reshape_layer")

# 定义全链接层

full_connect_layer = keras.layers.Dense(5, kernel_initializer=keras.initializers.RandomNormal(mean=0.0, stddev=0.1, seed=seed), bias_initializer="random_normal", use_bias=True, name="full_connect_layer")

model_2d = keras.Sequential()

model_2d.add(convolution_2d_layer)

model_2d.add(max_pooling_2d_layer)

model_2d.add(reshape_layer)

model_2d.add(full_connect_layer)

# 打印 full_connect_layer 层的输出

output = keras.Model(inputs=model_2d.input, outputs=model_2d.get_layer('full_connect_layer').output).predict(data_2d)

print("======================卷积结果=========================")

print(output)

# 打印网络结构

print("======================网络结构=========================")

print(model_2d.summary())

复制代码

输出

======================卷积结果=========================

[[ 0.30173036 -0.10435719 -0.03354734 0.24000235 -0.09962128]]

======================网络结构=========================

_________________________________________________________________

Layer (type) Output Shape Param #

=================================================================

conv2d_1 (Conv2D) (None, 5, 5, 1) 5

_________________________________________________________________

max_pooling_2d_layer (MaxPoo (None, 4, 4, 1) 0

_________________________________________________________________

reshape_layer (Flatten) (None, 16) 0

_________________________________________________________________

full_connect_layer (Dense) (None, 5) 85

=================================================================

Total params: 90

Trainable params: 90

Non-trainable params: 0

_________________________________________________________________

None

复制代码

  • 1
    点赞
  • 1
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值