👨🎓个人主页:研学社的博客
💥💥💞💞欢迎来到本博客❤️❤️💥💥
🏆博主优势:🌞🌞🌞博客内容尽量做到思维缜密,逻辑清晰,为了方便读者。
⛳️座右铭:行百里者,半于九十。
📋📋📋本文目录如下:🎁🎁🎁
目录
💥1 概述
这个例子旨在提出将卷积神经网络(CNN)与递归神经网络(RNN)相结合的概念,以根据前几个月预测水痘病例的数量。
CNN是特征提取的绝佳网络,而RNN已经证明了其预测序列到序列序列值的能力。在每个时间步,CNN提取序列的主要特征,而RNN学习预测下一个时间步的下一个值。
📚2 运行结果
部分代码:
tempLayers = [
sequenceInputLayer([inputSize 1 1],"Name","sequence")
sequenceFoldingLayer("Name","seqfold")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([3 3],32,"Name","block1_conv1","BiasLearnRateFactor",0,"Padding","same","Stride",[2 1])
batchNormalizationLayer("Name","block1_conv1_bn","Epsilon",0.001)
reluLayer("Name","block1_conv1_act")
convolution2dLayer([3 3],64,"Name","block1_conv2","BiasLearnRateFactor",0,"Padding","same")
batchNormalizationLayer("Name","block1_conv2_bn","Epsilon",0.001)
reluLayer("Name","block1_conv2_act")];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
groupedConvolution2dLayer([3 3],1,64,"Name","block2_sepconv1_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],128,"Name","block2_sepconv1_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block2_sepconv1_bn","Epsilon",0.001)
reluLayer("Name","block2_sepconv2_act")
groupedConvolution2dLayer([3 3],1,128,"Name","block2_sepconv2_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],128,"Name","block2_sepconv2_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block2_sepconv2_bn","Epsilon",0.001)
maxPooling2dLayer([3 3],"Name","block2_pool","Padding","same","Stride",[2 2])];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],128,"Name","conv2d_1","BiasLearnRateFactor",0,"Padding","same","Stride",[2 2])
batchNormalizationLayer("Name","batch_normalization_1","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = additionLayer(2,"Name","add_1");
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
reluLayer("Name","block3_sepconv1_act")
groupedConvolution2dLayer([3 3],1,128,"Name","block3_sepconv1_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],256,"Name","block3_sepconv1_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block3_sepconv1_bn","Epsilon",0.001)
reluLayer("Name","block3_sepconv2_act")
groupedConvolution2dLayer([3 3],1,256,"Name","block3_sepconv2_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],256,"Name","block3_sepconv2_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block3_sepconv2_bn","Epsilon",0.001)
maxPooling2dLayer([3 3],"Name","block3_pool","Padding","same","Stride",[2 2])];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],256,"Name","conv2d_2","BiasLearnRateFactor",0,"Padding","same","Stride",[2 2])
batchNormalizationLayer("Name","batch_normalization_2","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = additionLayer(2,"Name","add_2");
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
convolution2dLayer([1 1],728,"Name","conv2d_3","BiasLearnRateFactor",0,"Padding","same","Stride",[2 2])
batchNormalizationLayer("Name","batch_normalization_3","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
reluLayer("Name","block4_sepconv1_act")
groupedConvolution2dLayer([3 3],1,256,"Name","block4_sepconv1_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],728,"Name","block4_sepconv1_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block4_sepconv1_bn","Epsilon",0.001)
reluLayer("Name","block4_sepconv2_act")
groupedConvolution2dLayer([3 3],1,728,"Name","block4_sepconv2_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],728,"Name","block4_sepconv2_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block4_sepconv2_bn","Epsilon",0.001)
maxPooling2dLayer([3 3],"Name","block4_pool","Padding","same","Stride",[2 2])];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = additionLayer(2,"Name","add_3");
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
reluLayer("Name","block5_sepconv1_act")
groupedConvolution2dLayer([3 3],1,728,"Name","block5_sepconv1_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],728,"Name","block5_sepconv1_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block5_sepconv1_bn","Epsilon",0.001)
reluLayer("Name","block5_sepconv2_act")
groupedConvolution2dLayer([3 3],1,728,"Name","block5_sepconv2_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],728,"Name","block5_sepconv2_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block5_sepconv2_bn","Epsilon",0.001)
reluLayer("Name","block5_sepconv3_act")
groupedConvolution2dLayer([3 3],1,728,"Name","block5_sepconv3_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],728,"Name","block5_sepconv3_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block5_sepconv3_bn","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = additionLayer(2,"Name","add_4");
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
reluLayer("Name","block6_sepconv1_act")
groupedConvolution2dLayer([3 3],1,728,"Name","block6_sepconv1_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],728,"Name","block6_sepconv1_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block6_sepconv1_bn","Epsilon",0.001)
reluLayer("Name","block6_sepconv2_act")
groupedConvolution2dLayer([3 3],1,728,"Name","block6_sepconv2_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],728,"Name","block6_sepconv2_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block6_sepconv2_bn","Epsilon",0.001)
reluLayer("Name","block6_sepconv3_act")
groupedConvolution2dLayer([3 3],1,728,"Name","block6_sepconv3_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],728,"Name","block6_sepconv3_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block6_sepconv3_bn","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = additionLayer(2,"Name","add_5");
lgraph = addLayers(lgraph,tempLayers);
tempLayers = [
reluLayer("Name","block7_sepconv1_act")
groupedConvolution2dLayer([3 3],1,728,"Name","block7_sepconv1_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],728,"Name","block7_sepconv1_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block7_sepconv1_bn","Epsilon",0.001)
reluLayer("Name","block7_sepconv2_act")
groupedConvolution2dLayer([3 3],1,728,"Name","block7_sepconv2_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],728,"Name","block7_sepconv2_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block7_sepconv2_bn","Epsilon",0.001)
reluLayer("Name","block7_sepconv3_act")
groupedConvolution2dLayer([3 3],1,728,"Name","block7_sepconv3_channel-wise","BiasLearnRateFactor",0,"Padding","same")
convolution2dLayer([1 1],728,"Name","block7_sepconv3_point-wise","BiasLearnRateFactor",0)
batchNormalizationLayer("Name","block7_sepconv3_bn","Epsilon",0.001)];
lgraph = addLayers(lgraph,tempLayers);
tempLayers = additionLayer(2,"Name","add_6");
lgraph = addLayers(lgraph,tempLayers);
🎉3 参考文献
部分理论来源于网络,如有侵权请联系删除。
[1]H Sanchez (2023). Time Series Forecasting Using Hybrid CNN - RNN