时间序列预测——LSTM

本文详细介绍了利用LSTM模型进行时间序列预测的完整步骤,包括数据处理、模型构建、训练、预测和误差评估。数据集来源于mock_kaggle.csv,经过滞后扩充、数据打乱、标准化及3D转化,最终构建并训练了LSTM模型。
摘要由CSDN通过智能技术生成

  本文展示了使用LSTM模型进行时间序列预测的全过程,包含详细的注释。整个过程主要包括:数据导入、数据清洗、结构转化、建立LSTM模型、训练模型(包括动态调整学习率和earlystopping的设置)、预测、结果展示、误差评估等完整的时间序列预测流程。
  本文使用的数据集在本人上传的资源中,链接为mock_kaggle.csv

import pandas as pd
import numpy as np
import math
from matplotlib import pyplot as plt
from matplotlib.pylab import mpl
import tensorflow as tf
from sklearn.preprocessing import MinMaxScaler
from statsmodels.tsa.arima_model import ARIMA
from keras.models import Sequential
from keras.models import load_model
from keras.layers import Dense,Dropout
from keras.layers import LSTM
from sklearn import preprocessing
from keras import backend as K
from keras.callbacks import ModelCheckpoint
import openpyxl
from keras import losses
import keras
from sklearn.metrics import mean_squared_error # 均方误差
import time
mpl.rcParams['font.sans-serif'] = ['SimHei']   #显示中文
mpl.rcParams['axes.unicode_minus']=False       #显示负号

取数据

data=pd.read_csv('mock_kaggle.csv',encoding ='gbk',parse_dates=['datetime'])
Date=pd.to_datetime(data.datetime)
data['date'] = Date.map(lambda x: x.strftime('%Y-%m-%d'))
datanew=data.set_index(Date)
series = pd.Series(datanew['股票'].values, index=datanew['date'])
series
date
2014-01-01    4972
2014-01-02    4902
2014-01-03    4843
2014-01-04    4750
2014-01-05    4654
              ... 
2016-07-27    3179
2016-07-28    3071
2016-07-29    4095
2016-07-30    3825
2016-07-31    3642
Length: 937, dtype: int64

滞后扩充数据

dataframe1 = pd.DataFrame()
num_hour = 64
for i in range(num_hour,0,-1):
    dataframe1['t-'+str(i)] = series.shift(i)
dataframe1['t'] = series.values
dataframe3=dataframe1.dropna()
dataframe3.index=range(len(dataframe3))
dataframe3
t-64t-63t-62t-61t-60t-59t-58t-57t-56t-55...t-9t-8t-7t-6t-5t-4t-3t-2t-1t
04972.04902.04843.04750.04654.04509.04329.04104.04459.05043.0...6329.06237.06178.06129.06012.05957.05880.05755.05672.05597
14902.04843.04750.04654.04509.04329.04104.04459.05043.05239.0...6237.06178.06129.06012.05957.05880.05755.05672.05597.05549
24843.04750.04654.04509.04329.04104.04459.05043.05239.05118.0...6178.06129.06012.05957.05880.05755.05672.05597.05549.05504
34750.04654.04509.04329.04104.04459.05043.05239.05118.04984.0...6129.06012.05957.05880.05755.05672.05597.05549.05504.04659
44654.04509.04329.04104.04459.05043.05239.05118.04984.04904.0...6012.05957.05880.05755.05672.05597.05549.05504.04659.04537
..................................................................
8681817.05946.05779.05535.05273.05085.04970.04831.04558.04323.0...1420.01359.01075.01015.0917.01550.01420.01358.02893.03179
8695946.05779.05535.05273.05085.04970.04831.04558.04323.04151.0...1359.01075.01015.0917.01550.01420.01358.02893.03179.03071
8705779.05535.05273.05085.04970.04831.04558.04323.04151.03828.0...1075.01015.0917.01550.01420.01358.02893.03179.03071.04095
8715535.05273.05085.04970.04831.04558.04323.04151.03828.03492.0...1015.0917.01550.01420.01358.02893.03179.03071.04095.03825
8725273.05085.04970.04831.04558.04323.04151.03828.03492.03365.0...917.01550.01420.01358.02893.03179.03071.04095.03825.03642

873 rows × 65 columns

打乱数据

pd.DataFrame(np.random.shuffle(dataframe3.values)) 

划分数据并标准化

pot=len(dataframe3)-12
train=dataframe3[:pot]
test=dataframe3[pot:]
scaler = MinMaxScaler(feature_range=(0, 1)).fit(train)
#scaler = preprocessing.StandardScaler().fit(train)
train_norm=pd.DataFrame(scaler.fit_transform(train))
test_norm=pd.DataFrame(scaler.transform(test))
train_norm.shape
(861, 65)
train_norm=train_norm[9:] # 便于分banch
train_norm.shape
(852, 65)
test_norm.shape
(12, 65)
X_train=train_norm.iloc[:,1:]
X_test=test_norm.iloc[:,1:]
Y_train=train_norm.iloc[:,:1]
Y_test=test_norm.iloc[:,:1]

转化为3D数据 [samples, timesteps, features]

source_x_train=X_train
source_x_test=X_test
X_train=X_train.values.reshape([X_train.shape[0],1,X_train.shape[1]]) #从(696, 35)-->(696, 1,35)
X_test=X_test.values.reshape([X_test.shape[0],1,X_test.shape[1]])  #从(174, 35)-->(174, 1,35)
X_test.shape,X_train.shape
((12, 1, 64), (852, 1, 64))
def get_lr_metric(optimizer):
    def lr(y_true, y_pred):
        return optimizer.lr
    return lr

构建模型

# 特征数
input_size = X_train.shape[2]
# 时间步长:用多少个时间步的数据来预测下一个时刻的值
time_steps = X_train.shape[1]
# 隐藏层block的个数
cell_size = 128
batch_size=12

model = keras.Sequential()
#使用LSTM
model.add(keras.layers.LSTM(
        units = cell_size, # 输出维度
        batch_input_shape=(batch_size, time_steps, input_size),# 输入维度
        stateful=True #保持状态
))
model.add(Dense(128,activation='relu'))
model.add(Dense(64,activation='relu'))
model.add(Dense(32,activation='relu'))
model.add(Dense(16,activation='relu'))
# 输出层
model.add(keras.layers.Dense(1))

# 定义优化器
adam = keras.optimizers.Adam(lr=1e-3)
lr_metric = get_lr_metric(adam)
model.compile(optimizer=adam, loss='mae', metrics=['accuracy',lr_metric])
model.summary()
Model: "sequential_1"
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_1 (LSTM)                (12, 128)                 98816     
_________________________________________________________________
dense_1 (Dense)              (12, 128)                 16512     
_________________________________________________________________
dense_2 (Dense)              (12, 64)                  8256      
_________________________________________________________________
dense_3 (Dense)              (12, 32)                  2080      
_________________________________________________________________
dense_4 (Dense)              (12, 16)                  528       
_________________________________________________________________
dense_5 (Dense)              (12, 1)                   17        
=================================================================
Total params: 126,209
Trainable params: 126,209
Non-trainable params: 0
_________________________________________________________________

训练模型

time1=time.time()
nb_epoch=200
learning_rate=1e-2
min_lr=1e-4
last_loss=10000.0 #存储上一次的loss值
patience=1 #达到10后结束循环
threshold=min_lr*0.5

for i in range(nb_epoch):
    print('这是第%d次迭代:patience=%s' %(i,patience))
    #我们希望网络在观察序列中学习时建立状态,所以通过将“ shuffle”设置为“ False”来禁用样本shuffle。
    history=model.fit(X_train, Y_train,epochs=1,batch_size=batch_size,shuffle=False)        
     #每跑完一个epoch重置状态
    model.reset_states()
    
    #提前终止
    loss=history.history['loss'][0]
    if  abs(loss-last_loss)<threshold:
        patience+=1
        if patience>=10:
            break
    else:
        patience=0
    last_loss=loss
  
    #每50个epoch学习率*0.1
    if i>0 and learning_rate*0.1>=min_lr and i%50==0:
        learning_rate*=0.1
        adam = keras.optimizers.Adam(lr=learning_rate)
        lr_metric = get_lr_metric(adam)
        model.compile(optimizer=adam, loss='mse', metrics=['accuracy',lr_metric])
        
time2=time.time()
这是第0次迭代:patience=1
Epoch 1/1
852/852 [==============================] - 2s 2ms/step - loss: 0.1202 - accuracy: 0.0188 - lr: 1.0000e-03
这是第1次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 753us/step - loss: 0.1026 - accuracy: 0.0188 - lr: 1.0000e-03
这是第2次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 762us/step - loss: 0.0900 - accuracy: 0.0200 - lr: 1.0000e-03
这是第3次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 773us/step - loss: 0.0885 - accuracy: 0.0200 - lr: 1.0000e-03
这是第4次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 787us/step - loss: 0.0879 - accuracy: 0.0200 - lr: 1.0000e-03
这是第5次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 783us/step - loss: 0.0752 - accuracy: 0.0200 - lr: 1.0000e-03
这是第6次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 761us/step - loss: 0.0543 - accuracy: 0.0200 - lr: 1.0000e-03
这是第7次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 764us/step - loss: 0.0632 - accuracy: 0.0200 - lr: 1.0000e-03
这是第8次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 826us/step - loss: 0.0666 - accuracy: 0.0200 - lr: 1.0000e-03
这是第9次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 801us/step - loss: 0.0712 - accuracy: 0.0200 - lr: 1.0000e-03
这是第10次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 783us/step - loss: 0.0629 - accuracy: 0.0200 - lr: 1.0000e-03
这是第11次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 808us/step - loss: 0.0526 - accuracy: 0.0200 - lr: 1.0000e-03
这是第12次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 775us/step - loss: 0.0462 - accuracy: 0.0200 - lr: 1.0000e-03
这是第13次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 766us/step - loss: 0.0467 - accuracy: 0.0200 - lr: 1.0000e-03
这是第14次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 777us/step - loss: 0.0402 - accuracy: 0.0200 - lr: 1.0000e-03
这是第15次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 760us/step - loss: 0.0475 - accuracy: 0.0200 - lr: 1.0000e-03
这是第16次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 754us/step - loss: 0.0574 - accuracy: 0.0200 - lr: 1.0000e-03
这是第17次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 755us/step - loss: 0.0589 - accuracy: 0.0200 - lr: 1.0000e-03
这是第18次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 766us/step - loss: 0.0559 - accuracy: 0.0200 - lr: 1.0000e-03
这是第19次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 757us/step - loss: 0.0363 - accuracy: 0.0200 - lr: 1.0000e-03
这是第20次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 794us/step - loss: 0.0393 - accuracy: 0.0200 - lr: 1.0000e-03
这是第21次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 789us/step - loss: 0.0435 - accuracy: 0.0200 - lr: 1.0000e-03
这是第22次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 767us/step - loss: 0.0521 - accuracy: 0.0200 - lr: 1.0000e-03
这是第23次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 952us/step - loss: 0.0483 - accuracy: 0.0200 - lr: 1.0000e-03
这是第24次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 1ms/step - loss: 0.0437 - accuracy: 0.0200 - lr: 1.0000e-03
这是第25次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 884us/step - loss: 0.0488 - accuracy: 0.0200 - lr: 1.0000e-03
这是第26次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 759us/step - loss: 0.0699 - accuracy: 0.0200 - lr: 1.0000e-03
这是第27次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 890us/step - loss: 0.0590 - accuracy: 0.0200 - lr: 1.0000e-03
这是第28次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 987us/step - loss: 0.0407 - accuracy: 0.0200 - lr: 1.0000e-03
这是第29次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 1ms/step - loss: 0.0588 - accuracy: 0.0200 - lr: 1.0000e-03
这是第30次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 1ms/step - loss: 0.0575 - accuracy: 0.0200 - lr: 1.0000e-03
这是第31次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 945us/step - loss: 0.0465 - accuracy: 0.0200 - lr: 1.0000e-03
这是第32次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 1ms/step - loss: 0.0694 - accuracy: 0.0200 - lr: 1.0000e-03
这是第33次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 951us/step - loss: 0.0618 - accuracy: 0.0200 - lr: 1.0000e-03
这是第34次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 936us/step - loss: 0.0683 - accuracy: 0.0200 - lr: 1.0000e-030s - loss: 0.1315 - accuracy: 0.0060 
这是第35次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 934us/step - loss: 0.0646 - accuracy: 0.0188 - lr: 1.0000e-03
这是第36次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 960us/step - loss: 0.0519 - accuracy: 0.0200 - lr: 1.0000e-03
这是第37次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 990us/step - loss: 0.0735 - accuracy: 0.0200 - lr: 1.0000e-03
这是第38次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 897us/step - loss: 0.0579 - accuracy: 0.0200 - lr: 1.0000e-03
这是第39次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 894us/step - loss: 0.0575 - accuracy: 0.0200 - lr: 1.0000e-03
这是第40次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 802us/step - loss: 0.0452 - accuracy: 0.0200 - lr: 1.0000e-03
这是第41次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 800us/step - loss: 0.0557 - accuracy: 0.0200 - lr: 1.0000e-03
这是第42次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 842us/step - loss: 0.0503 - accuracy: 0.0200 - lr: 1.0000e-03
这是第43次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 947us/step - loss: 0.0562 - accuracy: 0.0200 - lr: 1.0000e-03
这是第44次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 888us/step - loss: 0.0384 - accuracy: 0.0200 - lr: 1.0000e-03
这是第45次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 853us/step - loss: 0.0402 - accuracy: 0.0200 - lr: 1.0000e-03
这是第46次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 880us/step - loss: 0.0498 - accuracy: 0.0200 - lr: 1.0000e-03
这是第47次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 808us/step - loss: 0.0322 - accuracy: 0.0200 - lr: 1.0000e-03
这是第48次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 789us/step - loss: 0.0337 - accuracy: 0.0200 - lr: 1.0000e-03
这是第49次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 809us/step - loss: 0.0359 - accuracy: 0.0200 - lr: 1.0000e-03
这是第50次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 775us/step - loss: 0.0506 - accuracy: 0.0200 - lr: 1.0000e-03
这是第51次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 2ms/step - loss: 0.0045 - accuracy: 0.0200 - lr: 1.0000e-03
这是第52次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 819us/step - loss: 0.0040 - accuracy: 0.0200 - lr: 1.0000e-03
这是第53次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 842us/step - loss: 0.0026 - accuracy: 0.0200 - lr: 1.0000e-03
这是第54次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 795us/step - loss: 0.0027 - accuracy: 0.0200 - lr: 1.0000e-03
这是第55次迭代:patience=1
Epoch 1/1
852/852 [==============================] - 1s 823us/step - loss: 0.0029 - accuracy: 0.0200 - lr: 1.0000e-03
这是第56次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 821us/step - loss: 0.0040 - accuracy: 0.0200 - lr: 1.0000e-03
这是第57次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 846us/step - loss: 0.0125 - accuracy: 0.0200 - lr: 1.0000e-03
这是第58次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 948us/step - loss: 0.0060 - accuracy: 0.0200 - lr: 1.0000e-03
这是第59次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 790us/step - loss: 0.0161 - accuracy: 0.0188 - lr: 1.0000e-03
这是第60次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 852us/step - loss: 0.0087 - accuracy: 0.0200 - lr: 1.0000e-03
这是第61次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 911us/step - loss: 0.0044 - accuracy: 0.0200 - lr: 1.0000e-03
这是第62次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 838us/step - loss: 0.0038 - accuracy: 0.0200 - lr: 1.0000e-03
这是第63次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 815us/step - loss: 0.0046 - accuracy: 0.0200 - lr: 1.0000e-03
这是第64次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 792us/step - loss: 0.0028 - accuracy: 0.0200 - lr: 1.0000e-03
这是第65次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 809us/step - loss: 0.0039 - accuracy: 0.0200 - lr: 1.0000e-03
这是第66次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 853us/step - loss: 0.0026 - accuracy: 0.0200 - lr: 1.0000e-03
这是第67次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 863us/step - loss: 0.0021 - accuracy: 0.0200 - lr: 1.0000e-03
这是第68次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 812us/step - loss: 0.0021 - accuracy: 0.0200 - lr: 1.0000e-03
这是第69次迭代:patience=1
Epoch 1/1
852/852 [==============================] - 1s 847us/step - loss: 0.0022 - accuracy: 0.0200 - lr: 1.0000e-03
这是第70次迭代:patience=2
Epoch 1/1
852/852 [==============================] - 1s 975us/step - loss: 0.0026 - accuracy: 0.0200 - lr: 1.0000e-03
这是第71次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 1ms/step - loss: 0.0023 - accuracy: 0.0200 - lr: 1.0000e-03- ETA: 1s - loss: 0.0059 - accuracy: 0.0104 - lr: 0.0 - ETA: 0s - loss: 0.0045 - accuracy: 0.0044 - 
这是第72次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 796us/step - loss: 0.0052 - accuracy: 0.0200 - lr: 1.0000e-03
这是第73次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 807us/step - loss: 0.0036 - accuracy: 0.0200 - lr: 1.0000e-03
这是第74次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 852us/step - loss: 0.0043 - accuracy: 0.0200 - lr: 1.0000e-03
这是第75次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 942us/step - loss: 0.0025 - accuracy: 0.0200 - lr: 1.0000e-03
这是第76次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 808us/step - loss: 0.0042 - accuracy: 0.0200 - lr: 1.0000e-03
这是第77次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 796us/step - loss: 0.0032 - accuracy: 0.0200 - lr: 1.0000e-03
这是第78次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 782us/step - loss: 0.0025 - accuracy: 0.0200 - lr: 1.0000e-03
这是第79次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 783us/step - loss: 0.0026 - accuracy: 0.0200 - lr: 1.0000e-03
这是第80次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 789us/step - loss: 0.0037 - accuracy: 0.0200 - lr: 1.0000e-03
这是第81次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 797us/step - loss: 0.0026 - accuracy: 0.0200 - lr: 1.0000e-03
这是第82次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 805us/step - loss: 0.0024 - accuracy: 0.0200 - lr: 1.0000e-03
这是第83次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 781us/step - loss: 0.0026 - accuracy: 0.0200 - lr: 1.0000e-03
这是第84次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 828us/step - loss: 0.0026 - accuracy: 0.0200 - lr: 1.0000e-03
这是第85次迭代:patience=1
Epoch 1/1
852/852 [==============================] - 1s 1ms/step - loss: 0.0020 - accuracy: 0.0200 - lr: 1.0000e-03
这是第86次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 871us/step - loss: 0.0022 - accuracy: 0.0200 - lr: 1.0000e-03
这是第87次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 958us/step - loss: 0.0028 - accuracy: 0.0200 - lr: 1.0000e-03
这是第88次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 933us/step - loss: 0.0024 - accuracy: 0.0200 - lr: 1.0000e-03
这是第89次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 1ms/step - loss: 0.0026 - accuracy: 0.0200 - lr: 1.0000e-03
这是第90次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 891us/step - loss: 0.0021 - accuracy: 0.0200 - lr: 1.0000e-03
这是第91次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 928us/step - loss: 0.0029 - accuracy: 0.0200 - lr: 1.0000e-03
这是第92次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 807us/step - loss: 0.0034 - accuracy: 0.0200 - lr: 1.0000e-03
这是第93次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 791us/step - loss: 0.0019 - accuracy: 0.0200 - lr: 1.0000e-03
这是第94次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 908us/step - loss: 0.0023 - accuracy: 0.0200 - lr: 1.0000e-03
这是第95次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 794us/step - loss: 0.0027 - accuracy: 0.0200 - lr: 1.0000e-03
这是第96次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 797us/step - loss: 0.0023 - accuracy: 0.0200 - lr: 1.0000e-03 0s - loss: 0.0022 - accuracy: 0.0232 - lr: 1.0000e-
这是第97次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 797us/step - loss: 0.0020 - accuracy: 0.0200 - lr: 1.0000e-03
这是第98次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 807us/step - loss: 0.0022 - accuracy: 0.0200 - lr: 1.0000e-03
这是第99次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 783us/step - loss: 0.0047 - accuracy: 0.0200 - lr: 1.0000e-03
这是第100次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 810us/step - loss: 0.0060 - accuracy: 0.0200 - lr: 1.0000e-03
这是第101次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 2ms/step - loss: 0.0075 - accuracy: 0.0200 - lr: 1.0000e-04
这是第102次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 879us/step - loss: 0.0056 - accuracy: 0.0200 - lr: 1.0000e-04
这是第103次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 803us/step - loss: 0.0042 - accuracy: 0.0200 - lr: 1.0000e-04 0s - loss: 0.0047 - accuracy: 0.0106 - lr: 1.0000e
这是第104次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 838us/step - loss: 0.0031 - accuracy: 0.0200 - lr: 1.0000e-04
这是第105次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 991us/step - loss: 0.0025 - accuracy: 0.0200 - lr: 1.0000e-04
这是第106次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 1ms/step - loss: 0.0022 - accuracy: 0.0200 - lr: 1.0000e-04
这是第107次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 892us/step - loss: 0.0020 - accuracy: 0.0200 - lr: 1.0000e-04  - ETA: 0s - loss: 0.0047 - accuracy: 0.0076 - lr
这是第108次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 863us/step - loss: 0.0019 - accuracy: 0.0200 - lr: 1.0000e-04
这是第109次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 888us/step - loss: 0.0018 - accuracy: 0.0200 - lr: 1.0000e-04
这是第110次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 941us/step - loss: 0.0018 - accuracy: 0.0200 - lr: 1.0000e-04
这是第111次迭代:patience=0
Epoch 1/1
852/852 [==============================] - 1s 835us/step - loss: 0.0017 - accuracy: 0.0200 - lr: 1.0000e-04
这是第112次迭代:patience=1
Epoch 1/1
852/852 [==============================] - 1s 949us/step - loss: 0.0017 - accuracy: 0.0200 - lr: 1.0000e-04
这是第113次迭代:patience=2
Epoch 1/1
852/852 [==============================] - 1s 906us/step - loss: 0.0016 - accuracy: 0.0200 - lr: 1.0000e-04
这是第114次迭代:patience=3
Epoch 1/1
852/852 [==============================] - 1s 855us/step - loss: 0.0016 - accuracy: 0.0200 - lr: 1.0000e-04
这是第115次迭代:patience=4
Epoch 1/1
852/852 [==============================] - 1s 892us/step - loss: 0.0016 - accuracy: 0.0200 - lr: 1.0000e-04
这是第116次迭代:patience=5
Epoch 1/1
852/852 [==============================] - 1s 1ms/step - loss: 0.0016 - accuracy: 0.0200 - lr: 1.0000e-04
这是第117次迭代:patience=6
Epoch 1/1
852/852 [==============================] - 1s 891us/step - loss: 0.0015 - accuracy: 0.0200 - lr: 1.0000e-04
这是第118次迭代:patience=7
Epoch 1/1
852/852 [==============================] - 1s 830us/step - loss: 0.0015 - accuracy: 0.0200 - lr: 1.0000e-04
这是第119次迭代:patience=8
Epoch 1/1
852/852 [==============================] - 1s 931us/step - loss: 0.0015 - accuracy: 0.0200 - lr: 1.0000e-04
这是第120次迭代:patience=9
Epoch 1/1
852/852 [==============================] - 1s 883us/step - loss: 0.0015 - accuracy: 0.0200 - lr: 1.0000e-04
print(time2-time1)
99.11077070236206

预测

yhat = model.predict(X_test)
yhat
array([[0.3163526 ],
       [0.3064773 ],
       [0.2985979 ],
       [0.31503552],
       [0.34486443],
       [0.44958484],
       [0.61627054],
       [0.7980742 ],
       [0.7699471 ],
       [0.74627775],
       [0.72622526],
       [0.70155066]], dtype=float32)
#还原为真实值
real_predict=scaler.inverse_transform(np.concatenate((source_x_test,yhat),axis=1))
real_y=scaler.inverse_transform(np.concatenate((source_x_test,Y_test),axis=1))
real_predict=real_predict[:,-1]
real_y=real_y[:,-1]

误差评估

round(mean_squared_error(Y_test,yhat),10)
0.0416780208
from sklearn.metrics import r2_score
round(r2_score(real_y,real_predict),4)
0.2604
plt.figure(figsize=(15,6))
bwith = 0.75 #边框宽度设置为2
ax = plt.gca()#获取边框
ax.spines['bottom'].set_linewidth(bwith)
ax.spines['left'].set_linewidth(bwith)
ax.spines['top'].set_linewidth(bwith)
ax.spines['right'].set_linewidth(bwith)
plt.plot(real_predict,label='real_predict')
plt.plot(real_y,label='real_y')
plt.plot(real_y*(1+0.15),label='15%上限',linestyle='--',color='green')
# plt.plot(real_y*(1+0.1),label='10%上限',linestyle='--')
# plt.plot(real_y*(1-0.1),label='10%下限',linestyle='--')
plt.plot(real_y*(1-0.15),label='15%下限',linestyle='--',color='green')
plt.fill_between(range(0,12),real_y*(1+0.15),real_y*(1-0.15),color='gray',alpha=0.2)
plt.legend()
plt.show()

在这里插入图片描述

per_real_loss=(real_y-real_predict)/real_y
print(round(sum(abs(per_real_loss))/len(per_real_loss),4)) #MAPE
0.4378
#计算指定置信水平下的预测准确率
#level为小数
def comput_acc(real,predict,level):
    num_error=0
    for i in range(len(real)):
        if abs(real[i]-predict[i])/real[i]>level:
            num_error+=1
    return 1-num_error/len(real)
comput_acc(real_y,real_predict,0.2),comput_acc(real_y,real_predict,0.15),comput_acc(real_y,real_predict,0.1)
(0.6666666666666667, 0.6666666666666667, 0.5833333333333333)
评论 11
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值