# 建立cnn-BiLSTM-Attention模型
def attention_model():
inputs = Input(shape=(4, 1))
# inputs = Input(shape=input_shape)
# CNN模块
x = Conv1D(filters=64, kernel_size=1, activation='relu')(inputs) # , padding = 'same'
x = MaxPooling1D(pool_size=2)(x) # 池化层
x = Dropout(0.2)(x)
# BiLSTM 模块
lstm_out = Bidirectional(LSTM(64, return_sequences=True),name='bilstm')(x)
lstm_out = Dropout(0.2)(lstm_out)
# Attention 模块
attention = Dense(128, activation='sigmoid', name='attention_vec')(lstm_out) # 求解Attention权重
attention_mul = Multiply()([lstm_out, attention])
# 用于将输入层的数据压成一维的数据
attention_mul = Flatten()(attention_mul)
output = Dense(32, activation='softmax')(attention_mul)
output = Dropout(0.3)(output)
output = Dense(1, activation='sigmoid')(output)
model = Model(inputs=inputs, outputs=output)
return model