包含所有数据和代码,支持一键运行(jupyter)
# Create the CLMHA model
class CustomMultiHeadAttention(layers.Layer):
def __init__(self, d_model, num_heads):
super(CustomMultiHeadAttention, self).__init__()
self.mha = MultiHeadAttention(d_model, num_heads)
def call(self, inputs):
q = inputs
k = inputs
v = inputs
output, _ = self.mha(v, k, q) # Return only the output tensor
return output
num_heads = 8
d_model = 128
# 创建CLMHA模型
CLMHA_model = keras.Sequential([
layers.Conv2D(32, (1, 3), activation='relu', input_shape=(2, 128, 1)),
layers.MaxPooling2D((1, 2)),
layers.Reshape((32, -1)),
layers.LSTM(64, return_sequences=True),
CustomMultiHeadAttention(d_model, num_heads),
layers.GlobalAveragePooling1D(),
layers.Dropout(0.5),
layers.Dense(64, activation='relu', kernel_regularizer=keras.regularizers.l2(0.001)), # 添加L2正则化
layers.Dropout(0.5),
layers.Dense(11, activation='softmax')
])
CNN结果:
LSTM结果
CNN-LSTM
CNN-LSTM-Multi-head-attentin