attention简单构造
def build_model(class_num=2): # 二分类
inputs = Input(shape=(input_dim,)) #输入层
# 注意力层
attention_probs = Dense(input_dim, activation='softmax', name='attention_vec')(inputs)
attention_mul = Multiply()([inputs, attention_probs])
attention_mul = Dense(64)(attention_mul) #全连接
output = Dense(class_num, activation='softmax')(attention_mul) #输出层
model = Model(inputs=[inputs], outputs=output)
return model