#lstm+attention inputs = Input(shape=(TIME_STEPS, INPUT_DIM,)) lstm_units = 32 lstm_out = LSTM(lstm_units, return_sequences=True)(inputs) a = Permute((2, 1))(lstm_out) a = Reshape((input_dim, TIME_STEPS))(a) # this line is not useful. It's just to know which dimension is what. a = Dense(TIME_STEPS, activation='softmax')(a) if SINGLE_ATTENTION_VECTOR: a = Lambda(lambda x: K.mean(x, axis=1), name='dim_reduction')(a) a = RepeatVector(input_dim)(a) a_probs = Permute((2, 1), name='attention_vec')(a) output_attention_mul = merge([inputs, a_probs], name='attention_mul', mode='mul') attention_mul = Flatten()(output_attention_mul) output = Dense(1, activation='sigmoid')(attention_mul) model = Model(input=[inputs], output=output) ####################################### #attention+lstm inputs = Input(shape=(TIME_STEPS, INPUT_DIM,)) a = Permute((2, 1))(inputs) a = Reshape((input_dim, TIME_STEPS))(a) # this line is not useful. It's just to know which dimension is what. a = Dense(TIME_STEPS, activation='softmax')(a) if SINGLE_ATTENTION_VECTOR: a = Lambda(lambda x: K.mean(x, axis=1), name='dim_reduction')(a) a = RepeatVector(input_dim)(a) a_probs = Permute((2, 1), name='attention_vec')(a) output_attention_mul = merge([inputs, a_probs], name='attention_mul', mode='mul') attention_mul = LSTM(lstm_units, return_sequences=False)(output_attention_mul) output = Dense(1, activation='sigmoid')(attention_mul) model = Model(input=[inputs], output=output) #########################################
keras attention code
最新推荐文章于 2024-07-12 01:10:28 发布