attention_lstm代码

import tensorflow as tf

def attention_3d_block(inputs,TIME_STEPS,SINGLE_ATTENTION_VECTOR):
    # inputs.shape = (batch_size, time_steps, input_dim)
    # inputs = tf.expand_dims(inputs,1)
    input_dim = int(inputs.shape[2])
    a = tf.keras.layers.Permute((2, 1))(inputs)
    a = tf.keras.layers.Reshape((input_dim, TIME_STEPS))(a)  # this line is not useful. It's just to know which dimension is what.
    a = tf.keras.layers.Dense(TIME_STEPS, activation='softmax')(a)
    if SINGLE_ATTENTION_VECTOR:
        a = tf.keras.layers.Lambda(lambda x: tf.keras.backend.mean(x, axis=1), name='dim_reduction')(a)
        a = tf.keras.layers.RepeatVector(input_dim)(a)
    a_probs = tf.keras.layers.Permute((2, 1), name='attention_vec')(a)
    output_attention_mul = tf.keras.layers.Multiply()([inputs, a_probs])
    return output_attention_mul



def attention_lstm(TIME_STEPS, INPUT_DIM,lstm_units = 32):
    tf.keras.backend.clear_session()  # 清除之前的模型,省得压满内存
    inputs = tf.keras.Input(shape=(TIME_STEPS, INPUT_DIM,))
    x = tf.keras.layers.LSTM(lstm_units, return_sequences=True,dropout=0.5)(inputs)
    x = tf.keras.layers.LSTM(lstm_units, return_sequences=True)(x)
    attention_mul = attention_3d_block(x,TIME_STEPS,1)
    lstm_out = tf.keras.layers.LSTM(lstm_units,recurrent_regularizer=tf.keras.regularizers.l2())(attention_mul)
    attention_mul = tf.keras.layers.Flatten()(lstm_out)
    output = tf.keras.layers.Dense(1)(attention_mul)
    model = tf.keras.Model(inputs=[inputs], outputs=output)
    return model

  • 8
    点赞
  • 12
    收藏
    觉得还不错? 一键收藏
  • 打赏
    打赏
  • 0
    评论
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

程序员奇奇

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值