import tensorflow as tf
from keras.layers import Dense, Dropout, Activation
from keras.models import Sequential
from keras.utils.generic_utils import get_custom_objects
class Lrelu(Activation):
def __init__(self, activation, **kwargs):
super(Lrelu, self).__init__(activation, **kwargs)
self.__name__ = 'lrelu'
def lrelu(x, leak=0.2): # , name="lrelu"
# with tf.variable_scope(name):
f1 = 0.5 * (1 + leak)
f2 = 0.5 * (1 - leak)
return f1 * x + f2 * abs(x)
get_custom_objects().update({'lrelu': Lrelu(lrelu)})
def _get_sae(inputs, hidden, output):
"""SAE(Auto-Encoders)
Build SAE Model.
# Arguments
inputs: Integer, number of input units.
hidden: Integer, number of hidden units.
output: Integer, number of output units.
# Returns
model: Model, nn model.
"""
model = Sequential()
model.add(Dense(hidden, input_dim=inputs, name='hidden'))
model.add(Activation('lrelu'))
model.add(Dropout(0.2))
model.add(Dense(output, activation='tanh'))
return model
tensorflow自定义激活函数
于 2022-05-07 10:21:15 首次发布