"""
@Title: activation_function
@Time: 2024/3/1
@Author: Michael Jie
"""
import numpy as np
# Sigmoid
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# Tanh
def tanh(x):
return (np.exp(x) - np.exp(-x)) / (np.exp(x) + np.exp(-x))
# ReLU
def relu(x):
return np.maximum(0, x)
# Leaky ReLU
def leaky_relu(x, alpha=0.01):
return np.maximum(alpha * x, x)
# ELU
def elu(x, alpha=0.01):
return np.maximum(alpha * (np.exp(x) - 1), x)
# Softmax
def softmax(x):
return np.exp(x) / np.sum(np.exp(x))
if __name__ == '__main__':
print(sigmoid(np.array([0, -1, 1, -99, 99])))
print(tanh(np.array([0, -1, 1, -99, 99])))
print(relu(np.array([0, -1, 1, -99, 99])))
print(leaky_relu(np.array([0, -1, 1, -99, 99])))
print(elu(np.array([0, -1, 1, -99, 99])))
print(softmax(np.array([0, -1, 1, -99, 99])))
Python-sklearn-ActivationFunction
最新推荐文章于 2024-08-14 17:18:35 发布