day74

package machinelearning.ann;

/**
 * ******************************************
 *
 * @author Michelle Min MitchelleMin@163.com
 * @date 2021-08-07
 * ******************************************
 */
class Activator {
    /**
     * Arc tan.
     */
    public final char ARC_TAN = 'a';

    /**
     * Elu.
     */
    public final char ELU = 'e';

    /**
     * Gelu.
     */
    public final char GELU = 'g';

    /**
     * Hard logistic.
     */
    public final char HARD_LOGISTIC = 'h';

    /**
     * Identity.
     */
    public final char IDENTITY = 'i';

    /**
     * Leaky relu, also known as parametric relu.
     */
    public final char LEAKY_RELU = 'l';

    /**
     * Relu.
     */
    public final char RELU = 'r';

    /**
     * Soft sign.
     */
    public final char SOFT_SIGN = 'o';

    /**
     * Sigmoid.
     */
    public final char SIGMOID = 's';

    /**
     * Tanh.
     */
    public final char TANH = 't';

    /**
     * Soft plus.
     */
    public final char SOFT_PLUS = 'u';

    /**
     * Swish.
     */
    public final char SWISH = 'w';

    /**
     * The activator.
     */
    private char activator;

    /**
     * Alpha for elu.
     */
    double alpha;

    /**
     * Beta for leaky relu.
     */
    double beta;

    /**
     * Gamma for leaky relu.
     */
    double gamma;

    /**
     *********************
     * The first constructor.
     *
     * @param paraActivator
     *            The activator.
     *********************
     */
    public Activator(char paraActivator) {
        activator = paraActivator;
    }//of the first constructor

    /**
     *********************
     * Setter.
     *********************
     */
    public void setActivator(char paraActivator) {
        activator = paraActivator;
    }//of setActivator

    /**
     *********************
     * Getter.
     *********************
     */
    public char getActivator() {
        return activator;
    }//of getActivator

    /**
     *********************
     * Setter.
     *********************
     */
    void setAlpha(double paraAlpha) {
        alpha = paraAlpha;
    }//of setAlpha

    /**
     *********************
     * Setter.
     *********************
     */
    void setBeta(double paraBeta) {
        beta = paraBeta;
    }//of setBeta

    /**
     *********************
     * Setter.
     *********************
     */
    void setGamma(double paraGamma) {
        gamma = paraGamma;
    }//of setGamma

    /**
     *********************
     * Activate according to the activation function.
     *********************
     */
    public double activate(double paraValue) {
        double resultValue = 0;
        switch (activator) {
            case ARC_TAN:
                resultValue = Math.atan(paraValue);
                break;
            case ELU:
                if (paraValue >= 0) {
                    resultValue = paraValue;
                } else {
                    resultValue = alpha * (Math.exp(paraValue) - 1);
                } // Of if
                break;
            // case GELU:
            // resultValue = ?;
            // break;
            // case HARD_LOGISTIC:
            // resultValue = ?;
            // break;
            case IDENTITY:
                resultValue = paraValue;
                break;
            case LEAKY_RELU:
                if (paraValue >= 0) {
                    resultValue = paraValue;
                } else {
                    resultValue = alpha * paraValue;
                }//of if
                break;
            case SOFT_SIGN:
                if (paraValue >= 0) {
                    resultValue = paraValue / (1 + paraValue);
                } else {
                    resultValue = paraValue / (1 - paraValue);
                }//of if
                break;
            case SOFT_PLUS:
                resultValue = Math.log(1 + Math.exp(paraValue));
                break;
            case RELU:
                if (paraValue >= 0) {
                    resultValue = paraValue;
                } else {
                    resultValue = 0;
                }//of if
                break;
            case SIGMOID:
                resultValue = 1 / (1 + Math.exp(-paraValue));
                break;
            case TANH:
                resultValue = 2 / (1 + Math.exp(-2 * paraValue)) - 1;
                break;
            // case SWISH:
            // resultValue = ?;
            // break;
            default:
                System.out.println("Unsupported activator: " + activator);
                System.exit(0);
        }//of switch

        return resultValue;
    }//of activate

    /**
     *********************
     * Derive according to the activation function. Some use x while others use
     * f(x).
     *
     * @param paraValue
     *            The original value x.
     * @param paraActivatedValue
     *            f(x).
     *********************
     */
    public double derive(double paraValue, double paraActivatedValue) {
        double resultValue = 0;
        switch (activator) {
            case ARC_TAN:
                resultValue = 1 / (paraValue * paraValue + 1);
                break;
            case ELU:
                if (paraValue >= 0) {
                    resultValue = 1;
                } else {
                    resultValue = alpha * (Math.exp(paraValue) - 1) + alpha;
                }//of if
                break;
            // case GELU:
            // resultValue = ?;
            // break;
            // case HARD_LOGISTIC:
            // resultValue = ?;
            // break;
            case IDENTITY:
                resultValue = 1;
                break;
            case LEAKY_RELU:
                if (paraValue >= 0) {
                    resultValue = 1;
                } else {
                    resultValue = alpha;
                }//of if
                break;
            case SOFT_SIGN:
                if (paraValue >= 0) {
                    resultValue = 1 / (1 + paraValue) / (1 + paraValue);
                } else {
                    resultValue = 1 / (1 - paraValue) / (1 - paraValue);
                }//of if
                break;
            case SOFT_PLUS:
                resultValue = 1 / (1 + Math.exp(-paraValue));
                break;
            case RELU: // Updated
                if (paraValue >= 0) {
                    resultValue = 1;
                } else {
                    resultValue = 0;
                }//of if
                break;
            case SIGMOID: // Updated
                resultValue = paraActivatedValue * (1 - paraActivatedValue);
                break;
            case TANH: // Updated
                resultValue = 1 - paraActivatedValue * paraActivatedValue;
                break;
            // case SWISH:
            // resultValue = ?;
            // break;
            default:
                System.out.println("Unsupported activator: " + activator);
                System.exit(0);
        }//of switch

        return resultValue;
    }//of derive

    /**
     *********************
     * Overrides the method claimed in Object.
     *********************
     */
    public String toString() {
        String resultString = "Activator with function '" + activator + "'";
        resultString += "\r\n alpha = " + alpha + ", beta = " + beta + ", gamma = " + gamma;

        return resultString;
    }//of toString

    /**
     ********************
     * Test the class.
     ********************
     */
    public static void main(String[] args) {
        Activator tempActivator = new Activator('s');
        double tempValue = 0.6;
        double tempNewValue;
        tempNewValue = tempActivator.activate(tempValue);
        System.out.println("After activation: " + tempNewValue);

        tempNewValue = tempActivator.derive(tempValue, tempNewValue);
        System.out.println("After derive: " + tempNewValue);
    }//of main
}//of class Activator

  • 0
    点赞
  • 0
    收藏
    觉得还不错? 一键收藏
  • 0
    评论

“相关推荐”对你有帮助么?

  • 非常没帮助
  • 没帮助
  • 一般
  • 有帮助
  • 非常有帮助
提交
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值