Sigmoid函数求导

import numpy as np
from scipy import optimize
from scipy.special import expit, logit
from scipy.stats import logistic
from sklearn import base, metrics

from sklearn.utils.validation import check_X_y
"""expit就是sigmoid函数"""
print(expit([-np.inf, -5.0,-1.0, 0.,1.,5.0,np.inf]))
"""logit is the inverse of expit:"""
print(logit(expit([-np.inf, -5.0,-1.0, 0.,1.,5.0,np.inf])))

sigmoid 函数

\bg_white \LARGE H(x)=\frac{1}{1+e^{-x}}

import numpy as np
import matplotlib.pyplot as plt
from scipy.special import expit, logit
A = np.linspace(-10,10, 100)
H = expit(A)

代码:

import numpy as np
import matplotlib.pyplot as plt

def MarginFunction(x):
    return 1 / (1 + np.exp(-x))

x = np.linspace(-10,10,100)
y = MarginFunction(x)
plt.scatter(x,y,c=y,marker='.')
plt.grid()
plt.show()

重要等式

 ===============================================================

在机器学习中,sigmoid函数经常用于构建损失函数(log_loss)。

\LARGE L(z)=-log(H(z))\\~~~~~~~~~~ =-log(\frac{1}{1+exp(-z)})\\ ~~~~~~~~~~\geq 0

import numpy as np
import matplotlib.pyplot as plt
from scipy.special import expit, logit

def log_loss(Z):
    # stable computation of the logistic loss
    idx = Z > 0
    out = np.zeros_like(Z)
    out[idx] = np.log(1 + np.exp(-Z[idx]))
    out[~idx] = (-Z[~idx] + np.log(1 + np.exp(Z[~idx])))
    return out

A = np.linspace(-10,10, 100)
loss = -np.log(expit(A))


plt.scatter(A, loss)
plt.show()

代码

import numpy as np
import matplotlib.pyplot as plt

def MarginFunction(x):
    return 1 / (1 + np.exp(-x))

x = np.linspace(-10,10,100)
y = MarginFunction(x)
y = -np.log2(y)
plt.scatter(x,y,c=y,marker='.')
plt.ylabel("L(z)")
plt.xlabel("z")
plt.grid()
plt.show()

sigmoid函数求导过程

H^{'} (x)=(-1)\times (1+e^{-x})^{-2} \times e^{-x} \times (-1)\\ ~~~~~~~~~~~ =(1+e^{-x})^{-2} \times e^{-x}\\ ~~~~~~~~~~~=\frac{e^{-x}}{(1+e^{-x})^2}\\ ~~~~~~~~~~~ =\frac{1+e^{-x}-1}{(1+e^{-x})^2} \\ ~~~~~~~~~~~=\frac{1+e^{-x}}{(1+e^{-x})^2} - \frac{1}{(1+e^{-x})^2}\\ ~~~~~~~~~~~ =\frac{1}{1+e^{-x}}(1-\frac{1}{1+e^{-x}})\\ ~~~~~~~~~~~=H(x)(1-H(x))

对数 sigmoid函数求导

\LARGE log(H(z))=\frac{1}{H(z)}\times H(z) ^{'}\\~~~~~~~~~~~~~~~~~= \frac{1}{H(z)} H(z)(1-H(z))\\~~~~~~~~~~~~~~~~~=1-H(z)

log_loss的导数(即负的对数sigmoid函数的导数)

 

 

 3. logistic loss and zero-one loss

 we can see that the logistic loss does not upper bound the zero-one loss

import numpy as np
import matplotlib.pyplot as plt

def MarginFunction(x):
    return 1 / (1 + np.exp(-x))

x = np.linspace(-3,3,100)
y = MarginFunction(x)
y = -np.log(y)

y2 = np.zeros_like(x)
for i, ele in enumerate(x):
    if ele < 0:
        y2[i] = 1
    else:
        y2[i] = 0


plt.scatter(x,y,c=y,marker='.')
plt.scatter(x, y2, c="r", marker=".")
plt.ylabel("L(z)")
plt.xlabel("z")
plt.grid()
plt.show()

4. the square error with sigmoid function VS the zero-one error

loss=\sum\limits_{i=1}^{n}(y_i - \pi(\mathbf{x}_i))^2 

import numpy as np
import matplotlib.pyplot as plt

def pi(x):
    return 1 / (1 + np.exp(-x))

x = np.linspace(-10,10,1000)
y = pi(x)
# plt.scatter(x,y,c=y,marker='.')
# plt.grid()
# plt.show()

X = pi(x)
Y = np.zeros_like(X)
for i, ele in enumerate(X):
    if ele < 0.5:
        Y[i] = 1
    else:
        Y[i] = 0

Y2 = np.zeros_like(X)
for i, ele in enumerate(X):
    if ele < 0.5:
        Y2[i] = (1-ele)**2
    else:
        Y2[i] = (ele - 0)**2




plt.scatter(X,Y, c="b", marker=".")
plt.scatter(X,Y2, c="r", marker=".")
plt.grid()
plt.show()

 

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

DeniuHe

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值