# 贝叶斯岭回归(Bayesian Ridge Regression)
import numpy as np
import matplotlib.pyplot as plt
from scipy import stats
from sklearn.linear_model import BayesianRidge, LinearRegression
# 生成具有高斯权重模拟数据
np.random.seed(0) # 设置 random 函数的随机种子
n_samples, n_features = 100, 100
X = np.random.randn(n_samples, n_features) # 创建高斯数据集
lambda_ = 4.
w = np.zeros(n_features)
relevant_features = np.random.randint(0, n_features, 10)
print relevant_features
for i in relevant_features:
w[i] = stats.norm.rvs(loc=0, scale=1. / np.sqrt(lambda_))
print "w: ", w
alpha_ = 50
noise = stats.norm.rvs(loc=0, scale=1. / np.sqrt(lambda_), size=n_samples)
y = np.dot(X, w) + noise
print "X: ", X
print "y: ", y
# 拟合和比较 BayesianRidge 与 LinearRegression 方法
clf = BayesianRidge(compute_score=True)
clf.fit(X, y)
ols = LinearRegression()
ols.fit(X, y)
lw = 2
plt.figure(figsi
『sklearn学习』贝叶斯岭回归
最新推荐文章于 2024-07-15 14:24:14 发布
本文介绍了贝叶斯岭回归的概念,并通过Python的sklearn库展示了如何拟合高斯权重模拟数据。对比了BayesianRidge和LinearRegression在估计权重上的差异,并通过图表展示了权重的分布和模型得分随迭代的变化。
摘要由CSDN通过智能技术生成