贝叶斯回归预测

#本文使用了高斯朴素贝叶斯方法
import pandas as pd
from sklearn.metrics import r2_score
#metrics为评价函数
from sklearn import metrics
from math import sqrt
# 载入数据
features = pd.read_csv('/Users/mac/Desktop/111.csv', encoding='gbk')
# 数据特征
print('The shape of our features is:', features.shape)
# one-hot 编码
features = pd.get_dummies(features)
import numpy as np
# 标签
labels = np.array(features['price'])
labels=labels.astype('int')
# 数据
features = features.drop('price', axis=1)
# 转变成列表
feature_list = list(features.columns)
# 转变成np.array格式
features = np.array(features)
# 训练集与测试集划分
from sklearn.model_selection import train_test_split
train_features, test_features, train_labels, test_labels = train_test_split(features, labels, test_size=0.25,random_state=3)



from scipy import stats

from sklearn.linear_model import BayesianRidge
"""贝叶斯岭回归"""
np.random.seed(0)
# Create weights with a precision lambda_ of 4.
lambda_ = 4.
# Only keep 10 weights of interest
relevant_features = np.random.randint(0, n_features, 10)
for i in relevant_features:
    w[i] = stats.norm.rvs(loc=0, scale=1. / np.sqrt(lambda_))
# Create noise with a precision alpha of 50.
alpha_ = 50.
noise = stats.norm.rvs(loc=0, scale=1. / np.sqrt(alpha_), size=n_samples)
# Create the target
y = np.dot(X, w) + noise

# #############################################################################
# Fit the Bayesian Ridge Regression and an OLS for comparison
clf = BayesianRidge(compute_score=True)
clf.fit(X, y)
评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值