# -*- coding=utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import xgboost as xgb
# train data
def get_train_data(data_size=100):
data_label = np.zeros((2*data_size, 1))
# class 1
x1 = np.reshape(np.random.normal(3, 1, data_size), (data_size, 1))
y1 = np.reshape(np.random.normal(4, 1, data_size), (data_size, 1))
data_train = np.concatenate((x1, y1), axis=1)
data_label[0:data_size, :] = 0
# class 2
x2 = np.reshape(np.random.normal(1, 1, data_size), (data_size, 1))
y2 = np.reshape(np.random.normal(0.5, 1, data_size), (data_size, 1))
data_train = np.concatenate((data_train, np.concatenate((x2, y2), axis=1)), axis=0)
data_label[data_size:2*data_size, :] = 1
return data_train, data_label
# test data
def get_test_data(start, end, data_size=100):
data1 = (end - start) * np.random.random((data_size, 1)) + start
data2 &
2020-11-03 生成两个服从高斯分布的数据集并用XGBoost模型进行预测
最新推荐文章于 2024-10-07 16:10:34 发布
本文介绍了如何生成两个服从高斯分布的数据集,并应用XGBoost机器学习模型进行预测分析的过程。
摘要由CSDN通过智能技术生成