sklearn实践(KNN+NB+KMeans+DBSCAN)

Sklearn 中的KNN分类器
algrithm = 'auto'
'ball_tree':球树
'kd_tree':kd树
'brute':暴力搜索
matric = 'minkowski'闵科夫斯基距离
n_neighbors = 5 n最好取奇数
#导入KNN分类器
from sklearn.neighbors import KNeighborsClassifier
import numpy as np
import matplotlib.pyplot as plt
#导入数据集生成工具
from sklearn.datasets import make_blobs
#随机生成200个类别数是2的分类数据
data = make_blobs(n_samples=200,centers=2,random_state=8)
X, y = data
#KNN分类器实例化
clf = KNeighborsClassifier()
#模型训练
clf.fit(X,y)
KNeighborsClassifier()
#训练结果可视化
x_min, x_max = X[:,0].min() - 1, X[:, 0].max() +1
y_min, y_max = X[:,0].min() - 1, X[:, 0].max() +1
xx,yy = np.meshgrid(np.arange(x_min,x_max,.02),
                    np.arange(y_min,y_max,.02))
Z = clf.predict(np.c_[xx.ravel(),yy.ravel()])
Z = Z.reshape(xx.shape)
plt.pcolormesh(xx,yy,Z,cmap=plt.cm.Pastel1)
plt.scatter(X[:,0],X[:,1],c = y, cmap=plt.cm.spring, edgecolors='k')
plt.xlim(xx.min(),xx.max())
plt.xlim(yy.min(),yy.max())
plt.title("Classifier:KNN")
​
​
Text(0.5, 1.0, 'Classifier:KNN')

import pandas as pd
from sklearn.tree import DecisionTreeClassifier
import matplotlib as mpl
import matplotlib.pyplot as plt
iris_feature = u'花萼长度',u'花萼宽度',u'花瓣长度',u'花瓣宽度',u'类别'
path = 'iris.csv'
data = pd.read_csv(path, header=None)
data.columns = iris_feature
data['类别']=pd.Categorical(data['类别']).codes
x_train = data[['花萼长度','花瓣长度']]
y_train = data['类别']
model = DecisionTreeClassifier(criterion='entropy',min_samples_leaf=3)
model.fit(x_train,y_train)
DecisionTreeClassifier(criterion='entropy', min_samples_leaf=3)
N,M = 500, 500
x1_min, x2_min = x_train.min(axis=0)
x1_max, x2_max = x_train.max(axis=0)
t1 = np.linspace(x1_min, x1_max, N)
t2 = np.linspace(x2_min, x2_max, M)
x1, x2 = np.meshgrid(t1,t2)
x_show = np.stack((x1.flat,x2.flat),axis=1)
y_predict=model.predict(x_show)
cm_light = mpl.colors.ListedColormap(['#A0FFA0','#FFA0A0','#A0A0FF'])
cm_dark = mpl.colors.ListedColormap(['g','r','b'])
plt.pcolormesh(x1,x2,y_predict.reshape(x1.shape),cmap = cm_light)
plt.show()
C:\Users\Lenovo\anaconda3\lib\site-packages\sklearn\base.py:450: UserWarning: X does not have valid feature names, but DecisionTreeClassifier was fitted with feature names
  warnings.warn(

#训练结果可视化
x_min, x_max = X[:,0].min() - 1, X[:, 0].max() +1
y_min, y_max = X[:,0].min() - 1, X[:, 0].max() +1
xx,yy = np.meshgrid(np.arange(x_min,x_max,.02),
                    np.arange(y_min,y_max,.02))
Z = clf.predict(np.c_[xx.ravel(),yy.ravel()])
Z = Z.reshape(xx.shape)
plt.pcolormesh(xx,yy,Z,cmap=plt.cm.Pastel1)
plt.scatter(X[:,0],X[:,1],c = y, cmap=plt.cm.spring, edgecolors='k')
plt.xlim(xx.min(),xx.max())
plt.xlim(yy.min(),yy.max())
plt.title("Classifier:KNN")
​
#对新数据点进行可视化
plt.scatter(6.75,4.87,marker='*',c='red',s=200)
<matplotlib.collections.PathCollection at 0x18da763beb0>

print('新数据点的分类是:',clf.predict([[6.75,4.82]]))
新数据点的分类是: [1]
KNN 拟合多分类问题
#生成样本数是500,类别数是5的数据集
data2 = make_blobs(n_samples=500, centers=5,random_state=8)
X2,y2 = data2
plt.scatter(X2[:,0],X2[:,1],c=y2,cmap=plt.cm.spring,edgecolors='k')
plt.show()

#用KNN模型进行拟合
clf = KNeighborsClassifier()
clf.fit(X2,y2)
#对拟合结果可视化
x_min, x_max = X2[:,0].min() - 1, X2[:, 0].max() +1
y_min, y_max = X2[:,0].min() - 1, X2[:, 0].max() +1
xx,yy = np.meshgrid(np.arange(x_min,x_max,.02),
                    np.arange(y_min,y_max,.02))
Z = clf.predict(np.c_[xx.ravel(),yy.ravel()])
Z = Z.reshape(xx.shape)
plt.pcolormesh(xx,yy,Z,cmap=plt.cm.Pastel1)
plt.scatter(X2[:,0],X2[:,1],c = y2, cmap=plt.cm.spring, edgecolors='k')
plt.xlim(xx.min(),xx.max())
plt.xlim(yy.min(),yy.max())
plt.title("Classifier:KNN")
plt.show()

#模型评估
print('模型正确率:{:.2f}'.format(clf.score(X2,y2)))
模型正确率:0.96
KNN实战葡萄酒分类
from matplotlib.colors import ListedColormap
from sklearn import neighbors, datasets
from sklearn.model_selection import train_test_split
#加载数据
wine = datasets.load_wine()
wine.keys()
dict_keys(['data', 'target', 'frame', 'target_names', 'DESCR', 'feature_names'])
#查看自变量数据维度
wine.data.shape
(178, 13)
#为便于可视化,取前两个特征纳入模型
X = wine.data[:,:2]
y = wine.target
print(wine.DESCR)
.. _wine_dataset:

Wine recognition dataset
------------------------

**Data Set Characteristics:**

    :Number of Instances: 178 (50 in each of three classes)
    :Number of Attributes: 13 numeric, predictive attributes and the class
    :Attribute Information:
 		- Alcohol
 		- Malic acid
 		- Ash
		- Alcalinity of ash  
 		- Magnesium
		- Total phenols
 		- Flavanoids
 		- Nonflavanoid phenols
 		- Proanthocyanins
		- Color intensity
 		- Hue
 		- OD280/OD315 of diluted wines
 		- Proline

    - class:
            - class_0
            - class_1
            - class_2
		
    :Summary Statistics:
    
    ============================= ==== ===== ======= =====
                                   Min   Max   Mean     SD
    ============================= ==== ===== ======= =====
    Alcohol:                      11.0  14.8    13.0   0.8
    Malic Acid:                   0.74  5.80    2.34  1.12
    Ash:                          1.36  3.23    2.36  0.27
    Alcalinity of Ash:            10.6  30.0    19.5   3.3
    Magnesium:                    70.0 162.0    99.7  14.3
    Total Phenols:                0.98  3.88    2.29  0.63
    Flavanoids:                   0.34  5.08    2.03  1.00
    Nonflavanoid Phenols:         0.13  0.66    0.36  0.12
    Proanthocyanins:              0.41  3.58    1.59  0.57
    Colour Intensity:              1.3  13.0     5.1   2.3
    Hue:                          0.48  1.71    0.96  0.23
    OD280/OD315 of diluted wines: 1.27  4.00    2.61  0.71
    Proline:                       278  1680     746   315
    ============================= ==== ===== ======= =====

    :Missing Attribute Values: None
    :Class Distribution: class_0 (59), class_1 (71), class_2 (48)
    :Creator: R.A. Fisher
    :Donor: Michael Marshall (MARSHALL%PLU@io.arc.nasa.gov)
    :Date: July, 1988

This is a copy of UCI ML Wine recognition datasets.
https://archive.ics.uci.edu/ml/machine-learning-databases/wine/wine.data

The data is the results of a chemical analysis of wines grown in the same
region in Italy by three different cultivators. There are thirteen different
measurements taken for different constituents found in the three types of
wine.

Original Owners: 

Forina, M. et al, PARVUS - 
An Extendible Package for Data Exploration, Classification and Correlation. 
Institute of Pharmaceutical and Food Analysis and Technologies,
Via Brigata Salerno, 16147 Genoa, Italy.

Citation:

Lichman, M. (2013). UCI Machine Learning Repository
[https://archive.ics.uci.edu/ml]. Irvine, CA: University of California,
School of Information and Computer Science. 

.. topic:: References

  (1) S. Aeberhard, D. Coomans and O. de Vel, 
  Comparison of Classifiers in High Dimensional Settings, 
  Tech. Rep. no. 92-02, (1992), Dept. of Computer Science and Dept. of  
  Mathematics and Statistics, James Cook University of North Queensland. 
  (Also submitted to Technometrics). 

  The data was used with many others for comparing various 
  classifiers. The classes are separable, though only RDA 
  has achieved 100% correct classification. 
  (RDA : 100%, QDA 99.4%, LDA 98.9%, 1NN 96.1% (z-transformed data)) 
  (All results using the leave-one-out technique) 

  (2) S. Aeberhard, D. Coomans and O. de Vel, 
  "THE CLASSIFICATION PERFORMANCE OF RDA" 
  Tech. Rep. no. 92-01, (1992), Dept. of Computer Science and Dept. of 
  Mathematics and Statistics, James Cook University of North Queensland. 
  (Also submitted to Journal of Chemometrics).

#划分训练集与测试集
X_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)
print('X_train:{},X_test:{}'.format(X_train.shape,X_test.shape))
X_train:(133, 2),X_test:(45, 2)
#KNN模型实例化
clf = neighbors.KNeighborsClassifier(n_neighbors=15,weights='distance')
#模型训练
clf.fit(X_train, y_train)
KNeighborsClassifier(n_neighbors=15, weights='distance')
print('测试集模型评分:{:.2f}'.format(clf.score(X_test,y_test)))
print('训练集模型评分:{:.2f}'.format(clf.score(X_train,y_train)))
测试集模型评分:0.89
训练集模型评分:1.00
#定义图像中分区的颜色和散点的颜色
cmap_light = ListedColormap(['#FFAAAA','#AAFFAA','#AAAAFF'])
cmap_bold = ListedColormap(['#FF0000','#00FF00','#0000FF'])
​
#分别用样本的两个特征值创建图像和横轴和纵轴
x_min, x_max = X_train[:,0].min() - 1, X_train[:, 0].max() +1
y_min, y_max = X_train[:,1].min() - 1, X_train[:, 1].max() +1
xx,yy = np.meshgrid(np.arange(x_min,x_max,.02),
                    np.arange(y_min,y_max,.02))
Z = clf.predict(np.c_[xx.ravel(),yy.ravel()])
​
#给每个类别中的样本分配不同的颜色
Z = Z.reshape(xx.shape)
plt.figure()
plt.pcolormesh(xx,yy,Z,cmap=cmap_light)
#用散点把样本表示出来
plt.scatter(X[:,0],X[:,1],c = y, cmap=cmap_bold, edgecolor='k')
plt.xlim(xx.min(),xx.max())
plt.ylim(yy.min(),yy.max())
plt.title("Classifier:(k =15, weight = 'distance')")
Text(0.5, 1.0, "Classifier:(k =15, weight = 'distance')")

#对新酒进行y预测
X_new = np.array([[13.2,2.77]])
prediction = clf.predict(X_new)
print("预测新红酒的分类为:{}".format(wine['target_names'][prediction]))
预测新红酒的分类为:['class_2']
#定义图像中分区的颜色和散点的颜色
cmap_light = ListedColormap(['#FFAAAA','#AAFFAA','#AAAAFF'])
cmap_bold = ListedColormap(['#FF0000','#00FF00','#0000FF'])
​
#分别用样本的两个特征值创建图像和横轴和纵轴
x_min, x_max = X_train[:,0].min() - 1, X_train[:, 0].max() +1
y_min, y_max = X_train[:,1].min() - 1, X_train[:, 1].max() +1
xx,yy = np.meshgrid(np.arange(x_min,x_max,.02),
                    np.arange(y_min,y_max,.02))
Z = clf.predict(np.c_[xx.ravel(),yy.ravel()])
​
#给每个类别中的样本分配不同的颜色
Z = Z.reshape(xx.shape)
plt.figure()
plt.pcolormesh(xx,yy,Z,cmap=cmap_light)
#用散点把样本表示出来
plt.scatter(X[:,0],X[:,1],c = y, cmap=cmap_bold, edgecolor='k')
plt.xlim(xx.min(),xx.max())
plt.ylim(yy.min(),yy.max())
plt.title("Classifier:(k =15, weight = 'distance')")
plt.scatter(13.2,2.77,marker='*',c='red',s=200)
​
<matplotlib.collections.PathCollection at 0x18dabc82970>

#建立两个空列表,分别对于训练数据集和测试数据集的模型评分
training_score = []
test_score = []
neighbors_amount = [1,20]
for n_neighbors in neighbors_amount:
    clf3 = neighbors.KNeighborsClassifier(n_neighbors=n_neighbors)
    clf3.fit(X_train, y_train)
    #把不同的n_neighbors数量对应的得分放进列表
    training_score.append(clf3.score(X_train,y_train))
    test_score.append(clf3.score(X_test,y_test))
plt.plot(neighbors_amount,training_score,label = "training score")
plt.plot(neighbors_amount,test_score,label = "test score")
plt.ylabel("score")
plt.xlabel("n_neighbors")
plt.legend()
plt.show()

朴素贝叶斯算法的Sklearn实现
BernoulliNB:伯努利算法(二项分布)
GaussianNB:高斯分布(数值型且符合高斯分布的数据)
MultinomialNB:多项式分布
#导入数据集生成工具
from sklearn.datasets import make_blobs
#随机生成500个类别数是5的分类数据
X, y = make_blobs(n_samples=500,centers=5,random_state=8)
from sklearn.naive_bayes import BernoulliNB
from sklearn.naive_bayes import GaussianNB
from sklearn.naive_bayes import MultinomialNB
%matplotlib inline
plt.scatter(X[:,0],X[:,1],c = y, cmap=plt.cm.spring,edgecolors='k')
<matplotlib.collections.PathCollection at 0x18db04d6f70>

#拆分训练集和测试集
from sklearn.model_selection import train_test_split
X_train,X_test,y_train,y_test = train_test_split(X,y,random_state=8)
#构建伯努利分布(二项分布)朴素贝叶斯分类器
nb = BernoulliNB()
nb.fit(X_train,y_train)
print('模型得分:{:.3f}'.format(nb.score(X_test,y_test)))
模型得分:0.544
#拟合结果可视化
plt.figure(dpi=300)
x_min,x_max = X[:,0].min()-0.5,X[:,0].max()+0.5
y_min,y_max = X[:,1].min()-0.5,X[:,1].max()+0.5
xx,yy = np.meshgrid(np.arange(x_min,x_max,.02),
                    np.arange(y_min,y_max,.02))
Z = nb.predict(np.c_[xx.ravel(),yy.ravel()])
Z = Z.reshape(xx.shape)
plt.pcolormesh(xx,yy,Z,cmap=plt.cm.Pastel1)
plt.scatter(X_train[:,0],X_train[:,1],c = y_train, cmap=plt.cm.cool, edgecolors='k')
plt.scatter(X_test[:,0],X_test[:,1],c = y_test, cmap=plt.cm.cool, marker='*',
           edgecolors='k')
​
plt.xlim(xx.min(),xx.max())
plt.xlim(yy.min(),yy.max())
plt.title("Classifier:BernoulliNB")
plt.show
​
​
<function matplotlib.pyplot.show(close=None, block=None)>

#构建高斯分布(正态分布)朴素贝叶斯分类器
gnb = GaussianNB()
gnb.fit(X_train,y_train)
print('模型得分:{:.3f}'.format(gnb.score(X_test,y_test)))
模型得分:0.968
#拟合结果可视化
plt.figure(dpi=300)
x_min,x_max = X[:,0].min()-0.5,X[:,0].max()+0.5
y_min,y_max = X[:,1].min()-0.5,X[:,1].max()+0.5
xx,yy = np.meshgrid(np.arange(x_min,x_max,.02),
                    np.arange(y_min,y_max,.02))
Z = gnb.predict(np.c_[xx.ravel(),yy.ravel()])
Z = Z.reshape(xx.shape)
plt.pcolormesh(xx,yy,Z,cmap=plt.cm.Pastel1)
plt.scatter(X_train[:,0],X_train[:,1],c = y_train, cmap=plt.cm.cool, edgecolors='k')
plt.scatter(X_test[:,0],X_test[:,1],c = y_test, cmap=plt.cm.cool, marker='*',
           edgecolors='k')
​
plt.xlim(xx.min(),xx.max())
plt.xlim(yy.min(),yy.max())
plt.title("Classifier:GaussianNB")
plt.show
​
​
<function matplotlib.pyplot.show(close=None, block=None)>

#构建多项分布朴素贝叶斯分类器
mnb = MultinomialNB()
mnb.fit(X_train,y_train)
print('模型得分:{:.3f}'.format(mnb.score(X_test,y_test)))
---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
Input In [68], in <cell line: 3>()
      1 #构建多项分布朴素贝叶斯分类器
      2 mnb = MultinomialNB()
----> 3 mnb.fit(X_train,y_train)
      4 print('模型得分:{:.3f}'.format(mnb.score(X_test,y_test)))

File ~\anaconda3\lib\site-packages\sklearn\naive_bayes.py:690, in _BaseDiscreteNB.fit(self, X, y, sample_weight)
    688 n_classes = Y.shape[1]
    689 self._init_counters(n_classes, n_features)
--> 690 self._count(X, Y)
    691 alpha = self._check_alpha()
    692 self._update_feature_log_prob(alpha)

File ~\anaconda3\lib\site-packages\sklearn\naive_bayes.py:863, in MultinomialNB._count(self, X, Y)
    861 def _count(self, X, Y):
    862     """Count and smooth feature occurrences."""
--> 863     check_non_negative(X, "MultinomialNB (input X)")
    864     self.feature_count_ += safe_sparse_dot(Y.T, X)
    865     self.class_count_ += Y.sum(axis=0)

File ~\anaconda3\lib\site-packages\sklearn\utils\validation.py:1249, in check_non_negative(X, whom)
   1246     X_min = X.min()
   1248 if X_min < 0:
-> 1249     raise ValueError("Negative values in data passed to %s" % whom)

ValueError: Negative values in data passed to MultinomialNB (input X)

X
array([[-4.43344765e+00, -9.14511574e+00],
       [-5.06998128e+00, -9.75464122e+00],
       [ 6.54464509e+00,  8.99873511e-01],
       [ 3.25023324e-01,  1.50633915e-01],
       [-1.51028157e+00, -1.10581275e+00],
       [-8.90489310e+00, -1.10427432e+01],
       [ 9.28383472e-02, -2.00771121e-02],
       [-6.21720086e+00, -1.11227678e+01],
       [ 7.63027116e+00,  8.69797933e+00],
       [ 7.92430026e+00,  1.04511206e-01],
       [-7.13921438e+00, -1.07768258e+01],
       [ 7.94310647e+00,  8.20622208e+00],
       [-5.66941096e+00, -7.88820528e+00],
       [ 5.28435774e+00,  1.01697239e+01],
       [ 5.07337492e+00,  1.05248297e+01],
       [-4.00121727e+00, -9.63422163e+00],
       [-2.30506902e-01, -6.02317146e-02],
       [ 7.31294296e+00,  9.92166331e+00],
       [-5.46321188e+00, -9.30519694e+00],
       [-1.27991474e+00, -2.55655518e+00],
       [ 7.44636985e+00,  1.14367495e+01],
       [-8.27628229e-01, -1.07833492e+00],
       [ 8.67425268e+00, -3.78602737e-01],
       [-1.32692658e+00, -1.44628313e+00],
       [-3.48914851e-01,  8.18190422e-01],
       [ 6.56880050e+00,  9.52289784e-02],
       [ 7.12375964e-01, -7.63789188e-01],
       [ 1.44601322e+00,  2.39556756e-01],
       [-3.42012569e+00, -7.92568028e+00],
       [-1.10515479e+00, -2.57568601e+00],
       [-9.99013988e-01, -2.10634431e+00],
       [ 8.51730001e+00, -4.27110532e-01],
       [ 9.40353080e+00,  8.09592099e+00],
       [ 8.26341570e+00,  1.03472344e+01],
       [ 7.44220527e-01, -6.73342209e-01],
       [ 6.49932355e+00,  2.49557216e-01],
       [-2.78785566e+00, -9.23897592e+00],
       [ 7.79924692e+00,  1.05957695e+01],
       [-5.56590131e+00, -1.11274686e+01],
       [ 1.96834909e+00, -1.43100574e+00],
       [ 1.70403501e+00,  2.89281120e-01],
       [-1.67266092e+00, -7.00462700e-01],
       [ 6.08887640e+00, -1.61332158e-02],
       [ 9.17061801e+00,  1.03769070e+01],
       [-9.17645248e-01, -1.15361504e+00],
       [ 7.27423265e+00,  9.18459991e+00],
       [ 5.59878870e+00,  7.59170022e+00],
       [-5.41281671e-01, -3.19405900e+00],
       [-2.42168045e+00, -4.52637901e-02],
       [-4.93045007e+00, -8.20314202e+00],
       [-5.08157350e+00, -1.02786976e+01],
       [ 1.93852944e+00, -3.13943173e-01],
       [ 1.74634923e+00, -9.75793270e-01],
       [ 7.21646340e-01,  1.02557653e+00],
       [-4.00826887e+00, -1.07329005e+01],
       [ 6.09382282e+00,  9.38044447e+00],
       [-5.34880666e-01, -2.19070358e+00],
       [ 7.63890536e+00, -6.73149302e-02],
       [ 7.55303352e+00,  1.18570611e+01],
       [ 8.72578696e+00,  1.03469168e+01],
       [ 1.42904203e+00, -6.87565947e-01],
       [ 6.34526126e+00,  8.70677779e+00],
       [-7.85017793e-01,  5.28164067e-03],
       [-1.43916256e+00,  3.73241591e-01],
       [-5.61408851e+00, -9.91946961e+00],
       [ 7.22095192e+00,  8.06544414e+00],
       [ 7.39634594e+00,  8.90196559e+00],
       [-1.98151023e+00, -2.16592395e+00],
       [-5.51676676e+00, -9.14911692e+00],
       [ 6.58341965e+00,  8.42678679e+00],
       [ 6.81170050e+00,  1.08840413e+01],
       [ 6.28516091e+00,  1.12871769e+01],
       [-1.55653165e+00, -7.27996706e-01],
       [-4.24364693e+00, -8.29916077e+00],
       [ 9.43042008e+00,  6.87265327e-01],
       [ 6.60605130e+00,  3.19799895e+00],
       [-1.53409012e+00, -9.90878728e-01],
       [-5.76357090e+00, -1.04040232e+01],
       [ 6.55819206e+00,  8.84793239e+00],
       [-2.72167758e+00, -2.95325619e+00],
       [ 8.73747674e+00,  2.00862220e+00],
       [-4.59447069e-01, -2.06696069e+00],
       [ 7.50126258e+00,  6.25170006e-01],
       [-1.21976463e+00, -1.33102015e+00],
       [-9.12976313e-01, -3.30006342e+00],
       [-7.09696856e+00, -1.03351152e+01],
       [-2.06601951e+00, -2.28699081e+00],
       [-1.10490173e+00, -2.32517180e+00],
       [ 8.10434971e+00,  1.83659293e+00],
       [ 3.86238558e-01, -4.09349201e-01],
       [ 7.57818277e+00,  9.58629233e+00],
       [-5.34778961e+00, -8.50391865e+00],
       [-9.79702428e-02, -7.64530403e-01],
       [-2.66189885e+00, -1.88821006e+00],
       [-1.71496832e+00, -2.43461184e+00],
       [-8.41083691e-01, -1.38548096e+00],
       [ 6.19407512e+00, -3.18356091e-02],
       [-5.87783338e+00, -8.82114499e+00],
       [ 1.00236578e+00, -4.05921498e-01],
       [-4.55452123e+00, -1.00640207e+01],
       [-1.97566591e+00, -2.19029775e+00],
       [-4.98569214e+00, -1.13741883e+01],
       [-4.53226485e+00, -1.01190266e+01],
       [ 8.11950967e+00,  5.61204932e-01],
       [-1.52665919e+00, -1.24282769e+00],
       [ 6.79156708e+00,  4.72310264e-01],
       [-6.78856917e+00, -1.08365979e+01],
       [ 7.82944816e+00,  9.62627158e+00],
       [ 6.02937898e+00,  1.03197406e+01],
       [ 5.35085426e-01,  4.44680103e-01],
       [ 7.82182216e+00,  5.26330873e-01],
       [-9.44345135e-01,  4.34256470e-02],
       [-7.15585936e-01,  6.02454362e-01],
       [ 1.00136753e+01,  1.05208945e+01],
       [-4.85469834e+00, -8.87703904e+00],
       [ 5.84965451e+00,  7.22417914e-01],
       [ 8.99334153e+00,  9.73134910e+00],
       [ 6.35932101e-01, -6.38079081e-02],
       [ 7.46996922e+00,  8.44935323e+00],
       [ 7.94657760e+00, -3.76224206e-01],
       [ 9.02255525e+00,  1.00677790e+01],
       [ 6.75445054e+00,  9.74531933e+00],
       [ 6.25817082e+00,  9.79505477e+00],
       [-1.75382617e+00, -2.03422839e+00],
       [ 7.56833386e+00,  9.32443309e+00],
       [-6.27849401e+00, -1.14358958e+01],
       [ 5.62803952e+00,  9.77585443e+00],
       [-2.05083442e+00, -1.00134575e+00],
       [ 6.26211747e+00,  1.57705700e+00],
       [ 3.34560991e-01, -1.05591893e+00],
       [ 7.14832905e+00,  8.17626052e+00],
       [ 5.57550594e+00,  4.27451104e-01],
       [-3.66180285e+00, -9.69838725e+00],
       [ 5.82259795e+00,  8.88727231e+00],
       [-1.72342112e+00, -3.73455485e+00],
       [-5.54026262e-01, -1.38272258e+00],
       [ 6.57119411e+00, -7.42773591e-01],
       [ 8.77188508e+00,  7.68341004e-01],
       [-5.44911640e+00, -1.01442406e+01],
       [ 8.35312192e+00,  2.03257144e-01],
       [-5.28990882e+00, -1.13829772e+01],
       [ 9.86936252e+00,  7.64023471e-01],
       [-4.15679584e+00, -8.57548482e+00],
       [-4.60359636e+00, -1.12907675e+01],
       [ 1.36372434e-01, -2.28174218e+00],
       [-3.68906526e+00, -9.75309639e+00],
       [ 7.24211001e+00,  7.48506871e+00],
       [-1.40139481e+00,  1.29977925e-01],
       [ 7.18477230e+00,  2.22950427e+00],
       [ 8.00405631e+00,  1.05369537e+01],
       [-6.11582688e+00, -8.64183423e+00],
       [ 7.40292703e+00,  9.16217702e+00],
       [ 7.59527490e+00,  1.32739544e+00],
       [ 5.88994248e+00,  2.11980680e+00],
       [-2.75268271e-01,  3.13746927e-01],
       [ 7.65867869e+00, -1.70111195e-01],
       [-5.02971835e+00, -1.07120361e+01],
       [-5.35327352e+00, -9.37745867e+00],
       [-4.73721386e+00, -7.39704109e+00],
       [ 1.54748735e+00, -1.68114724e+00],
       [-1.51330940e+00, -3.62641036e+00],
       [-2.15800128e+00, -1.42205528e+00],
       [ 6.91511696e+00,  8.64812384e+00],
       [-5.26001042e+00, -1.04045905e+01],
       [-4.96774224e+00, -9.25593061e+00],
       [ 1.92162997e+00,  2.93028474e-01],
       [ 1.72066891e+00, -5.32672083e-01],
       [ 1.25948556e+00,  3.88933567e-01],
       [ 8.81545663e+00,  8.76386046e+00],
       [ 7.33912656e+00, -7.53392130e-02],
       [ 5.37205330e-03, -1.86046763e+00],
       [ 7.48665378e+00,  2.13454535e-01],
       [ 6.89078889e+00,  1.06129890e+01],
       [ 6.85769503e+00,  1.03010593e+01],
       [-7.12402986e+00, -1.14263672e+01],
       [ 9.15668309e+00,  9.59459888e+00],
       [-1.50427389e+00, -2.55144929e+00],
       [ 2.93966451e-01, -6.19669218e-01],
       [-1.04796733e+00, -5.31270661e-01],
       [ 8.01141920e+00,  9.20192994e+00],
       [ 7.15013321e+00,  9.52893935e+00],
       [ 8.00604490e+00,  8.09337578e-01],
       [-4.48194591e-02, -1.80679730e-01],
       [-1.31789303e+00, -3.14387317e-01],
       [-8.07089814e-02, -1.67757305e+00],
       [ 4.33366829e+00,  1.05103468e+01],
       [-5.93815864e+00, -9.44580606e+00],
       [ 9.73038696e-01, -2.26417188e-02],
       [ 7.67145422e-01, -1.25214583e+00],
       [-1.85074551e+00, -1.42387131e+00],
       [ 8.50049461e+00,  9.12147855e+00],
       [ 7.52733204e+00,  2.74469799e-01],
       [ 8.39800148e+00,  2.83971510e+00],
       [ 6.93540782e+00,  1.74268311e+00],
       [-8.31497846e-01, -5.13671530e-01],
       [-5.37454252e+00, -1.03007109e+01],
       [ 7.07978644e+00,  7.81427747e+00],
       [ 7.13540110e+00, -6.39512668e-01],
       [-6.68698045e-01, -6.90654011e-01],
       [-5.39504295e+00, -1.05532257e+01],
       [-6.39506253e+00, -7.69148705e+00],
       [ 6.88953097e+00,  8.05268736e-01],
       [ 4.40516482e-01, -7.48813910e-01],
       [ 6.51876894e+00, -1.36881715e+00],
       [-1.43039817e+00, -1.67591397e+00],
       [ 6.77333549e-02, -8.37640299e-02],
       [-1.53179015e+00, -1.48699125e+00],
       [ 9.55873880e-01, -4.03379621e+00],
       [-6.55628915e+00, -9.64640544e+00],
       [-6.33208588e-01, -1.68351578e+00],
       [-1.76645017e+00,  9.47956073e-01],
       [-5.30658677e+00, -1.10179983e+01],
       [-3.13311521e+00, -1.20106310e+00],
       [ 3.12305384e-01,  1.54061391e+00],
       [-1.95597347e-01, -1.28161036e+00],
       [-2.98005437e+00, -1.58013873e+00],
       [ 8.37153676e+00,  9.88104962e-01],
       [ 7.51718983e+00,  1.31532401e+00],
       [-1.27573649e+00, -2.15531394e+00],
       [ 8.08034605e+00,  1.00284738e+01],
       [-1.46057668e+00,  6.79226449e-02],
       [ 9.96791734e-01, -1.43485885e-01],
       [ 1.85847418e+00, -1.02220186e+00],
       [ 6.63110319e+00,  2.65308097e+00],
       [-4.04534193e-02, -2.96097757e-01],
       [ 7.59635095e+00,  8.01979550e+00],
       [-8.25033553e-01,  2.33988337e-01],
       [ 4.47610415e-01, -2.06696561e-01],
       [ 8.20316159e+00,  1.20137562e+01],
       [-4.96416607e+00, -9.18233156e+00],
       [ 5.42029987e-02, -2.76954050e-01],
       [ 8.53178848e+00,  1.68305022e+00],
       [ 6.96767867e+00,  8.96225230e+00],
       [-5.48258756e+00, -8.89942127e+00],
       [ 2.06855171e+00, -1.22519717e+00],
       [ 8.80996213e+00,  1.19021701e+01],
       [ 6.87472003e+00, -1.60699487e-01],
       [-6.98097051e-01, -1.84706012e+00],
       [-8.83928949e-01, -3.01426848e-01],
       [ 7.51463404e+00,  1.01410759e+01],
       [ 5.40636569e-01, -1.04403264e+00],
       [ 7.45637594e+00,  2.44406337e-01],
       [-1.09787084e+00, -1.31119195e+00],
       [-3.82871421e-01, -6.62317396e-01],
       [ 6.93568163e+00,  5.02741209e-01],
       [ 6.24007751e+00,  5.58477990e-01],
       [-3.14363938e+00, -1.57334786e+00],
       [-1.94100253e+00, -2.61416733e+00],
       [ 6.40269472e+00,  8.49536803e-02],
       [-5.80175035e-01,  1.71183598e-01],
       [-3.10141938e+00, -1.75654058e+00],
       [-4.95208086e+00, -1.03463286e+01],
       [-1.61089871e+00, -3.22916339e+00],
       [ 8.80002143e+00,  8.54323521e+00],
       [-3.25068501e+00, -9.62691415e+00],
       [-4.42746509e+00, -1.03170576e+01],
       [ 8.10044749e+00,  7.59678303e-01],
       [ 7.24044576e+00,  1.07171623e+00],
       [ 7.31054144e+00,  3.91028663e-01],
       [-5.25500968e+00, -8.60196777e+00],
       [-4.73476878e+00, -8.55463611e+00],
       [ 7.82510107e+00,  8.41865266e+00],
       [ 4.96938735e+00,  1.32531048e+00],
       [-6.30209873e+00, -9.32940815e+00],
       [ 2.62660509e-01,  9.65271010e-01],
       [-1.48877051e+00, -1.19634905e+00],
       [ 7.89765814e+00,  8.21954764e+00],
       [-4.45215551e+00, -1.00532560e+01],
       [-1.31707209e+00, -2.17756423e+00],
       [ 7.13760133e+00,  9.84345464e+00],
       [-4.75186086e+00, -1.04289934e+01],
       [ 7.95311372e+00,  8.36897664e+00],
       [ 1.93377764e-02, -1.05609958e-01],
       [-5.31457968e-01, -4.92647994e-01],
       [ 7.56093115e+00, -5.17026891e-01],
       [ 8.28827095e+00,  1.07173080e+01],
       [ 6.65712690e+00,  7.72756233e+00],
       [-6.34164663e-01, -6.07622032e-01],
       [ 6.26977193e+00,  2.11033394e+00],
       [ 8.12519495e+00,  1.67159478e+00],
       [ 5.44099009e+00,  1.59585563e+00],
       [-5.65921095e-01, -8.11783410e-01],
       [ 8.23250469e-01, -2.01221114e+00],
       [ 8.89464606e+00,  1.02980640e+01],
       [ 7.07232613e+00,  1.26533062e+00],
       [-6.99919635e+00, -8.56031678e+00],
       [ 6.02376341e+00, -5.20259471e-01],
       [ 6.73117031e+00,  1.20886838e+00],
       [ 5.27801757e+00,  8.93474119e+00],
       [-1.74667691e+00, -1.89051792e+00],
       [-1.58865363e+00, -2.69068193e+00],
       [-5.20059731e+00, -1.03573690e+01],
       [-5.30009566e+00, -8.92934674e+00],
       [ 5.93897560e+00,  1.19214956e+00],
       [ 1.82402024e+00, -2.03624911e-01],
       [-5.30834448e-01, -1.07113243e+00],
       [-1.21337415e+00, -1.01424922e+00],
       [ 1.11518120e+00,  9.18665341e-02],
       [-4.77467042e+00, -1.12152830e+01],
       [ 9.94109903e+00,  9.22395667e+00],
       [ 8.67502392e+00,  3.75612057e-01],
       [ 5.00804464e-01, -2.04688958e+00],
       [-8.93528122e-01, -1.68668306e+00],
       [ 7.49814373e+00,  9.29677019e+00],
       [-5.19456552e-01, -2.43563648e+00],
       [ 9.19642422e+00,  1.15753695e+01],
       [-7.56188368e+00, -8.28714368e+00],
       [-3.50732232e-01, -3.52404645e+00],
       [-5.93797412e+00, -1.06374536e+01],
       [ 7.09962807e+00,  5.65520496e-01],
       [ 8.00631190e-01,  7.77519791e-01],
       [ 7.26697254e+00,  9.87045836e+00],
       [ 8.85832690e+00, -3.46482533e-01],
       [-2.22042599e-01, -1.62912288e+00],
       [-4.76069071e+00, -8.28875610e+00],
       [-4.56561308e+00, -1.08599245e+01],
       [-5.23262194e-01, -3.09940343e+00],
       [-3.92686455e+00, -9.45150743e+00],
       [ 6.40863862e+00,  9.43370396e-02],
       [-6.64874622e+00, -9.61970279e+00],
       [ 7.68373899e+00,  1.56326950e+00],
       [ 5.30528133e+00,  2.94410737e-01],
       [ 3.06821494e-01, -1.92234926e-01],
       [-1.42498427e+00,  1.08901301e-01],
       [ 7.84725158e+00, -2.58084628e-01],
       [-2.20043708e+00, -2.46036941e+00],
       [ 1.17518556e+00, -6.10841138e-01],
       [-5.14761890e+00, -9.45451041e+00],
       [ 6.85255409e-01, -1.46190547e+00],
       [ 1.79848855e+00,  5.79574546e-01],
       [ 6.95802459e+00,  9.19924611e+00],
       [ 8.20330317e+00,  1.27929111e+00],
       [-2.68444428e+00, -2.11080861e+00],
       [ 7.09022949e+00,  8.57919798e+00],
       [ 7.94970781e+00, -3.73406449e-01],
       [ 7.92129785e+00,  7.80184469e-01],
       [-4.58458845e+00, -1.03794929e+01],
       [ 8.14330144e+00,  1.05961829e+00],
       [-1.18383745e+00, -1.73565007e+00],
       [ 1.54202742e+00, -1.58137907e-01],
       [ 8.18240421e+00,  8.16999978e+00],
       [-4.14222194e+00, -8.72744882e+00],
       [-5.47911697e+00, -9.50341322e+00],
       [-1.28750711e+00, -8.57613234e-01],
       [ 6.01601630e+00,  5.36377613e-01],
       [-2.18897682e-01, -7.85530539e-01],
       [-3.25302477e+00, -8.47992901e+00],
       [-6.48418280e-01, -1.70147230e+00],
       [ 8.48011698e+00,  6.91221263e-01],
       [ 6.71388804e+00,  1.38741885e+00],
       [ 7.67809597e-01, -2.49690076e-01],
       [ 8.00236864e+00,  1.01691733e+01],
       [-1.76444763e+00, -2.99833194e+00],
       [ 1.33389463e+00,  1.06030034e+00],
       [ 7.28724996e+00,  7.62099800e+00],
       [-4.17837910e+00, -1.04682799e+01],
       [-2.94506518e+00, -3.07935248e+00],
       [-4.52037786e+00, -1.04538956e+01],
       [ 7.62051584e+00,  9.37144814e+00],
       [ 6.80526026e+00, -2.90929197e-01],
       [ 6.97321804e+00,  2.57628100e+00],
       [ 1.76911741e-01, -1.41912911e+00],
       [ 1.42033122e+00, -1.49828040e+00],
       [ 7.58423725e+00,  1.07012439e+01],
       [-4.28410356e+00, -8.12545456e+00],
       [-7.00689388e+00, -9.18315166e+00],
       [ 8.14715032e+00,  2.09399376e+00],
       [-5.07908921e+00, -9.66089359e+00],
       [ 7.40314915e+00,  1.04234244e+01],
       [ 9.10088858e+00,  9.14807411e+00],
       [-6.71809245e+00, -1.26235480e+01],
       [-1.68728875e+00, -1.62497286e+00],
       [ 7.54257819e+00,  7.02403019e+00],
       [ 7.29548244e+00, -2.22931191e-01],
       [-5.07571169e+00, -9.58046865e+00],
       [-4.40937161e+00, -9.79814154e+00],
       [ 6.42049196e+00,  2.66837116e-01],
       [ 8.91111219e+00,  9.14933265e+00],
       [ 1.18899642e+00, -1.10106373e+00],
       [-6.13449833e-01, -2.93736931e+00],
       [ 7.01747287e+00, -1.22016798e+00],
       [ 6.11962018e+00,  2.15278054e-01],
       [-4.92234949e+00, -9.78331136e+00],
       [-5.12722493e+00, -1.08884302e+01],
       [ 6.88307080e+00,  1.38045511e+00],
       [ 8.21656221e+00,  9.10241800e-01],
       [-5.10925884e+00, -1.03853869e+01],
       [ 4.69777002e+00,  5.96873174e-01],
       [ 7.56475962e+00,  1.12476287e+01],
       [ 7.96481592e+00,  8.03914659e+00],
       [ 6.32869714e-01, -4.97077325e-01],
       [-6.96840395e+00, -1.07340379e+01],
       [-4.07766395e+00, -1.10199169e+01],
       [ 1.89983579e-01, -6.15965437e-01],
       [ 1.31575437e+00, -1.07614114e+00],
       [-6.94652343e+00, -1.09656853e+01],
       [-3.98348908e+00, -1.03871958e+01],
       [-3.37855614e+00, -1.52317749e+00],
       [ 8.68173394e+00,  5.49803785e-01],
       [ 7.29433984e+00,  9.79486468e+00],
       [ 1.00048926e+00,  1.34000261e+00],
       [ 7.60252720e+00,  8.98962387e+00],
       [-7.46735357e-01, -4.55325598e-01],
       [-1.88417007e+00, -2.57828749e+00],
       [-1.66587476e+00, -8.39289176e-01],
       [ 6.53423970e+00,  9.45532341e+00],
       [-2.06646672e-01, -1.71606847e+00],
       [ 7.88579276e+00,  7.87430048e-01],
       [ 7.49985237e+00,  9.55274284e+00],
       [ 7.74488453e+00,  1.44091782e-01],
       [-1.95954694e+00,  6.44939077e-01],
       [ 1.24072488e+00, -3.60310387e+00],
       [-5.40165401e-02, -1.20169717e+00],
       [ 7.32112244e+00,  7.85104225e-01],
       [ 2.46406607e-01, -3.02282737e+00],
       [ 5.49953213e+00,  9.04384494e+00],
       [-4.76109814e+00, -9.22475700e+00],
       [-5.15652004e+00, -8.59896651e+00],
       [ 3.51396112e+00, -1.88978278e+00],
       [ 7.61227907e+00,  9.44636270e+00],
       [-2.46228997e+00, -1.72844995e+00],
       [-5.87774763e+00, -7.46558928e+00],
       [-5.61633191e+00, -9.38447377e+00],
       [ 7.83939881e+00, -1.09061033e-01],
       [-4.97642707e-01, -1.02019997e-01],
       [ 9.67915963e-01,  5.45581870e-02],
       [-8.94571838e-01, -1.62329674e-01],
       [ 7.80361128e+00,  9.74561264e+00],
       [ 1.29010469e-01, -8.50880489e-01],
       [-2.49895812e+00, -2.37238998e+00],
       [-4.45713217e+00, -1.00988570e+01],
       [-3.86287948e-02, -2.09605745e-01],
       [-1.12640005e+00, -8.32828276e-01],
       [-2.01372219e+00, -1.57048035e+00],
       [ 7.64534862e+00, -1.89105765e+00],
       [-1.83631986e+00, -3.11008080e+00],
       [ 5.94205586e+00,  1.05076833e+01],
       [ 7.40565933e+00,  8.82924480e+00],
       [-7.30443027e+00, -8.99318542e+00],
       [ 6.15895483e+00,  8.70208685e+00],
       [ 6.89703841e+00,  7.98081009e+00],
       [-2.98873841e-01, -3.17642819e+00],
       [-9.68565748e-01,  1.40550736e-01],
       [-6.67666404e+00, -1.08552677e+01],
       [-3.99547299e+00, -1.06847441e+01],
       [-3.99502995e-01, -5.04614876e-01],
       [ 6.39995999e+00,  7.58000402e-02],
       [ 1.76709071e+00, -2.10672998e+00],
       [ 7.84402130e+00,  1.02906040e+01],
       [-2.15619784e-01, -6.36459560e-01],
       [-1.77508241e+00, -2.39767490e+00],
       [-3.41229172e+00, -9.58861351e+00],
       [ 8.99412695e-01, -2.16794517e+00],
       [ 6.89228905e+00,  8.60634293e+00],
       [-1.56334604e+00, -5.06458783e-01],
       [-1.40649114e+00, -1.91137571e+00],
       [-5.15594507e-01, -2.58580875e+00],
       [ 7.40783871e+00,  6.93633083e+00],
       [ 8.32813617e+00,  9.14002426e+00],
       [ 2.44111449e+00,  3.88328404e-01],
       [-1.30071030e+00, -1.57562268e+00],
       [ 8.59258191e+00, -2.90226066e-01],
       [ 6.62257531e+00,  2.04423066e+00],
       [ 2.77533110e-01,  7.47689611e-02],
       [-4.75492004e-01, -1.55278255e-01],
       [-1.20520085e+00, -1.71524439e+00],
       [-3.55220147e+00, -2.64939979e+00],
       [-5.58893722e+00, -8.69397523e+00],
       [-6.07676925e+00, -8.88408113e+00],
       [ 8.22871505e+00,  1.23014656e+00],
       [ 1.43144120e+00, -1.62588014e+00],
       [-6.36773439e+00, -1.04055501e+01],
       [ 1.97481009e-01, -1.18560608e+00],
       [ 7.16251356e+00,  9.74878714e+00],
       [ 7.23782400e+00,  1.70291874e+00],
       [ 8.32245091e+00,  9.67819196e+00],
       [-7.68302349e-01, -1.32253228e+00],
       [ 1.84651670e+00,  8.56997285e-02],
       [ 7.49341310e+00,  1.10089236e+01],
       [ 2.40567874e+00, -6.32687361e-01],
       [ 9.57869303e-01, -4.99869762e-01],
       [-2.38887391e+00, -1.72102620e+00],
       [-2.31454234e+00, -2.63341363e+00],
       [ 1.88923495e+00, -6.67208901e-01],
       [-5.71701717e+00, -8.99791925e+00],
       [ 2.82939516e-01,  4.61267784e-01],
       [ 9.10772988e+00, -6.93704109e-02],
       [-5.32505563e-01, -9.20336182e-01],
       [ 6.19399963e+00,  8.19786954e+00],
       [ 7.51239046e+00,  2.06597042e+00],
       [ 8.06164078e+00,  8.43736968e+00],
       [ 9.59017028e+00,  5.84419553e-01],
       [ 1.46875991e+00,  2.44935494e-02],
       [ 4.21957532e-01,  1.58934526e+00],
       [ 1.69178802e+00,  8.86517871e-01],
       [ 6.84661976e+00, -8.59452088e-01],
       [-3.93041066e+00, -1.01269384e+01],
       [ 7.33076870e+00,  9.71058947e-01],
       [ 5.94356564e+00,  6.09246595e-01],
       [-2.51818591e+00, -1.62273821e+00]])
#导入数据预处理工具
from sklearn.preprocessing import MinMaxScaler
scaler = MinMaxScaler()
scaler.fit(X_train)
X_train_scaled = scaler.transform(X_train)
X_test_scaled = scaler.transform(X_test)
#构建多项分布朴素贝叶斯分类器
mnb = MultinomialNB()
mnb.fit(X_train_scaled,y_train)
print('模型得分:{:.3f}'.format(mnb.score(X_test_scaled,y_test)))
模型得分:0.320
#拟合结果可视化
plt.figure(dpi=300)
Z = mnb.predict(scaler.transform(np.c_[xx.ravel(),yy.ravel()]))
Z = Z.reshape(xx.shape)
plt.pcolormesh(xx,yy,Z,cmap=plt.cm.Pastel1)
plt.scatter(X_train[:,0],X_train[:,1],c = y_train, cmap=plt.cm.cool, edgecolors='k')
plt.scatter(X_test[:,0],X_test[:,1],c = y_test, cmap=plt.cm.cool, marker='*',
           edgecolors='k')
​
plt.xlim(xx.min(),xx.max())
plt.xlim(yy.min(),yy.max())
plt.title("Classifier:MultinomiaNB")
plt.show
​
​
<function matplotlib.pyplot.show(close=None, block=None)>

#将X,y赋值为np数组 s=0;m=1;1=2;yes=1;no=0
X = np.array([[0,0,0],
            [0,2,1],
            [2,1,1],
            [1,1,1],
            [2,1,1],
            [1,2,0],
            [1,0,0],
            [2,1,0],
            [1,0,1],
            [0,0,1]])
y = np.array([0,1,1,1,1,1,0,1,1,0])
#构建多项式朴素贝叶斯分类模型
clf = MultinomialNB()
clf.fit(X,y)
#预测新账号类别
new_account = [[1,0,0]]
pre = clf.predict(new_account)
if pre == [1]:
    print("该账号可能是虚假账号!")
else:
    print("该账号为真实账号!")
print(pre)
该账号可能是虚假账号!
[1]
威斯康星乳腺癌分类预测
#导入数据库
from sklearn.datasets import load_breast_cancer
cancer = load_breast_cancer()
cancer.keys()
print('肿瘤的分类',cancer['target_names'])
print('\n肿瘤的特征:\n',cancer['feature_names'])
肿瘤的分类 ['malignant' 'benign']

肿瘤的特征:
 ['mean radius' 'mean texture' 'mean perimeter' 'mean area'
 'mean smoothness' 'mean compactness' 'mean concavity'
 'mean concave points' 'mean symmetry' 'mean fractal dimension'
 'radius error' 'texture error' 'perimeter error' 'area error'
 'smoothness error' 'compactness error' 'concavity error'
 'concave points error' 'symmetry error' 'fractal dimension error'
 'worst radius' 'worst texture' 'worst perimeter' 'worst area'
 'worst smoothness' 'worst compactness' 'worst concavity'
 'worst concave points' 'worst symmetry' 'worst fractal dimension']
X, y = cancer.data, cancer.target
print(X.shape,y.shape)
(569, 30) (569,)
#拆分训练集和测试集
X_train,X_test,y_train,y_test = train_test_split(X,y,test_size=0.25,random_state=8)
print(X_train.shape,X_test.shape)
(426, 30) (143, 30)
print(cancer)
{'data': array([[1.799e+01, 1.038e+01, 1.228e+02, ..., 2.654e-01, 4.601e-01,
        1.189e-01],
       [2.057e+01, 1.777e+01, 1.329e+02, ..., 1.860e-01, 2.750e-01,
        8.902e-02],
       [1.969e+01, 2.125e+01, 1.300e+02, ..., 2.430e-01, 3.613e-01,
        8.758e-02],
       ...,
       [1.660e+01, 2.808e+01, 1.083e+02, ..., 1.418e-01, 2.218e-01,
        7.820e-02],
       [2.060e+01, 2.933e+01, 1.401e+02, ..., 2.650e-01, 4.087e-01,
        1.240e-01],
       [7.760e+00, 2.454e+01, 4.792e+01, ..., 0.000e+00, 2.871e-01,
        7.039e-02]]), 'target': array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1,
       0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0,
       0, 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 1, 1, 1, 1, 0, 1, 0, 0,
       1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 0, 0,
       1, 1, 1, 0, 1, 1, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 1, 1, 0, 1,
       1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 1, 0, 1, 0,
       0, 1, 0, 0, 1, 1, 0, 1, 1, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 0, 1, 1, 1, 1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 1, 0, 0, 1, 1, 1,
       1, 0, 1, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 0, 0,
       0, 0, 1, 0, 0, 0, 1, 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 1, 0, 0,
       1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, 0, 1, 0, 1, 1,
       1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
       0, 0, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 0, 1, 0, 0, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 0, 1, 1, 1, 1, 0, 0,
       0, 1, 1, 1, 1, 0, 1, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0,
       0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 0, 0, 0, 1, 0, 0,
       1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 1, 0, 1, 1, 0, 0, 1, 1,
       1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 0, 1, 1, 0,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 1, 1,
       1, 0, 1, 1, 0, 1, 0, 1, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0,
       1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1,
       1, 1, 1, 0, 1, 0, 1, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 0, 1, 0, 1, 1,
       1, 1, 1, 0, 1, 1, 0, 1, 0, 1, 0, 0, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1,
       1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1]), 'frame': None, 'target_names': array(['malignant', 'benign'], dtype='<U9'), 'DESCR': '.. _breast_cancer_dataset:\n\nBreast cancer wisconsin (diagnostic) dataset\n--------------------------------------------\n\n**Data Set Characteristics:**\n\n    :Number of Instances: 569\n\n    :Number of Attributes: 30 numeric, predictive attributes and the class\n\n    :Attribute Information:\n        - radius (mean of distances from center to points on the perimeter)\n        - texture (standard deviation of gray-scale values)\n        - perimeter\n        - area\n        - smoothness (local variation in radius lengths)\n        - compactness (perimeter^2 / area - 1.0)\n        - concavity (severity of concave portions of the contour)\n        - concave points (number of concave portions of the contour)\n        - symmetry\n        - fractal dimension ("coastline approximation" - 1)\n\n        The mean, standard error, and "worst" or largest (mean of the three\n        worst/largest values) of these features were computed for each image,\n        resulting in 30 features.  For instance, field 0 is Mean Radius, field\n        10 is Radius SE, field 20 is Worst Radius.\n\n        - class:\n                - WDBC-Malignant\n                - WDBC-Benign\n\n    :Summary Statistics:\n\n    ===================================== ====== ======\n                                           Min    Max\n    ===================================== ====== ======\n    radius (mean):                        6.981  28.11\n    texture (mean):                       9.71   39.28\n    perimeter (mean):                     43.79  188.5\n    area (mean):                          143.5  2501.0\n    smoothness (mean):                    0.053  0.163\n    compactness (mean):                   0.019  0.345\n    concavity (mean):                     0.0    0.427\n    concave points (mean):                0.0    0.201\n    symmetry (mean):                      0.106  0.304\n    fractal dimension (mean):             0.05   0.097\n    radius (standard error):              0.112  2.873\n    texture (standard error):             0.36   4.885\n    perimeter (standard error):           0.757  21.98\n    area (standard error):                6.802  542.2\n    smoothness (standard error):          0.002  0.031\n    compactness (standard error):         0.002  0.135\n    concavity (standard error):           0.0    0.396\n    concave points (standard error):      0.0    0.053\n    symmetry (standard error):            0.008  0.079\n    fractal dimension (standard error):   0.001  0.03\n    radius (worst):                       7.93   36.04\n    texture (worst):                      12.02  49.54\n    perimeter (worst):                    50.41  251.2\n    area (worst):                         185.2  4254.0\n    smoothness (worst):                   0.071  0.223\n    compactness (worst):                  0.027  1.058\n    concavity (worst):                    0.0    1.252\n    concave points (worst):               0.0    0.291\n    symmetry (worst):                     0.156  0.664\n    fractal dimension (worst):            0.055  0.208\n    ===================================== ====== ======\n\n    :Missing Attribute Values: None\n\n    :Class Distribution: 212 - Malignant, 357 - Benign\n\n    :Creator:  Dr. William H. Wolberg, W. Nick Street, Olvi L. Mangasarian\n\n    :Donor: Nick Street\n\n    :Date: November, 1995\n\nThis is a copy of UCI ML Breast Cancer Wisconsin (Diagnostic) datasets.\nhttps://goo.gl/U2Uwz2\n\nFeatures are computed from a digitized image of a fine needle\naspirate (FNA) of a breast mass.  They describe\ncharacteristics of the cell nuclei present in the image.\n\nSeparating plane described above was obtained using\nMultisurface Method-Tree (MSM-T) [K. P. Bennett, "Decision Tree\nConstruction Via Linear Programming." Proceedings of the 4th\nMidwest Artificial Intelligence and Cognitive Science Society,\npp. 97-101, 1992], a classification method which uses linear\nprogramming to construct a decision tree.  Relevant features\nwere selected using an exhaustive search in the space of 1-4\nfeatures and 1-3 separating planes.\n\nThe actual linear program used to obtain the separating plane\nin the 3-dimensional space is that described in:\n[K. P. Bennett and O. L. Mangasarian: "Robust Linear\nProgramming Discrimination of Two Linearly Inseparable Sets",\nOptimization Methods and Software 1, 1992, 23-34].\n\nThis database is also available through the UW CS ftp server:\n\nftp ftp.cs.wisc.edu\ncd math-prog/cpo-dataset/machine-learn/WDBC/\n\n.. topic:: References\n\n   - W.N. Street, W.H. Wolberg and O.L. Mangasarian. Nuclear feature extraction \n     for breast tumor diagnosis. IS&T/SPIE 1993 International Symposium on \n     Electronic Imaging: Science and Technology, volume 1905, pages 861-870,\n     San Jose, CA, 1993.\n   - O.L. Mangasarian, W.N. Street and W.H. Wolberg. Breast cancer diagnosis and \n     prognosis via linear programming. Operations Research, 43(4), pages 570-577, \n     July-August 1995.\n   - W.H. Wolberg, W.N. Street, and O.L. Mangasarian. Machine learning techniques\n     to diagnose breast cancer from fine-needle aspirates. Cancer Letters 77 (1994) \n     163-171.', 'feature_names': array(['mean radius', 'mean texture', 'mean perimeter', 'mean area',
       'mean smoothness', 'mean compactness', 'mean concavity',
       'mean concave points', 'mean symmetry', 'mean fractal dimension',
       'radius error', 'texture error', 'perimeter error', 'area error',
       'smoothness error', 'compactness error', 'concavity error',
       'concave points error', 'symmetry error',
       'fractal dimension error', 'worst radius', 'worst texture',
       'worst perimeter', 'worst area', 'worst smoothness',
       'worst compactness', 'worst concavity', 'worst concave points',
       'worst symmetry', 'worst fractal dimension'], dtype='<U23'), 'filename': 'breast_cancer.csv', 'data_module': 'sklearn.datasets.data'}
#构建高斯分布(正态分布)朴素贝叶斯分类器
gnb = GaussianNB()
gnb.fit(X_train,y_train)
print('测试集得分:{:.3f}'.format(gnb.score(X_test,y_test)))
print('训练集得分:{:.3f}'.format(gnb.score(X_train,y_train)))
测试集得分:0.937
训练集得分:0.948
#随机选取一个样本进行测试
print('模型预测的分类是:{}'.format(gnb.predict([X[312]])))
print('样本的正确分类是:',y[312])
模型预测的分类是:[1]
样本的正确分类是: 1
#输出预测的概率值
gnb.predict_proba([X[312]])
array([[4.1780099e-14, 1.0000000e+00]])
KMeans聚类算法
class sklearn.cluster.KMeans(
n_clusters = 8 分成8组
init 初始化
)
#导入KMeans模板
from sklearn.cluster import KMeans
#随机生成含150个类别数为3的数据集
from sklearn.datasets import make_blobs
X,y= make_blobs(n_samples=150,centers=3,random_state=8)
#数据可视化
%matplotlib inline
plt.figure(dpi = 300)
plt.scatter(X[:,0],X[:,1],c = y,cmap = plt.cm.spring,edgecolors='k')
<matplotlib.collections.PathCollection at 0x18db0e87790>

#模型实例化
kmeans = KMeans(n_clusters=3)
#模型训练
kmeans.fit(X)
KMeans(n_clusters=3)
#拟合结果可视化
#分别用样本的两个特征值创建图像的横轴和竖轴
x_min,x_max = X[:,0].min()-1,X[:,0].max()+1
y_min,y_max = X[:,1].min()-1,X[:,1].max()+1
xx,yy = np.meshgrid(np.arange(x_min,x_max,.02),
                    np.arange(y_min,y_max,.02))
Z = kmeans.predict(np.c_[xx.ravel(),yy.ravel()])
#给每个分类中的样本分配不同的颜色
Z = Z.reshape(xx.shape)
plt.pcolormesh(xx,yy,Z,cmap=plt.cm.Pastel1)
plt.scatter(X[:,0],X[:,1],c = y, cmap=plt.cm.cool, edgecolors='k',s = 20)     
plt.xlim(xx.min(),xx.max())
plt.xlim(yy.min(),yy.max())
plt.title("K-Means Cluster")
plt.show
​
​
<function matplotlib.pyplot.show(close=None, block=None)>

KMeans类的属性
inertia_:各样本和其最近的类中心距离之和
#查看聚类中心点的坐标
kmeans.cluster_centers_
array([[ 7.51338019,  9.44881625],
       [-5.43790266, -9.83963795],
       [ 7.21711781,  0.68887741]])
#拟合结果可视化
#分别用样本的两个特征值创建图像的横轴和竖轴
x_min,x_max = X[:,0].min()-1,X[:,0].max()+1
y_min,y_max = X[:,1].min()-1,X[:,1].max()+1
xx,yy = np.meshgrid(np.arange(x_min,x_max,.02),
                    np.arange(y_min,y_max,.02))
Z = kmeans.predict(np.c_[xx.ravel(),yy.ravel()])
#给每个分类中的样本分配不同的颜色
Z = Z.reshape(xx.shape)
plt.pcolormesh(xx,yy,Z,cmap=plt.cm.Pastel1)
plt.scatter(X[:,0],X[:,1],c = y, cmap=plt.cm.cool, edgecolors='k',s = 20)
plt.scatter(kmeans.cluster_centers_[:,0],kmeans.cluster_centers_[:,1],
           s=200,marker='*',c='red',label = 'centroids')
plt.legend()
plt.xlim(xx.min(),xx.max())
plt.xlim(yy.min(),yy.max())
plt.title("K-Means Cluster")
plt.show
​
​
<function matplotlib.pyplot.show(close=None, block=None)>

#查看数据点的标签
print(kmeans.labels_)
#查看运行时迭代的次数
print(kmeans.n_iter_)
#查看各样本与其最近的类中心的距离之和
print(kmeans.inertia_)#簇内误差平方和
[0 2 1 0 1 1 2 1 2 1 1 2 0 0 2 2 2 2 0 2 0 0 2 0 1 2 0 1 1 1 0 1 2 2 0 2 0
 1 1 2 2 1 1 1 2 2 0 0 0 1 2 2 1 1 0 2 1 2 1 0 0 2 2 0 0 2 0 2 2 0 0 0 0 1
 0 2 2 1 0 0 0 1 1 1 2 2 1 0 1 2 2 1 1 2 0 1 0 1 2 2 0 0 0 2 1 1 2 1 0 2 1
 0 0 1 2 0 1 2 2 1 1 1 1 0 0 0 1 2 2 2 0 1 0 1 2 0 0 0 0 0 0 1 2 1 1 2 2 1
 1 2]
2
329.18213511898057
“肘方法”确定k值
distortion = []
for i in range(1,20):
    km = KMeans(n_clusters=i,
               init='k-means++',
               random_state=8)
    km.fit(X)
    distortion.append(km.inertia_)
plt.plot(range(1,20),distortion,marker='o')
plt.xticks(range(1,20))
plt.grid(linestyle='--')
plt.xlabel('Number of clusters')
plt.ylabel("Distortion")
C:\Users\Lenovo\anaconda3\lib\site-packages\sklearn\cluster\_kmeans.py:1036: UserWarning: KMeans is known to have a memory leak on Windows with MKL, when there are less chunks than available threads. You can avoid it by setting the environment variable OMP_NUM_THREADS=1.
  warnings.warn(
Text(0, 0.5, 'Distortion')

轮廓系数 :结合了凝聚度和分离度
#计算并绘制轮廓系数图
km = KMeans(n_clusters=3)
y_km = km.fit_predict(X)
from matplotlib import cm
from sklearn.metrics import silhouette_samples
cluster_labels = np.unique(y_km)
n_clusters = cluster_labels.shape[0]
silhouette_vals = silhouette_samples(X,y_km,metric='euclidean')
y_ax_lower, y_ax_upper = 0,0
yticks=[]
for i,c in enumerate(cluster_labels):
    c_silhouette_vals = silhouette_vals[y_km == c]
    c_silhouette_vals.sort()
    y_ax_upper += len(c_silhouette_vals)
    color = cm.jet(i/n_clusters)
    plt.barh(range(y_ax_lower,y_ax_upper),
            c_silhouette_vals,
            height=1.0,
            edgecolor = 'none',
            color = color)
    yticks.append((y_ax_lower+y_ax_upper)/2)
    y_ax_lower += len(c_silhouette_vals)
silhouette_avg = np.mean(silhouette_vals)
plt.axvline(silhouette_avg,
           color = 'red',
           linestyle='--')
plt.ylabel("Cluster")
plt.xlabel("Silhouette coefficient")
Text(0.5, 0, 'Silhouette coefficient')

#计算并绘制轮廓系数图(k=2)
km = KMeans(n_clusters=2)
y_km = km.fit_predict(X)
from matplotlib import cm
from sklearn.metrics import silhouette_samples
cluster_labels = np.unique(y_km)
n_clusters = cluster_labels.shape[0]
silhouette_vals = silhouette_samples(X,y_km,metric='euclidean')
y_ax_lower, y_ax_upper = 0,0
yticks=[]
for i,c in enumerate(cluster_labels):
    c_silhouette_vals = silhouette_vals[y_km == c]
    c_silhouette_vals.sort()
    y_ax_upper += len(c_silhouette_vals)
    color = cm.jet(i/n_clusters)
    plt.barh(range(y_ax_lower,y_ax_upper),
            c_silhouette_vals,
            height=1.0,
            edgecolor = 'none',
            color = color)
    yticks.append((y_ax_lower+y_ax_upper)/2)
    y_ax_lower += len(c_silhouette_vals)
silhouette_avg = np.mean(silhouette_vals)
plt.axvline(silhouette_avg,
           color = 'red',
           linestyle='--')
plt.ylabel("Cluster")
plt.xlabel("Silhouette coefficient")
Text(0.5, 0, 'Silhouette coefficient')

DBSCAN算法实践
min_samples:案例被考虑为核心案例的最小数量
p:Minkowski距离中的指数
#生成模拟数据集
from sklearn.datasets import make_moons
X,y = make_moons(n_samples=200,
                noise=0.05,
                random_state=8)
#数据可视化
plt.scatter(X[:,0],X[:,1],c=y,cmap=plt.cm.spring,edgecolors='k')
<matplotlib.collections.PathCollection at 0x18dbc0318b0>

X1,y1 = make_moons(n_samples=200,
                noise=0,
                random_state=8)
#数据可视化
plt.figure(dpi=300)
plt.scatter(X1[:,0],X1[:,1],c=y1,cmap=plt.cm.spring,edgecolors='k')
<matplotlib.collections.PathCollection at 0x18dbc0b1c40>

#利用K-Means算法聚类
#模型实例化
kmeans = KMeans(n_clusters=2,init='random')
#模型训练与预测
y_km = kmeans.fit_predict(X)
#结果可视化
#定义图像中分区的颜色和散点的颜色
cmap_light = ListedColormap(['#FFAAAA','#AAFFAA','#AAAAFF'])
cmap_bold = ListedColormap(['#FF0000','#00FF00','#0000FF'])
​
#分别用样本的两个特征值创建图像和横轴和纵轴
x_min, x_max = X[:,0].min() - 1, X[:, 0].max() +1
y_min, y_max = X[:,1].min() - 1, X[:, 1].max() +1
xx,yy = np.meshgrid(np.arange(x_min,x_max,.02),
                    np.arange(y_min,y_max,.02))
Z = kmeans.predict(np.c_[xx.ravel(),yy.ravel()])
​
#给每个类别中的样本分配不同的颜色
Z = Z.reshape(xx.shape)
plt.pcolormesh(xx,yy,Z,cmap=cmap_light)
#用散点把样本表示出来
plt.scatter(X[:,0],X[:,1],c = y, cmap=plt.cm.spring, edgecolor='k',s=20)
plt.scatter(kmeans.cluster_centers_[:,0],kmeans.cluster_centers_[:,1],
           s=200,marker='*',c='red',label = 'centroids')
plt.legend()
plt.grid()
plt.xlim(xx.min(),xx.max())
plt.ylim(yy.min(),yy.max())
plt.title("K-Means Cluster")
​
Text(0.5, 1.0, 'K-Means Cluster')

#结果可视化
plt.scatter(X[y_km==0,0],
           X[y_km==0,1],
           c='red',
           marker='o',
           s=40,
           label='cluster 1',
           edgecolors='k')
plt.scatter(X[y_km==1,0],
           X[y_km==1,1],
           c='green',
           marker='s',
           s=40,
           label='cluster 2',
           edgecolors='k')
plt.title('K-Means Cclustering')
plt.legend()
<matplotlib.legend.Legend at 0x18dbc218a60>

#利用DBSCAN聚类
#导入DBSCAN模块
from sklearn.cluster import DBSCAN
#模型实例化
db = DBSCAN(eps=0.2,min_samples=5)
#训练模型并预测
y_db = db.fit_predict(X)
#结果可视化
plt.scatter(X[y_db==0,0],
           X[y_db==0,1],
           c='red',
           marker='o',
           s=40,
           label='cluster 1',
           edgecolors='k')
plt.scatter(X[y_db==1,0],
           X[y_db==1,1],
           c='green',
           marker='s',
           s=40,
           label='cluster 2',
           edgecolors='k')
plt.title('K-Means clustering')
plt.legend()
<matplotlib.legend.Legend at 0x18dea64aee0>

#DBSCAN
%matplotlib inline
dbscan_data=pd.read_csv('C:\\Users\Lenovo\dbscan_data.csv')
dbscan_data.head()
plt.scatter(dbscan_data['x1'],dbscan_data['x2'])
<matplotlib.collections.PathCollection at 0x18dea7dee20>

db = DBSCAN(eps=0.5,min_samples=100)
db.fit(dbscan_data)
DBSCAN(min_samples=100)
labels6 = db.labels_
dbscan_data['cluster_db']=labels6
colors=np.array(['red','green','blue','yellow','teal','orange','cyan','black','goldenrod','tomato'])
plt.figure(figsize=(15,8))
plt.scatter(dbscan_data['x1'],dbscan_data['x2'],c=colors[dbscan_data['cluster_db']])
<matplotlib.collections.PathCollection at 0x18dea7e60a0>

db = DBSCAN(eps=0.3,min_samples=50).fit(dbscan_data)
labels6 = db.labels_
plt.figure(figsize=(15,8))
dbscan_data['cluster_db']=labels6
colors=np.array(['red','green','blue','yellow','teal','orange','cyan','black','goldenrod','tomato','#123456','#563211','green'])
plt.scatter(dbscan_data['x1'],dbscan_data['x2'],c=colors[dbscan_data['cluster_db']])
​
<matplotlib.collections.PathCollection at 0x18dee135a00>

​

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包

打赏作者

包bao~

你的鼓励将是我创作的最大动力

¥1 ¥2 ¥4 ¥6 ¥10 ¥20
扫码支付:¥1
获取中
扫码支付

您的余额不足,请更换扫码支付或充值

打赏作者

实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值