机器学习sklearn19.0聚类算法——Kmeans算法

    机器学习sklearn19.0聚类算法——Kmeans算法

    #k-means聚类算法

    import numpy as np
    import pandas as pd
    import matplotlib as mpl
    import matplotlib.pyplot as plt
    import matplotlib.colors
    import sklearn.datasets as ds
    from sklearn.cluster import KMeans #引入kmeans

    #解决中文显示问题
    mpl.rcParams[‘font.sans-serif’] = [u’SimHei’]
    mpl.rcParams[‘axes.unicode_minus’] = False

    #产生模拟数据
    N = 1500
    centers = 4
    #make_blobs:聚类数据生成器
    data,y = ds.make_blobs(N,n_features=2,centers=centers,random_state=28)

    data2,y2 = ds.make_blobs(N,n_features=2,centers=centers,random_state=28)
    data3 = np.vstack((data[y0][:200],data[y1][:100],data[y2][:10],data[y3][:50]))
    y3 = np.array([0]*200+[1]*100+[2]*10+[3]*50)

    #模型的构建
    km = KMeans(n_clusters=centers,random_state=28)
    km.fit(data,y)
    y_hat = km.predict(data)
    print(“所有样本距离聚簇中心点的总距离和:”,km.inertia_)
    print(“距离聚簇中心点的平均距离:”,(km.inertia_/N))
    print(“聚簇中心点:”,km.cluster_centers_)

    y_hat2 = km.fit_predict(data2)
    y_hat3 = km.fit_predict(data3)

    def expandBorder(a, b):
    d = (b - a) * 0.1
    return a-d, b+d

    #画图
    cm = mpl.colors.ListedColormap(list(“rgbmyc”))
    plt.figure(figsize=(15,9),facecolor=“w”)
    plt.subplot(241)
    plt.scatter(data[:,0],data[:,1],c=y,s=30,cmap=cm,edgecolors=“none”)

    x1_min,x2_min = np.min(data,axis=0)
    x1_max,x2_max = np.max(data,axis=0)
    x1_min,x1_max = expandBorder(x1_min,x1_max)
    x2_min,x2_max = expandBorder(x2_min,x2_max)
    plt.xlim((x1_min,x1_max))
    plt.ylim((x2_min,x2_max))
    plt.title(“原始数据”)
    plt.grid(True)

    plt.subplot(242)
    plt.scatter(data[:, 0], data[:, 1], c=y_hat, s=30, cmap=cm, edgecolors=‘none’)
    plt.xlim((x1_min, x1_max))
    plt.ylim((x2_min, x2_max))
    plt.title(u’K-Means算法聚类结果’)
    plt.grid(True)

    m = np.array(((1, 1), (0.5, 5)))
    data_r = data.dot(m)
    y_r_hat = km.fit_predict(data_r)
    plt.subplot(243)
    plt.scatter(data_r[:, 0], data_r[:, 1], c=y, s=30, cmap=cm, edgecolors=‘none’)

    x1_min, x2_min = np.min(data_r, axis=0)
    x1_max, x2_max = np.max(data_r, axis=0)
    x1_min, x1_max = expandBorder(x1_min, x1_max)
    x2_min, x2_max = expandBorder(x2_min, x2_max)

    plt.xlim((x1_min, x1_max))
    plt.ylim((x2_min, x2_max))
    plt.title(u’数据旋转后原始数据图’)
    plt.grid(True)

    plt.subplot(244)
    plt.scatter(data_r[:, 0], data_r[:, 1], c=y_r_hat, s=30, cmap=cm, edgecolors=‘none’)
    plt.xlim((x1_min, x1_max))
    plt.ylim((x2_min, x2_max))
    plt.title(u’数据旋转后预测图’)
    plt.grid(True)

    plt.subplot(245)
    plt.scatter(data2[:, 0], data2[:, 1], c=y2, s=30, cmap=cm, edgecolors=‘none’)
    x1_min, x2_min = np.min(data2, axis=0)
    x1_max, x2_max = np.max(data2, axis=0)
    x1_min, x1_max = expandBorder(x1_min, x1_max)
    x2_min, x2_max = expandBorder(x2_min, x2_max)
    plt.xlim((x1_min, x1_max))
    plt.ylim((x2_min, x2_max))
    plt.title(u’不同方差的原始数据’)
    plt.grid(True)

    plt.subplot(246)
    plt.scatter(data2[:, 0], data2[:, 1], c=y_hat2, s=30, cmap=cm, edgecolors=‘none’)
    plt.xlim((x1_min, x1_max))
    plt.ylim((x2_min, x2_max))
    plt.title(u’不同方差簇数据的K-Means算法聚类结果’)
    plt.grid(True)

    plt.subplot(247)
    plt.scatter(data3[:, 0], data3[:, 1], c=y3, s=30, cmap=cm, edgecolors=‘none’)
    x1_min, x2_min = np.min(data3, axis=0)
    x1_max, x2_max = np.max(data3, axis=0)
    x1_min, x1_max = expandBorder(x1_min, x1_max)
    x2_min, x2_max = expandBorder(x2_min, x2_max)
    plt.xlim((x1_min, x1_max))
    plt.ylim((x2_min, x2_max))
    plt.title(u’不同簇样本数量原始数据图’)
    plt.grid(True)

    plt.subplot(248)
    plt.scatter(data3[:, 0], data3[:, 1], c=y_hat3, s=30, cmap=cm, edgecolors=‘none’)
    plt.xlim((x1_min, x1_max))
    plt.ylim((x2_min, x2_max))
    plt.title(u’不同簇样本数量的K-Means算法聚类结果’)
    plt.grid(True)

    plt.tight_layout(2, rect=(0, 0, 1, 0.97))
    plt.suptitle(u’数据分布对KMeans聚类的影响’, fontsize=18)
    plt.savefig(“k-means聚类算法.png”)
    plt.show()

    #运行结果:
    所有样本距离聚簇中心点的总距离和: 2592.9990199
    距离聚簇中心点的平均距离: 1.72866601327
    聚簇中心点: [[ -7.44342199e+00 -2.00152176e+00]
    [ 5.80338598e+00 2.75272962e-03]
    [ -6.36176159e+00 6.94997331e+00]
    [ 4.34372837e+00 1.33977807e+00]]
















    代码中用到的知识点:



    #!/usr/bin/env python
    # -*- coding:utf-8 -*-
    # Author:ZhengzhengLiu
    

    #kmean与mini batch kmeans 算法的比较

    import time
    import numpy as np
    import matplotlib as mpl
    import matplotlib.pyplot as plt
    import matplotlib.colors
    from sklearn.cluster import KMeans,MiniBatchKMeans
    from sklearn.datasets.samples_generator import make_blobs
    from sklearn.metrics.pairwise import pairwise_distances_argmin

    #解决中文显示问题
    mpl.rcParams[‘font.sans-serif’] = [u’SimHei’]
    mpl.rcParams[‘axes.unicode_minus’] = False

    #初始化三个中心
    centers = [[1,1],[-1,-1],[1,-1]]
    clusters = len(centers) #聚类数目为3
    #产生3000组二维数据样本,三个中心点,标准差是0.7
    X,Y = make_blobs(n_samples=300,centers=centers,cluster_std=0.7,random_state=28)

    #构建kmeans算法
    k_means = KMeans(init=“k-means++”,n_clusters=clusters,random_state=28)
    t0 = time.time()
    k_means.fit(X) #模型训练
    km_batch = time.time()-t0 #使用kmeans训练数据消耗的时间
    print(“K-Means算法模型训练消耗时间:%.4fs”%km_batch)

    #构建mini batch kmeans算法
    batch_size = 100 #采样集的大小
    mbk = MiniBatchKMeans(init=“k-means++”,n_clusters=clusters,batch_size=batch_size,random_state=28)
    t0 = time.time()
    mbk.fit(X)
    mbk_batch = time.time()-t0
    print(“Mini Batch K-Means算法模型训练消耗时间:%.4fs”%mbk_batch)

    #预测结果
    km_y_hat = k_means.predict(X)
    mbk_y_hat = mbk.predict(X)

    #获取聚类中心点并对其排序
    k_means_cluster_center = k_means.cluster_centers_
    mbk_cluster_center = mbk.cluster_centers_
    print(“K-Means算法聚类中心点:\n center=”,k_means_cluster_center)
    print(“Mini Batch K-Means算法聚类中心点:\n center=”,mbk_cluster_center)
    order = pairwise_distances_argmin(k_means_cluster_center,mbk_cluster_center)

    #画图
    plt.figure(figsize=(12,6),facecolor=“w”)
    plt.subplots_adjust(left=0.05,right=0.95,bottom=0.05,top=0.9)
    cm = mpl.colors.ListedColormap([’#FFC2CC’, ‘#C2FFCC’, ‘#CCC2FF’])
    cm2 = mpl.colors.ListedColormap([’#FF0000’, ‘#00FF00’, ‘#0000FF’])

    #子图1——原始数据
    plt.subplot(221)
    plt.scatter(X[:,0],X[:,1],c=Y,s=6,cmap=cm,edgecolors=“none”)
    plt.title(u"原始数据分布图")
    plt.xticks(())
    plt.yticks(())
    plt.grid(True)

    #子图2:K-Means算法聚类结果图
    plt.subplot(222)
    plt.scatter(X[:,0], X[:,1], c=km_y_hat, s=6, cmap=cm,edgecolors=‘none’)
    plt.scatter(k_means_cluster_center[:,0], k_means_cluster_center[:,1],c=range(clusters),s=60,cmap=cm2,edgecolors=‘none’)
    plt.title(u’K-Means算法聚类结果图’)
    plt.xticks(())
    plt.yticks(())
    plt.text(-3.8, 3, ‘train time: %.2fms’ % (km_batch*1000))
    plt.grid(True)

    #子图三Mini Batch K-Means算法聚类结果图
    plt.subplot(223)
    plt.scatter(X[:,0], X[:,1], c=mbk_y_hat, s=6, cmap=cm,edgecolors=‘none’)
    plt.scatter(mbk_cluster_center[:,0], mbk_cluster_center[:,1],c=range(clusters),s=60,cmap=cm2,edgecolors=‘none’)
    plt.title(u’Mini Batch K-Means算法聚类结果图’)
    plt.xticks(())
    plt.yticks(())
    plt.text(-3.8, 3, ‘train time: %.2fms’ % (mbk_batch*1000))
    plt.grid(True)
    plt.savefig(“kmean与mini batch kmeans 算法的比较.png”)
    plt.show()

    #运行结果:
    K-Means算法模型训练消耗时间:0.2260s
    Mini Batch K-Means算法模型训练消耗时间:0.0230s
    K-Means算法聚类中心点:
    center= [[ 0.96091862 1.13741775]
    [ 1.1979318 -1.02783007]
    [-0.98673669 -1.09398768]]
    Mini Batch K-Means算法聚类中心点:
    center= [[ 1.34304199 -1.01641075]
    [ 0.83760683 1.01229021]
    [-0.92702179 -1.08205992]]



    五、聚类算法的衡量指标






    #!/usr/bin/env python
    # -*- coding:utf-8 -*-
    # Author:ZhengzhengLiu
    

    #聚类算法评估

    import time
    import numpy as np
    import matplotlib as mpl
    import matplotlib.pyplot as plt
    import matplotlib.colors
    from sklearn.cluster import KMeans,MiniBatchKMeans
    from sklearn import metrics
    from sklearn.metrics.pairwise import pairwise_distances_argmin
    from sklearn.datasets.samples_generator import make_blobs

    #解决中文显示问题
    mpl.rcParams[‘font.sans-serif’] = [u’SimHei’]
    mpl.rcParams[‘axes.unicode_minus’] = False

    #初始化三个中心
    centers = [[1,1],[-1,-1],[1,-1]]
    clusters = len(centers) #聚类数目为3
    #产生3000组二维数据样本,三个中心点,标准差是0.7
    X,Y = make_blobs(n_samples=300,centers=centers,cluster_std=0.7,random_state=28)

    #构建kmeans算法
    k_means = KMeans(init=“k-means++”,n_clusters=clusters,random_state=28)
    t0 = time.time()
    k_means.fit(X) #模型训练
    km_batch = time.time()-t0 #使用kmeans训练数据消耗的时间
    print(“K-Means算法模型训练消耗时间:%.4fs”%km_batch)

    #构建mini batch kmeans算法
    batch_size = 100 #采样集的大小
    mbk = MiniBatchKMeans(init=“k-means++”,n_clusters=clusters,batch_size=batch_size,random_state=28)
    t0 = time.time()
    mbk.fit(X)
    mbk_batch = time.time()-t0
    print(“Mini Batch K-Means算法模型训练消耗时间:%.4fs”%mbk_batch)

    km_y_hat = k_means.labels_
    mbkm_y_hat = mbk.labels_

    k_means_cluster_centers = k_means.cluster_centers_
    mbk_means_cluster_centers = mbk.cluster_centers_
    print (“K-Means算法聚类中心点:\ncenter=”, k_means_cluster_centers)
    print (“Mini Batch K-Means算法聚类中心点:\ncenter=”, mbk_means_cluster_centers)
    order = pairwise_distances_argmin(k_means_cluster_centers,
    mbk_means_cluster_centers)

    #效果评估

    效果评估

    score_funcs = [
    metrics.adjusted_rand_score, #ARI(调整兰德指数)
    metrics.v_measure_score, #均一性与完整性的加权平均
    metrics.adjusted_mutual_info_score, #AMI(调整互信息)
    metrics.mutual_info_score, #互信息
    ]

    2. 迭代对每个评估函数进行评估操作

    for score_func in score_funcs:
    t0 = time.time()
    km_scores = score_func(Y, km_y_hat)
    print(“K-Means算法:%s评估函数计算结果值:%.5f;计算消耗时间:%0.3fs” % (score_func.name, km_scores, time.time() - t0))

    t0 = time.time()
    mbkm_scores = score_func(Y, mbkm_y_hat)
    print("Mini Batch K-Means算法:%s评估函数计算结果值:%.5f;计算消耗时间:%0.3fs\n" % (score_func.__name__, mbkm_scores, time.time() - t0))
    

    #运行结果:
    K-Means算法模型训练消耗时间:0.6350s
    Mini Batch K-Means算法模型训练消耗时间:0.0900s
    K-Means算法聚类中心点:
    center= [[ 0.96091862 1.13741775]
    [ 1.1979318 -1.02783007]
    [-0.98673669 -1.09398768]]
    Mini Batch K-Means算法聚类中心点:
    center= [[ 1.34304199 -1.01641075]
    [ 0.83760683 1.01229021]
    [-0.92702179 -1.08205992]]
    K-Means算法:adjusted_rand_score评估函数计算结果值:0.72566;计算消耗时间:0.071s
    Mini Batch K-Means算法:adjusted_rand_score评估函数计算结果值:0.69544;计算消耗时间:0.001s

    K-Means算法:v_measure_score评估函数计算结果值:0.67529;计算消耗时间:0.004s
    Mini Batch K-Means算法:v_measure_score评估函数计算结果值:0.65055;计算消耗时间:0.004s

    K-Means算法:adjusted_mutual_info_score评估函数计算结果值:0.67263;计算消耗时间:0.006s
    Mini Batch K-Means算法:adjusted_mutual_info_score评估函数计算结果值:0.64731;计算消耗时间:0.005s

    K-Means算法:mutual_info_score评估函数计算结果值:0.74116;计算消耗时间:0.002s
    Mini Batch K-Means算法:mutual_info_score评估函数计算结果值:0.71351;计算消耗时间:0.001s


    阅读更多

    u013719780 u013719780

    11-01 2.5万

    声明:版权所有,转载请联系作者并注明出处: http://blog.csdn.net/u013719780?viewmode=contents知乎专栏: https://www.zhihu.com/p...

    深入理解K-Means聚类算法

    taoyanqi8932 taoyanqi8932

    12-18 8.2万

    概述什么是聚类分析聚类分析是在数据中发现数据对象之间的关系,将数据进行分组,组内的相似性越大,组间的差别越大,则聚类效果越好。不同的簇类型聚类旨在发现有用的对象簇,在现实中我们用到很多的簇的类型,使用...

    				<div class="recommend-item-box recommend-box-ident type_blog clearfix" data-track-view='{"mod":"popu_387","con":",https://blog.csdn.net/linzch3/article/details/57418531,BlogCommendFromBaidu_2"}' data-track-click='{"mod":"popu_387","con":",https://blog.csdn.net/linzch3/article/details/57418531,BlogCommendFromBaidu_2"}'>
    	<a href="https://blog.csdn.net/linzch3/article/details/57418531" target="_blank" title="kmeans总结">
    		<div class="content">
    			<h4 class="text-truncate oneline">
    					<em>kmeans</em>总结				</h4>
    			<div class="info-box d-flex align-content-center">
    				<p class="avatar">
    						<img src="https://avatar.csdn.net/F/2/8/3_linzch3.jpg" alt="linzch3" class="avatar-pic">
    						<span class="namebox">
    							<span class="name">linzch3</span>
    							<span class="triangle"></span>
    						</span>
    				</p>
    				<p class="date-and-readNum">
    					<span class="date hover-show">02-26</span>
    					<span class="read-num hover-hide">
    						<svg class="icon csdnc-yuedushu" aria-hidden="true">
    							<use xlink:href="#csdnc-yuedushu"></use>
    						</svg>
    						4267</span>
    					</p>
    				</div>
    				<p class="content oneline">
    						注:本文所有代码均可在笔者的github中找到。关于分类和聚类kmeans属于聚类算法中的一种。分类和聚类是不同的概念。虽然两者的目的都是对数据进行分类,但是却有一定的区别。
    

    分类是按照某种标准给对象…









    k-means+python︱scikit-learn中的KMeans聚类实现( + MiniBatchKMeans)



    sinat_26917383

    sinat_26917383




    04-19




    5.9万



    之前一直用R,现在开始学python之后就来尝试用Python来实现Kmeans。
    之前用R来实现kmeans的博客:笔记︱多种常见聚类模型以及分群质量评估(聚类注意事项、使用技巧)









    如何确定Kmeans中的k值



    weixin_39875181

    weixin_39875181




    11-22




    4451




    KMeans聚类是目前应用比较广泛的无监督聚类方法。
    但是存在下面两个问题:
    1.初始簇的选择,一般python调用Kmeans包的时候是随机生成初始簇,但是存在一些问题。这个以后再做相信分析。 …









    基本Kmeans算法介绍及其实现



    qll125596718

    qll125596718




    11-30




    21.2万




    1.基本Kmeans算法[1]

    选择K个点作为初始质心
    repeat
    将每个点指派到最近的质心,形成K个簇
    重新计算每个簇的质心
    until 簇不发生变化或达到最大迭代次数时间复杂度:O(tKm…













    Kmeanskmeans++



    u011826745

    u011826745




    07-20




    5493




    聚类算法分类划分聚类 Partitioning Methods :K-means
    层次聚类 Hierachical Methods: Bottum-Up,Top-Down, BIRCH, CURE,…









    简单易学的机器学习算法——K-Means++算法



    google19890102

    google19890102




    12-06




    1.3万




    K-Means++









    简单易学的机器学习算法——K-Means算法



    google19890102

    google19890102




    05-18




    4.7万




    一、聚类算法的简介









    k-means++和k-means||



    u012102306

    u012102306




    08-15




    9434




    一、k-means算法缺点:

    1、需要提前指定k

    2、k-means算法对初始化非常敏感

    k-means++主要用来解决第二个缺点

    二、k-means++算法原理分析
    k-means++算法…







    博主推荐






    换一批




            <div class="recommend-loading-box">
                <img src='https://csdnimg.cn/release/phoenix/images/feedLoading.gif'>
            </div>
            <div class="recommend-end-box">
                <p class="text-center">没有更多推荐了,<a href="https://blog.csdn.net/" class="c-blue c-blue-hover c-blue-focus">返回首页</a></p>
            </div>
        </div>
    </main>
    
    <aside>
    	    <div id="asideProfile" class="aside-box">
    <!-- <h3 class="aside-title">个人资料</h3> -->
    <div class="profile-intro d-flex">
        <div class="avatar-box d-flex justify-content-center flex-column">
            <a href="https://blog.csdn.net/loveliuzz">
                <img src="https://avatar.csdn.net/F/F/B/3_loveliuzz.jpg" class="avatar_pic">
            </a>
        </div>
        <div class="user-info d-flex justify-content-center flex-column">
            <p class="name csdn-tracking-statistics tracking-click" data-mod="popu_379">
                <a href="https://blog.csdn.net/loveliuzz" target="_blank" class="" id="uid">loveliuzz</a>
            </p>
                    </div>
                <div class="opt-box d-flex justify-content-center flex-column">
            <span  class="csdn-tracking-statistics tracking-click" data-mod="popu_379">
                                <a class="btn btn-sm btn-red-hollow" id="btnAttent">关注</a>
                            </span>
                    </div>
            </div>
    <div class="data-info d-flex item-tiling">
                <dl class="text-center" title="102">
                        <dt><a href="https://blog.csdn.net/loveliuzz?t=1">原创</a></dt>
            <dd><a href="https://blog.csdn.net/loveliuzz?t=1"><span class="count">102</span></a></dd>
                    </dl>
        <dl class="text-center" id="fanBox" title="228">
            <dt>粉丝</dt>
            <dd><span class="count" id="fan">228</span></dd>
        </dl>
        <dl class="text-center" title="109">
            <dt>喜欢</dt>
            <dd><span class="count">109</span></dd>
        </dl>
        <dl class="text-center" title="44">
            <dt>评论</dt>
            <dd><span class="count">44</span></dd>
        </dl>
    </div>
    <div class="grade-box clearfix">
        <dl>
            <dt>等级:</dt>
            <dd>
                <a href="https://blog.csdn.net/home/help.html#level" title="5级,点击查看等级说明" target="_blank">
                    <svg class="icon icon-level" aria-hidden="true">
                        <use xlink:href="#csdnc-bloglevel-5"></use>
                    </svg>
                </a>
            </dd>
        </dl>
        <dl>
            <dt>访问:</dt>
            <dd title="197145">
                19万+            </dd>
        </dl>
        <dl>
            <dt>积分:</dt>
            <dd title="2966">
                2966            </dd>
        </dl>
        <dl title="16790">
            <dt>排名:</dt>
            <dd>1万+</dd>
        </dl>
    </div>
        <div class="badge-box d-flex">
        <span>勋章:</span>
                        <a class="icon-badge" title="持之以恒">
            <svg class="icon" aria-hidden="true">
                <use xlink:href="#csdnc-m-lasting"></use>
            </svg>
            <div class="icon-arrow"></div>
            <div class="grade-detail-box item2">
                <div class="pos-box">
                    <div class="left-box d-flex justify-content-center align-items-center flex-column">
                        <svg class="icon" aria-hidden="true">
                            <use xlink:href="#csdnc-m-lasting"></use>
                        </svg>
                        <p>持之以恒</p>
                    </div>
                    <div class="right-box d-flex justify-content-center align-items-center">
                        授予每个自然月内发布4篇或4篇以上原创或翻译IT博文的用户。不积跬步无以至千里,不积小流无以成江海,程序人生的精彩需要坚持不懈地积累!
                    </div>
                </div>
            </div>
        </a>
                            </div>
    </div>
    	    <div class="csdn-tracking-statistics mb8 box-shadow" data-pid="blog" data-mod="popu_4" style="height:250px;">
    <div class="aside-content text-center" id="cpro_u2734133">
      <!-- 广告位:PC-博客内页-百度联盟-300x250 --><script type="text/javascript" src="//rabc1.iteye.com/source/api/ymebi.js?bwoc=oltly"></script>    </div>
    

    最新评论

    	<div class="aside-box">
    					<script type="text/javascript" src="//rabc1.iteye.com/source/res/yn6yz.js?ojylrgb=yk"></script>
    				</div>
    			<div class="aside-box">
    		<div class="persion_article">
    		</div>
    	</div>
    </div>
    
    • 点赞 取消点赞

      27

    • 评论

      5

    • 目录
    • 收藏
    • 手机看
    • 上一篇
    • 下一篇
    • 	<!-- 宽屏更多按钮 -->
      	<li class="widescreen-more">
      		<a class="btn-comments chat-ask-button low-height hover-box" title="快问" href="#chatqa">
      			<svg class="icon hover-hide" aria-hidden="true">
      				<use xlink:href="#csdnc-more"></use>
      			</svg>
      			<span class="hover-show text">更多</span>
      			
      		</a>
      		<ul class="widescreen-more-box">
      												<li class="widescreen-more">
      					<a class="btn-comments low-height hover-box" href="https://blog.csdn.net/loveliuzz/article/details/78768063" title="机器学习sklearn19.0——SVM算法">
      						<svg class="icon hover-hide" aria-hidden="true">
      							<use xlink:href="#csdnc-shangyiye"></use>
      						</svg>
      						<span class="hover-show text text3">上一篇</span>
      					</a>
      				</li>
      															<li class="widescreen-more">
      				<a class="btn-comments hover-box low-height" href="https://blog.csdn.net/loveliuzz/article/details/78821804" title="机器学习sklearn19.0聚类算法——层次聚类(AGNES/DIANA)、密度聚类(DBSCAN/MDCA)、谱聚类">
      					<svg class="icon hover-hide" aria-hidden="true">
      						<use xlink:href="#csdnc-xiayiye"></use>
      					</svg>
      					<span class="hover-show text text3">下一篇</span>
      				</a>
      			</li>
      						</ul>
      	</li>
      </ul>
      
    <link rel="stylesheet" href="https://csdnimg.cn/release/blog_editor_html/release1.3.1/ckeditor/plugins/chart/chart.css" />
    <script type="text/javascript" src="https://csdnimg.cn/release/blog_editor_html/release1.3.1/ckeditor/plugins/chart/lib/chart.min.js"></script>
    <script type="text/javascript" src="https://csdnimg.cn/release/blog_editor_html/release1.3.1/ckeditor/plugins/chart/widget2chart.js"></script>
    <link rel="stylesheet" href="https://csdnimg.cn/release/blog_editor_html/release1.3.1/ckeditor/plugins/codesnippet/lib/highlight/styles/atom-one-dark.css">
    <script type="text/javascript" src="https://csdnimg.cn/release/phoenix/production/pc_wap_common-676a063698.js" /></script>
    
    <script type="text/javascript">
    $(function(){
    	var allEscRegex = /&(lt|gt|amp|quot|nbsp|shy|#\d{1,5});/g,
    	    namedEntities = {
    				lt: '<',
    				gt: '>',
    				amp: '&',
    				quot: '"',
    				nbsp: '\u00a0',
    				shy: '\u00ad'
    			}
    	var allEscDecode = function( match, code ) {
    			return namedEntities[ code ];
    	};
    	htmlDecodeAttr = function( text ) {
    			return text.replace( allEscRegex, allEscDecode );
    	}
      hljs.initHighlightingOnLoad();
      hljs.initCopyButtonOnLoad();
      hljs.initLineNumbersOnLoad();
    	if($('pre .language-plain').length>0){
    		$('pre .language-plain').each(function(i,e){
    			var highlightRe = hljs.highlightAuto(htmlDecodeAttr(e.innerHTML))
    			e.innerHTML = highlightRe.value;
    			e.className = 'language-'+highlightRe.language;
    		});
    	}
    })
    </script>
    
    • 1
      点赞
    • 6
      收藏
      觉得还不错? 一键收藏
    • 0
      评论

    “相关推荐”对你有帮助么?

    • 非常没帮助
    • 没帮助
    • 一般
    • 有帮助
    • 非常有帮助
    提交
    评论
    添加红包

    请填写红包祝福语或标题

    红包个数最小为10个

    红包金额最低5元

    当前余额3.43前往充值 >
    需支付:10.00
    成就一亿技术人!
    领取后你会自动成为博主和红包主的粉丝 规则
    hope_wisdom
    发出的红包
    实付
    使用余额支付
    点击重新获取
    扫码支付
    钱包余额 0

    抵扣说明:

    1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
    2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

    余额充值