【NLP】9文本匹配度测量结果D-S融合——度量相关性、绘制图像

1. D-S理论应用思路

  • 采样周期为20s,计算教师授课内容与百度百科、课本教材的相关程度的基本概率赋值,相关程度的定义可参考之前文章中关于Pearson相关性的判据:
rCorrelation Criteria
0No correlation
0-0.5Weak correlation
0.5-0.8Moderate correlation
0.8-1Strong correlation
1Perfect correlation
  • 每分钟计算一次结果,并在qt设计的人机交互界面中进行显示
  • 视频播放完毕后输出总的时间域融合、传感器融合的结果

2. 代码实现

关于d-s证据的原理可看123456

此博客有d-s证据理论的matlab代码,运行实现较好,能够实现文章慕课授课中的学生听课行为自动分析系统基本概率融合,关于全集进行相关修改,代码如下:

# <editor-fold desc="D-S融合">
import numpy as np

# 功能:融合x,y两行向量
def DS_fusion(x, y):
    if x.ndim != 1: print('x应为行向量')
    if y.ndim != 1: print('y应为行向量')
    if x.shape[0] != y.shape[0]: print('x,y列数应相等')
    tmp = 0
    for i in range(x.shape[0]-1):
        # x[i] = x[i] * y[i] + x[i] * y[-2] + y[i] * x[-2]
        if i == x.shape[0]-2:
            x[i] = x[i] * y[i]
        else:
            x[i] = x[i] * y[i] + x[i] * y[-2] + y[i] * x[-2]
        tmp += x[i]
    for i in range(x.shape[0]-1):
        x[i] = x[i]/tmp
    x[-1] = 0
    return x


num_Sensor = 4
num_Object = 5
num_Period = 3
Info = np.zeros((num_Period,num_Sensor,num_Object))
Info[0,:,:] = ([[0.0,0.4444,0.4444,0.1112,0.00],
                [0.4444,0.0,0.4444,0.1112,0.00],
                [0.4444,0.0,0.4444,0.1112,0.00],
                [0.0,0.0,0.8888,0.1112,0.00]])

Info[1,:,:] = ([[0.0, 0.4528, 0.4528, 0.0944,0.00],
                [0.4528,0.0, 0.4528,0.0944,0.00],
                [0.4528,0.0, 0.4528,0.0944,0.00],
                [0, 0,  0.9056, 0.0944, 0.00]])

Info[2,:,:] = ([[0.0, 0.4528, 0.4528, 0.0944,0.00],
                [0.4528,0.0, 0.4528,0.0944,0.00],
                [0.4528,0.0, 0.4528,0.0944,0.00],
                [0, 0,  0.9056, 0.0944, 0.00]])

Info1 = np.zeros((num_Period, num_Object))

# 各周期内传感器的融合
for i in range(num_Period):
    Info1[i] = Info[i][0]
    for j in range(num_Sensor-1):
        Info1[i] = DS_fusion(Info1[i],Info[i][j+1])

np.set_printoptions(suppress=True)
# np.set_printoptions(precision=4) #设精度为3
print(np.around(Info1, 4))

# 周期之间的融合
Result = Info1[0]
for i in range(num_Period-1):
    Result = DS_fusion(Result, Info1[i+1])

print(Result)
# </editor-fold>

对于5的代码:

import numpy as np

# 功能:融合x,y两行向量
def DS_fusion(x, y):
    if x.ndim != 1: print('x应为行向量')
    if y.ndim != 1: print('y应为行向量')
    if x.shape[0] != y.shape[0]: print('x,y列数应相等')
    tmp = 0
    for i in range(5):
        if i == 4:
            x[i] = x[i] * y[i]
        elif i == 3 or i == 2:
            x[i] = x[i] * y[i] + x[i] * y[-2] + y[i] * x[-2]
        else:
            x[i] = x[i] * y[i] + x[i] * y[-2] + y[i] * x[-2] + x[i] * y[-3] + y[i] * x[-3]

        tmp += x[i]
    for i in range(x.shape[0]-1):
        x[i] = x[i]/tmp
    x[-1] = 0
    return x

x = np.array([0.4,0.3,0.1,0.1,0.1,0])
y = np.array([0.2,0.2,0.05,0.5,0.05,0])
print(DS_fusion(x,y))

结果:

[0.46575342 0.3630137  0.02054795 0.14383562 0.00684932 0.        ]

注:代码过多,代码分块注释与保存快捷键:Ctrl+Alt+T,效果如下:

在这里插入图片描述

3. 度量教师授课内容与百度百科、教材的相关性

from gensim.models import KeyedVectors
word_vectors = KeyedVectors.load('vectors.kv')


def get_sentence_vec(sentence):
    import jieba
    import numpy as np
    import re
    sentence = ''.join(re.findall('[\u4e00-\u9fa5|\n]',sentence))
    sentence_list = ' '.join(jieba.cut(sentence)).split(' ')
    vecsum = np.zeros(word_vectors.vector_size)
    cnt = 0
    for word in sentence_list:
        try:
            vecsum = vecsum + word_vectors[word]
            cnt += 1
        except:
            continue
    if cnt == 0:
        # print('Not exist!')
        return vecsum
    return vecsum/cnt


begin_list = []
end_list = []
text_list = []
test_data = []
text_path = '... your path/大学物理典型问题解析—力学与热学 第5讲 牛顿运动定律及其应用-1/1牛顿运动定理简要回顾.txt'
file = open(text_path, 'r', encoding='utf-8')
for line in file.readlines():
    line = line.strip('\n').replace('  ',' ').split(' ')
    sentence_vec = get_sentence_vec(line[2])
    if sentence_vec.all() == 0:
        print(line[2] + ' Word Vector not exist!')
        continue
    begin_list.append(eval(line[0]))
    end_list.append(eval(line[1]))
    text_list.append(line[2])
    test_data.append(sentence_vec)
file.close()

syllabus_baike = []
syllabus_baike_data = []
baike_path = '... your path/baike.txt'
file_baike = open(baike_path, 'r', encoding='utf-8')
for line in file_baike.readlines():
    for sentence in line.strip('\n').strip(')').strip('”').strip('。').split('。'):     # 。)
        sentence_vec = get_sentence_vec(sentence)
        if sentence_vec.all() == 0:
            print('syllabus_baike:' + sentence + ' Word Vector not exist!')
            continue
        syllabus_baike.append(sentence)
        syllabus_baike_data.append(sentence_vec)
file_baike.close()      # len(syllabus_baike) = 211

syllabus_teaching_material = []
syllabus_teaching_material_data = []
teaching_material_path = '... your path/teaching_material.txt'
file_teaching_material = open(teaching_material_path, 'r', encoding='utf-8')
txt = file_teaching_material.read()
file_teaching_material.close()      # len(txt) = 67
for sentence in txt.replace('\n', '').split('.'):
    sentence_vec = get_sentence_vec(sentence)
    if sentence_vec.all() == 0:
        print('syllabus_teaching_material:' + sentence + ' Word Vector not exist!')
        continue
    syllabus_teaching_material.append(sentence)
    syllabus_teaching_material_data.append(sentence_vec)


from scipy.spatial.distance import cosine
max_list = []
for line in test_data:
    cos_list = []
    for line2 in syllabus_baike_data:
        cos_list.append(1-cosine(line, line2))
    cos_list2 = []
    for line2 in syllabus_teaching_material_data:
        cos_list2.append(1-cosine(line, line2))
    max_list.append([cos_list.index(max(cos_list)), max(cos_list), cos_list2.index(max(cos_list2)), max(cos_list2)])

for i in range(len(text_list)):
    print(text_list[i],max_list[i][1],syllabus_baike[max_list[i][0]], max_list[i][3], syllabus_teaching_material[max_list[i][2]])

结果:

syllabus_baike:因此: Word Vector not exist!
syllabus_teaching_material:411 Word Vector not exist!
syllabus_teaching_material:2 Word Vector not exist!
syllabus_teaching_material:2 Word Vector not exist!
嗯 0.27633987396100823 也可能记作,此时表示方向压强的改变 0.3003855278204397  Galilei)就表达了惯性的概念
大家好, 0.41210052237188377 牛顿认为:“雷恩和惠更斯的理论以绝对硬的物体为前提,而用理想弹性体可以得到更肯定的结果,并且用非理想弹性体,如压紧的木球、钢球和玻璃球做实验,消除误差后结果是一致的 0.40233271284694383 进一步追问,是否有比太阳参考系精度更高的惯性参考系呢?是,答案是肯定的,不过在我们的课程中很少用到
这周我们进行牛顿运动定律方面的相关练习, 0.7373663701987992 牛顿也重复做了此实验,他进一步讨论了空气阻力的影响及改进办法,并对结果进行了修正 0.7292025993955437 动量是物理学中一个非常重要的物理量,在后文中将对它进行详细的讨论
牛顿运动定律是整个力学的基础, 0.8983454824567716 牛顿运动定律是力学中重要的定律,是研究经典力学甚至物理学的基础,阐述了经典力学中基本的运动规律 0.8280248030974275 引入动量p,牛顿第二定律的数学表达式为F(1-63由于在牛顿力学中,质量m恒定,故牛顿第二定律的两种表达式式(1-59)与式(1-63)是一致的,不过在狭义相对论中,(1-63)依旧适用,而式(1-59)不再成立在直角坐标系中,牛顿第二定律的分量式为Fna切向P(1-64)Fm法向F:、F,、F分别是合外力在x、y、z轴上的投影,P、P,、P分别是物体的动量在x、y、z轴上的投影涉及质点的平面曲线运动时,可以将被研究的矢量,例如:加速度和力,沿着质点轨道的法向和切向进行分解,以方便地研究图1-18圆周运动的法向和切向其运动
这一部分中所用到的研究方法, 0.8236661526759003 第一、第二定律只研究单一物体(可以只有一个物体,也可以从众多物体中隔离出一个物体来作为研究对象),解决其不受力或受很多力作用后的运动问题;第三定律扩展了研究对象,至少研究是两个物体之间的相互作用,这种相互作用制约或影响了研究对象或研究对象以外的其它物体的运动 0.8042857131621559 自然界中的基本相互作用力是物体间的相互作用,其形式是多种多样的,有些力我们授课录像:自然界中的很难直观地感受到,比如说原子核内部的核力、加速器中粒子之基本相互作用
对于我们学习力学是非常重要的。 0.8249761062321814 牛顿运动定律是力学中重要的定律,是研究经典力学甚至物理学的基础,阐述了经典力学中基本的运动规律 0.7571570864816607 动量是物理学中一个非常重要的物理量,在后文中将对它进行详细的讨论
首先我们对牛顿运动定律做一个简单的回顾, 0.8387968957347439 其中,马赫在《发展中的力学》中,对牛顿运动定律做了比较全面的考察和分析整理;埃森布德在《关于经验的运动定律》中、奥斯顿在《牛顿力学的表述》中,也提出了相似的新表述 0.8091452788512982 从牛顿发表他的运动定律到现在的三百多年间,牛顿运动定律有力地推进了人类对自然的认识
牛顿第一定律, 0.9150573085331962 1、牛顿第一运动定律: 0.8462860437828448 牛顿第一定律也称为惯性定律牛顿第一定律看似简单,其实非常深刻
……

4. 授课内容相关性D-S理论分析

假设得到的两个矩阵如下:

[23.300000000000015, 3(| 0.4-0.5 | {Moderate, Weak} |), 27.50000000000002, 28, 43.59999999999997, 0, 0]
[24.10000000000002, 3(| 0.4-0.5 | {Moderate, Weak} |), 26.900000000000027, 23, 51.39999999999997, 0, 0]

通过以下代码归一化:

import numpy as np
x = np.array([24.10000000000002, 3, 26.900000000000027, 23, 51.39999999999997, 0, 0])
print((x/sum(x)).tolist())

即认为是基本概率赋值:

[0.18580542264752803, 0.023923444976076555, 0.21929824561403524, 0.22328548644338117, 0.34768740031897905, 0.0, 0.0]
[0.18769470404984437, 0.02336448598130841, 0.20950155763239894, 0.1791277258566978, 0.40031152647975055, 0.0, 0.0]

重新选择评判标准:

余弦相似度Correlation Criteria
0-0.5Weak correlation
0.5-0.7Moderate correlation
0.7-0.8{Strong, Moderate}
0.8-1Strong correlation

代码如下:

correlation_baike = [0] * 6
correlation_teaching_material = [0] * 6
for i in range(len(test_data)):
    if max_list[i][1] >= 0.8:
        correlation_baike[0] += 0.3
        correlation_baike[1] += 0.3
        correlation_baike[3] += 1
    elif max_list[i][1] < 0.5:
        correlation_baike[0] += 1
        correlation_baike[1] += 0.3
        correlation_baike[3] += 0.3
    elif max_list[i][1] >= 0.5 and max_list[i][1] < 0.7:
        correlation_baike[0] += 0.3
        correlation_baike[1] += 1
        correlation_baike[3] += 0.3
    else:
        correlation_baike[2] += 1

    if max_list[i][3] >= 0.8:
        correlation_teaching_material[0] += 0.3
        correlation_teaching_material[1] += 0.3
        correlation_teaching_material[3] += 1
    elif max_list[i][3] < 0.5:
        correlation_teaching_material[0] += 1
        correlation_teaching_material[1] += 0.3
        correlation_teaching_material[3] += 0.3
    elif max_list[i][3] >= 0.5 and max_list[i][3] < 0.7:
        correlation_teaching_material[0] += 0.3
        correlation_teaching_material[1] += 1
        correlation_teaching_material[3] += 0.3
    else:
        correlation_teaching_material[2] += 1

print(correlation_baike)
print(correlation_teaching_material)

计数结果:

[26.30000000000002, 28.400000000000023, 28, 44.49999999999997, 0, 0]
[27.100000000000023, 27.80000000000003, 23, 52.29999999999997, 0, 0]

归一化(基本概率赋值):

[0.2067610062893083, 0.22327044025157247, 0.220125786163522, 0.3498427672955972, 0.0, 0.0]
[0.208141321044547, 0.21351766513056855, 0.17665130568356371, 0.40168970814132077, 0.0, 0.0]

D-S融合:

import numpy as np

# 功能:融合x,y两行向量
def DS_fusion(x, y):
    if x.ndim != 1: print('x应为行向量')
    if y.ndim != 1: print('y应为行向量')
    if x.shape[0] != y.shape[0]: print('x,y列数应相等')
    tmp = 0
    for i in range(4):
        if i == 0 or i == 2:
            x[i] = x[i] * y[i]
        else:
            x[i] = x[i] * y[i] + x[i] * y[2] + y[i] * x[2]
        tmp += x[i]
    for i in range(x.shape[0]-1):
        x[i] = x[i]/tmp
    x[-1] = 0
    return x

x = np.array([0.2067610062893083, 0.22327044025157247, 0.220125786163522, 0.3498427672955972, 0.0, 0.0])
y = np.array([0.208141321044547, 0.21351766513056855, 0.17665130568356371, 0.40168970814132077, 0.0, 0.0])
print(DS_fusion(x,y))

结果:

[0.09916398 0.3090302  0.08960139 0.50220443 0.         0.        ]

5. 不进行D-S融合的对比

余弦相关性求和取平均

直接对max_list列表的第一列和第三列求平均值:

print(np.mean(max_list, axis=0)[1], np.mean(max_list, axis=0)[3])

结果:

0.7189390893141048 0.7397255979568887

直接累加计算次数

correlation_baike = [0] * 3
correlation_teaching_material = [0] * 3
for i in range(len(test_data)):
    if max_list[i][1] >= 0.8:
        correlation_baike[2] += 1
    elif max_list[i][1] < 0.5:
        correlation_baike[0] += 1
    else:
        correlation_baike[1] += 1

    if max_list[i][3] >= 0.8:
        correlation_teaching_material[2] += 1
    elif max_list[i][3] < 0.5:
        correlation_teaching_material[0] += 1
    else:
        correlation_teaching_material[1] += 1

print(correlation_baike)
print(correlation_teaching_material)

结果:

[11, 42, 37]
[10, 34, 46]

归一化:

[0.12222222222222222, 0.4666666666666667, 0.4111111111111111]
[0.1111111111111111, 0.37777777777777777, 0.5111111111111111]

相加求平均:

[0.11666666666666667, 0.4222222222222222, 0.4611111111111111]

分析不足:

  1. 余弦相关性求和取平均只能给出一个分数,无法获得更多的细节,例如是都是中度符合,或者是强符合、弱符合二者居多
  2. 直接累加计算次数无法给出不确定性,可能存在较大误差

6. 授课过程余弦相似性变化曲线与相关度扇形图

图的实例

余弦相似性变化曲线

import matplotlib.pyplot as plt
x = begin_list
y = [i[1] for i in max_list]
plt.figure(1)
plt.plot(x,y)
y = [i[3] for i in max_list]
plt.figure(2)
plt.plot(x,y)
plt.show()

在这里插入图片描述
在这里插入图片描述

相关度扇形图

import matplotlib.pyplot as plt
labels = 'Weak correlation', 'Moderate correlation', '{Strong, Moderate} correlation', 'Strong correlation'
sizes = [0.09916398, 0.3090302, 0.08960139, 0.50220443]
explode = (0, 0, 0.2, 0)  # only "explode" the 2nd slice (i.e. 'Hogs')

patches,l_text,p_text = plt.pie(sizes, explode=explode, labels=labels, autopct='%1.1f%%',
        shadow=True, startangle=90)

for t in l_text:
    t.set_size(16)
for t in p_text:
    t.set_size(18)

plt.axis('equal')
plt.show()

在这里插入图片描述

import numpy as np
import matplotlib.pyplot as plt
fig, ax = plt.subplots(subplot_kw=dict(aspect="equal"))

recipe = ['Weak correlation', 'Moderate correlation', '{Strong, Moderate}', 'Strong correlation']

data = [0.09916398, 0.3090302, 0.08960139, 0.50220443]
explode = (0, 0, 0.2, 0)

wedges, texts = ax.pie(data, wedgeprops=dict(width=0.5), startangle=-40)

bbox_props = dict(boxstyle="square,pad=0.3", fc="w", ec="k", lw=0.72)
kw = dict(arrowprops=dict(arrowstyle="-"),
          bbox=bbox_props, zorder=0, va="center")

for i, p in enumerate(wedges):
    ang = (p.theta2 - p.theta1)/2. + p.theta1
    y = np.sin(np.deg2rad(ang))
    x = np.cos(np.deg2rad(ang))
    horizontalalignment = {-1: "right", 1: "left"}[int(np.sign(x))]
    connectionstyle = "angle,angleA=0,angleB={}".format(ang)
    kw["arrowprops"].update({"connectionstyle": connectionstyle})
    ax.annotate(recipe[i],xy=(x, y), xytext=(1.35*np.sign(x), 1.4*y),
                horizontalalignment=horizontalalignment, **kw)

ax.set_title("Matplotlib bakery: A donut")

plt.show()

在这里插入图片描述

小结

完成度量教师授课内容与教学大纲(百度百科、教材)的相关度,运用D-S融合得到最终结果,并绘制过程变化图像和最终结果扇形图

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值