总体思路
- 导入想要产生词云的文章或者段落
- 对导入的文字进行
jieba
分词 - 统计分词之后的词频
- 生成并绘制词云
Demo
from wordcloud import WordCloud
import matplotlib.pyplot as plt
import jieba
# Now, There is no 'word.txt' under this path
path_txt = "/home/alan/Desktop/word.txt"
f = open(path_txt, 'r', encoding = 'UTF-8').read()
cut_text = " ".join(jieba.cut(f))
wordcloud = WordCloud(
font_path = "/home/alan/.local/share/fonts/STKAITI.TTF",
background_color="white",
width=1000,
height = 800
).generate(cut_text)
plt.imshow(wordcloud, interpolation = "bilinear")
plt.axis("off")
plt.show()