一、jieba分词后直接用Wordcloud做词云
import jieba
from jieba.analyse import extract_tags
from wordcloud import WordCloud
import matplotlib.pyplot as plt
#from scipy.misc import imread
#读取小说“择天记”
txtname = '择天记'
max_words = 50
path = 'C:/Users/Administrator/Desktop'
txtfile = path + '/' + txtname + '.txt'
content = open(txtfile, 'r', encoding='utf-8').read() #评论内容
#载入词典
dictfile = path + '/' + 'zetianji.txt'
jieba.load_userdict(dictfile) # file_name 为文件类对象或自定义词典的路径
#分词
cut_text = " ".join(jieba.cut(content))
#读取字体
font_file = r'C:/Windows/Fonts/STKAITI.TTF'
#词云设置
wc = WordCloud(font_path=font_file, #设置字体
background_color="black",#背景颜色
max_words= 50,# 词云显示的最大词数
max_font_size=100, #字体最大值mask = back_col