1、画曲线
# coding:utf-8
import numpy as np
import matplotlib.pyplot as plt
#定义数组
X = np.arange(3,10)
print (X)
#画图
plt.plot(X, X*4, "r--", label="y=X*4")
plt.plot(X, X*X+3, "b:", label="y=X*X+3")
plt.plot(X, X*X*X-10, "k", label="y=X*X*X-10")
#绘制图标
plt.legend()
#显示图形
plt.show()
运行结果:
2、画sinX,cosX图像
import numpy as np
import matplotlib.pyplot as plt
X=np.linspace(-np.pi,np.pi,256,endpoint=True)
C=np.cos(X)
S=np.sin(X)
plt.plot(X,C,color="blue",linewidth=2.0,linestyle="-",label="$sin(x)$")
plt.plot(X,S,color="red",linewidth=2.0,linestyle="--",label="$cos(x)$")
plt.legend()
plt.show()
运行结果:
3、卡迪尔爱情公式
# coding:utf-8
import numpy as np
import matplotlib.pyplot as plt
#定义数组
X = np.arange(3,10)
print (X)
#画图
plt.plot(X, X*4, "r--", label="y=X*4")
plt.plot(X, X*X+3, "b:", label="y=X*X+3")
plt.plot(X, X*X*X-10, "k", label="y=X*X*X-10")
#绘制图标
plt.legend()
#显示图形
plt.show()
运行结果:
4、3D模型
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
fig1 = plt.figure()
ax = Axes3D(fig1)
X = np.arange(-2,2,0.25)
Y = np.arange(-2,2,0.25)
print (X)
print (Y)
X,Y = np.meshgrid(X,Y)
R = np.sqrt(X**2+Y**2)
Z = np.sin(R)+np.sin(R)
ax.plot_surface(X,Y,Z,rstride=1,cstride=1,cmap='rainbow')
plt.show()
运行结果:
5、绘制散点图
import numpy as np
import matplotlib.pyplot as plt
x = np.random.randn(200)
y = np.random.randn(200)
m = np.random.randn(200)*200
n = np.random.randn(200)*200
plt.scatter(x, y, c=m, s=n, marker="*")
plt.show()
运行结果:
6、柱状图
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
num_list = [1.5, 0.6, 7.8, 6]
plt.bar(range(len(num_list)), num_list, color='rgb')
plt.show()
运行结果:
7、饼状图
# -*- coding: utf-8 -*-
import matplotlib.pyplot as plt
#课本56页有讲解
labels = 'frogs', 'hogs', 'Dogs', 'Logs' # 定义标签
sizes = [15, 30, 45, 10] # 数据比率
colors = ['yellow', 'red', 'gold', 'blue']
explode = (0, 0.1, 0, 0) # 突出第二块,0.1为离开距离
plt.pie(sizes, explode=explode, labels=labels, colors=colors,
autopct='%1.1f%%', shadow=True, startangle=90)
plt.axis('equal') # 显示为圆
plt.show()
运行结果:
8、词云《英语版背影》
# -*-codingutf-8 -*-
from matplotlib import pyplot as plt
from wordcloud import WordCloud
import jieba
text = open(r'E:\PythonMake\python_test3\beiying.txt', 'r').read()#注意文件的路径
cut = jieba.cut(text) #text为你需要分词的字符串/句子
string = ' '.join(cut) #将分开的词用空格连接
print(string)
font = r'C:\Windows\Fonts\FZSTK.TTF'#读取电脑一种字体
wc = WordCloud(font_path=font, # 如果是中文必须要添加这个,否则会显示成框框
background_color='white',
width=1000,
height=800,
).generate(string)
wc.to_file('by.png') # 保存图片
plt.imshow(wc) # 用plt显示图片
plt.axis('off') # 不显示坐标轴
plt.show() # 显示图片
运行结果:
9、词云中文形状背影
#导入wordcloud模块和matplotlib模块
from wordcloud import WordCloud,ImageColorGenerator
import matplotlib.pyplot as plt
import scipy.misc
import jieba
#读取一个txt文件
text = open(r'E:\PythonMake\python_test3\zzqby.txt', 'r').read() # 注意文件的路径
#读入背景图片
bg_pic = scipy.misc.imread(r'E:\PythonMake\python_test3\beijingpicture.png')
wordlist_after_jieba = jieba.cut(text, cut_all = True)
wl_space_split = " ".join(wordlist_after_jieba)
#生成词云
font = r'C:\Windows\Fonts\simfang.ttf'
wc = WordCloud(mask=bg_pic,background_color='white',font_path=font, scale=1.5).generate(wl_space_split)
image_colors = ImageColorGenerator(bg_pic)
#显示词云图片
plt.imshow(wc)
plt.axis('off')
plt.show()
# 保存图片
wc.to_file(r'0512.jpg')
运行结果:
10、数据拟合分析
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
from sklearn.linear_model import LinearRegression
#1.输入数据
X = [[6], [8], [10], [12], [14]]
print (X)
P = [[7], [10], [13], [16], [18]]
print (P)
#2.数据分析模型
clf = LinearRegression()
print (clf)
#3.训练、预测
clf.fit(X, P)
X1 = [[5], [7], [11], [16]]
pre = clf.predict(X1)
print (pre)
print (u'系数', clf.coef_)
print (u'截距', clf.intercept_)
#4.画图、可视化分析
plt.title("pizza linearregression")
plt.xlabel("zhijing")
plt.ylabel("price")
plt.plot(X, P, 'r.')
plt.plot(X1, pre, 'b-')
plt.show()
运行结果:
11、决策树分析鸢尾花数据集
# -*- coding: utf-8 -*-
#决策树分析鸢尾花数据集
#导入包
import numpy as np
import matplotlib.pyplot as plt
from sklearn.datasets import load_iris
#导入数据集
hua = load_iris()
X = hua.data
Y = hua.target
print X
print Y
#数据分析 决策树分类
from sklearn.tree import DecisionTreeClassifier
#最近邻分类
from sklearn.neighbors import KNeighborsClassifier
clf = KNeighborsClassifier(n_neighbors=7)
#clf = DecisionTreeClassifier()
clf.fit(X,Y)
pre = clf.predict(X)
print u"预测:", pre
#可视化分析
m = [k[0] for k in X]
n = [k[1] for k in X]
plt.scatter(m, n, c=pre, s=100, marker="s")
plt.show()
运行结果:
PS C:\Users\Youle> & D:/Python/Python27/python.exe c:/Users/Youle/Desktop/1/test01.py
[[5.1 3.5 1.4 0.2]
[4.9 3. 1.4 0.2]
[4.7 3.2 1.3 0.2]
[4.6 3.1 1.5 0.2]
[5. 3.6 1.4 0.2]
[5.4 3.9 1.7 0.4]
[4.6 3.4 1.4 0.3]
[5. 3.4 1.5 0.2]
[4.4 2.9 1.4 0.2]
[4.9 3.1 1.5 0.1]
[5.4 3.7 1.5 0.2]
[4.8 3.4 1.6 0.2]
[4.8 3. 1.4 0.1]
[4.3 3. 1.1 0.1]
[5.8 4. 1.2 0.2]
[5.7 4.4 1.5 0.4]
[5.4 3.9 1.3 0.4]
[5.1 3.5 1.4 0.3]
[5.7 3.8 1.7 0.3]
[5.1 3.8 1.5 0.3]
[5.4 3.4 1.7 0.2]
[5.1 3.7 1.5 0.4]
[4.6 3.6 1. 0.2]
[5.1 3.3 1.7 0.5]
[4.8 3.4 1.9 0.2]
[5. 3. 1.6 0.2]
[5. 3.4 1.6 0.4]
[5.2 3.5 1.5 0.2]
[5.2 3.4 1.4 0.2]
[4.7 3.2 1.6 0.2]
[4.8 3.1 1.6 0.2]
[5.4 3.4 1.5 0.4]
[5.2 4.1 1.5 0.1]
[5.5 4.2 1.4 0.2]
[4.9 3.1 1.5 0.2]
[5. 3.2 1.2 0.2]
[5.5 3.5 1.3 0.2]
[4.9 3.6 1.4 0.1]
[4.4 3. 1.3 0.2]
[5.1 3.4 1.5 0.2]
[5. 3.5 1.3 0.3]
[4.5 2.3 1.3 0.3]
[4.4 3.2 1.3 0.2]
[5. 3.5 1.6 0.6]
[5.1 3.8 1.9 0.4]
[4.8 3. 1.4 0.3]
[5.1 3.8 1.6 0.2]
[4.6 3.2 1.4 0.2]
[5.3 3.7 1.5 0.2]
[5. 3.3 1.4 0.2]
[7. 3.2 4.7 1.4]
[6.4 3.2 4.5 1.5]
[6.9 3.1 4.9 1.5]
[5.5 2.3 4. 1.3]
[6.5 2.8 4.6 1.5]
[5.7 2.8 4.5 1.3]
[6.3 3.3 4.7 1.6]
[4.9 2.4 3.3 1. ]
[6.6 2.9 4.6 1.3]
[5.2 2.7 3.9 1.4]
[5. 2. 3.5 1. ]
[5.9 3. 4.2 1.5]
[6. 2.2 4. 1. ]
[6.1 2.9 4.7 1.4]
[5.6 2.9 3.6 1.3]
[6.7 3.1 4.4 1.4]
[5.6 3. 4.5 1.5]
[5.8 2.7 4.1 1. ]
[6.2 2.2 4.5 1.5]
[5.6 2.5 3.9 1.1]
[5.9 3.2 4.8 1.8]
[6.1 2.8 4. 1.3]
[6.3 2.5 4.9 1.5]
[6.1 2.8 4.7 1.2]
[6.4 2.9 4.3 1.3]
[6.6 3. 4.4 1.4]
[6.8 2.8 4.8 1.4]
[6.7 3. 5. 1.7]
[6. 2.9 4.5 1.5]
[5.7 2.6 3.5 1. ]
[5.5 2.4 3.8 1.1]
[5.5 2.4 3.7 1. ]
[5.8 2.7 3.9 1.2]
[6. 2.7 5.1 1.6]
[5.4 3. 4.5 1.5]
[6. 3.4 4.5 1.6]
[6.7 3.1 4.7 1.5]
[6.3 2.3 4.4 1.3]
[5.6 3. 4.1 1.3]
[5.5 2.5 4. 1.3]
[5.5 2.6 4.4 1.2]
[6.1 3. 4.6 1.4]
[5.8 2.6 4. 1.2]
[5. 2.3 3.3 1. ]
[5.6 2.7 4.2 1.3]
[5.7 3. 4.2 1.2]
[5.7 2.9 4.2 1.3]
[6.2 2.9 4.3 1.3]
[5.1 2.5 3. 1.1]
[5.7 2.8 4.1 1.3]
[6.3 3.3 6. 2.5]
[5.8 2.7 5.1 1.9]
[7.1 3. 5.9 2.1]
[6.3 2.9 5.6 1.8]
[6.5 3. 5.8 2.2]
[7.6 3. 6.6 2.1]
[4.9 2.5 4.5 1.7]
[7.3 2.9 6.3 1.8]
[6.7 2.5 5.8 1.8]
[7.2 3.6 6.1 2.5]
[6.5 3.2 5.1 2. ]
[6.4 2.7 5.3 1.9]
[6.8 3. 5.5 2.1]
[5.7 2.5 5. 2. ]
[5.8 2.8 5.1 2.4]
[6.4 3.2 5.3 2.3]
[6.5 3. 5.5 1.8]
[7.7 3.8 6.7 2.2]
[7.7 2.6 6.9 2.3]
[6. 2.2 5. 1.5]
[6.9 3.2 5.7 2.3]
[5.6 2.8 4.9 2. ]
[7.7 2.8 6.7 2. ]
[6.3 2.7 4.9 1.8]
[6.7 3.3 5.7 2.1]
[7.2 3.2 6. 1.8]
[6.2 2.8 4.8 1.8]
[6.1 3. 4.9 1.8]
[6.4 2.8 5.6 2.1]
[7.2 3. 5.8 1.6]
[7.4 2.8 6.1 1.9]
[7.9 3.8 6.4 2. ]
[6.4 2.8 5.6 2.2]
[6.3 2.8 5.1 1.5]
[6.1 2.6 5.6 1.4]
[7.7 3. 6.1 2.3]
[6.3 3.4 5.6 2.4]
[6.4 3.1 5.5 1.8]
[6. 3. 4.8 1.8]
[6.9 3.1 5.4 2.1]
[6.7 3.1 5.6 2.4]
[6.9 3.1 5.1 2.3]
[5.8 2.7 5.1 1.9]
[6.8 3.2 5.9 2.3]
[6.7 3.3 5.7 2.5]
[6.7 3. 5.2 2.3]
[6.3 2.5 5. 1.9]
[6.5 3. 5.2 2. ]
[6.2 3.4 5.4 2.3]
[5.9 3. 5.1 1.8]]
[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1
1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 2 2 2 2 2
2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
2 2]
预测: [0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0
0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 1 2 1
1 1 1 1 1 1 1 1 1 2 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 1 2 2 2 2 2 2 1 2 2 2 2
2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2 2
2 2]