matplotlib是python最著名的绘图库,它提供了一整套和MATLIB相识的命令API,它可以画出美丽的线图,散点图,等高线图,条形图,柱状图,3D图等。
绘制3D图:
# -*- coding: UTF-8 -*-
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import numpy as np
"""
碗状图形
"""
fig = plt.figure(figsize=(8, 5))
ax1 = Axes3D(fig)
alpha = 0.8
r = np.linspace(-alpha, alpha, 100)
X, Y = np.meshgrid(r, r)
l = 1. / (1 + np.exp(-(X ** 2 + Y ** 2)))
ax1.plot_wireframe(X, Y, l)
ax1.plot_surface(X, Y, l, cmap=plt.get_cmap("rainbow")) # 彩虹配色
ax1.set_title("Bowl shape")
plt.show()
结果:
带梯度下降的3D图动画
# -*- coding: UTF-8 -*-
"""
Gradient Descent 的 3D 动态图示
"""
import numpy as np
from matplotlib import cm
import matplotlib.pyplot as plt
import mpl_toolkits.mplot3d.axes3d as p3
import matplotlib.animation as animation
nb_steps = 20
x0 = np.array([0.8, 0.8])
learning_rate = 0.1
def cost_function(x):
return x[0]**2 + x[1]**2
def gradient_cost_function(x):
return np.array([2*x[0], 2*x[1]])
def gen_line():
x = x0.copy()
data = np.empty((3, nb_steps+1))
data[:, 0] = np.concatenate((x, [cost_function(x)]))
for t in range(1, nb_steps+1):
grad = gradient_cost_function(x)
x -= learning_rate * grad
data[:, t] = np.concatenate((x, [cost_function(x)]))
return data
def update_line(num, data, line):
line.set_data(data[:2, :num])
line.set_3d_properties(data[2, :num])
return line
fig = plt.figure()
fig.suptitle("Gradient Descent", fontsize=20)
ax = p3.Axes3D(fig)
X = np.arange(-0.5, 1, 0.1)
Y = np.arange(-1, 1, 0.1)
X, Y = np.meshgrid(X, Y)
Z = cost_function((X, Y))
surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.coolwarm, linewidth=0, antialiased=False)
data = gen_line()
line = ax.plot(data[0, 0:1], data[0, 0:1], data[0, 0:1], 'rx-', linewidth=2)[0]
ax.view_init(30, -160)
ax.set_xlim3d([-1.0, 1.0])
ax.set_xlabel('X')
ax.set_ylim3d([-1.0, 1.0])
ax.set_ylabel('Y')
ax.set_zlim3d([0.0, 2.0])
ax.set_zlabel('Z')
line_ani = animation.FuncAnimation(fig, update_line, nb_steps+1, fargs=(data, line), interval=200, blit=False)
line_ani.save('gradient_descent.gif', dpi=80, writer='imagemagick')
plt.show()
结果: