深度学习入门-斋藤康毅
深度学习入门-斋藤康毅 学习笔记
farell
失业中。。。。。。
展开
-
chapter 6 与学习相关技巧
1.参数更新gradient_descent_3d_update_process.y# -*- coding: utf-8 -*-"""Created on Fri Jul 9 09:13:20 2021@author: Bugs"""#方法一,利用关键字from matplotlib import pyplot as pltfrom mpl_toolkits.mplot3d import Axes3Dratio = 0.01def gradient_descent(f,原创 2021-07-21 14:51:31 · 167 阅读 · 0 评论 -
chapter 5 误差反向传播法
layer_naive.pyclass MulLayer: def __init__(self): self.x = None self.y = None def forward(self, x, y): self.x = x self.y = y out = x * y return out def backward(self, dout): dx = dou原创 2021-07-19 17:19:50 · 235 阅读 · 2 评论 -
chapter 4 神经网路的学习
gradient_1d.py# coding: utf-8import numpy as npimport matplotlib.pylab as pltdef numerical_diff(f, x): h = 1e-4 #0.0001 return (f(x+h) - f(x-h))/(2*h)def function_1(x): return 0.01*x**2 + 0.1*xdef tangent_line(f, x): d = numeric原创 2021-07-19 15:11:00 · 314 阅读 · 0 评论 -
chapter 3 神经网络
step_function.py# coding: utf-8import numpy as npimport matplotlib.pylab as pltdef step_function(x): return np.array(x > 0, dtype=np.int)if __name__ = '__main__': X = np.arange(-5.0, 5.0, 0.1) Y = step_function(X) plt.plot(X, Y)原创 2021-07-16 16:54:35 · 161 阅读 · 2 评论 -
chapter2 感知机
and_gate.py# coding: utf-8import numpy as npdef AND(x1, x2): x = np.array([x1, x2]) w = np.array([0.5, 0.5]) b = -0.7 tmp = np.sum(w*x) + b if tmp <= 0: return 0 else: return 1if __name__ == '__main__':原创 2021-07-16 15:37:52 · 73 阅读 · 0 评论