一、框架代码
import numpy as np
import scipy.special
import matplotlib.pyplot as plt
%matplotlib inline
class neuralNetwork:
def __init__():
pass
def query():
pass
def train():
pass
pass
if __name__=='__main__':
二、初始化网络
def __init__(self, inputnodes, hiddennodes, outputnodes, learningrate):
self.inputnodes = inputnodes
self.hiddennodes = hiddennodes
self.outputnodes = outputnodes
self.learningrate = learningrate
self.wih = np.random.normal(0.0, np.power(self.hiddennodes, -0.5), (self.hiddennodes, self.inputnodes))
self.who = np.random.normal(0.0,np.power(self.outputnodes, -0.5),(self.outputnodes, self.hiddennodes))
self.activation_function = lambda x:scipy.special.expit(x)
pass
三、查询
def query(self,inputs_list):
inputs = np.array(inputs_list, ndmin=2).T
hidden_inputs = np.dot(self.wih, inputs)
hidden_outputs = self.activation_function(hidden_inputs)
final_inputs = np.dot(self.who, hidden_outputs)
final_outputs = self.activation_function(final_inputs)
return final_outputs
pass
四、训练
def train(self, inputs_list, targets_list):
inputs = np.array(inputs_list, ndmin=2).T
targets = np.array(targets_list, ndmin=2).T
hidden_inputs = np.dot(self.wih, inputs)
hidden_outputs = self.activation_function(hidden_inputs)
final_inputs = np.dot(self.who, hidden_outputs)
final_outputs = self.activation_function(final_inputs)
output_errors = targets - final_outputs
hidden_errors = np.dot(self.who.T, output_errors)
self.who += self.learningrate * np.dot(
(output_errors * final_outputs * (1.0 - final_outputs)),
np.transpose(hidden_outputs)
)
self.wih += self.learningrate * np.dot(
(hidden_errors * hidden_outputs * (1.0 - hidden_outputs)),
np.transpose(inputs)
)
pass
五、执行
if __name__=='__main__':
# set the neuralNetwork
inputnodes = 784
hiddennodes = 100
outputnodes = 10
learningrate = 0.2
# train the neuralNetwork
myNN=neuralNetwork(inputnodes,hiddennodes,outputnodes,learningrate)
training_data_file = open('mnist_train_100.csv','r')
training_data_list = training_data_file.readlines()
training_data_file.close()
for record in training_data_list:
all_values = record.split(',')
inputs = (np.asfarray(all_values[1:]) / 255.0 * 0.99 + 0.01)
targets = np.zeros(outputnodes) + 0.01
targets[int(all_values[0])] = 0.99
myNN.train(inputs, targets)
pass
# test the neuralNetwork
test_data_file = open('mnist_test_10.csv','r')
test_data_list = test_data_file.readlines()
test_data_file.close()
scorecard = []
i=1
for record in test_data_list:
all_values = record.split(',')
fig = plt.figure(figsize=(30,30))
ax = fig.add_subplot(1,10,i)
i = i +1
ax.imshow(
np.asfarray(all_values[1:]).reshape((28,28)),
cmap='Greys',
interpolation='None'
)
correct_label = int(all_values[0])
inputs = (np.asfarray(all_values[1:]) / 255.0 * 0.99) + 0.01
outputs = myNN.query(inputs)
label = np.argmax(outputs)
print('correct_label =', correct_label, 'myNN"s answer =', label)
if(label == correct_label):
scorecard.append(1)
else:
scorecard.append(0)
pass
plt.show()
print(scorecard)
scorecard_array = np.asarray(scorecard)
print('myNN"s performance=',scorecard_array.sum() / scorecard_array.size)
六、ALL
import numpy as np
import scipy.special
import matplotlib.pyplot as plt
class neuralNetwork:
def __init__(self, inputnodes, hiddennodes, outputnodes, learningrate):
self.inputnodes = inputnodes
self.hiddennodes = hiddennodes
self.outputnodes = outputnodes
self.learningrate = learningrate
self.wih = np.random.normal(0.0, np.power(self.hiddennodes, -0.5), (self.hiddennodes, self.inputnodes))
self.who = np.random.normal(0.0,np.power(self.outputnodes, -0.5),(self.outputnodes, self.hiddennodes))
self.activation_function = lambda x:scipy.special.expit(x)
pass
def query(self,inputs_list):
inputs = np.array(inputs_list, ndmin=2).T
hidden_inputs = np.dot(self.wih, inputs)
hidden_outputs = self.activation_function(hidden_inputs)
final_inputs = np.dot(self.who, hidden_outputs)
final_outputs = self.activation_function(final_inputs)
return final_outputs
pass
def train(self, inputs_list, targets_list):
inputs = np.array(inputs_list, ndmin=2).T
targets = np.array(targets_list, ndmin=2).T
hidden_inputs = np.dot(self.wih, inputs)
hidden_outputs = self.activation_function(hidden_inputs)
final_inputs = np.dot(self.who, hidden_outputs)
final_outputs = self.activation_function(final_inputs)
output_errors = targets - final_outputs
hidden_errors = np.dot(self.who.T, output_errors)
self.who += self.learningrate * np.dot(
(output_errors * final_outputs * (1.0 - final_outputs)),
np.transpose(hidden_outputs)
)
self.wih += self.learningrate * np.dot(
(hidden_errors * hidden_outputs * (1.0 - hidden_outputs)),
np.transpose(inputs)
)
pass
pass
if __name__=='__main__':
# set the neuralNetwork
inputnodes = 784
hiddennodes = 100
outputnodes = 10
learningrate = 0.2
# train the neuralNetwork
myNN=neuralNetwork(inputnodes,hiddennodes,outputnodes,learningrate)
training_data_file = open('mnist_train_100.csv','r')
training_data_list = training_data_file.readlines()
training_data_file.close()
for record in training_data_list:
all_values = record.split(',')
inputs = (np.asfarray(all_values[1:]) / 255.0 * 0.99 + 0.01)
targets = np.zeros(outputnodes) + 0.01
targets[int(all_values[0])] = 0.99
myNN.train(inputs, targets)
pass
# test the neuralNetwork
test_data_file = open('mnist_test_10.csv','r')
test_data_list = test_data_file.readlines()
test_data_file.close()
scorecard = []
i=1
for record in test_data_list:
all_values = record.split(',')
fig = plt.figure(figsize=(30,30))
ax = fig.add_subplot(1,10,i)
i = i +1
ax.imshow(
np.asfarray(all_values[1:]).reshape((28,28)),
cmap='Greys',
interpolation='None'
)
correct_label = int(all_values[0])
inputs = (np.asfarray(all_values[1:]) / 255.0 * 0.99) + 0.01
outputs = myNN.query(inputs)
label = np.argmax(outputs)
print('correct_label =', correct_label, 'myNN"s answer =', label)
if(label == correct_label):
scorecard.append(1)
else:
scorecard.append(0)
pass
plt.show()
print(scorecard)
scorecard_array = np.asarray(scorecard)
print('myNN"s performance=',scorecard_array.sum() / scorecard_array.size)
效果:
reference:
《2018 Python神经网络编程》