import numpy as np
# 激活函数
def activation_relu(input):
return np.maximum(0, input)
# 定义一个层类
class Layer():
def __init__(self, n_inputs, n_neurons):
self.weights = np.random.randn(n_inputs, n_neurons)
self.biases = np.random.randn(n_neurons)
def layer_forward(self, inputs):
sum1 = np.dot(inputs, self.weights) + self.biases
output = activation_relu(sum1)
return output
# 定义一个网络类
class Network():
def __init__(self, network_shape):
self.shape = network_shape
self.layers = []
for i in range(len(network_shape) - 1):
layer = Layer(network_shape[i], network_shape[i+1])
self.layers.append(layer)
# 前馈运算函数
def network_forward(self, inputs):
outputs = [inputs]
for i in range(len(self.layers)):
layer_output = self.layers[i].layer_forward(outputs[i])
outputs.append(layer_output)
print(outputs)
return outputs
#-------------------------------Test----------------------------------
def test():
a1 = -0.9
a2 = 0.5
a3 = 0.7
input1 = np.array([[a1, a2, a3],
[-a1, -a2, -a3]])
network_shape = [3, 4, 5, 3]
network = Network(network_shape)
network.network_forward(input1)
#-------------------------------运行-----------------------------------
test()
04_面向对象的网络
于 2023-10-27 16:03:11 首次发布