matlab使用简单神经网络实现函数逼近
clc
clear
Data = -3:0.01:3;%提供训练数据集共601个,取-3到+3之间的数,步进为0.01
xsize = size(Data);%获取数据总个数
datasize = xsize(2);
Value = zeros(1,datasize);%初始化x=sin(x)的y值
for i = 1:datasize
%获得y=sin(x)的y值
Value(i) = sin(Data(i));
end
hidesize = 10;%每层神经元个数
W1 = rand(hidesize,1);%输入层与隐含层之间的权重,10个
B1 = rand(hidesize,1);%隐含层神经元的阈值,偏置值,10个
W2 = rand(1,hidesize);%隐含层与输出层之间的权重,10个
B2 = rand(1,1);%输入层神经元的阈值,偏置值,1个
lr = 0.005;%学习率
TotalEpoch = 5000;%训练次数
lossList = zeros(1,TotalEpoch);%误差随迭代次数的变化,记录每轮训练的总误差
Y = zeros(1,datasize);%模型输出的结果,记录每轮训练时的预测值
for epoch = 1:TotalEpoch
loss = 0;%每把所有数据训练一轮清零误差
for i = 1:datasize
x = Data(i);%输入层输入数据
hidein = x*W1-B1;%隐含层的输入数据,权重*输入值-偏置值得到隐藏层的输入数据
%即第一层输出
hideout = zeros(hidesize,1);%隐含层的输出数据初始化
for j = 1:hidesize
hideout(j) = 1/(1+exp(-hidein(j)));%隐含层的输出数据,使用sigmod算法
end
y = W2*hideout-B2;%输出,即y值,即第二层输出
Y(i) = y;%存储当前y值
e = y-Value(i);%误差
%反馈,修改参数
dB2 = -1*lr*e;%-1*学习率*误差
dW2 = e*lr*hideout';%误差*学习率*预测值
dB1 = zeros(hidesize,1);%初始化偏置值1
for j = 1:hidesize
%修改偏置值1
dB1(j) = W2(j)*1/(1+exp(-hidein(j)))*(1-1/(1+exp(-hidein(j))))*(-1)*e*lr;
end
dW1 = zeros(hidesize,1);%初始化权重1
for j = 1:hidesize
%修改权重1
dW1(j) = W2(j)*1/(1+exp(-hidein(j)))*(1-1/(1+exp(-hidein(j))))*x*e*lr;
end
W1 = W1-dW1;%修改第一层神经元权重
B1 = B1-dB1;%修改第一层偏置值
W2 = W2-dW2;%修改第二层神经元权重
B2 = B2-dB2;%修改第二层偏置值
loss = loss + abs(e);%累计误差
end
lossList(epoch) = loss;%记录每轮训练误差
if mod(epoch,100)==0
%每100次输出当前训练次数和当前误差值
epoch
loss
end
end
%可视化训练结果
figure(1);%显示图1
plot(lossList,'r');%显示误差曲线
title('总误差变化曲线');%修改图标题
axis([0,TotalEpoch,0,400]);%修改坐标轴
figure(2);%显示图2
plot(Data,Value,'r');%显示真实曲线
hold on;
plot(Data,Y,'k');%显示预测值
legend('红线为正确值','黑线表示训练后输出值');
title('sinx在[-3,3]训练集曲线');