//
X =lyap(A,Q)//[P,l,g]=care(A, B, Q, R)//[K,S,e]=lqry(sys,Q,R,N)//LQRY(输出调节器)目标函数是f(y,u)[K,S,e]=lqr(SYS,Q,R,N)//LQR,目标函数是f(x,u)
reg =lqg(sys,QXU,QWV)//LQG,线性二次高斯调节器,目标函数是E(f(x,u))
5. 可观性和可控性
sys =ss(A,B,C,D);
co =ctrb(sys);rank(co);
ob =obsv(sys);rank(ob);
6. 使用MATLAB神经网络工具箱
% Algorithm Studio
%初始化网络
numInputs =1;
numLayers =3;
biasConnect =[1;1;1];
inputConnect =[1;0;0];
layerConnect =[000;100;010];
outputConnect =[001];
net =network(numInputs,numLayers,biasConnect,inputConnect,layerConnect,outputConnect);% net.inputs{1}.range =[-11];
net.inputs{1}.name ='误差';% net.layers{1}.dimensions=3;
net.layers{1}.size =1;
net.layers{1}.name ='输入层';% net.outputs{1}.feedbackMode ='open';% net.layers{2}.dimensions=9;
net.layers{2}.size =3;
net.layers{2}.name ='隐藏层';% net.outputs{2}.feedbackMode ='open';% net.layers{3}.dimensions=1;
net.layers{3}.size =1;
net.layers{3}.name ='输出层';% net.outputs{3}.feedbackMode ='open';
net.inputWeights{1,1}.learnFcn ='learngdm';
net.layerWeights{2,1}.learnFcn ='learngdm';
net.layers{1}.transferFcn ='tansig';
net.layers{2}.transferFcn ='tansig';
net.layers{3}.transferFcn ='tansig';for i =1:3
net.layers{i}.initFcn ='initnw';
end
net.inputs{1}.processFcns ={'removeconstantrows','mapminmax'};
net.outputs{3}.processFcns ={'removeconstantrows','mapminmax'};
net.performFcn ='mse';
net.dividefcn ='dividerand';
net.adaptFcn ='adaptwb';
net.trainFcn ='trainlm';
net =init(net);%view(net)
x =1:1000;% x = x/57.3;
y =x;
net =train(net,x,y);% 仿真
p =[4.5,100.5,555.5,777.1,999.4];
out =sim(net,p);figure('Name','AS.CK:模型验证','NumberTitle','off');plot(p,out,'*')
hold on
plot(x,y)legend('模型预测值','采样实际值')
grid on
grid minor