function [x] = conjugate_gradient(A, b, x0, tol, max_iter)
r = b - A * x0;
p = r;
for k = 1:max_iter
alpha = (r' * r) / (p' * A * p);
x = x0 + alpha * p;
r_new = r - alpha * A * p;
beta = (r_new' * r_new) / (r' * r);
p = r_new + beta * p;
if norm(r_new) < tol
break;
end
x0 = x;
r = r_new;
end
end
% 设定矩阵A和向量b
A = ...; % 你的系数矩阵
b = ...; % 你的右侧向量
% 初始化迭代初值和容差
x0 = zeros(size(b));
tol = 1e-6;
max_iter_cg = n; % n为共轭梯度法的迭代步数
max_iter_gs = 1000; % 设置高斯塞德尔迭代的最大步数
max_iter_sor = 1000; % 设置实用高斯塞德尔迭代的最大步数
% 共轭梯度法
x_cg = conjugate_gradient(A, b, x0, tol, max_iter_cg);
% 高斯塞德尔迭代
x_gs = gauss_seidel(A, b, x_cg, tol, max_iter_gs);
% 实用高斯塞德尔迭代
x_sor = sor(A, b, x_cg, tol, max_iter_sor);
% 这里假设你已经有了高斯塞德尔迭代的实现,下面是一个简单的示例:
function x = gauss_seidel(A, b, x0, tol, max_iter)
n = length(b);
x = x0;
for k = 1:max_iter
for i = 1:n
x(i) = (b(i) - A(i, 1:i-1) * x(1:i-1) - A(i, i+1:end) * x(i+1:end)) / A(i, i);
end
if norm(x - x0) < tol
break;
end
x0 = x;
end
end
% 实用高斯塞德尔迭代(Successive Over-Relaxation, SOR)
function x = sor(A, b, x0, tol, max_iter, omega)
if nargin < 6
omega = 1.2; % 可调整的松弛因子
end
n = length(b);
x = x0;
for k = 1:max_iter
for i = 1:n
x(i) = (1 - omega) * x0(i) + omega * (b(i) - A(i, 1:i-1) * x(1:i-1) - A(i, i+1:end) * x0(i+1:end)) / A(i, i);
end
if norm(x - x0) < tol
break;
end
x0 = x;
end
end
%定义Jacobi迭代函数
function [x, n] = jacobi(A, b, x0, eps)
%计算迭代矩阵
D = diag(diag(A));
L = -tril(A,-1);
U = -triu(A,1);
BJ = D\(L+U);
f = D\b;
%判断收敛性
a = max(abs(eig(BJ)));
if a >= 1
disp('Jacobi迭代不收敛');
return %不再向下执行
else
n = 1;
x = BJ*x0 + f;
while norm(x-x0,inf)>=eps %无穷范数
x0 = x;
x = BJ*x0+f;
n = n+1;
end
end
A = [4 3 0; 3 4 -1; 0 -1 4];
b = [24; 30; -24];
x0 = [0; 0; 0];
eps = 1.0e-6;
[x, n] = jacobi(A,b,x0,eps);
CG+Jacobi/G-S iteration
于 2023-12-04 00:16:19 首次发布