1. 对于n为特征的每个维度，分别找到阈值使得分类错误率最小
2. 选择错误率最小的第i维构造根节点：
a) 预测xi>Θ
b) 分别构造true/false子树分支，各自的分类结果作为叶子

1. 构造根节点
2. 选择错误率最小的叶子
3. 仅适用于和所选叶子相关的训练数据构造节点
4. 使用构造的节点代替所选的叶子
5. 重复2-4步直到所有的叶子错误率为0，或者到达循环结束的循环次数。

function tree_node = tree_node_w(max_splits)

% Step1: reading Data from the file
clear
clc
Data = file_data(:,1:10)';
Labels = file_data(:,end)';
Labels = Labels*2 - 1;
MaxIter = 200; % boosting iterations
% Step2: splitting data to training and control set
TrainData   = Data(:,1:2:end);
TrainLabels = Labels(1:2:end);
ControlData   = Data(:,2:2:end);
ControlLabels = Labels(2:2:end);
% Step3: constructing weak learner
weak_learner = tree_node_w(3); % pass the number of tree splits to the constructor
% Step4: training with Gentle AdaBoost
[GLearners GWeights] = GentleAdaBoost(weak_learner, TrainData, TrainLabels, MaxIter);
% Step5: training with Modest AdaBoost
[MLearners MWeights] = ModestAdaBoost(weak_learner, TrainData, TrainLabels, MaxIter);
% Step5: training with Modest AdaBoost
[RLearners RWeights] = RealAdaBoost(weak_learner, TrainData, TrainLabels, MaxIter);
% Step6: evaluating on control set
ResultG = sign(Classify(GLearners, GWeights, ControlData));
ResultM = sign(Classify(MLearners, MWeights, ControlData));
ResultR = sign(Classify(RLearners, RWeights, ControlData));
% Step7: calculating error
ErrorG  = sum(ControlLabels ~= ResultG) / length(ControlLabels)
ErrorM  = sum(ControlLabels ~= ResultM) / length(ControlLabels)
ErrorR  = sum(ControlLabels ~= ResultR) / length(ControlLabels)

% Step4: iterativly running the training
for lrn_num = 1 : MaxIter
clc;
disp(strcat('Boosting step: ', num2str(lrn_num),'/', num2str(MaxIter)));
[GLearners GWeights] = GentleAdaBoost(weak_learner, TrainData, TrainLabels, 1, GWeights, GLearners);
%evaluating control error
GControl = sign(Classify(GLearners, GWeights, ControlData));
GAB_control_error(lrn_num) = GAB_control_error(lrn_num) + sum(GControl ~= ControlLabels) / length(ControlLabels);
[RLearners RWeights] = RealAdaBoost(weak_learner, TrainData, TrainLabels, 1, RWeights, RLearners);
%evaluating control error
RControl = sign(Classify(RLearners, RWeights, ControlData));
RAB_control_error(lrn_num) = RAB_control_error(lrn_num) + sum(RControl ~= ControlLabels) / length(ControlLabels);
[NuLearners NuWeights] = ModestAdaBoost(weak_learner, TrainData, TrainLabels, 1, NuWeights, NuLearners);
%evaluating control error
NuControl = sign(Classify(NuLearners, NuWeights, ControlData));
MAB_control_error(lrn_num) = MAB_control_error(lrn_num) + sum(NuControl ~= ControlLabels) / length(ControlLabels);
end