day64

simple classifier

package machinelearning.adaBoosting;

import java.util.Random;

import weka.core.Instance;

/**
 * ******************************************
 * The super class of any classifier.
 *
 * @author Michelle Min MitchelleMin@163.com
 * @date 2021-07-27
 * ******************************************
 */
public abstract class SimpleClassifier {

    /**
     * The index of the current attribute.
     */
    int selectedAttribute;

    /**
     * Weighted data.
     */
    WeightedInstances weightedInstances;

    /**
     * The accuracy on the training set.
     */
    double trainingAccuracy;

    /**
     * The number of classes. For binary classification it is 2.
     */
    int numClasses;

    /**
     * The number of instances.
     */
    int numInstances;

    /**
     * The number of conditional attributes.
     */
    int numConditions;

    /**
     * Foe random number generation.
     */
    Random random = new Random();

    /**
     * *******************
     * The first constructor.
     *
     * @param paraWeightedInstances The given instances.
     *                              *******************
     */
    public SimpleClassifier(WeightedInstances paraWeightedInstances) {
        weightedInstances = paraWeightedInstances;

        numConditions = weightedInstances.numAttributes() - 1;
        numInstances = weightedInstances.numInstances();
        numClasses = weightedInstances.classAttribute().numValues();
    }//of the first constructor

    /**
     * *******************
     * Train the classifier.
     * *******************
     */
    public abstract void train();

    /**
     * *******************
     * Classify an instance.
     *
     * @param paraInstance The given instance.
     * @return Predicted label.
     * *******************
     */
    public abstract int classify(Instance paraInstance);

    /**
     * *******************
     * Which instances in the training set are correctly classified.
     *
     * @return The correctness array.
     * *******************
     */
    public boolean[] computeCorrectnessArray() {
        boolean[] resultCorrectnessArray = new boolean[weightedInstances.numInstances()];
        for (int i = 0; i < resultCorrectnessArray.length; i++) {
            Instance tempInstance = weightedInstances.instance(i);
            if ((int) (tempInstance.classValue()) == classify(tempInstance)) {
                resultCorrectnessArray[i] = true;
            }//of if
        }//of for i
        return resultCorrectnessArray;
    }//of computeCorrectnessArray()

    /**
     ********************
     * Compute the accuracy on the training set.
     *
     * @return The training accuracy.
     ********************
     */
    public double computeTrainingAccuracy(){
        double tempCorrect = 0;
        boolean[] tempCorrectnessArray = computeCorrectnessArray();
        for (int i = 0; i < tempCorrectnessArray.length; i++) {
            if (tempCorrectnessArray[i]) {
                tempCorrect++;
            }//of if
        }//of for i

        double resultAccuracy = tempCorrect / tempCorrectnessArray.length;

        return resultAccuracy;
    }//of computeTrainingAccuracy

    /**
     ********************
     * Compute the weighted error on the training set. It is at least le-6 to
     * avoid NaN.
     *
     * @return The weighted error.
     ********************
     */
    public double computeWeightedError(){
        double resultError = 0;
        boolean[] tempCorrectnessArray = computeCorrectnessArray();
        for (int i = 0; i < tempCorrectnessArray.length; i++) {
            if (!tempCorrectnessArray[i]) {
                resultError += weightedInstances.getWeight(i);
            }//of if
        }//of for i

        if (resultError < 1e-6) {
            resultError = 1e-6;
        }//of if

        return resultError;
    }//of computeWeightedError
}//of class SimpleClassifier

stumpClassifier

package machinelearning.adaBoosting;

import weka.core.Instance;
import java.io.FileReader;
import java.util.*;

/**
 * ******************************************
 * The stump classifier.<br>
 *
 * @author Michelle Min MitchelleMin@163.com
 * @date 2021-07-27
 * ******************************************
 */
public class StumpClassifier extends SimpleClassifier{
    /**
     * The best cut for the current attribute on weightedInstances.
     */
    double bestCut;

    /**
     * The class label for attribute value less than bestCut.
     */
    int leftLeafLabel;

    /**
     * The class label attribute value no less than bestCut.
     */
    int rightLeafLabel;

    /**
     ********************
     * The only constructor.
     *
     * @param paraWeightedInstances
     *            The given instances.
     ********************
     */
    public StumpClassifier(WeightedInstances paraWeightedInstances){
        super(paraWeightedInstances);
    }//of the only constructor

    /**
     ********************
     * Train the classifier.
     ********************
     */
    public void train(){
        //Step 1. Randomly choose an attribute.
        selectedAttribute = random.nextInt(numConditions);

        //Step 2.Find all attribute values and sort.
        double[] tempValuesArray = new double[numInstances];
        for (int i = 0; i < tempValuesArray.length; i++) {
            tempValuesArray[i] = weightedInstances.instance(i).value(selectedAttribute);
        }//of for i
        Arrays.sort(tempValuesArray);

        //Step 3. Initialize, classify all instances as the same with the original cut.
        int tempNumLabels = numClasses;
        double[] tempLabelCountArray = new double[tempNumLabels];
        int tempCurrentLabel;

        // Step 3.1 Scan all labels to obtain their counts.
        for (int i = 0; i < numInstances; i++) {
            //The label of the ith instance
            tempCurrentLabel = (int) weightedInstances.instance(i).classValue();
            tempLabelCountArray[tempCurrentLabel] += weightedInstances.getWeight(i);
        }//of for i

        // Step 3.2 Find the label with the maximal count.
        double tempMaxCorrect = 0;
        int tempBestLabel = -1;
        for (int i = 0; i < tempLabelCountArray.length; i++) {
            if (tempMaxCorrect < tempLabelCountArray[i]) {
                tempMaxCorrect = tempLabelCountArray[i];
                tempBestLabel = i;
            }//of if
        }//of for i

        // Step 3.3 The cut is a little bit smaller than the minimal value.
        bestCut = tempValuesArray[0] - 0.1;
        leftLeafLabel = tempBestLabel;
        rightLeafLabel = tempBestLabel;

        // Step 4. Check candidate cuts one by one.
        // Step 4.1 To handle multi-class data, left and right.
        double tempCut;
        double[][] tempLabelCountMatrix = new double[2][tempNumLabels];

        for (int i = 0; i < tempValuesArray.length - 1; i++) {
            // Step 4.2  Some attribute values are identical, ignore them.
            if (tempValuesArray[i] == tempValuesArray[i + 1]) {
                continue;
            }//of if
            tempCut = (tempValuesArray[i] + tempValuesArray[i+1]) / 2;

            // Step 4.3 Scan all labels to obtain their counts wrt. the cut.
            // Initialize again since it is used many times.
            for (int j = 0; j < 2; j++) {
                for (int k = 0; k < tempNumLabels; k++) {
                    tempLabelCountMatrix[j][k] = 0;
                }//of for k
            }//of for j

            for (int j = 0; j < numInstances; j++) {
                // The label of the jth instance
                tempCurrentLabel = (int) weightedInstances.instance(j).classValue();
                if (weightedInstances.instance(j).value(selectedAttribute) < tempCut) {
                    tempLabelCountMatrix[0][tempCurrentLabel] += weightedInstances.getWeight(j);
                }else{
                    tempLabelCountMatrix[1][tempCurrentLabel] += weightedInstances.getWeight(j);
                }//of if
            }//of for j

            //Step 4.3 Left leaf.
            double tempLeftMaxCorrect = 0;
            int tempLeftBestLabel = 0;
            for (int j = 0; j < tempLabelCountMatrix[0].length; j++) {
                if (tempLeftMaxCorrect < tempLabelCountMatrix[0][j]) {
                    tempLeftMaxCorrect = tempLabelCountMatrix[0][j];
                    tempLeftBestLabel = j;
                }//of if
            }//of for j

            // Step 4.4 Right leaf.
            double tempRightMaxCorrect  = 0;
            int tempRightBestLabel = 0;
            for (int j = 0; j < tempLabelCountMatrix[1].length; j++) {
                if (tempRightMaxCorrect < tempLabelCountMatrix[1][j]) {
                    tempRightMaxCorrect = tempLabelCountMatrix[1][j];
                    tempLeftBestLabel = j;
                }//of if
            }//of for j

            // Step 4.5 Compare with the current best.
            if (tempMaxCorrect < tempLeftMaxCorrect + tempRightBestLabel) {
                tempMaxCorrect = tempLeftMaxCorrect + tempRightBestLabel;
                bestCut = tempCut;
                leftLeafLabel = tempLeftBestLabel;
                rightLeafLabel = tempRightBestLabel;
            }//of if
        }//of for i

        System.out.println("Attribute = " + selectedAttribute + ", cut = " + bestCut + ", leftLeafLabel = "
                + leftLeafLabel + ", rightLeafLabel = " + rightLeafLabel);
    }//of train

    /**
     ********************
     * Classify an instance.
     *
     * @param paraInstance
     *            The given instance.
     * @return Predicted label.
     ********************
     */
    public int classify(Instance paraInstance){
        int resultLabel = -1;
        if (paraInstance.value(selectedAttribute) < bestCut) {
            resultLabel = leftLeafLabel;
        }else{
            resultLabel = rightLeafLabel;
        }//of if
        return resultLabel;
    }//of classify

    /**
     ********************
     * For display.
     ********************
     */
    public String toString(){
        String resultString = "I an a stump classifier. \r\n" + "I choose attribute #" + selectedAttribute
                + " with cut value " + bestCut + ".\r\n" + "The left and right leaf labels are " + leftLeafLabel
                + " and " + rightLeafLabel + ", respectively.\r\n" + "My weighted error is: " + computeWeightedError()
                + ".\r\n" + "My weighted accuracy is:" + computeTrainingAccuracy() +".";

        return resultString;
    }//of toString

    /**
     ********************
     * For unit test.
     *
     * @param args
     *            Not provided.
     ********************
     */
    public static void main(String[] args){
        WeightedInstances tempWeightedInstances = null;
        String tempFilename  = "D:/mitchelles/data/iris.arff";
        try{
            FileReader tempFileReader =new FileReader(tempFilename);
            tempWeightedInstances = new WeightedInstances(tempFileReader);
            tempFileReader.close();
        }catch (Exception ee){
            System.out.println("Cannot read the file: " + tempFilename + "\r\n" + ee);
            System.exit(0);
        }//of try

        StumpClassifier tempClassifier = new StumpClassifier(tempWeightedInstances);
        tempClassifier.train();
        System.out.println(tempClassifier);

        System.out.println(Arrays.toString(tempClassifier.computeCorrectnessArray()));
    }//of main

}//of StumpClassifier

评论
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值