Example usage for weka.core Instance classValue

List of usage examples for weka.core Instance classValue

Introduction

In this page you can find the example usage for weka.core Instance classValue.

Prototype

public double classValue();

Source Link

Document

Returns an instance's class value as a floating-point number.

Usage

From source file:machinelearningcw.EnhancedLinearPerceptron.java

public double offlinePerceptron(Instances ins) {
    double error_count = 0;//count the number of errors
    double changeinWeights[];

    for (int h = 0; h < numberofiterations; h++) {
        changeinWeights = new double[ins.numAttributes() - 1];
        //error_count = 0;
        for (Instance instance : ins) {
            double y = 0;
            for (int i = 0; i < ins.numAttributes() - 1; i++) {

                y += w[i] * (instance.value(i));
            }/*from w ww  .  j  a  v  a2s .  co  m*/
            double match;
            if (y >= 0) {
                match = 1;
            } else {
                match = 0;
            }
            double difference = instance.classValue() - match;

            for (int j = 0; j < ins.numAttributes() - 1; j++) {

                changeinWeights[j] = changeinWeights[j]
                        + (0.5 * learning_rate) * ((difference) * instance.value(j));

            }
            error_count += (difference * difference);

        }
        /*  for (int j = 0; j < ins.numAttributes() - 1; j++) {
                
        System.out.print("w[" + j + "]: " + changeinWeights[j] + "|");
          }
          System.out.println("");*/

        //update all the weights at the end 
        for (int j = 0; j < w.length; j++) {

            w[j] += changeinWeights[j];

        }

    }
    error_count = error_count / numberofiterations;// average error count

    return error_count;
}

From source file:machinelearningcw.EnhancedLinearPerceptron.java

public double perceptron(Instances ins) {
    double error_count = 0;//count the number of errors
    for (int h = 0; h < numberofiterations; h++)//stopping condition
    {/*  ww w  . j  a  v  a 2s .  c  om*/
        error_count = 0;
        for (Instance instance : ins) {
            double y = 0;
            for (int i = 0; i < ins.numAttributes() - 1; i++) {
                y += w[i] * (instance.value(i));
            }
            // System.out.println(y);

            double match;
            if (y >= 0) {
                match = 1;
            } else {
                match = 0;
            }
            double difference = instance.classValue() - match;

            //  System.out.println(match);
            for (int j = 0; j < ins.numAttributes() - 1; j++) {

                w[j] = w[j] + 0.5 * learning_rate * ((difference) * instance.value(j));
                // System.out.print(w[j] + ", ");

            }

            error_count += difference * difference;
        }
    }
    return error_count;

}

From source file:machinelearningcw.EnhancedLinearPerceptron.java

public boolean crossValidation(Instances ins) throws Exception {
    //get the data
    Instances data = new Instances(ins);
    Instances train;// the new training data
    Instances test; // the new testing data

    int seed = 0;
    Random rand = new Random(seed);
    //randomize the data
    data.randomize(rand);/*w  ww .  java  2  s .  c om*/

    //number of folds
    int folds = 10;
    int offlineErrors = 0;
    int onlineErrors = 0;

    for (int i = 0; i < folds; i++) {
        train = data.trainCV(folds, i);
        test = data.testCV(folds, i);

        //add the the total errors for each
        //offlineErrors += 
        offlinePerceptron(train);
        for (Instance inst : test) {
            if (classifyInstance(inst) != inst.classValue()) {
                offlineErrors += 1;
            }

        }
        //reset w
        Arrays.fill(w, 1);
        perceptron(train);
        for (Instance inst : test) {
            if (classifyInstance(inst) != inst.classValue()) {
                onlineErrors += 1;
            }
        }

    }
    //  System.out.println(" off: " + offlineErrors);
    //    System.out.println(" on: " + onlineErrors);
    //calculate the mean of the total errors
    offlineErrors = offlineErrors / folds;
    onlineErrors = onlineErrors / folds;
    // System.out.println(flag);
    return offlineErrors > onlineErrors;

}

From source file:machinelearningcw.MachineLearningCw.java

public static void EnchancedPerceptron(Instances train, Instances test) throws Exception {
    EnhancedLinearPerceptron ep = new EnhancedLinearPerceptron();
    ep.buildClassifier(train);//  ww  w  .  ja va2s. c  o  m
    if (ep.setStandardiseAttributes == true) {

        ep.standardizeAtrrbutes(test);
    }

    double errors = 0;
    for (Instance i : test) {

        errors += (i.classValue() - ep.classifyInstance(i)) * (i.classValue() - ep.classifyInstance(i));
        //System.out.println(errors);
    }
    System.out.println("original errors: " + errors);
    double per = (test.numInstances() - errors) / test.numInstances();
    System.out.println("Accuracy: " + per);
}

From source file:machinelearningcw.perceptronClassifier.java

public double perceptron(Instances ins) {
    double error_count = 0;//count the number of errors
    for (int h = 0; h < numberofiterations; h++) {
        error_count = 0;//from   w w  w  . j  a  va 2 s . co m
        for (Instance instance : ins) {
            double y = 0;
            for (int i = 0; i < ins.numAttributes() - 1; i++) {
                //   System.out.println("Y: "+ w[i]+"  "+instance.value(i));
                y += (w[i] * (instance.value(i)));
            }

            //   System.out.println(y);

            double match;
            if (y >= 0) {
                match = 1;
            } else {
                match = 0;
            }

            double difference = instance.classValue() - match;

            //  System.out.println("class value: " + instance.classValue() 
            //+ "  "+match+"  " + difference + "difference");
            //System.out.println(match);
            for (int j = 0; j < ins.numAttributes() - 1; j++) {

                //   System.out.println("w[" + j + "] = " + w[j] + " 
                //+ " + "0.5 " + " * " + learning_rate + " * " + difference
                //+ " * " + instance.value(j));
                w[j] = w[j] + (0.5 * learning_rate * difference * instance.value(j));

                /*   System.out.println("w[" + j + "] = " + w[j] + " +
                    " + "0.5 " + " * " + learning_rate + " * " 
                    + difference + " * " + instance.value(j));*/

            }
            error_count += (difference * difference);
        }
    }
    //System.out.println(match);
    for (int j = 0; j < ins.numAttributes() - 1; j++) {

        //  System.out.print("w[" + j + "]: " + w[j] + "|");
    }

    //  System.out.println("");
    return error_count;

}

From source file:machinelearningq2.BasicNaiveBayesV1.java

/**
 *
 * This initial classifier will contain a two dimension array of counts
 *
 * @param ins//w ww.  j  a  va 2s .  co m
 * @throws Exception
 */
@Override
public void buildClassifier(Instances ins) throws Exception {
    ins.setClassIndex(ins.numAttributes() - 1);
    countData = ins.size();
    // assigns the class position of the instance 
    classValueCounts = new int[ins.numClasses()];
    System.out.println(ins);
    if (laplace == true) {
        laplaceCorrection(ins);
    }
    // store the values
    for (Instance line : ins) {
        double classValue = line.classValue();
        classValueCounts[(int) classValue]++;
        for (int i = 0; i < line.numAttributes() - 1; i++) {
            double attributeValue = line.value(i);
            DataFound d = new DataFound(attributeValue, classValue, i);
            int index = data.indexOf(d);
            // then it doesn't exist
            if (index == -1) {
                data.add(d);
            } else {
                data.get(index).incrementCount();
            }
        }
    }
    System.out.println("");

    System.out.println(Arrays.toString(classValueCounts));

}

From source file:machinelearningq2.ExtendedNaiveBayes.java

/**
 *
 * Build classifier will either build a gaussian or a discrete classifier
 * dependent on user input/*ww  w.  j av  a  2  s. c o  m*/
 *
 * @param ins
 * @throws Exception
 */
@Override
public void buildClassifier(Instances ins) throws Exception {
    if ("d".equals(gausianOrDiscretise)) {
        buildDiscreteClassifier(ins);
    } else {
        countData = ins.size();
        // assigns the class position of the instance 
        ins.setClassIndex(ins.numAttributes() - 1);
        classValueCounts = new int[ins.numClasses()];
        attributeMeans = new double[ins.numClasses()][ins.numAttributes() - 1];
        attributeVariance = new double[ins.numClasses()][ins.numAttributes() - 1];

        // store the values
        for (Instance line : ins) {
            double classValue = line.classValue();
            classValueCounts[(int) classValue]++;
            for (int i = 0; i < line.numAttributes() - 1; i++) {
                double attributeValue = line.value(i);
                attributeMeans[(int) classValue][i] += attributeValue;
                DataFound d = new DataFound(attributeValue, classValue, i);

                int index = data.indexOf(d);
                // then it doesn't exist
                if (index == -1) {
                    data.add(d);
                } else {
                    data.get(index).incrementCount();
                }
            }
        }
        System.out.println("Attribute Totals: " + Arrays.deepToString(attributeMeans));
        // computes the means
        for (int j = 0; j < classValueCounts.length; j++) {
            for (int i = 0; i < ins.numAttributes() - 1; i++) {
                attributeMeans[j][i] = attributeMeans[j][i] / classValueCounts[j];
            }
        }

        // calculate the variance
        for (int i = 0; i < data.size(); i++) {
            double cv = data.get(i).getClassValue();
            double atIn = data.get(i).getAttributeIndex();
            double squareDifference = Math
                    .pow(data.get(i).getAttributeValue() - attributeMeans[(int) cv][(int) atIn], 2);
            attributeVariance[(int) cv][(int) atIn] += squareDifference;
        }
        for (int j = 0; j < classValueCounts.length; j++) {
            for (int i = 0; i < ins.numAttributes() - 1; i++) {
                attributeVariance[j][i] = attributeVariance[j][i] / (classValueCounts[j] - 1);
                attributeVariance[j][i] = Math.sqrt(attributeVariance[j][i]);
            }
        }
        System.out.println("Attribute Means: " + Arrays.deepToString(attributeMeans));
        System.out.println("Variance: " + Arrays.deepToString(attributeVariance));
    }
}

From source file:machinelearningq2.ExtendedNaiveBayes.java

/**
 * The method buildDiscreteClassifier discretizes the data and then builds a
 * classifer//from w ww .  j a  va  2s .  c om
 *
 * @param instnc
 * @return
 * @throws Exception
 */
public void buildDiscreteClassifier(Instances ins) throws Exception {
    ins = discretize(ins);
    ins.setClassIndex(ins.numAttributes() - 1);
    countData = ins.size();
    // assigns the class position of the instance 
    classValueCounts = new int[ins.numClasses()];
    // store the values
    for (Instance line : ins) {
        double classValue = line.classValue();
        classValueCounts[(int) classValue]++;
        for (int i = 0; i < line.numAttributes() - 1; i++) {
            double attributeValue = line.value(i);
            DataFound d = new DataFound(attributeValue, classValue, i);
            int index = data.indexOf(d);
            // then it doesn't exist
            if (index == -1) {
                data.add(d);
            } else {
                data.get(index).incrementCount();
            }
        }
    }

}

From source file:machinelearningq2.ExtendedNaiveBayes.java

/**
 * The method classifyInstance which should call your previous
 * distributionForInstance method and simply return the prediction as the
 * class with the largest probability/*from  w w w.  j a  v a  2  s .c  o  m*/
 *
 * @param instnc
 * @return
 * @throws Exception
 */
@Override
public double classifyInstance(Instance instnc) throws Exception {
    testCount++;
    double[] bayesCalculations;
    double actualClassValue = instnc.classValue();
    if ("d".equals(gausianOrDiscretise)) {
        bayesCalculations = distributionForDiscrete(instnc);
    } else {
        bayesCalculations = distributionForInstance(instnc);
    }
    double largest = 0;
    double largestIndex = 0;

    for (int i = 0; i < bayesCalculations.length; i++) {
        if (bayesCalculations[i] > largest) {
            largest = bayesCalculations[i];
            largestIndex = i;
        }
    }
    if (largestIndex == actualClassValue) {
        correctCount++;
    }

    return largestIndex;
}

From source file:machinelearning_cw.BasicKNN.java

@Override
public double classifyInstance(Instance instance) throws Exception {
    /* Calculate euclidean distances */
    double[] distances = Helpers.findEuclideanDistances(trainingData, instance);

    /* /*w  w  w  .jav  a2s.c  o  m*/
     * Create a list of dictionaries where each dictionary contains
     * the keys "distance" and "id".
     * The distance key stores the euclidean distance for an instance and 
     * the id key stores the hashcode for that instance object.
     */
    ArrayList<HashMap<String, Object>> table = Helpers.buildDistanceTable(trainingData, distances);

    /* Find the k smallest distances */
    Object[] kClosestRows = new Object[k];
    Object[] kClosestInstances = new Object[k];
    double[] classValues = new double[k];

    for (int i = 1; i <= k; i++) {
        ArrayList<Integer> tieIndices = new ArrayList<Integer>();

        /* Find the positions in the table of the ith closest neighbour */
        int[] closestRowIndices = Helpers.findNthClosestNeighbour(table, i);

        if (closestRowIndices.length > 0) {
            /* Keep track of distance ties */
            for (int j = 0; j < closestRowIndices.length; j++) {
                tieIndices.add(closestRowIndices[j]);
            }

            /* Break ties (by choosing winner at random) */
            Random rand = new Random();
            int matchingNeighbourPosition = tieIndices.get(rand.nextInt(tieIndices.size()));
            HashMap<String, Object> matchingRow = table.get(matchingNeighbourPosition);
            kClosestRows[i - 1] = matchingRow;
        }
    }

    /* 
     * Find the closestInstances from their rows in the table and also
     * get their class values.
     */
    for (int i = 0; i < kClosestRows.length; i++) {
        /* Build up closestInstances array */
        for (int j = 0; j < trainingData.numInstances(); j++) {
            Instance inst = trainingData.get(j);
            HashMap<String, Object> row = (HashMap<String, Object>) kClosestRows[i];
            if (Integer.toHexString(inst.hashCode()).equals(row.get("id"))) {
                kClosestInstances[i] = inst;
            }
        }

        /* Keep track of the class values of the closest instanes */
        Instance inst = (Instance) kClosestInstances[i];
        classValues[i] = inst.classValue();
    }

    /* Return the most frequently occuring closest class */
    return Helpers.mode(Helpers.arrayToArrayList(classValues));
}