Example usage for weka.core Instance numAttributes

List of usage examples for weka.core Instance numAttributes

Introduction

In this page you can find the example usage for weka.core Instance numAttributes.

Prototype

public int numAttributes();

Source Link

Document

Returns the number of attributes.

Usage

From source file:machinelearningcw.EnhancedLinearPerceptron.java

public Instance standardizeAtrrbutes(Instance ins) {

    for (int n = 0; n < ins.numAttributes() - 1; n++) {
        double x = ((ins.value(n) - means[n]) / std[n]);

        ins.setValue(n, x);/*from  ww  w. j a va  2  s  .  c  o  m*/

    }

    return ins;
}

From source file:machinelearningcw.EnhancedLinearPerceptron.java

public Instances standardizeAtrrbutes(Instances ins) {

    for (Instance i : ins) {
        for (int n = 0; n < i.numAttributes() - 1; n++) {
            double x = ((i.value(n) - means[n]) / std[n]);

            i.setValue(n, x);/*from  w w w  .  jav a2  s.co  m*/

        }
    }

    return ins;
}

From source file:machinelearningcw.perceptronClassifier.java

@Override
public double classifyInstance(Instance instnc) throws Exception {
    double y = 0;
    for (int i = 0; i < instnc.numAttributes() - 1; i++) {
        y += (w[i] * (instnc.value(i)));

    }// www .  ja va 2 s.co m

    return (y >= 0) ? 1 : 0;
}

From source file:machinelearningcw.RandomLinearPerceptron.java

@Override
public double[] distributionForInstance(Instance instnc) throws Exception {
    Instance newInstance;/*ww  w.  j  a  v  a2s.co m*/
    double classify[] = new double[2];
    // 
    for (int n = 0; n < instnc.numAttributes(); n++) {

        // inst.setValue(n, instnc.value(n));
    }
    for (int i = 0; i < ensemble.length; i++) {
        newInstance = new DenseInstance(instnc);
        //sort in reverse order to stop out of bounds exception
        Arrays.sort(attributesdeleted[i], Collections.reverseOrder());
        for (int j = 0; j < attributesdeleted[i].length; j++) {
            /* System.out.println("ATTEMPTING TO DELETE: 
              "+attributesdeleted[i][j]);*/
            //  System.out.println(newInstance.numAttributes()-1);

            //delete the attributes deleted in the buildclassify method
            newInstance.deleteAttributeAt(attributesdeleted[i][j]);

        }

        //add up all the predictions classified in the ensemble
        double result = ensemble[i].classifyInstance(newInstance);
        if (result == 0) {
            classify[0] += 1;
        } else {
            classify[1] += 1;
        }
    }
    //   System.out.println("0: "+ classify[0]+" 1: "+classify[1]);
    return classify;
}

From source file:machinelearninglabs.OENaiveBayesClassifier.java

@Override
public double[] distributionForInstance(Instance instance) throws Exception {
    // return an array with a size of the number of classes
    double[] jointProbabilities = new double[instance.attribute(instance.classIndex()).numValues()];
    double[] result = new double[instance.attribute(instance.classIndex()).numValues()];

    // calculate un-normaalized probs
    for (int cls = 0; cls < jointProbabilities.length; cls++) {
        double p = classProbs[cls];
        for (int att = 0; att < instance.numAttributes() - 1; att++) {
            int value = (int) instance.value(att);
            p *= conditionalProbabilities[att][cls][value];
        }//  w  ww  .  ja va2 s.co  m
        jointProbabilities[cls] = p;
    }

    // Find normalized probabilities
    for (int i = 0; i < jointProbabilities.length; i++) {
        double denominator = 0;
        for (int j = 0; j < jointProbabilities.length; j++) {
            denominator += jointProbabilities[j];
        }
        result[i] = jointProbabilities[i] / denominator;
    }
    return result;
}

From source file:machinelearningproject.Tree.java

public String traverseTree(Instance instance) {
    String attrValue = "";
    Tree buffTree = this;
    while (!buffTree.isLeaf()) {
        //get attribute value of an instance
        for (int i = 0; i < instance.numAttributes(); i++) {
            if (instance.attribute(i).name().equals(buffTree.attributeName)) {
                attrValue = instance.stringValue(i);
                break;
            }//w  w  w .  ja  v a 2s . c  om
        }

        //compare attribute with node value
        for (int i = 0; i < buffTree.nodes.size(); i++) {
            if (attrValue.equals(buffTree.nodes.get(i).value)) {
                buffTree = buffTree.nodes.get(i).subTree;
                break;
            }
        }
    }

    //isLeaf
    attrValue = buffTree.attributeName;

    return attrValue;
}

From source file:machinelearningq2.BasicNaiveBayesV1.java

/**
 *
 * This initial classifier will contain a two dimension array of counts
 *
 * @param ins/*from w  w w .j a v  a 2 s  .c om*/
 * @throws Exception
 */
@Override
public void buildClassifier(Instances ins) throws Exception {
    ins.setClassIndex(ins.numAttributes() - 1);
    countData = ins.size();
    // assigns the class position of the instance 
    classValueCounts = new int[ins.numClasses()];
    System.out.println(ins);
    if (laplace == true) {
        laplaceCorrection(ins);
    }
    // store the values
    for (Instance line : ins) {
        double classValue = line.classValue();
        classValueCounts[(int) classValue]++;
        for (int i = 0; i < line.numAttributes() - 1; i++) {
            double attributeValue = line.value(i);
            DataFound d = new DataFound(attributeValue, classValue, i);
            int index = data.indexOf(d);
            // then it doesn't exist
            if (index == -1) {
                data.add(d);
            } else {
                data.get(index).incrementCount();
            }
        }
    }
    System.out.println("");

    System.out.println(Arrays.toString(classValueCounts));

}

From source file:machinelearningq2.ExtendedNaiveBayes.java

/**
 *
 * Build classifier will either build a gaussian or a discrete classifier
 * dependent on user input/*from w w w  .j a v a  2s. com*/
 *
 * @param ins
 * @throws Exception
 */
@Override
public void buildClassifier(Instances ins) throws Exception {
    if ("d".equals(gausianOrDiscretise)) {
        buildDiscreteClassifier(ins);
    } else {
        countData = ins.size();
        // assigns the class position of the instance 
        ins.setClassIndex(ins.numAttributes() - 1);
        classValueCounts = new int[ins.numClasses()];
        attributeMeans = new double[ins.numClasses()][ins.numAttributes() - 1];
        attributeVariance = new double[ins.numClasses()][ins.numAttributes() - 1];

        // store the values
        for (Instance line : ins) {
            double classValue = line.classValue();
            classValueCounts[(int) classValue]++;
            for (int i = 0; i < line.numAttributes() - 1; i++) {
                double attributeValue = line.value(i);
                attributeMeans[(int) classValue][i] += attributeValue;
                DataFound d = new DataFound(attributeValue, classValue, i);

                int index = data.indexOf(d);
                // then it doesn't exist
                if (index == -1) {
                    data.add(d);
                } else {
                    data.get(index).incrementCount();
                }
            }
        }
        System.out.println("Attribute Totals: " + Arrays.deepToString(attributeMeans));
        // computes the means
        for (int j = 0; j < classValueCounts.length; j++) {
            for (int i = 0; i < ins.numAttributes() - 1; i++) {
                attributeMeans[j][i] = attributeMeans[j][i] / classValueCounts[j];
            }
        }

        // calculate the variance
        for (int i = 0; i < data.size(); i++) {
            double cv = data.get(i).getClassValue();
            double atIn = data.get(i).getAttributeIndex();
            double squareDifference = Math
                    .pow(data.get(i).getAttributeValue() - attributeMeans[(int) cv][(int) atIn], 2);
            attributeVariance[(int) cv][(int) atIn] += squareDifference;
        }
        for (int j = 0; j < classValueCounts.length; j++) {
            for (int i = 0; i < ins.numAttributes() - 1; i++) {
                attributeVariance[j][i] = attributeVariance[j][i] / (classValueCounts[j] - 1);
                attributeVariance[j][i] = Math.sqrt(attributeVariance[j][i]);
            }
        }
        System.out.println("Attribute Means: " + Arrays.deepToString(attributeMeans));
        System.out.println("Variance: " + Arrays.deepToString(attributeVariance));
    }
}

From source file:machinelearningq2.ExtendedNaiveBayes.java

/**
 * The method buildDiscreteClassifier discretizes the data and then builds a
 * classifer/*w w w  .j  av  a  2  s . c  o  m*/
 *
 * @param instnc
 * @return
 * @throws Exception
 */
public void buildDiscreteClassifier(Instances ins) throws Exception {
    ins = discretize(ins);
    ins.setClassIndex(ins.numAttributes() - 1);
    countData = ins.size();
    // assigns the class position of the instance 
    classValueCounts = new int[ins.numClasses()];
    // store the values
    for (Instance line : ins) {
        double classValue = line.classValue();
        classValueCounts[(int) classValue]++;
        for (int i = 0; i < line.numAttributes() - 1; i++) {
            double attributeValue = line.value(i);
            DataFound d = new DataFound(attributeValue, classValue, i);
            int index = data.indexOf(d);
            // then it doesn't exist
            if (index == -1) {
                data.add(d);
            } else {
                data.get(index).incrementCount();
            }
        }
    }

}

From source file:machinelearningq2.ExtendedNaiveBayes.java

/**
 *
 * The method distributionForInstance should work out the probabilities of
 * class membership for a single instance.
 *
 * @param instnc//w  ww. ja v  a2 s.  co  m
 * @return
 * @throws Exception
 */
@Override
public double[] distributionForInstance(Instance instnc) throws Exception {

    if ("d".equals(gausianOrDiscretise)) {
        return super.distributionForInstance(instnc);
    }
    // creates a double array for storing the naive calculations for each class
    double[] prediction = new double[classValueCounts.length];
    for (int c = 0; c < classValueCounts.length; c++) {
        ArrayList<Double> likelihoods = new ArrayList<>();
        double priorProbability = classValueCounts[c] / countData;
        likelihoods.add(priorProbability);
        for (int i = 0; i < instnc.numAttributes() - 1; i++) {
            double currentMean = attributeMeans[c][i];
            double currentVariance = attributeVariance[c][i];
            double attributeValue = instnc.value(i);

            double likelihood = 1 / (Math.sqrt(2 * Math.PI) * currentVariance)
                    * Math.exp(-Math.pow(attributeValue - currentMean, 2) / (2 * Math.pow(currentVariance, 2)));
            likelihoods.add(likelihood);
        }
        double total = 1;
        for (Double x : likelihoods) {
            total *= x;
        }
        prediction[c] = total;
    }
    return prediction;
}