Example usage for weka.core Instance value

List of usage examples for weka.core Instance value

Introduction

In this page you can find the example usage for weka.core Instance value.

Prototype

public double value(Attribute att);

Source Link

Document

Returns an instance's attribute value in internal format.

Usage

From source file:mlda.util.Utils.java

License:Open Source License

/**
 * Get number of labels associated with each instance
 * /*from  ww w. j a va2  s  . c o  m*/
 * @param mlData Multi-label dataset
 * @return Array with the number of labels associated with each instance
 */
public static int[] labelsForInstance(MultiLabelInstances mlData) {

    int nInstances = mlData.getNumInstances();
    int nLabels = mlData.getNumLabels();

    int[] labelsForInstance = new int[nInstances];

    int[] labelIndices = mlData.getLabelIndices();

    Instances instances = mlData.getDataSet();

    Instance inst;
    for (int i = 0; i < nInstances; i++) {
        inst = instances.get(i);

        for (int j = 0; j < nLabels; j++) {
            if (inst.value(labelIndices[j]) == 1) {
                labelsForInstance[i]++;
            }
        }
    }

    return (labelsForInstance);
}

From source file:moa.classifiers.bayes.NaiveBayes.java

License:Open Source License

@Override
public void trainOnInstanceImpl(Instance inst) {
    this.observedClassDistribution.addToValue((int) inst.classValue(), inst.weight());
    for (int i = 0; i < inst.numAttributes() - 1; i++) {
        int instAttIndex = modelAttIndexToInstanceAttIndex(i, inst);
        AttributeClassObserver obs = this.attributeObservers.get(i);
        if (obs == null) {
            obs = inst.attribute(instAttIndex).isNominal() ? newNominalClassObserver()
                    : newNumericClassObserver();
            this.attributeObservers.set(i, obs);
        }/*from  w  w  w  . j  a  v  a  2 s .  c  o  m*/
        obs.observeAttributeClass(inst.value(instAttIndex), (int) inst.classValue(), inst.weight());
    }
}

From source file:moa.classifiers.bayes.NaiveBayes.java

License:Open Source License

public static double[] doNaiveBayesPrediction(Instance inst, DoubleVector observedClassDistribution,
        AutoExpandVector<AttributeClassObserver> attributeObservers) {
    double[] votes = new double[observedClassDistribution.numValues()];
    double observedClassSum = observedClassDistribution.sumOfValues();
    for (int classIndex = 0; classIndex < votes.length; classIndex++) {
        votes[classIndex] = observedClassDistribution.getValue(classIndex) / observedClassSum;
        for (int attIndex = 0; attIndex < inst.numAttributes() - 1; attIndex++) {
            int instAttIndex = modelAttIndexToInstanceAttIndex(attIndex, inst);
            AttributeClassObserver obs = attributeObservers.get(attIndex);
            if ((obs != null) && !inst.isMissing(instAttIndex)) {
                votes[classIndex] *= obs.probabilityOfAttributeValueGivenClass(inst.value(instAttIndex),
                        classIndex);/* w  w w .  j a  va  2  s.co m*/
            }
        }
    }
    // TODO: need logic to prevent underflow?
    return votes;
}

From source file:moa.classifiers.bayes.NaiveBayes.java

License:Open Source License

public static double[] doNaiveBayesPredictionLog(Instance inst, DoubleVector observedClassDistribution,
        AutoExpandVector<AttributeClassObserver> observers,
        AutoExpandVector<AttributeClassObserver> observers2) {
    AttributeClassObserver obs;//from www  . jav  a 2  s .c o m
    double[] votes = new double[observedClassDistribution.numValues()];
    double observedClassSum = observedClassDistribution.sumOfValues();
    for (int classIndex = 0; classIndex < votes.length; classIndex++) {
        votes[classIndex] = Math.log10(observedClassDistribution.getValue(classIndex) / observedClassSum);
        for (int attIndex = 0; attIndex < inst.numAttributes() - 1; attIndex++) {
            int instAttIndex = modelAttIndexToInstanceAttIndex(attIndex, inst);
            if (inst.attribute(instAttIndex).isNominal()) {
                obs = observers.get(attIndex);
            } else {
                obs = observers2.get(attIndex);
            }

            if ((obs != null) && !inst.isMissing(instAttIndex)) {
                votes[classIndex] += Math
                        .log10(obs.probabilityOfAttributeValueGivenClass(inst.value(instAttIndex), classIndex));

            }
        }
    }
    return votes;

}

From source file:moa.classifiers.bayes.NaiveBayesMultinomial.java

License:Open Source License

/**
 * Trains the classifier with the given instance.
 *
 * @param instance the new training instance to include in the model
 *//* www  .ja va 2s. c om*/
@Override
public void trainOnInstanceImpl(Instance inst) {
    if (this.reset == true) {
        this.m_numClasses = inst.numClasses();
        double laplace = this.laplaceCorrectionOption.getValue();
        int numAttributes = inst.numAttributes();

        m_probOfClass = new double[m_numClasses];
        Arrays.fill(m_probOfClass, laplace);

        m_classTotals = new double[m_numClasses];
        Arrays.fill(m_classTotals, laplace * numAttributes);

        m_wordTotalForClass = new DoubleVector[m_numClasses];
        for (int i = 0; i < m_numClasses; i++) {
            //Arrays.fill(wordTotal, laplace);
            m_wordTotalForClass[i] = new DoubleVector();
        }
        this.reset = false;
    }
    // Update classifier
    int classIndex = inst.classIndex();
    int classValue = (int) inst.value(classIndex);

    double w = inst.weight();
    m_probOfClass[classValue] += w;

    m_classTotals[classValue] += w * totalSize(inst);
    double total = m_classTotals[classValue];

    for (int i = 0; i < inst.numValues(); i++) {
        int index = inst.index(i);
        if (index != classIndex && !inst.isMissing(i)) {
            //m_wordTotalForClass[index][classValue] += w * inst.valueSparse(i);
            double laplaceCorrection = 0.0;
            if (m_wordTotalForClass[classValue].getValue(index) == 0) {
                laplaceCorrection = this.laplaceCorrectionOption.getValue();
            }
            m_wordTotalForClass[classValue].addToValue(index, w * inst.valueSparse(i) + laplaceCorrection);
        }
    }
}

From source file:moa.classifiers.DecisionStump.java

License:Open Source License

@Override
public void trainOnInstanceImpl(Instance inst) {
    this.observedClassDistribution.addToValue((int) inst.classValue(), inst.weight());
    for (int i = 0; i < inst.numAttributes() - 1; i++) {
        int instAttIndex = modelAttIndexToInstanceAttIndex(i, inst);
        AttributeClassObserver obs = this.attributeObservers.get(i);
        if (obs == null) {
            obs = inst.attribute(instAttIndex).isNominal() ? newNominalClassObserver()
                    : newNumericClassObserver();
            this.attributeObservers.set(i, obs);
        }/*w w  w .j a va2 s .co m*/
        obs.observeAttributeClass(inst.value(instAttIndex), (int) inst.classValue(), inst.weight());
    }
    if (this.trainingWeightSeenByModel - this.weightSeenAtLastSplit >= this.gracePeriodOption.getValue()) {
        this.bestSplit = findBestSplit((SplitCriterion) getPreparedClassOption(this.splitCriterionOption));
        this.weightSeenAtLastSplit = this.trainingWeightSeenByModel;
    }
}

From source file:moa.classifiers.functions.Perceptron.java

License:Open Source License

@Override
public void trainOnInstanceImpl(Instance inst) {

    //Init Perceptron
    if (this.reset == true) {
        this.reset = false;
        this.numberAttributes = inst.numAttributes();
        this.numberClasses = inst.numClasses();
        this.weightAttribute = new double[inst.numClasses()][inst.numAttributes()];
        for (int i = 0; i < inst.numClasses(); i++) {
            for (int j = 0; j < inst.numAttributes(); j++) {
                weightAttribute[i][j] = 0.2 * this.classifierRandom.nextDouble() - 0.1;
            }//from   ww  w  .  j  a v a2s  . c o m
        }
    }

    double[] preds = new double[inst.numClasses()];
    for (int i = 0; i < inst.numClasses(); i++) {
        preds[i] = prediction(inst, i);
    }
    double learningRatio = learningRatioOption.getValue();

    int actualClass = (int) inst.classValue();
    for (int i = 0; i < inst.numClasses(); i++) {
        double actual = (i == actualClass) ? 1.0 : 0.0;
        double delta = (actual - preds[i]) * preds[i] * (1 - preds[i]);
        for (int j = 0; j < inst.numAttributes() - 1; j++) {
            this.weightAttribute[i][j] += learningRatio * delta * inst.value(j);
        }
        this.weightAttribute[i][inst.numAttributes() - 1] += learningRatio * delta;
    }
}

From source file:moa.classifiers.functions.Perceptron.java

License:Open Source License

public double prediction(Instance inst, int classVal) {
    double sum = 0.0;
    for (int i = 0; i < inst.numAttributes() - 1; i++) {
        sum += weightAttribute[classVal][i] * inst.value(i);
    }/*from  w w  w.j  a  v a  2  s  .  co m*/
    sum += weightAttribute[classVal][inst.numAttributes() - 1];
    return 1.0 / (1.0 + Math.exp(-sum));
}

From source file:moa.classifiers.lazy.neighboursearch.kdtrees.KMeansInpiredMethod.java

License:Open Source License

/** 
 * Splits a node into two such that the overall sum of squared distances 
 * of points to their centres on both sides of the (axis-parallel) 
 * splitting plane is minimum. The two nodes created after the whole 
 * splitting are correctly initialised. And, node.left and node.right 
 * are set appropriately.//from   w  w w . j a v  a 2 s  .  co m
 * @param node The node to split.
 * @param numNodesCreated The number of nodes that so far have been
 * created for the tree, so that the newly created nodes are 
 * assigned correct/meaningful node numbers/ids.
 * @param nodeRanges The attributes' range for the points inside
 * the node that is to be split.
 * @param universe The attributes' range for the whole 
 * point-space.
 * @throws Exception If there is some problem in splitting the
 * given node.
 */
public void splitNode(KDTreeNode node, int numNodesCreated, double[][] nodeRanges, double[][] universe)
        throws Exception {

    correctlyInitialized();

    int splitDim = -1;
    double splitVal = Double.NEGATIVE_INFINITY;

    double leftAttSum[] = new double[m_Instances.numAttributes()],
            rightAttSum[] = new double[m_Instances.numAttributes()],
            leftAttSqSum[] = new double[m_Instances.numAttributes()],
            rightAttSqSum[] = new double[m_Instances.numAttributes()], rightSqMean, leftSqMean, leftSqSum,
            rightSqSum, minSum = Double.POSITIVE_INFINITY, val;

    for (int dim = 0; dim < m_Instances.numAttributes(); dim++) {
        // m_MaxRelativeWidth in KDTree ensure there'll be atleast one dim with
        // width > 0.0
        if (node.m_NodeRanges[dim][WIDTH] == 0.0 || dim == m_Instances.classIndex())
            continue;

        quickSort(m_Instances, m_InstList, dim, node.m_Start, node.m_End);

        for (int i = node.m_Start; i <= node.m_End; i++) {
            for (int j = 0; j < m_Instances.numAttributes(); j++) {
                if (j == m_Instances.classIndex())
                    continue;
                val = m_Instances.instance(m_InstList[i]).value(j);
                if (m_NormalizeNodeWidth) {
                    if (Double.isNaN(universe[j][MIN]) || universe[j][MIN] == universe[j][MAX])
                        val = 0.0;
                    else
                        val = ((val - universe[j][MIN]) / universe[j][WIDTH]); // normalizing
                                                                               // value
                }
                if (i == node.m_Start) {
                    leftAttSum[j] = rightAttSum[j] = leftAttSqSum[j] = rightAttSqSum[j] = 0.0;
                }
                rightAttSum[j] += val;
                rightAttSqSum[j] += val * val;
            }
        }

        for (int i = node.m_Start; i <= node.m_End - 1; i++) {
            Instance inst = m_Instances.instance(m_InstList[i]);
            leftSqSum = rightSqSum = 0.0;
            for (int j = 0; j < m_Instances.numAttributes(); j++) {
                if (j == m_Instances.classIndex())
                    continue;
                val = inst.value(j);

                if (m_NormalizeNodeWidth) {
                    if (Double.isNaN(universe[j][MIN]) || universe[j][MIN] == universe[j][MAX])
                        val = 0.0;
                    else
                        val = ((val - universe[j][MIN]) / universe[j][WIDTH]); // normalizing
                                                                               // value
                }

                leftAttSum[j] += val;
                rightAttSum[j] -= val;
                leftAttSqSum[j] += val * val;
                rightAttSqSum[j] -= val * val;
                leftSqMean = leftAttSum[j] / (i - node.m_Start + 1);
                leftSqMean *= leftSqMean;
                rightSqMean = rightAttSum[j] / (node.m_End - i);
                rightSqMean *= rightSqMean;

                leftSqSum += leftAttSqSum[j] - (i - node.m_Start + 1) * leftSqMean;
                rightSqSum += rightAttSqSum[j] - (node.m_End - i) * rightSqMean;
            }

            if (minSum > (leftSqSum + rightSqSum)) {
                minSum = leftSqSum + rightSqSum;

                if (i < node.m_End)
                    splitVal = (m_Instances.instance(m_InstList[i]).value(dim)
                            + m_Instances.instance(m_InstList[i + 1]).value(dim)) / 2;
                else
                    splitVal = m_Instances.instance(m_InstList[i]).value(dim);

                splitDim = dim;
            }
        } // end for instance i
    } // end for attribute dim

    int rightStart = rearrangePoints(m_InstList, node.m_Start, node.m_End, splitDim, splitVal);

    if (rightStart == node.m_Start || rightStart > node.m_End) {
        System.out.println("node.m_Start: " + node.m_Start + " node.m_End: " + node.m_End + " splitDim: "
                + splitDim + " splitVal: " + splitVal + " node.min: " + node.m_NodeRanges[splitDim][MIN]
                + " node.max: " + node.m_NodeRanges[splitDim][MAX] + " node.numInstances: "
                + node.numInstances());

        if (rightStart == node.m_Start)
            throw new Exception("Left child is empty in node " + node.m_NodeNumber + ". Not possible with "
                    + "KMeanInspiredMethod splitting method. Please " + "check code.");
        else
            throw new Exception("Right child is empty in node " + node.m_NodeNumber + ". Not possible with "
                    + "KMeansInspiredMethod splitting method. Please " + "check code.");
    }

    node.m_SplitDim = splitDim;
    node.m_SplitValue = splitVal;
    node.m_Left = new KDTreeNode(numNodesCreated + 1, node.m_Start, rightStart - 1,
            m_EuclideanDistance.initializeRanges(m_InstList, node.m_Start, rightStart - 1));
    node.m_Right = new KDTreeNode(numNodesCreated + 2, rightStart, node.m_End,
            m_EuclideanDistance.initializeRanges(m_InstList, rightStart, node.m_End));
}

From source file:moa.classifiers.meta.RandomRules.java

License:Open Source License

private Instance transformInstance(Instance inst, int classifierIndex) {
    if (this.listAttributes == null) {
        this.numAttributes = (int) (this.numAttributesPercentageOption.getValue() * inst.numAttributes()
                / 100.0);/*from w  ww . j ava  2s  .co m*/
        this.listAttributes = new int[this.numAttributes][this.ensemble.length];
        this.dataset = new InstancesHeader[this.ensemble.length];
        for (int ensembleIndex = 0; ensembleIndex < this.ensemble.length; ensembleIndex++) {
            for (int attributeIndex = 0; attributeIndex < this.numAttributes; attributeIndex++) {
                boolean isUnique = false;
                while (isUnique == false) {
                    this.listAttributes[attributeIndex][ensembleIndex] = this.classifierRandom
                            .nextInt(inst.numAttributes() - 1);
                    isUnique = true;
                    for (int k = 0; k < attributeIndex; k++) {
                        if (this.listAttributes[attributeIndex][ensembleIndex] == this.listAttributes[k][ensembleIndex]) {
                            isUnique = false;
                            break;
                        }
                    }
                }
                //this.listAttributes[attributeIndex][ensembleIndex] = attributeIndex;
            }
            //Create Header
            FastVector attributes = new FastVector();
            for (int attributeIndex = 0; attributeIndex < this.numAttributes; attributeIndex++) {
                attributes.addElement(inst.attribute(this.listAttributes[attributeIndex][ensembleIndex]));
                System.out.print(this.listAttributes[attributeIndex][ensembleIndex]);
            }
            System.out.println("Number of attributes: " + this.numAttributes + "," + inst.numAttributes());
            attributes.addElement(inst.classAttribute());
            this.dataset[ensembleIndex] = new InstancesHeader(
                    new Instances(getCLICreationString(InstanceStream.class), attributes, 0));
            this.dataset[ensembleIndex].setClassIndex(this.numAttributes);
            this.ensemble[ensembleIndex].setModelContext(this.dataset[ensembleIndex]);
        }
    }
    //Instance instance = new DenseInstance(this.numAttributes+1);
    //instance.setDataset(dataset[classifierIndex]);
    double[] attVals = new double[this.numAttributes + 1];
    for (int attributeIndex = 0; attributeIndex < this.numAttributes; attributeIndex++) {
        //instance.setValue(attributeIndex, inst.value(this.listAttributes[attributeIndex][classifierIndex]));
        attVals[attributeIndex] = inst.value(this.listAttributes[attributeIndex][classifierIndex]);
    }
    Instance instance = new DenseInstance(1.0, attVals);
    instance.setDataset(dataset[classifierIndex]);
    instance.setClassValue(inst.classValue());
    // System.out.println(inst.toString());
    // System.out.println(instance.toString());
    // System.out.println("============");
    return instance;
}