Example usage for weka.core Instance classValue

List of usage examples for weka.core Instance classValue

Introduction

In this page you can find the example usage for weka.core Instance classValue.

Prototype

public double classValue();

Source Link

Document

Returns an instance's class value as a floating-point number.

Usage

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

public void updatedefaultRuleStatistics(Instance inst) {
    this.instancesSeenDefault++;
    for (int j = 0; j < inst.numAttributes() - 1; j++) {
        this.attributeStatisticsDefault.addToValue(j, inst.value(j));
        this.squaredAttributeStatisticsDefault.addToValue(j, inst.value(j) * inst.value(j));
    }/* w  w  w  .  j  a  v  a  2s.  co  m*/
    this.actualClassStatisticsDefault += inst.classValue();
    this.squaredActualClassStatisticsDefault += inst.classValue() * inst.classValue();
}

From source file:moa.classifiers.rules.functions.FadingTargetMean.java

License:Apache License

@Override
public void trainOnInstanceImpl(Instance inst) {
    updateAccumulatedError(inst);/*from  w  w  w.  ja  va2s  . c  o  m*/
    nD = 1 + fadingFactor * nD;
    sum = inst.classValue() + fadingFactor * sum;
}

From source file:moa.classifiers.rules.functions.Perceptron.java

License:Apache License

/**
 * Update the model using the provided instance
 *//*  w  w  w  . j  a v  a  2 s .  com*/
public void trainOnInstanceImpl(Instance inst) {
    accumulatedError = Math.abs(this.prediction(inst) - inst.classValue()) + fadingFactor * accumulatedError;
    nError = 1 + fadingFactor * nError;
    // Initialise Perceptron if necessary   
    if (this.initialisePerceptron == true) {
        this.fadingFactor = this.fadingFactorOption.getValue();
        this.classifierRandom.setSeed(randomSeedOption.getValue());
        this.initialisePerceptron = false; // not in resetLearningImpl() because it needs Instance!
        this.weightAttribute = new double[inst.numAttributes()];
        for (int j = 0; j < inst.numAttributes(); j++) {
            weightAttribute[j] = 2 * this.classifierRandom.nextDouble() - 1;
        }
        // Update Learning Rate
        learningRatio = learningRatioOption.getValue();
        this.learningRateDecay = learningRateDecayOption.getValue();

    }

    // Update attribute statistics
    this.perceptronInstancesSeen++;
    this.perceptronYSeen++;

    for (int j = 0; j < inst.numAttributes() - 1; j++) {
        perceptronattributeStatistics.addToValue(j, inst.value(j));
        squaredperceptronattributeStatistics.addToValue(j, inst.value(j) * inst.value(j));
    }
    this.perceptronsumY += inst.classValue();
    this.squaredperceptronsumY += inst.classValue() * inst.classValue();

    if (constantLearningRatioDecayOption.isSet() == false) {
        learningRatio = learningRatioOption.getValue() / (1 + perceptronInstancesSeen * learningRateDecay);
    }

    //double prediction = this.updateWeights(inst,learningRatio);
    //accumulatedError= Math.abs(prediction-inst.classValue()) + fadingFactor*accumulatedError;

    this.updateWeights(inst, learningRatio);

}

From source file:moa.classifiers.rules.functions.Perceptron.java

License:Apache License

private double normalizeActualClassValue(Instance inst) {
    double meanY = perceptronsumY / perceptronYSeen;
    double sdY = computeSD(squaredperceptronsumY, perceptronsumY, perceptronYSeen);

    double normalizedY = 0.0;
    if (sdY > SD_THRESHOLD) {
        normalizedY = (inst.classValue() - meanY) / sdY;
    } else {//  ww  w. j av a 2 s  .c o m
        normalizedY = inst.classValue() - meanY;
    }
    return normalizedY;
}

From source file:moa.classifiers.rules.functions.TargetMean.java

License:Apache License

@Override
public void trainOnInstanceImpl(Instance inst) {
    updateAccumulatedError(inst);// w  ww  .ja v a 2  s . c o m
    ++this.n;
    this.sum += inst.classValue();
}

From source file:moa.classifiers.rules.functions.TargetMean.java

License:Apache License

protected void updateAccumulatedError(Instance inst) {
    double mean = 0;
    nError = 1 + fadingErrorFactor * nError;
    if (n > 0)
        mean = sum / n;/*  w  w  w. java 2  s.co m*/
    errorSum = Math.abs(inst.classValue() - mean) + fadingErrorFactor * errorSum;
}

From source file:moa.classifiers.rules.GeRules.java

License:Open Source License

@Override
public double[] getVotesForInstance(Instance inst) {
    // TODO Auto-generated method stub

    // increase no. of seen intances
    totalSeenInstances++;//from www .  j a va  2  s . c o m

    // check if there is any rules that cover the instance
    ArrayList<Rule> coveredRules = RulesCoveredInstance(inst);
    //      logger.debug("No. Rules cover instance: " + coveredRules.size());

    //      logger.debug(inst);
    // return prediction if there are rules that cover the instance
    if (coveredRules.size() > 0) {

        actualAttempts++;

        double[] classPrediction = new double[inst.numClasses()];
        // vote class labels from all available rules

        for (Rule rule : coveredRules) {
            classPrediction[(int) rule.classification]++;
            //            logger.debug(rule.printRule());
        }

        // actual attempt
        if (Utils.maxIndex(classPrediction) == (int) inst.classValue()) {
            actualAttemptsCorrectlyClassified++;
        }
        return classPrediction;
    }

    // otherwise, return the majority class
    return observedClassDistribution.getArrayCopy();
}

From source file:moa.classifiers.rules.GeRules.java

License:Open Source License

@Override
public void trainOnInstanceImpl(Instance inst) {
    // TODO Auto-generated method stub

    // add weight of respective class to classification distribution
    observedClassDistribution.addToValue((int) inst.classValue(), inst.weight());

    // only add instances to be learnt if there are no rule coverd the instance
    if (RulesCoveredInstance(inst).isEmpty()) {
        slidingWindowsBuffer.add(inst);/*  ww  w .  j av a  2 s.c om*/
    }
    // if there are rule(s) cover the instance, then update stattic in the rule
    else {

        // for each rule matched the instance,
        // update class distribution statistic
        for (Rule rule : RulesCoveredInstance(inst)) {
            rule.updateClassDistribution(inst);

            rule.noOfCovered++;

            // also update if the rule correctly cover an instance with it class
            if (inst.classValue() == rule.classification) {
                rule.noOfCorrectlyCovered++;
            } else { // validate the current rule
                if (rule.ruleShouldBeRemoved()) {
                    rulesList.remove(rule);
                }
            }
        }

    }

    // check if the sliding windows buffer is filled to the criteria
    if (slidingWindowsBuffer.size() == slidingWindowsSizeOption.getValue()) {

        // learn rules with the classifier
        ArrayList<Rule> learntRules = prismClassifier.learnRules(slidingWindowsBuffer);

        if (learntRules != null) {
            rulesList.addAll(learntRules);
        }

        // clear sliding window buffer to take more instances
        slidingWindowsBuffer.clear();
    }
}

From source file:moa.classifiers.rules.RuleClassifier.java

License:Apache License

@Override
public void trainOnInstanceImpl(Instance inst) {
    int countRuleFiredTrue = 0;
    boolean ruleFired = false;
    this.instance = inst;
    this.numAttributes = instance.numAttributes() - 1;
    this.numClass = instance.numClasses();
    this.numInstance = numInstance + 1;
    int conta1 = 0;
    for (int j = 0; j < ruleSet.size(); j++) {
        if (this.ruleSet.get(j).ruleEvaluate(inst) == true) {
            countRuleFiredTrue = countRuleFiredTrue + 1;

            double anomaly = 0.0;
            if (this.Supervised.isSet()) {
                anomaly = computeAnomalySupervised(this.ruleSet.get(j), j, inst); // compute anomaly (Supervised method)
            } else if (this.Unsupervised.isSet()) {
                anomaly = computeAnomalyUnsupervised(this.ruleSet.get(j), j, inst); // compute anomaly (Unsupervised method)
            }/*from ww w. j  a  v  a2  s  .  c o m*/

            if (anomaly >= this.anomalyProbabilityThresholdOption.getValue()) {
                conta1 = conta1 + 1;
            }
            //   System.out.print(numInstance+";"+anomaly+"\n");
            try {

                File dir = new File("SeaAnomaliesUnsupervised.txt");
                FileWriter fileWriter = new FileWriter(dir, true);
                PrintWriter printWriter = new PrintWriter(fileWriter);
                printWriter.println(numInstance + ";" + anomaly);
                printWriter.flush();
                printWriter.close();

            } catch (IOException e) {
                e.printStackTrace();
            }
            if ((this.ruleSet.get(j).instancesSeen <= this.anomalyNumInstThresholdOption.getValue())
                    || (anomaly < this.anomalyProbabilityThresholdOption.getValue()
                            && this.anomalyDetectionOption.isSet())
                    || !this.anomalyDetectionOption.isSet()) {
                this.ruleSet.get(j).obserClassDistrib.addToValue((int) inst.classValue(), inst.weight());
                for (int i = 0; i < inst.numAttributes() - 1; i++) {
                    int instAttIndex = modelAttIndexToInstanceAttIndex(i, inst);

                    if (!inst.isMissing(instAttIndex)) {
                        AttributeClassObserver obs = this.ruleSet.get(j).observers.get(i); // Nominal and binary tree.
                        AttributeClassObserver obsGauss = this.ruleSet.get(j).observersGauss.get(i); // Gaussian.
                        if (obs == null) {
                            obs = inst.attribute(instAttIndex).isNominal() ? newNominalClassObserver()
                                    : newNumericClassObserver();
                            this.ruleSet.get(j).observers.set(i, obs);
                        }
                        if (obsGauss == null) {
                            obsGauss = inst.attribute(instAttIndex).isNumeric() ? newNumericClassObserver2()
                                    : null;
                            this.ruleSet.get(j).observersGauss.set(i, obsGauss);
                        }
                        obs.observeAttributeClass(inst.value(instAttIndex), (int) inst.classValue(),
                                inst.weight());
                        if (inst.attribute(instAttIndex).isNumeric()) {
                            obsGauss.observeAttributeClass(inst.value(instAttIndex), (int) inst.classValue(),
                                    inst.weight());
                        }
                    }
                }
                expandeRule(this.ruleSet.get(j), inst, j); // This function expands the rule
            }
            if (this.orderedRulesOption.isSet()) { // Ordered rules
                break;
            }
        }
    }
    if (countRuleFiredTrue > 0) {
        ruleFired = true;
    } else {
        ruleFired = false;
    }
    if (ruleFired == false) { //If none of the rules cover the example update sufficient statistics of the default rule
        this.observedClassDistribution.addToValue((int) inst.classValue(), inst.weight());
        for (int i = 0; i < inst.numAttributes() - 1; i++) {
            int instAttIndex = modelAttIndexToInstanceAttIndex(i, inst);
            if (!inst.isMissing(instAttIndex)) {
                AttributeClassObserver obs = this.attributeObservers.get(i);
                AttributeClassObserver obsGauss = this.attributeObserversGauss.get(i);
                if (obs == null) {
                    obs = inst.attribute(instAttIndex).isNominal() ? newNominalClassObserver()
                            : newNumericClassObserver();
                    this.attributeObservers.set(i, obs);
                }
                if (obsGauss == null) {
                    obsGauss = inst.attribute(instAttIndex).isNumeric() ? newNumericClassObserver2() : null;
                    this.attributeObserversGauss.set(i, obsGauss);
                }
                obs.observeAttributeClass(inst.value(instAttIndex), (int) inst.classValue(), inst.weight());
                if (inst.attribute(instAttIndex).isNumeric()) {
                    obsGauss.observeAttributeClass(inst.value(instAttIndex), (int) inst.classValue(),
                            inst.weight());
                }
            }
        }
        createRule(inst); //This function creates a rule
    }
}

From source file:moa.classifiers.rules.RuleClassifier.java

License:Apache License

public void updateRuleAttribStatistics(Instance inst, Rule rl, int ruleIndex) {
    rl.instancesSeen++;// w w w. ja  v a  2 s.  c  o m
    if (rl.squaredAttributeStatisticsSupervised.size() == 0 && rl.attributeStatisticsSupervised.size() == 0) {
        for (int s = 0; s < inst.numAttributes() - 1; s++) {
            ArrayList<Double> temp1 = new ArrayList<Double>();
            ArrayList<Double> temp2 = new ArrayList<Double>();
            rl.attributeStatisticsSupervised.add(temp1);
            rl.squaredAttributeStatisticsSupervised.add(temp2);
            int instAttIndex = modelAttIndexToInstanceAttIndex(s, inst);
            if (instance.attribute(instAttIndex).isNumeric()) {
                for (int i = 0; i < inst.numClasses(); i++) {
                    rl.attributeStatisticsSupervised.get(s).add(0.0);
                    rl.squaredAttributeStatisticsSupervised.get(s).add(1.0);
                }
            }
        }
    }
    for (int s = 0; s < inst.numAttributes() - 1; s++) {
        int instAttIndex = modelAttIndexToInstanceAttIndex(s, inst);
        if (!inst.isMissing(instAttIndex)) {
            if (instance.attribute(instAttIndex).isNumeric()) {
                rl.attributeStatistics.addToValue(s, inst.value(s));
                rl.squaredAttributeStatistics.addToValue(s, inst.value(s) * inst.value(s));
                double sumValue = rl.attributeStatisticsSupervised.get(s).get((int) inst.classValue())
                        + inst.value(s);
                rl.attributeStatisticsSupervised.get(s).set((int) inst.classValue(), sumValue);
                double squaredSumvalue = rl.squaredAttributeStatisticsSupervised.get(s)
                        .get((int) inst.classValue()) + (inst.value(s) * inst.value(s));
                rl.squaredAttributeStatisticsSupervised.get(s).set((int) inst.classValue(), squaredSumvalue);
            }
        } else {
            rl.attributeMissingValues.addToValue(s, 1);
        }
    }
}