Example usage for weka.core Instance numAttributes

List of usage examples for weka.core Instance numAttributes

Introduction

In this page you can find the example usage for weka.core Instance numAttributes.

Prototype

public int numAttributes();

Source Link

Document

Returns the number of attributes.

Usage

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

protected int observersNumberInstance(Instance inst, AutoExpandVector<AttributeClassObserver> observerss) {
    int numberInstance = 0;
    for (int z = 0; z < inst.numAttributes() - 1; z++) {
        numberInstance = 0;//from w ww .  j av a  2s. c  om
        int instAttIndex = modelAttIndexToInstanceAttIndex(z, inst);
        if (inst.attribute(instAttIndex).isNumeric()) {
            Node rootNode = ((BinaryTreeNumericAttributeClassObserverRegression) observerss.get(z)).root1;
            if (rootNode != null) {
                numberInstance = (int) (rootNode.lessThan[2] + rootNode.greaterThan[2]);
                break;
            }
        }
    }
    return numberInstance;
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

public double prediction(Instance inst, double[] weightAtt, double squaredActualClassStatistics,
        double actualClassStatistics, int instancesSeen, boolean reset) {
    double prediction = 0;
    if (reset == false) {
        for (int j = 0; j < inst.numAttributes() - 1; j++) {
            if (inst.attribute(j).isNumeric()) {
                prediction += weightAtt[j] * inst.value(j);
            }/*from ww  w. ja va2 s .  co  m*/
        }
        prediction += weightAtt[inst.numAttributes() - 1];
    }
    double sdPredictedClass = computeSD(squaredActualClassStatistics, actualClassStatistics, instancesSeen);
    double outputDesnorm = 0;
    if (sdPredictedClass > 0.0000001) {
        outputDesnorm = 3 * prediction * sdPredictedClass + (actualClassStatistics / instancesSeen);
    }
    return outputDesnorm;
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

public double updateAttWeight(Instance inst, double[] weightAtt, double squaredActualClassStatistics,
        double actualClassStatistics, DoubleVector squaredAttributeStatistics, DoubleVector attributeStatistics,
        int instancesSeen, boolean reset) {
    double learningRatio = 0.0;
    if (this.learningRatio_Decay_or_Const_Option.isSet()) { //Decaying learning rate option
        learningRatio = this.learningRatioOption.getValue();
    } else {//from  w  w w  . ja  v  a  2 s.c o  m
        learningRatio = initLearnRate / (1 + instancesSeen * this.learnRateDecay);
    }

    double predict = 0.0;
    if (instancesSeen > 30) {
        predict = this.prediction(inst, weightAtt, squaredActualClassStatistics, actualClassStatistics,
                instancesSeen, reset);
        double sdClass = computeSD(squaredActualClassStatistics, actualClassStatistics, instancesSeen);
        double actualClass = 0.0;
        double predictedClass = 0.0;
        if (sdClass > 0.0000001) {
            actualClass = (inst.classValue() - (actualClassStatistics / instancesSeen)) / (3 * sdClass);
            predictedClass = (predict - (actualClassStatistics / instancesSeen)) / (3 * sdClass);
        }
        double delta = actualClass - predictedClass;
        for (int x = 0; x < inst.numAttributes() - 1; x++) {
            if (inst.attribute(x).isNumeric()) {
                // Update weights. Ensure attribute values are normalised first.
                double sd = Math.sqrt((squaredAttributeStatistics.getValue(x)
                        - ((attributeStatistics.getValue(x) * attributeStatistics.getValue(x)) / instancesSeen))
                        / instancesSeen);
                double instanceValue = 0;
                instanceValue = (inst.value(x) - (attributeStatistics.getValue(x) / instancesSeen));
                if (sd > 0.0000001) {
                    instanceValue = instanceValue / (3 * sd);
                }
                if (sd == 0.0) {
                    weightAtt[x] = 0.0;
                } else {
                    weightAtt[x] += learningRatio * delta * instanceValue;
                }
            }
        }
        weightAtt[inst.numAttributes() - 1] += learningRatio * delta;
    }
    return predict;
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

public void updateRuleStatistics(Instance inst, Rule rl, int ruleIndex) {
    rl.instancesSeen++;/*from  w  w  w  .j  a va2  s .c  om*/
    double targetValueSize = this.numTargetValue.get(ruleIndex) + 1.0;
    double targetVal = this.targetValue.get(ruleIndex) + inst.classValue();
    this.targetValue.set(ruleIndex, targetVal);
    this.numTargetValue.set(ruleIndex, targetValueSize);
    setRuleTarget(this.targetValue.get(ruleIndex), this.numTargetValue.get(ruleIndex), ruleIndex);
    rl.ValorTargetRule = this.ruleTargetMean.get(ruleIndex);
    for (int s = 0; s < inst.numAttributes() - 1; s++) {
        rl.attributeStatistics.addToValue(s, inst.value(s));
        rl.squaredAttributeStatistics.addToValue(s, inst.value(s) * inst.value(s));
    }
    rl.actualClassStatistics += inst.classValue();
    rl.squaredActualClassStatistics += inst.classValue() * inst.classValue();
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

public void initialyPerceptron(Instance inst) {
    if (this.resetDefault == true) {
        this.resetDefault = false;
        this.weightAttributeDefault = new double[inst.numAttributes()];
        this.instancesSeenDefault = 0;
        this.actualClassStatisticsDefault = 0.0;
        this.squaredActualClassStatisticsDefault = 0.0;
        this.attributeStatisticsDefault = new DoubleVector();
        this.squaredAttributeStatisticsDefault = new DoubleVector();
        this.attributesProbabilityDefault = new DoubleVector();
        Random r = new Random();
        long value = (long) seedOption.getValue();
        r.setSeed(value);//from  w w w  . j  a v a2  s . co m
        for (int j = 0; j < inst.numAttributes(); j++) {
            this.weightAttributeDefault[j] = 2 * r.nextDouble() - 1;
        }
    }
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

public void updatedefaultRuleStatistics(Instance inst) {
    this.instancesSeenDefault++;
    for (int j = 0; j < inst.numAttributes() - 1; j++) {
        this.attributeStatisticsDefault.addToValue(j, inst.value(j));
        this.squaredAttributeStatisticsDefault.addToValue(j, inst.value(j) * inst.value(j));
    }/* ww  w  .  java2 s  .c  o  m*/
    this.actualClassStatisticsDefault += inst.classValue();
    this.squaredActualClassStatisticsDefault += inst.classValue() * inst.classValue();
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

public void createRule(Instance inst) {
    int remainder = (int) Double.MAX_VALUE;
    int numInstanciaObservers = observersNumberInstance(inst, this.attributeObservers);
    if (numInstanciaObservers != 0 && this.gracePeriodOption.getValue() != 0) {
        remainder = (numInstanciaObservers) % (this.gracePeriodOption.getValue());
    }//from w  w w.  j  a  v  a 2  s . c o  m
    if (remainder == 0) {
        theBestAttributes(inst, this.attributeObservers);
        boolean bestAttribute = checkBestAttrib(numInstanciaObservers); // Check if the best attribute value is really the best.
        if (bestAttribute == true) {
            double attributeValue = this.saveTheBest.get(3);
            double symbol = this.saveTheBest.get(2); // <=, > : (0.0, -1.0, 1.0).
            double value = this.saveTheBest.get(0); // Value of the attribute
            double targetValorTotal = this.saveTheBest.get(4);
            double contaTargetValorTotal = this.saveTheBest.get(5);
            this.pred = new Predicates(attributeValue, symbol, value);
            Rule Rl = new Rule(); // Create new rule.
            Rl.predicateSet.add(pred);
            Rl.weightAttribute = new double[inst.numAttributes()];
            System.arraycopy(this.weightAttributeDefault, 0, Rl.weightAttribute, 0,
                    this.weightAttributeDefault.length); //Initialize the rule array of weights.
            reanicializeRuleStatistic(Rl); //Initialize the others statistics of the rule.
            this.ruleSet.add(Rl);
            this.targetValue.add(targetValorTotal);
            this.numTargetValue.add(contaTargetValorTotal);
            getRuleTarget(this.targetValue.get(ruleSet.size() - 1), this.numTargetValue.get(ruleSet.size() - 1),
                    this.ruleSet.size() - 1);
            Rl.ValorTargetRule = this.ruleTargetMean.get(this.ruleSet.size() - 1);
            this.attributeObservers = new AutoExpandVector<AttributeClassObserver>();
        }
    }
}

From source file:moa.classifiers.rules.AMRules.java

License:Apache License

protected double observersDistrib(Instance inst, AutoExpandVector<AttributeClassObserver> observerss) {
    double votes = 0.0;
    for (int z = 0; z < inst.numAttributes() - 1; z++) {
        int instAttIndex = modelAttIndexToInstanceAttIndex(z, inst);
        if (inst.attribute(instAttIndex).isNumeric()) {
            if (observerss.get(z) != null) {
                Node rootNode = ((BinaryTreeNumericAttributeClassObserverRegression) observerss.get(z)).root1;
                if (rootNode != null) {
                    double sum = rootNode.greaterThan[0] + rootNode.lessThan[0];
                    double numTarget = rootNode.greaterThan[2] + rootNode.lessThan[2];
                    votes = sum / numTarget;
                    break;
                }/*from   ww w  .j  a  v  a  2 s.c o m*/
            }
        }
    }
    return votes;
}

From source file:moa.classifiers.rules.functions.Perceptron.java

License:Apache License

/**
 * Update the model using the provided instance
 *///from w w  w . j  a v  a  2 s  . co  m
public void trainOnInstanceImpl(Instance inst) {
    accumulatedError = Math.abs(this.prediction(inst) - inst.classValue()) + fadingFactor * accumulatedError;
    nError = 1 + fadingFactor * nError;
    // Initialise Perceptron if necessary   
    if (this.initialisePerceptron == true) {
        this.fadingFactor = this.fadingFactorOption.getValue();
        this.classifierRandom.setSeed(randomSeedOption.getValue());
        this.initialisePerceptron = false; // not in resetLearningImpl() because it needs Instance!
        this.weightAttribute = new double[inst.numAttributes()];
        for (int j = 0; j < inst.numAttributes(); j++) {
            weightAttribute[j] = 2 * this.classifierRandom.nextDouble() - 1;
        }
        // Update Learning Rate
        learningRatio = learningRatioOption.getValue();
        this.learningRateDecay = learningRateDecayOption.getValue();

    }

    // Update attribute statistics
    this.perceptronInstancesSeen++;
    this.perceptronYSeen++;

    for (int j = 0; j < inst.numAttributes() - 1; j++) {
        perceptronattributeStatistics.addToValue(j, inst.value(j));
        squaredperceptronattributeStatistics.addToValue(j, inst.value(j) * inst.value(j));
    }
    this.perceptronsumY += inst.classValue();
    this.squaredperceptronsumY += inst.classValue() * inst.classValue();

    if (constantLearningRatioDecayOption.isSet() == false) {
        learningRatio = learningRatioOption.getValue() / (1 + perceptronInstancesSeen * learningRateDecay);
    }

    //double prediction = this.updateWeights(inst,learningRatio);
    //accumulatedError= Math.abs(prediction-inst.classValue()) + fadingFactor*accumulatedError;

    this.updateWeights(inst, learningRatio);

}

From source file:moa.classifiers.rules.functions.Perceptron.java

License:Apache License

public double[] normalizedInstance(Instance inst) {
    // Normalize Instance
    double[] normalizedInstance = new double[inst.numAttributes()];
    for (int j = 0; j < inst.numAttributes() - 1; j++) {
        int instAttIndex = modelAttIndexToInstanceAttIndex(j, inst);
        double mean = perceptronattributeStatistics.getValue(j) / perceptronYSeen;
        double sd = computeSD(squaredperceptronattributeStatistics.getValue(j),
                perceptronattributeStatistics.getValue(j), perceptronYSeen);
        if (sd > SD_THRESHOLD)
            normalizedInstance[j] = (inst.value(instAttIndex) - mean) / sd;
        else/*from ww  w  . j  a v a 2  s.c o m*/
            normalizedInstance[j] = inst.value(instAttIndex) - mean;
    }
    return normalizedInstance;
}