List of usage examples for weka.core Instance classValue
public double classValue();
From source file:moa.classifiers.meta.AccuracyUpdatedEnsemble3.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { this.initVariables(); this.classDistributions[(int) inst.classValue()]++; this.currentChunk.add(inst); this.processedInstances++; this.instances++; /*/*w w w . j a v a 2 s .c o m*/ * how to decide ddm occur, by total vote or by current sub classifier? * */ int trueClass = (int) inst.classValue(); boolean prediction; if (Utils.maxIndex(this.getVotesForInstance(inst)) == trueClass) { prediction = true; } else { prediction = false; } if (trueClass != 0) // problem System.out.println(trueClass + " " + prediction + " " + Utils.maxIndex(this.getVotesForInstance(inst))); this.ddmLevel = this.driftDetectionMethod.computeNextVal(prediction, inst); switch (this.ddmLevel) { case DriftDetectionMethod.DDM_OUTCONTROL_LEVEL: this.processChunk(); this.instances = 0; System.out.println("DDM out control level"); break; case DriftDetectionMethod.DDM_WARNING_LEVEL: System.out.println("DDM warning level"); break; case DriftDetectionMethod.DDM_INCONTROL_LEVEL: //System.out.println("DDM incontrol level"); break; default: System.out.println("ERROR"); break; } }
From source file:moa.classifiers.meta.AccuracyWeightedDDMEnsemble.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { this.initVariables(); this.classDistributions[(int) inst.classValue()]++; this.currentChunk.add(inst); this.processedInstances++; // update weighted of ensemble double mse_r = this.computeMseR(); double newclassifierClassifierWeight = 1.0 / (mse_r + Double.MIN_VALUE); double forPassWeight[] = new double[this.learners.length]; for (int i = 0; i < this.learners.length; i++) { this.weights[i][0] = 1.0 / (mse_r + this.computeMse(this.learners[(int) this.weights[i][1]], this.currentChunk) + Double.MIN_VALUE); forPassWeight[i] = this.weights[i][0]; }/*from w w w .j av a 2s . co m*/ if (this.learners.length > 0) weightRecordContener.setWeightByInst(forPassWeight); // deal with DDM method int trueClass = (int) inst.classValue(); boolean prediction; if (Utils.maxIndex(this.learners[currentHoldIndex].getVotesForInstance(inst)) == trueClass) { prediction = true; } else { prediction = false; } this.ddmLevel = this.driftDetectionMethod.computeNextVal(prediction, inst); switch (this.ddmLevel) { case DriftDetectionMethod.DDM_WARNING_LEVEL: if (newClassifierReset == true) { this.newclassifier.resetLearning(); newClassifierReset = false; } this.newclassifier.trainOnInstance(inst); break; case DriftDetectionMethod.DDM_OUTCONTROL_LEVEL: // add newclassifer into ensemble, and as currentHold Classifier addedClassifier = null; if (this.learners.length < this.memberCountOption.getValue()) { currentHoldIndex = this.addToStoredReturnIndex(this.newclassifier, newclassifierClassifierWeight); } else { int poorestClassifier = this.getPoorestClassifierIndex(); if (this.weights[poorestClassifier][0] < newclassifierClassifierWeight) { this.weights[poorestClassifier][0] = newclassifierClassifierWeight; addedClassifier = this.newclassifier.copy(); currentHoldIndex = (int) this.weights[poorestClassifier][1]; this.learners[currentHoldIndex] = addedClassifier; } } weightRecordContener.addNewID(currentHoldIndex, newclassifierClassifierWeight); // reseting this.processedInstances = 0; this.currentChunk = null; this.classDistributions = null; this.newclassifier = (Classifier) getPreparedClassOption(this.learnerOption); this.newclassifier.resetLearning(); //System.out.println("single classifier drift"); break; case DriftDetectionMethod.DDM_INCONTROL_LEVEL: newClassifierReset = true; break; default: System.exit(0); } this.learners[currentHoldIndex].trainOnInstance(inst); /** * weight write */ processedInstForWeight++; if (processedInstForWeight == this.totalInstSizeOption.getValue() - 1) { try { weightRecordContener.write(this.weightFileOption.getValue(), this.totalInstSizeOption.getValue()); } catch (Exception e) { e.printStackTrace(); } } }
From source file:moa.classifiers.meta.LeveragingBag.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { int numClasses = inst.numClasses(); //Output Codes if (this.initMatrixCodes == true) { this.matrixCodes = new int[this.ensemble.length][inst.numClasses()]; for (int i = 0; i < this.ensemble.length; i++) { int numberOnes; int numberZeros; do { // until we have the same number of zeros and ones numberOnes = 0;/*from w w w . ja v a2 s.co m*/ numberZeros = 0; for (int j = 0; j < numClasses; j++) { int result = 0; if (j == 1 && numClasses == 2) { result = 1 - this.matrixCodes[i][0]; } else { result = (this.classifierRandom.nextBoolean() ? 1 : 0); } this.matrixCodes[i][j] = result; if (result == 1) { numberOnes++; } else { numberZeros++; } } } while ((numberOnes - numberZeros) * (numberOnes - numberZeros) > (this.ensemble.length % 2)); } this.initMatrixCodes = false; } boolean Change = false; Instance weightedInst = (Instance) inst.copy(); double w = this.weightShrinkOption.getValue(); //Train ensemble of classifiers for (int i = 0; i < this.ensemble.length; i++) { double k = 0.0; switch (this.leveraginBagAlgorithmOption.getChosenIndex()) { case 0: //LeveragingBag k = MiscUtils.poisson(w, this.classifierRandom); break; case 1: //LeveragingBagME double error = this.ADError[i].getEstimation(); k = !this.ensemble[i].correctlyClassifies(weightedInst) ? 1.0 : (this.classifierRandom.nextDouble() < (error / (1.0 - error)) ? 1.0 : 0.0); break; case 2: //LeveragingBagHalf w = 1.0; k = this.classifierRandom.nextBoolean() ? 0.0 : w; break; case 3: //LeveragingBagWT w = 1.0; k = 1.0 + MiscUtils.poisson(w, this.classifierRandom); break; case 4: //LeveragingSubag w = 1.0; k = MiscUtils.poisson(1, this.classifierRandom); k = (k > 0) ? w : 0; break; } if (k > 0) { if (this.outputCodesOption.isSet()) { weightedInst.setClassValue((double) this.matrixCodes[i][(int) inst.classValue()]); } weightedInst.setWeight(inst.weight() * k); this.ensemble[i].trainOnInstance(weightedInst); } boolean correctlyClassifies = this.ensemble[i].correctlyClassifies(weightedInst); double ErrEstim = this.ADError[i].getEstimation(); if (this.ADError[i].setInput(correctlyClassifies ? 0 : 1)) { if (this.ADError[i].getEstimation() > ErrEstim) { Change = true; } } } if (Change) { numberOfChangesDetected++; double max = 0.0; int imax = -1; for (int i = 0; i < this.ensemble.length; i++) { if (max < this.ADError[i].getEstimation()) { max = this.ADError[i].getEstimation(); imax = i; } } if (imax != -1) { this.ensemble[imax].resetLearning(); //this.ensemble[imax].trainOnInstance(inst); this.ADError[imax] = new ADWIN((double) this.deltaAdwinOption.getValue()); } } }
From source file:moa.classifiers.meta.OnlineAccuracyUpdatedEnsemble.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { this.initVariables(); if (this.processedInstances < this.windowSize) { this.classDistributions[(int) inst.classValue()]++; } else {//from w w w .j a v a 2s . c o m this.classDistributions[this.currentWindow[processedInstances % this.windowSize]]--; this.classDistributions[(int) inst.classValue()]++; } this.currentWindow[processedInstances % this.windowSize] = (int) inst.classValue(); this.processedInstances++; this.computeMseR(); if (this.processedInstances % this.windowSize == 0) { this.createNewClassifier(inst); } else { this.candidate.classifier.trainOnInstance(inst); for (int i = 0; i < this.ensemble.length; i++) { this.weights[i][0] = this.computeWeight(i, inst); } } for (int i = 0; i < this.ensemble.length; i++) { this.ensemble[i].classifier.trainOnInstance(inst); } }
From source file:moa.classifiers.meta.OnlineAccuracyUpdatedEnsemble.java
License:Open Source License
/** * Computes the weight of a learner before training a given example. * @param i the identifier (in terms of array learners) * of the classifier for which the weight is supposed to be computed * @param example the newest example//from ww w . j av a 2 s . c o m * @return the computed weight. */ protected double computeWeight(int i, Instance example) { int d = this.windowSize; int t = this.processedInstances - this.ensemble[i].birthday; double e_it = 0; double mse_it = 0; double voteSum = 0; try { double[] votes = this.ensemble[i].classifier.getVotesForInstance(example); for (double element : votes) { voteSum += element; } if (voteSum > 0) { double f_it = 1 - (votes[(int) example.classValue()] / voteSum); e_it = f_it * f_it; } else { e_it = 1; } } catch (Exception e) { e_it = 1; } if (t > d) { mse_it = this.ensemble[i].mse_it + e_it / (double) d - this.ensemble[i].squareErrors[t % d] / (double) d; } else { mse_it = this.ensemble[i].mse_it * (t - 1) / t + e_it / (double) t; } this.ensemble[i].squareErrors[t % d] = e_it; this.ensemble[i].mse_it = mse_it; if (linearOption.isSet()) { return java.lang.Math.max(mse_r - mse_it, Double.MIN_VALUE); } else { return 1.0 / (this.mse_r + mse_it + Double.MIN_VALUE); } }
From source file:moa.classifiers.meta.OzaBagASHT.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { int trueClass = (int) inst.classValue(); for (int i = 0; i < this.ensemble.length; i++) { int k = MiscUtils.poisson(1.0, this.classifierRandom); if (k > 0) { Instance weightedInst = (Instance) inst.copy(); weightedInst.setWeight(inst.weight() * k); if (Utils.maxIndex(this.ensemble[i].getVotesForInstance(inst)) == trueClass) { this.error[i] += alpha * (0.0 - this.error[i]); //EWMA } else { this.error[i] += alpha * (1.0 - this.error[i]); //EWMA }/*from w w w . ja v a 2s . c om*/ this.ensemble[i].trainOnInstance(weightedInst); } } }
From source file:moa.classifiers.meta.OzaBoostAdwin.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { int numClasses = inst.numClasses(); // Set log (k-1) and (k-1) for SAMME Method if (this.sammeOption.isSet()) { this.Km1 = numClasses - 1; this.logKm1 = Math.log(this.Km1); this.initKm1 = false; }//from w ww. j a va 2 s. co m //Output Codes if (this.initMatrixCodes == true) { this.matrixCodes = new int[this.ensemble.length][inst.numClasses()]; for (int i = 0; i < this.ensemble.length; i++) { int numberOnes; int numberZeros; do { // until we have the same number of zeros and ones numberOnes = 0; numberZeros = 0; for (int j = 0; j < numClasses; j++) { int result = 0; if (j == 1 && numClasses == 2) { result = 1 - this.matrixCodes[i][0]; } else { result = (this.classifierRandom.nextBoolean() ? 1 : 0); } this.matrixCodes[i][j] = result; if (result == 1) { numberOnes++; } else { numberZeros++; } } } while ((numberOnes - numberZeros) * (numberOnes - numberZeros) > (this.ensemble.length % 2)); } this.initMatrixCodes = false; } boolean Change = false; double lambda_d = 1.0; Instance weightedInst = (Instance) inst.copy(); for (int i = 0; i < this.ensemble.length; i++) { double k = this.pureBoostOption.isSet() ? lambda_d : MiscUtils.poisson(lambda_d * this.Km1, this.classifierRandom); if (k > 0.0) { if (this.outputCodesOption.isSet()) { weightedInst.setClassValue((double) this.matrixCodes[i][(int) inst.classValue()]); } weightedInst.setWeight(inst.weight() * k); this.ensemble[i].trainOnInstance(weightedInst); } boolean correctlyClassifies = this.ensemble[i].correctlyClassifies(weightedInst); if (correctlyClassifies) { this.scms[i] += lambda_d; lambda_d *= this.trainingWeightSeenByModel / (2 * this.scms[i]); } else { this.swms[i] += lambda_d; lambda_d *= this.trainingWeightSeenByModel / (2 * this.swms[i]); } double ErrEstim = this.ADError[i].getEstimation(); if (this.ADError[i].setInput(correctlyClassifies ? 0 : 1)) { if (this.ADError[i].getEstimation() > ErrEstim) { Change = true; } } } if (Change) { numberOfChangesDetected++; double max = 0.0; int imax = -1; for (int i = 0; i < this.ensemble.length; i++) { if (max < this.ADError[i].getEstimation()) { max = this.ADError[i].getEstimation(); imax = i; } } if (imax != -1) { this.ensemble[imax].resetLearning(); //this.ensemble[imax].trainOnInstance(inst); this.ADError[imax] = new ADWIN((double) this.deltaAdwinOption.getValue()); this.scms[imax] = 0; this.swms[imax] = 0; } } }
From source file:moa.classifiers.meta.OzaBoostAdwin.java
License:Open Source License
public double[] getVotesForInstanceBinary(Instance inst) { double combinedVote[] = new double[(int) inst.numClasses()]; Instance weightedInst = (Instance) inst.copy(); if (this.initMatrixCodes == false) { for (int i = 0; i < this.ensemble.length; i++) { //Replace class by OC weightedInst.setClassValue((double) this.matrixCodes[i][(int) inst.classValue()]); double vote[]; vote = this.ensemble[i].getVotesForInstance(weightedInst); //Binary Case int voteClass = 0; if (vote.length == 2) { voteClass = (vote[1] > vote[0] ? 1 : 0); }/* w w w. j a v a 2s. c o m*/ //Update votes for (int j = 0; j < inst.numClasses(); j++) { if (this.matrixCodes[i][j] == voteClass) { combinedVote[j] += getEnsembleMemberWeight(i); } } } } return combinedVote; }
From source file:moa.classifiers.meta.PAME.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { // get the prediction vector back double[] ht = this.getPredictions(inst); double yt = inst.classValue(); if (inst.classIndex() == 0) { this.rareCount += 1.0; }/*from w ww . ja v a 2s .c om*/ this.count += 1.0; // convert to a positive / negative classification scenario if (yt == 0) { yt = 1.0; } else { yt = -1.0; } /* * update expert weights */ if (this.updateMethodOption.getChosenIndex() == PAME1) { pame1_weights(ht, yt); } else if (this.updateMethodOption.getChosenIndex() == PAME2) { pame2_weights(ht, yt); } else if (this.updateMethodOption.getChosenIndex() == PAME3) { pame3_weights(ht, yt); } /* * we are going to use an online bagging / boosting strategy to update the * experts. In the end our objective with the weight formulation is a bit * more of a decision theoretic approach. */ for (int i = 0; i < this.ensemble.length; i++) { // sample from a Poisson probability distribution as implemented in // online bagging and boosting] double w; if (this.overSampleOption.isSet() && inst.classIndex() == 0) { w = 1.0 / (this.rareCount / this.count); if (this.logTransformOption.isSet()) { w = Math.log(w); } } else if (this.underSampleOption.isSet() && inst.classIndex() != 0) { w = 1.0 - this.rareCount / this.count; } else { w = 1.0; } int k = MiscUtils.poisson(w, this.classifierRandom); // update the expert accordingly if (k > 0) { // this works by updating the expert k-times with the same example. // thus is k = 4. the expert is trained updated on the same example // 4 times in a row. pretty easy. Instance weightedInst = (Instance) inst.copy(); weightedInst.setWeight(inst.weight() * k); // set the # of training times this.ensemble[i].trainOnInstance(weightedInst); // update expert } } this.n_negativeWeights = 0; for (int i = 0; i < this.weights.length; i++) { if (this.weights[i] < 0.0) this.n_negativeWeights++; } }
From source file:moa.classifiers.meta.PAMEAdwin.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { // get the prediction vector back double[] ht = this.getPredictions(inst); double yt = inst.classValue(); if (inst.classIndex() == 0) { this.rareCount += 1.0; }// ww w . ja v a2s.com this.count += 1.0; // convert to a positive / negative classification scenario if (yt == 0) { //System.out.println("Y is positive" + yt); yt = 1.0; } else { //System.out.println("Y is negative" + yt); yt = -1.0; } /* * update expert weights */ if (this.updateMethodOption.getChosenIndex() == PAME1) { pame1_weights(ht, yt); } else if (this.updateMethodOption.getChosenIndex() == PAME2) { pame2_weights(ht, yt); } else if (this.updateMethodOption.getChosenIndex() == PAME3) { pame3_weights(ht, yt); } else if (this.updateMethodOption.getChosenIndex() == PAME23) { pame23_weights(ht, yt); } /* * we are going to use an online bagging / boosting strategy to update the * experts. In the end our objective with the weight formulation is a bit * more of a decision theoretic approach. */ boolean Change = false; for (int i = 0; i < this.ensemble.length; i++) { // sample from a Poisson probability distribution as implemented in // online bagging and boosting double w; if (this.overSampleOption.isSet() && inst.classIndex() == 0) { w = 1.0 / (this.rareCount / this.count); if (this.logTransformOption.isSet()) { w = Math.log(w); } } else if (this.underSampleOption.isSet() && inst.classIndex() != 0) { w = 1.0 - this.rareCount / this.count; } else { w = 1.0; } int k = MiscUtils.poisson(w, this.classifierRandom); // update the expert accordingly if (k > 0) { // this works by updating the expert k-times with the same example. // thus is k = 4. the expert is trained updated on the same example // 4 times in a row. pretty easy. Instance weightedInst = (Instance) inst.copy(); weightedInst.setWeight(inst.weight() * k); // set the # of training times this.ensemble[i].trainOnInstance(weightedInst); // update expert } boolean correctlyClassifies = this.ensemble[i].correctlyClassifies(inst); double ErrEstim = this.ADError[i].getEstimation(); if (this.ADError[i].setInput(correctlyClassifies ? 0 : 1)) { if (this.ADError[i].getEstimation() > ErrEstim) { Change = true; } } } /* * if change was detected, remove the worst expert from the ensemble of * classifiers. */ if (Change) { double max = 0.0; int imax = -1; for (int i = 0; i < this.ensemble.length; i++) { if (max < this.ADError[i].getEstimation()) { max = this.ADError[i].getEstimation(); imax = i; } } if (imax != -1) { this.ensemble[imax].resetLearning(); //this.ensemble[imax].trainOnInstance(inst); this.ADError[imax] = new ADWIN(); } } this.n_negativeWeights = 0; for (int i = 0; i < this.weights.length; i++) { if (this.weights[i] < 0.0) this.n_negativeWeights++; } }