List of usage examples for weka.core Instance classValue
public double classValue();
From source file:tr.gov.ulakbim.jDenetX.classifiers.OzaBoostAdwin.java
License:Open Source License
public double[] getVotesForInstanceBinary(Instance inst) { double combinedVote[] = new double[(int) inst.numClasses()]; Instance weightedInst = (Instance) inst.copy(); if (this.initMatrixCodes) { for (int i = 0; i < this.ensemble.length; i++) { //Replace class by OC weightedInst.setClassValue((double) this.matrixCodes[i][(int) inst.classValue()]); double vote[]; vote = this.ensemble[i].getVotesForInstance(weightedInst); //Binary Case int voteClass = 0; if (vote.length == 2) { voteClass = (vote[1] > vote[0] ? 1 : 0); }/* ww w. ja va 2 s . c o m*/ //Update votes for (int j = 0; j < inst.numClasses(); j++) { if (this.matrixCodes[i][j] == voteClass) { combinedVote[j] += getEnsembleMemberWeight(i); } } } } return combinedVote; }
From source file:tr.gov.ulakbim.jDenetX.classifiers.Perceptron.java
License:Open Source License
@Override public void trainOnInstanceImpl(Instance inst) { //Init Perceptron if (this.reset) { this.reset = false; this.numberAttributes = inst.numAttributes(); this.numberClasses = inst.numClasses(); this.weightAttribute = new double[inst.numClasses()][inst.numAttributes()]; for (int i = 0; i < inst.numClasses(); i++) { for (int j = 0; j < inst.numAttributes(); j++) { weightAttribute[i][j] = 0.2 * Math.random() - 0.1; }//from w w w . j av a 2s .c om } } double[] preds = new double[inst.numClasses()]; for (int i = 0; i < inst.numClasses(); i++) { preds[i] = prediction(inst, i); } double learningRatio = learningRatioOption.getValue(); int actualClass = (int) inst.classValue(); for (int i = 0; i < inst.numClasses(); i++) { double actual = (i == actualClass) ? 1.0 : 0.0; double delta = (actual - preds[i]) * preds[i] * (1 - preds[i]); for (int j = 0; j < inst.numAttributes() - 1; j++) { this.weightAttribute[i][j] += learningRatio * delta * inst.value(j); } this.weightAttribute[i][inst.numAttributes() - 1] += learningRatio * delta; } }
From source file:tr.gov.ulakbim.jDenetX.experiments.wrappers.EvalActiveBoostingID.java
License:Open Source License
protected int selfTest(InstanceStream testStream) { int returnStatus = 1; Instance testInst = null; int maxInstances = this.maxInstancesOption.getValue(); long instancesProcessed = 0; //InstanceStream testStream = (InstanceStream) getPreparedClassOption(this.testStreamOption); ClassificationPerformanceEvaluator evaluator = new BasicClassificationPerformanceEvaluator(); evaluator.reset();/*from w w w . ja v a 2 s . c om*/ while (testStream.hasMoreInstances() && ((maxInstances < 0) || (instancesProcessed < maxInstances))) { testInst = (Instance) testStream.nextInstance().copy(); int trueClass = (int) testInst.classValue(); testInst.setClassMissing(); double[] prediction = model.getVotesForInstance(testInst); evaluator.addClassificationAttempt(trueClass, prediction, testInst.weight()); instancesProcessed++; if (instancesProcessed % INSTANCES_BETWEEN_MONITOR_UPDATES == 0) { long estimatedRemainingInstances = testStream.estimatedRemainingInstances(); if (maxInstances > 0) { long maxRemaining = maxInstances - instancesProcessed; if ((estimatedRemainingInstances < 0) || (maxRemaining < estimatedRemainingInstances)) { estimatedRemainingInstances = maxRemaining; } } System.out.println(estimatedRemainingInstances < 0 ? -1.0 : (double) instancesProcessed / (double) (instancesProcessed + estimatedRemainingInstances)); } } return returnStatus; }
From source file:tr.gov.ulakbim.jDenetX.experiments.wrappers.EvalActiveBoostingID.java
License:Open Source License
public LearningEvaluation evalModel(InstanceStream trainStream, InstanceStream testStream, AbstractClassifier model) {//from w w w . ja v a 2 s .c o m model = new SelfOzaBoostID(); InstanceStream stream = (InstanceStream) trainStream.copy(); ClassificationPerformanceEvaluator evaluator = new BasicClassificationPerformanceEvaluator(); Instance testInst = null; int maxInstances = this.maxInstancesOption.getValue(); long instancesProcessed = 0; System.out.println("Evaluating model..."); while (stream.hasMoreInstances() && ((maxInstances < 0) || (instancesProcessed < maxInstances))) { testInst = (Instance) stream.nextInstance().copy(); int trueClass = (int) testInst.classValue(); testInst.setClassMissing(); double[] prediction = model.getVotesForInstance(testInst); evaluator.addClassificationAttempt(trueClass, prediction, testInst.weight()); instancesProcessed++; if (instancesProcessed % INSTANCES_BETWEEN_MONITOR_UPDATES == 0) { long estimatedRemainingInstances = stream.estimatedRemainingInstances(); if (maxInstances > 0) { long maxRemaining = maxInstances - instancesProcessed; if ((estimatedRemainingInstances < 0) || (maxRemaining < estimatedRemainingInstances)) { estimatedRemainingInstances = maxRemaining; } } System.out.println(estimatedRemainingInstances < 0 ? -1.0 : (double) instancesProcessed / (double) (instancesProcessed + estimatedRemainingInstances)); } } System.out.println("Accuracy result before self-train: " + evaluator.getPerformanceMeasurements()[1]); selfTrain(testInst); int returnStatus = selfTest(testStream); EvalActiveBoostingID.model.resetLearningImpl(); //Learning is completed so we can reset return new LearningEvaluation(evaluator.getPerformanceMeasurements()); }
From source file:tr.gov.ulakbim.jDenetX.streams.generators.multilabel.MetaMultilabelGenerator.java
License:Open Source License
private Instance getNextWithBinary(int i) { int lim = 1000; if (queue[i].size() <= 0) { int c = -1; while (lim-- > 0) { Instance tinst = this.m_BinaryGenerator.nextInstance(); //System.err.println("next binary : "+tinst); c = (int) Math.round(tinst.classValue()); if (i == c) return tinst; else if (queue[c].size() < 100) queue[c].add(tinst);//from w ww.j a v a 2 s . c o m } System.err.println( "[Overflow] The binary stream is too skewed, could not get an example of class " + i + ""); System.exit(1); return null; } else return queue[i].remove(); }
From source file:tr.gov.ulakbim.jDenetX.tasks.EvaluateInterleavedTestThenTrain.java
License:Open Source License
@Override protected Object doMainTask(TaskMonitor monitor, ObjectRepository repository) { Classifier learner = (Classifier) getPreparedClassOption(this.learnerOption); InstanceStream stream = (InstanceStream) getPreparedClassOption(this.streamOption); ClassificationPerformanceEvaluator evaluator = (ClassificationPerformanceEvaluator) getPreparedClassOption( this.evaluatorOption); learner.setModelContext(stream.getHeader()); int maxInstances = this.instanceLimitOption.getValue(); long instancesProcessed = 0; int maxSeconds = this.timeLimitOption.getValue(); int secondsElapsed = 0; monitor.setCurrentActivity("Evaluating learner...", -1.0); LearningCurve learningCurve = new LearningCurve("learning evaluation instances"); File dumpFile = this.dumpFileOption.getFile(); PrintStream immediateResultStream = null; if (dumpFile != null) { try {//w w w . j av a 2 s. c om if (dumpFile.exists()) { immediateResultStream = new PrintStream(new FileOutputStream(dumpFile, true), true); } else { immediateResultStream = new PrintStream(new FileOutputStream(dumpFile), true); } } catch (Exception ex) { throw new RuntimeException("Unable to open immediate result file: " + dumpFile, ex); } } boolean firstDump = true; boolean preciseCPUTiming = TimingUtils.enablePreciseTiming(); long evaluateStartTime = TimingUtils.getNanoCPUTimeOfCurrentThread(); long lastEvaluateStartTime = evaluateStartTime; double RAMHours = 0.0; while (stream.hasMoreInstances() && ((maxInstances < 0) || (instancesProcessed < maxInstances)) && ((maxSeconds < 0) || (secondsElapsed < maxSeconds))) { Instance trainInst = stream.nextInstance(); Instance testInst = (Instance) trainInst.copy(); int trueClass = (int) trainInst.classValue(); testInst.setClassMissing(); double[] prediction = learner.getVotesForInstance(testInst); evaluator.addClassificationAttempt(trueClass, prediction, testInst.weight()); learner.trainOnInstance(trainInst); instancesProcessed++; if (instancesProcessed % this.sampleFrequencyOption.getValue() == 0) { long evaluateTime = TimingUtils.getNanoCPUTimeOfCurrentThread(); double time = TimingUtils.nanoTimeToSeconds(evaluateTime - evaluateStartTime); double timeIncrement = TimingUtils.nanoTimeToSeconds(evaluateTime - lastEvaluateStartTime); double RAMHoursIncrement = learner.measureByteSize() / (1024.0 * 1024.0 * 1024.0); //GBs RAMHoursIncrement *= (timeIncrement / 3600.0); //Hours RAMHours += RAMHoursIncrement; lastEvaluateStartTime = evaluateTime; learningCurve.insertEntry(new LearningEvaluation( new Measurement[] { new Measurement("learning evaluation instances", instancesProcessed), new Measurement("evaluation time (" + (preciseCPUTiming ? "cpu " : "") + "seconds)", time), new Measurement("model cost (RAM-Hours)", RAMHours) }, evaluator, learner)); if (immediateResultStream != null) { if (firstDump) { immediateResultStream.println(learningCurve.headerToString()); firstDump = false; } immediateResultStream.println(learningCurve.entryToString(learningCurve.numEntries() - 1)); immediateResultStream.flush(); } } if (instancesProcessed % INSTANCES_BETWEEN_MONITOR_UPDATES == 0) { if (monitor.taskShouldAbort()) { return null; } long estimatedRemainingInstances = stream.estimatedRemainingInstances(); if (maxInstances > 0) { long maxRemaining = maxInstances - instancesProcessed; if ((estimatedRemainingInstances < 0) || (maxRemaining < estimatedRemainingInstances)) { estimatedRemainingInstances = maxRemaining; } } monitor.setCurrentActivityFractionComplete(estimatedRemainingInstances < 0 ? -1.0 : (double) instancesProcessed / (double) (instancesProcessed + estimatedRemainingInstances)); if (monitor.resultPreviewRequested()) { monitor.setLatestResultPreview(learningCurve.copy()); } secondsElapsed = (int) TimingUtils .nanoTimeToSeconds(TimingUtils.getNanoCPUTimeOfCurrentThread() - evaluateStartTime); } } if (immediateResultStream != null) { immediateResultStream.close(); } return learningCurve; }
From source file:tr.gov.ulakbim.jDenetX.tasks.EvaluateModel.java
License:Open Source License
@Override public Object doMainTask(TaskMonitor monitor, ObjectRepository repository) { Classifier model = (Classifier) getPreparedClassOption(this.modelOption); InstanceStream stream = (InstanceStream) getPreparedClassOption(this.streamOption); ClassificationPerformanceEvaluator evaluator = (ClassificationPerformanceEvaluator) getPreparedClassOption( this.evaluatorOption); int maxInstances = this.maxInstancesOption.getValue(); long instancesProcessed = 0; monitor.setCurrentActivity("Evaluating model...", -1.0); while (stream.hasMoreInstances() && ((maxInstances < 0) || (instancesProcessed < maxInstances))) { Instance testInst = (Instance) stream.nextInstance().copy(); int trueClass = (int) testInst.classValue(); testInst.setClassMissing();//from www . java2 s. c o m double[] prediction = model.getVotesForInstance(testInst); evaluator.addClassificationAttempt(trueClass, prediction, testInst.weight()); instancesProcessed++; if (instancesProcessed % INSTANCES_BETWEEN_MONITOR_UPDATES == 0) { if (monitor.taskShouldAbort()) { return null; } long estimatedRemainingInstances = stream.estimatedRemainingInstances(); if (maxInstances > 0) { long maxRemaining = maxInstances - instancesProcessed; if ((estimatedRemainingInstances < 0) || (maxRemaining < estimatedRemainingInstances)) { estimatedRemainingInstances = maxRemaining; } } monitor.setCurrentActivityFractionComplete(estimatedRemainingInstances < 0 ? -1.0 : (double) instancesProcessed / (double) (instancesProcessed + estimatedRemainingInstances)); if (monitor.resultPreviewRequested()) { monitor.setLatestResultPreview(new LearningEvaluation(evaluator.getPerformanceMeasurements())); } } } return new LearningEvaluation(evaluator.getPerformanceMeasurements()); }
From source file:tr.gov.ulakbim.jDenetX.tasks.EvaluatePrequential.java
License:Open Source License
@Override protected Object doMainTask(TaskMonitor monitor, ObjectRepository repository) { Classifier learner = (Classifier) getPreparedClassOption(this.learnerOption); InstanceStream stream = (InstanceStream) getPreparedClassOption(this.streamOption); ClassificationPerformanceEvaluator evaluator = (ClassificationPerformanceEvaluator) getPreparedClassOption( this.evaluatorOption); //New for prequential methods if (evaluator instanceof WindowClassificationPerformanceEvaluator) { ((WindowClassificationPerformanceEvaluator) evaluator).setWindowWidth(widthOption.getValue()); }/*from w w w.j av a 2 s . c o m*/ if (evaluator instanceof EWMAClassificationPerformanceEvaluator) { ((EWMAClassificationPerformanceEvaluator) evaluator).setalpha(alphaOption.getValue()); } if (evaluator instanceof FadingFactorClassificationPerformanceEvaluator) { ((FadingFactorClassificationPerformanceEvaluator) evaluator).setalpha(alphaOption.getValue()); } //End New for prequential methods learner.setModelContext(stream.getHeader()); int maxInstances = this.instanceLimitOption.getValue(); long instancesProcessed = 0; int maxSeconds = this.timeLimitOption.getValue(); int secondsElapsed = 0; monitor.setCurrentActivity("Evaluating learner...", -1.0); LearningCurve learningCurve = new LearningCurve("learning evaluation instances"); File dumpFile = this.dumpFileOption.getFile(); PrintStream immediateResultStream = null; if (dumpFile != null) { try { if (dumpFile.exists()) { immediateResultStream = new PrintStream(new FileOutputStream(dumpFile, true), true); } else { immediateResultStream = new PrintStream(new FileOutputStream(dumpFile), true); } } catch (Exception ex) { throw new RuntimeException("Unable to open immediate result file: " + dumpFile, ex); } } //File for output predictions File outputPredictionFile = this.outputPredictionFileOption.getFile(); PrintStream outputPredictionResultStream = null; if (outputPredictionFile != null) { try { if (outputPredictionFile.exists()) { outputPredictionResultStream = new PrintStream(new FileOutputStream(outputPredictionFile, true), true); } else { outputPredictionResultStream = new PrintStream(new FileOutputStream(outputPredictionFile), true); } } catch (Exception ex) { throw new RuntimeException("Unable to open prediction result file: " + outputPredictionFile, ex); } } boolean firstDump = true; boolean preciseCPUTiming = TimingUtils.enablePreciseTiming(); long evaluateStartTime = TimingUtils.getNanoCPUTimeOfCurrentThread(); long lastEvaluateStartTime = evaluateStartTime; double RAMHours = 0.0; while (stream.hasMoreInstances() && ((maxInstances < 0) || (instancesProcessed < maxInstances)) && ((maxSeconds < 0) || (secondsElapsed < maxSeconds))) { Instance trainInst = stream.nextInstance(); Instance testInst = (Instance) trainInst.copy(); int trueClass = (int) trainInst.classValue(); testInst.setClassMissing(); double[] prediction = learner.getVotesForInstance(testInst); // Output prediction if (outputPredictionFile != null) { outputPredictionResultStream.println(Utils.maxIndex(prediction) + "," + trueClass); } evaluator.addClassificationAttempt(trueClass, prediction, testInst.weight()); learner.trainOnInstance(trainInst); instancesProcessed++; if (instancesProcessed % this.sampleFrequencyOption.getValue() == 0) { long evaluateTime = TimingUtils.getNanoCPUTimeOfCurrentThread(); double time = TimingUtils.nanoTimeToSeconds(evaluateTime - evaluateStartTime); double timeIncrement = TimingUtils.nanoTimeToSeconds(evaluateTime - lastEvaluateStartTime); double RAMHoursIncrement = learner.measureByteSize() / (1024.0 * 1024.0 * 1024.0); //GBs RAMHoursIncrement *= (timeIncrement / 3600.0); //Hours RAMHours += RAMHoursIncrement; lastEvaluateStartTime = evaluateTime; learningCurve.insertEntry(new LearningEvaluation( new Measurement[] { new Measurement("learning evaluation instances", instancesProcessed), new Measurement("evaluation time (" + (preciseCPUTiming ? "cpu " : "") + "seconds)", time), new Measurement("model cost (RAM-Hours)", RAMHours) }, evaluator, learner)); if (immediateResultStream != null) { if (firstDump) { immediateResultStream.println(learningCurve.headerToString()); firstDump = false; } immediateResultStream.println(learningCurve.entryToString(learningCurve.numEntries() - 1)); immediateResultStream.flush(); } } if (instancesProcessed % INSTANCES_BETWEEN_MONITOR_UPDATES == 0) { if (monitor.taskShouldAbort()) { return null; } long estimatedRemainingInstances = stream.estimatedRemainingInstances(); if (maxInstances > 0) { long maxRemaining = maxInstances - instancesProcessed; if ((estimatedRemainingInstances < 0) || (maxRemaining < estimatedRemainingInstances)) { estimatedRemainingInstances = maxRemaining; } } monitor.setCurrentActivityFractionComplete(estimatedRemainingInstances < 0 ? -1.0 : (double) instancesProcessed / (double) (instancesProcessed + estimatedRemainingInstances)); if (monitor.resultPreviewRequested()) { monitor.setLatestResultPreview(learningCurve.copy()); } secondsElapsed = (int) TimingUtils .nanoTimeToSeconds(TimingUtils.getNanoCPUTimeOfCurrentThread() - evaluateStartTime); } } if (immediateResultStream != null) { immediateResultStream.close(); } if (outputPredictionResultStream != null) { outputPredictionResultStream.close(); } return learningCurve; }
From source file:tubes2ai.AIJKFFNN.java
@Override public void buildClassifier(Instances instances) throws Exception { getCapabilities().testWithFail(instances); int nInputNeuron, nOutputNeuron; /* Inisialisasi tiap layer */ nInputNeuron = instances.numAttributes() - 1; nOutputNeuron = instances.numClasses(); inputLayer = new Vector<Neuron>(nInputNeuron); hiddenLayer = new Vector<Neuron>(nHiddenNeuron); outputLayer = new Vector<Neuron>(nOutputNeuron); Random random = new Random(getSeed()); Enumeration<Attribute> attributeEnumeration = instances.enumerateAttributes(); attributeList = Collections.list(attributeEnumeration); /* Mengisi layer dengan neuron-neuron dengan weight default */ for (int k = 0; k < nOutputNeuron; k++) { outputLayer.add(new Neuron()); }/*from ww w. j a va2 s . c o m*/ for (int k = 0; k < nInputNeuron; k++) { inputLayer.add(new Neuron()); } /* Kalau ada hidden layer */ if (nHiddenLayer > 0) { for (int j = 0; j < nHiddenNeuron; j++) { hiddenLayer.add(new Neuron()); } } /* Link */ if (nHiddenLayer > 0) { linkNeurons(inputLayer, hiddenLayer); linkNeurons(hiddenLayer, outputLayer); } else { linkNeurons(inputLayer, outputLayer); } for (Neuron neuron : inputLayer) { neuron.initialize(random); } inputLayerArray = new Neuron[nInputNeuron]; int i = 0; for (Neuron neuron : inputLayer) { inputLayerArray[i] = neuron; i++; } outputCalculationArray = new Neuron[nHiddenLayer * nHiddenNeuron + nOutputNeuron]; int j = 0; for (Neuron neuron : hiddenLayer) { outputCalculationArray[j] = neuron; j++; } for (Neuron neuron : outputLayer) { outputCalculationArray[j] = neuron; j++; } if (nHiddenLayer > 0) { for (Neuron neuron : hiddenLayer) { neuron.initialize(random); } } for (Neuron neuron : outputLayer) { neuron.initialize(random); } /* Learning */ int iterations = 0; List<Double> errors = new ArrayList<>(); do { for (Instance instance : instances) { /* Memasukkan instance ke input neuron */ loadInput(instance); /* Menghitung error dari layer output ke input */ /* Menyiapkan nilai target */ for (int ix = 0; ix < outputLayer.size(); ix++) { if (ix == (int) instance.classValue()) { outputLayer.get(ix).errorFromTarget(1); } else { outputLayer.get(ix).errorFromTarget(0); } } if (nHiddenLayer != 0) { for (Neuron nHid : hiddenLayer) { nHid.calculateError(); } } /* Update Weight */ for (int k = 0; k < outputCalculationArray.length; k++) { outputCalculationArray[k].updateWeights(learningRate); } } iterations++; if (iterations % 500 == 0) { System.out.println("FFNN iteration " + iterations); } } while (iterations < maxIterations); }
From source file:wekimini.learning.LinearRegressionAttributeTransformer.java
@Override public Instance convertInstance(Instance instance) throws Exception { double[] newVals = new double[numInputs * exponent + 1]; int next = 0; for (int i = 0; i < instance.numAttributes() - 1; i++) { for (int j = 1; j <= exponent; j++) { newVals[next] = Math.pow(instance.value(i), j); next++;//from ww w .j a va 2 s . c om } } //Now add class: newVals[newVals.length - 1] = instance.classValue(); //Convert: return new Instance(instance.weight(), newVals); }