List of usage examples for weka.core Instances classIndex
publicint classIndex()
From source file:moa.tud.ke.patching.AdaptivePatchingAdwin.java
public static Instances changeClassToWrongRight(Instances instances) throws Exception { int whichAttribute = instances.classIndex(); // System.out.println(instances.classAttribute().toString()); Add filter = new Add(); //filter.setAttributeIndex("" + (whichAttribute + 1)); filter.setAttributeName("newClass"); String newNominalLabels = "wrong,right"; filter.setNominalLabels(newNominalLabels); filter.setInputFormat(instances);/*from w ww . j av a 2 s . c o m*/ instances = Filter.useFilter(instances, filter); Iterator inst = instances.iterator(); int index = 0; while (inst.hasNext()) { weka.core.Instance a = (weka.core.Instance) inst.next(); a.setValue((whichAttribute + 1), a.classValue()); index++; } Remove rmfilter = new Remove(); rmfilter.setAttributeIndices("" + (instances.classIndex() + 1)); rmfilter.setInputFormat(instances); instances = Filter.useFilter(instances, rmfilter); instances.setClassIndex(instances.numAttributes() - 1); // System.out.println(instances.classAttribute().toString()); return instances; }
From source file:moa.tud.ke.patching.AdaptivePatchingTwoAdwins.java
/** * Creates a copy of the instances and redefines the problem such that it is * now important to classify the wrongly classified instances *//*w w w .j a v a 2 s . com*/ private Instances redefineProblem(Instances data) { Instances redefInstances = new Instances(data); // deep copy of instance store // System.out.println(reDefinedClasses.attributeStats(reDefinedClasses.classIndex())); // System.out.println("Before filtering: "+wrongData.size()); double predictedClass = 0; int oldClassIndex = redefInstances.classIndex(); try { Iterator inst = redefInstances.iterator(); while (inst.hasNext()) { weka.core.Instance a = (weka.core.Instance) inst.next(); predictedClass = this.baseClassifier.classifyInstance(a); // Achtung: das hier muss "base" bleiben!! if (predictedClass == a.classValue()) { a.setClassValue(1); } else { a.setClassValue(0); } } if (this.useBaseClassAsAttribute.isSet()) { redefInstances = addBaseClassToInstances(redefInstances); } redefInstances = changeClassToWrongRight(redefInstances); } catch (Exception e) { System.err.println("Error while classifying instance in redefineProblem"); System.err.println(e.getMessage()); System.err.println(e.fillInStackTrace()); System.exit(987654); } return redefInstances; }
From source file:moa.tud.ke.patching.Patching.java
/** * Creates a copy of the instances and redefines the problem into a * classification problem for the instances where the base classifier errs. *///w ww. j av a 2s . co m private Instances redefineProblem(Instances data) { Instances redefInstances = new Instances(data); // deep copy of instance store double predictedClass = 0; int oldClassIndex = redefInstances.classIndex(); try { Iterator inst = redefInstances.iterator(); while (inst.hasNext()) { weka.core.Instance a = (weka.core.Instance) inst.next(); predictedClass = this.baseClassifier.classifyInstance(a); // Caution: this must always be "base" if (predictedClass == a.classValue()) { a.setClassValue(1); } else { a.setClassValue(0); } } if (this.useBaseClassAsAttribute.isSet()) { redefInstances = addBaseClassToInstances(redefInstances); } redefInstances = changeClassToWrongRight(redefInstances); } catch (Exception e) { System.err.println("Error while classifying instance in redefineProblem"); System.err.println(e.getMessage()); System.err.println(e.fillInStackTrace()); } return redefInstances; }
From source file:mulan.classifier.transformation.MultiLabelStacking.java
License:Open Source License
/** * Attaches an index attribute at the beginning of each instance * * @param original//from w w w .j a v a 2 s. c o m * @return */ protected Instances attachIndexes(Instances original) { ArrayList<Attribute> attributes = new ArrayList<Attribute>(original.numAttributes() + 1); for (int i = 0; i < original.numAttributes(); i++) { attributes.add(original.attribute(i)); } // Add attribute for holding the index at the beginning. attributes.add(0, new Attribute("Index")); Instances transformed = new Instances("Meta format", attributes, 0); for (int i = 0; i < original.numInstances(); i++) { Instance newInstance; newInstance = (Instance) original.instance(i).copy(); newInstance.setDataset(null); newInstance.insertAttributeAt(0); newInstance.setValue(0, i); transformed.add(newInstance); } transformed.setClassIndex(original.classIndex() + 1); return transformed; }
From source file:my.randomforestui.RandomForestUI.java
public static Instances chromosome(Instances input, boolean bits[]) { Instances temp = input; if (temp.classIndex() == -1) temp.setClassIndex(temp.numAttributes() - 1); for (int i = 5; i >= 0; i--) { if (bits[i] == false) { temp.deleteAttributeAt(i);//from w w w.j a v a 2 s . c o m } } return temp; }
From source file:my.randomforestui.RandomForestUI.java
public void RandomForest() throws Exception { String lala = "Processing Data\n"; jTextArea1.setText(lala);//from w w w.jav a2 s. co m DataSource source = new DataSource( "/home/rizkifika/NetBeansProjects/randomforest/src/randomforest/train.arff"); Instances training = source.getDataSet(); // set kelas pada data training if (training.classIndex() == -1) training.setClassIndex(training.numAttributes() - 1); // set data testing DataSource testing = new DataSource( "/home/rizkifika/NetBeansProjects/randomforest/src/randomforest/test.arff"); Instances test = testing.getDataSet(); // set kelas pada data testing if (test.classIndex() == -1) test.setClassIndex(test.numAttributes() - 1); //set parameter untuk random integer 1 sd < 64 int max = 64; int min = 1; int i; Random r = new Random(); int randInt; int input; // inisialisasi instances untuk testing dan training serta bit cromosom boolean[] inputbits = new boolean[6]; Instances inputTrain; Instances inputTest; double acc = 0; //inisialisasi data kromosom dengan nilai 0 chromosomeData[] data = new chromosomeData[200]; for (i = 0; i < 200; i++) { input = 0; chromosomeData tempData = new chromosomeData(inputbits, acc); data[i] = tempData; } //set inisial kromosom dengan random kromosom System.out.println("------Initialize initial chromosome------"); for (i = 0; i < 20; i++) { //get random integer randInt = r.nextInt(max - min) + min; input = randInt; //convert random integer ke format kromosom inputbits = integer_to_chromosome(input); //menyesuaikan data training dengan kromosom inputTrain = chromosome(source.getDataSet(), inputbits); //menesuaikan data testing dengan kromosom inputTest = chromosome(testing.getDataSet(), inputbits); //random forest acc = doRandomForest(inputTrain, inputTest); //masukkan kromoom dan hasilnya dalam class chromosomeData tempData = new chromosomeData(inputbits, acc); data[i] = tempData; System.out.println("kromosom = " + Arrays.toString(inputbits) + " accuracy = " + acc); } Arrays.sort(data); System.out.println("------sorted------"); System.out.println("------do Genetic Algorithm with random forest------"); //inisialisasi kromosom 1 dan 2 boolean[] chromosome1 = new boolean[6]; boolean[] chromosome2 = new boolean[6]; //set batas max rank exclusive kromosom int maxchromosome = 11; //set batas min rank inclusive kromosom int minchromosome = 1; Random rchromosome = new Random(); int index; int point; int init = 19; for (i = 0; i < 30; i++) { System.out.println("-----GA ke-" + i + " -----"); //get random integer randInt = rchromosome.nextInt(maxchromosome - minchromosome) + minchromosome; index = randInt; //set kromosom pertama chromosome1 = data[index].chromosome; //get random integer randInt = rchromosome.nextInt(maxchromosome - minchromosome) + minchromosome; index = randInt; //set kromosom kedua chromosome2 = data[index].chromosome; //set point untuk crossover randInt = rchromosome.nextInt(6 - 2) + 2; point = randInt; //do crossover kromosom 1 chromosome1 = crossover1(chromosome1, chromosome2, point); //do crossover kromosom 2 chromosome2 = crossover2(chromosome1, chromosome2, point); //do mutation chromosome1 = mutation(chromosome1); chromosome2 = mutation(chromosome2); //evaluate kromosom 1 inputTrain = chromosome(source.getDataSet(), chromosome1); //menesuaikan data testing dengan kromosom inputTest = chromosome(testing.getDataSet(), chromosome1); //random forest acc = doRandomForest(inputTrain, inputTest); //masukkan kromoom dan hasilnya dalam class chromosomeData tempData = new chromosomeData(chromosome1, acc); init = init + 1; data[init] = tempData; System.out.println("kromosom1 = " + Arrays.toString(chromosome1) + " accuracy = " + acc); //evaluate kromosom 2 inputTrain = chromosome(source.getDataSet(), chromosome2); //menesuaikan data testing dengan kromosom inputTest = chromosome(testing.getDataSet(), chromosome2); //random forest acc = doRandomForest(inputTrain, inputTest); //masukkan kromoom dan hasilnya dalam class chromosomeData tempData2 = new chromosomeData(chromosome2, acc); init = init + 1; data[init] = tempData2; System.out.println("kromosom2 = " + Arrays.toString(chromosome2) + " accuracy = " + acc); Arrays.sort(data); System.out.println("------sorted------"); } System.out.println("-------10 kromosom terbaik-------"); String result = "Processing Data\n"; for (i = 0; i < 10; i++) { //true fitur dipakai false fitur tidak dipakai, urutan kromosom sesuai file arff System.out.println("kromosom = " + Arrays.toString(data[i].chromosome) + " acc = " + data[i].accuracy); result = result + "kromosom = " + Arrays.toString(data[i].chromosome) + " acc = " + data[i].accuracy + "\n"; } jTextArea1.setText(result); }
From source file:myclassifier.naiveBayes.java
public void Klasifikasi(String filename) throws Exception { // load unlabeled data and set class attribute Instances unlabeled = ConverterUtils.DataSource.read("unlabeled_" + filename); unlabeled.setClassIndex(unlabeled.numAttributes() - 1); // create copy Instances labeled = new Instances(unlabeled); // label instances for (int i = 0; i < unlabeled.numInstances(); i++) { double clsLabel = NBClassifier.classifyInstance(labeled.instance(i)); labeled.instance(i).setClassValue(clsLabel); }//from w w w. j a v a 2 s. co m // save newly labeled data ConverterUtils.DataSink.write("labeled_" + filename, labeled); //print hasil System.out.println("Classification Result"); System.out.println("# - actual - predicted - distribution"); for (int i = 0; i < labeled.numInstances(); i++) { double pred = NBClassifier.classifyInstance(labeled.instance(i)); double[] dist = NBClassifier.distributionForInstance(labeled.instance(i)); System.out.print((i + 1) + " - "); System.out.print(labeled.instance(i).toString(labeled.classIndex()) + " - "); System.out.print(labeled.classAttribute().value((int) pred) + " - "); System.out.println(Utils.arrayToString(dist)); } }
From source file:myclassifier.wekaCode.java
public static Instances readFileArff(String fileName) throws Exception { //http://weka.sourceforge.net/doc.stable/weka/core/Instances.html //membaca semua instances dari file .arff, .csv DataSource source = new DataSource(fileName); Instances dataSet = source.getDataSet(); //set atribut terakhir sebagai kelas if (dataSet.classIndex() == -1) dataSet.setClassIndex(dataSet.numAttributes() - 1); //Make the last attribute be the class return dataSet; }
From source file:NaiveBayes.NaiveBayes.java
@Override public void buildClassifier(Instances i) throws Exception { datatrain = i;/*from w w w .jav a 2s .c o m*/ numEachClass = getNumEachClass(datatrain); listAtribut.clear(); int numAtt = datatrain.numAttributes() - 1; for (int j = 0; j < numAtt; j++) { if (j == datatrain.classIndex()) { numAtt++; j++; listAtribut.add(new Atribut(datatrain, j, i.classIndex())); } else { listAtribut.add(new Atribut(datatrain, j, i.classIndex())); } } }
From source file:NaiveBayes.NaiveBayes.java
public static int[] getNumEachClass(Instances ins) { int[] countEachClass = new int[ins.numClasses()]; for (int i = 0; i < ins.numClasses(); i++) { int cnt = 0; for (int j = 0; j < ins.numInstances(); j++) { if (ins.attribute(ins.classIndex()).value(i) .equals(ins.get(j).toString(ins.classIndex()).replaceAll("\\s+", ""))) cnt++;/*w w w .ja v a 2 s . c o m*/ } countEachClass[i] = cnt; } return countEachClass; }