List of usage examples for weka.core Instances numClasses
publicint numClasses()
From source file:myJ48.MyJ48.java
public MyJ48 pruneTree(Instances data) throws Exception { if (currentAttribute == null) { return this; } else {/* ww w.ja va 2 s. c o m*/ } if (currentAttribute != null) { for (int i = 0; i < currentAttribute.numValues(); i++) { boolean succLeaf = true; if (nodes[i].currentAttribute != null) { for (int j = 0; j < nodes[i].currentAttribute.numValues(); j++) { succLeaf = (succLeaf && (nodes[i].nodes[j].currentAttribute == null)); } if (succLeaf) { Attribute tempAttr = nodes[i].currentAttribute; nodes[i].currentAttribute = null; // Set the class value as the highest frequency of the class classDistribution = new double[data.numClasses()]; Enumeration enumInstance = data.enumerateInstances(); while (enumInstance.hasMoreElements()) { Instance temp = (Instance) enumInstance.nextElement(); classDistribution[(int) temp.classValue()]++; } Utils.normalize(classDistribution); nodes[i].classValue = Utils.maxIndex(classDistribution); nodes[i].classAttribute = data.classAttribute(); /*Weka weka = new Weka(); weka.setTraining("weather.nominal.arff"); String[] options_cl = {""}; weka.setClassifier("myJ48.MyJ48", options_cl); weka.runCV(true); double currentAccuracy = weka.getM_Evaluation().correct();*/ Random rand = new Random(); double currentAccuracy = rand.nextDouble(); System.out.println("acc kepake : " + currentAccuracy); double maxFalseAccuracy = 0.7; // coba coba if (maxFalseAccuracy > currentAccuracy) { nodes[i].currentAttribute = tempAttr; //visited = true; } else { //visited = false; } } } else { nodes[i] = nodes[i].pruneTree(data); } } } return this; }
From source file:NaiveBayes.NaiveBayes.java
public static int[] getNumEachClass(Instances ins) { int[] countEachClass = new int[ins.numClasses()]; for (int i = 0; i < ins.numClasses(); i++) { int cnt = 0; for (int j = 0; j < ins.numInstances(); j++) { if (ins.attribute(ins.classIndex()).value(i) .equals(ins.get(j).toString(ins.classIndex()).replaceAll("\\s+", ""))) cnt++;//from w ww. j a v a2 s .c o m } countEachClass[i] = cnt; } return countEachClass; }
From source file:NaiveBayes.NaiveBayes13514004.java
@Override public void buildClassifier(Instances i) { //Algoritma/*from w w w .ja v a 2 s. co m*/ origin = new Instances(i); //Menghitung jumlah attribute dan kelas numAtt = i.numAttributes() - 1; numClass = i.numClasses(); //Inisialisasi matrix 3 dimensi data = new int[numAtt][numClass][0]; prob = new double[numAtt][numClass][0]; kelasdata = new int[numClass]; kelasprob = new double[numClass]; Enumeration<Instance> enu1 = i.enumerateInstances(); while (enu1.hasMoreElements()) { Instance ins = enu1.nextElement(); Enumeration<Attribute> enu_t = i.enumerateAttributes(); int x = 0; while (enu_t.hasMoreElements()) { Attribute att = enu_t.nextElement(); numDis = att.numValues(); data[x][(int) ins.classValue()] = new int[numDis]; prob[x][(int) ins.classValue()] = new double[numDis]; x++; } } //Mengisi matriks Frekuensi Enumeration<Instance> enu2 = i.enumerateInstances(); while (enu2.hasMoreElements()) { Instance ins = enu2.nextElement(); Enumeration<Attribute> enu_t = i.enumerateAttributes(); int x = 0; while (enu_t.hasMoreElements()) { Attribute att = enu_t.nextElement(); data[x][(int) ins.classValue()][(int) ins.value(att)]++; x++; } kelasdata[(int) ins.classValue()]++; } //Menghitung probabilitas kelas double numInstances = (double) i.numInstances(); for (int y = 0; y < numClass; y++) { kelasprob[y] = (double) kelasdata[y] / numInstances; } //Mengisi matriks probabilitas Enumeration<Instance> enu3 = i.enumerateInstances(); while (enu3.hasMoreElements()) { Instance ins = enu3.nextElement(); Enumeration<Attribute> enu_t = i.enumerateAttributes(); int x = 0; while (enu_t.hasMoreElements()) { Attribute att = enu_t.nextElement(); int sumDis = Utils.sum(data[x][(int) ins.classValue()]); numDis = att.numValues(); for (int z = 0; z < numDis; z++) { int y = (int) ins.classValue(); prob[x][y][z] = ((double) data[x][y][z] / (double) sumDis); } x++; } } }
From source file:NaiveBayesPckge.NaiveBayesMain.java
public static void printCoba(Instances instance) { System.out.println(""); System.out.println("1. first instance : " + instance.firstInstance()); System.out.println("2. banyaknya atribut :" + instance.numAttributes()); System.out.println("3. " + instance.attribute(0).numValues()); System.out.println("4. " + instance.attribute(0).weight()); System.out.println("5. " + instance.attribute(instance.numAttributes() - 1).numValues()); System.out.println("6. " + instance.get(0)); System.out.println("7. " + instance.get(0).stringValue(4)); System.out.println("8. " + instance.numInstances()); System.out.println("9. " + instance.attribute(instance.numAttributes() - 1).numValues()); System.out.println("10. " + instance.get(1).stringValue(instance.numAttributes() - 1)); System.out.println("11. " + instance.attribute(instance.numAttributes() - 1).value(0)); System.out.println("12. " + instance.attribute(instance.numAttributes() - 1).name()); System.out.println("13. " + instance.numClasses()); System.out.println("14. Banyaknya kelas yang diuji : " + instance.classIndex()); // System.out.println("15. " + (String.valueOf(instance.attribute(0).value(34)).equals(String.valueOf(4.3)))); // System.out.println("16. " + instance); }
From source file:naive_bayes.Naive_bayes.java
@Override public void buildClassifier(Instances newData) throws Exception { int countAttr = newData.numAttributes(); int distinctClassValue = newData.attribute(classidx).numValues(); /* Inisialisasi Model */ M = new ArrayList[countAttr][distinctClassValue]; for (int i = 0; i < countAttr; i++) { for (int j = 0; j < distinctClassValue; j++) { M[i][j] = new ArrayList<ListElement>(); }/*from w w w. ja v a 2 s. co m*/ } boolean add; ListElement le = new ListElement(); Attribute ab; for (int i = 0; i < countAttr; i++) { if (i != classidx) { for (int j = 0; j < distinctClassValue; j++) { for (int k = 0; k < newData.attribute(i).numValues(); k++) { ab = newData.attribute(i); String c = ab.value((int) newData.instance(149).value(i)); add = M[i][j].add(new ListElement()); } } } } /* Membuat array yang menghitung banyak nilai pada masing-masing kelas */ Attribute a; String c; arrayOfClass = new ListElement[newData.numClasses()]; for (int idx = 0; idx < newData.numClasses(); idx++) { arrayOfClass[idx] = new ListElement(); a = newData.classAttribute(); c = a.value(idx); arrayOfClass[idx].setDisAttrName(c); } for (int i = 0; i < newData.numInstances(); i++) { double z = newData.instance(i).classValue(); int zz = (int) z; arrayOfClass[zz].setCount(arrayOfClass[zz].getCount() + 1); } //Masukan frekuensi masing-masing atribut for (int i = 0; i < newData.numInstances(); i++) { for (int j = 0; j < newData.numAttributes(); j++) { if (j != classidx) { //bukan atribut kelas a = newData.attribute(classidx); c = a.value((int) newData.instance(i).value(classidx)); //Mengambil indeks kelas double z = newData.instance(i).classValue(); int zz = (int) z; le.setDisAttrName(c); //Mengambil indeks valueDistinct double x = newData.instance(i).value(j); int xx = (int) x; //Menambahkan frekuensi kemunculan nilai per kelas per atribut le.setCount(M[j][zz].get(xx).getCount() + 1); M[j][zz].set(xx, new ListElement(M[j][zz].get(xx).getDisAttrName(), M[j][zz].get(xx).getCount() + 1)); } } } /* Menghitung probabilitas masing-masing nilai distinct atribut per kelas */ for (int j = 0; j < newData.numAttributes(); j++) { if (j != classidx) { for (int zz = 0; zz < newData.numClasses(); zz++) { for (int xx = 0; xx < newData.attribute(j).numValues(); xx++) { M[j][zz].set(xx, new ListElement(M[j][zz].get(xx).getDisAttrName(), M[j][zz].get(xx).getCount() / arrayOfClass[zz].getCount())); } } } } }
From source file:naive_bayes.Naive_bayes.java
public double classifyInstance(Instances newData) throws Exception { DenseInstance newInstance = new DenseInstance(newData.instance(2)); float[] prob = new float[newData.numClasses()]; for (int i = 0; i < newData.numClasses(); i++) { prob[i] = (float) arrayOfClass[i].getCount() / (float) newData.numInstances(); System.out.println("ii = " + prob[i]); }/*from w ww. j av a 2s. com*/ for (int i = 0; i < newData.numClasses(); i++) { for (int j = 0; j < newData.numAttributes(); j++) { if (j != classidx) { System.out.println("j =" + j); double x = newInstance.value(j); System.out.println("x = " + x); int xx = (int) x; System.out.println("xx = " + xx); prob[i] *= M[j][i].get(xx).getCount(); System.out.println("lala = " + prob[i]); } } } int indeksmaks = 0; double max = prob[indeksmaks]; System.out.println("prob 0 = " + prob[0]); for (int i = 1; i < newData.numClasses(); i++) { if (max < prob[i]) { indeksmaks = i; max = prob[i]; System.out.println("prob " + i + prob[i]); } } String tata = arrayOfClass[indeksmaks].getDisAttrName(); System.out.println("kelas instance = " + tata); return indeksmaks; }
From source file:net.sf.bddbddb.order.MyId3.java
License:LGPL
/** * Method for building an Id3 tree.//from w ww .ja v a 2s . c o m * * @param data * the training data * @exception Exception * if decision tree can't be built successfully */ private void makeTree(Instances data) throws Exception { // Check if no instances have reached this node. if (data.numInstances() == 0) { m_Attribute = null; m_ClassValue = Instance.missingValue(); m_Distribution = new double[data.numClasses()]; double sum = 0; laplaceSmooth(m_Distribution, sum, data.numClasses()); return; } // Compute attribute with maximum information gain. double[] infoGains = new double[data.numAttributes()]; Enumeration attEnum = data.enumerateAttributes(); while (attEnum.hasMoreElements()) { Attribute att = (Attribute) attEnum.nextElement(); infoGains[att.index()] = computeInfoGain(data, att); } m_Attribute = data.attribute(Utils.maxIndex(infoGains)); boolean makeLeaf; makeLeaf = Utils.eq(infoGains[m_Attribute.index()], 0); Instances[] splitData = null; if (!makeLeaf) { splitData = splitData(data, m_Attribute); for (int i = 0; i < splitData.length; ++i) { if (splitData[i].numInstances() == data.numInstances()) { //System.out.println("When splitting on attrib // "+m_Attribute+", child "+i+" is same size as current, // making into leaf."); makeLeaf = true; break; } } } // Make leaf if information gain is zero. // Otherwise create successors. if (makeLeaf) { m_Attribute = null; m_Distribution = new double[data.numClasses()]; Enumeration instEnum = data.enumerateInstances(); double sum = 0; while (instEnum.hasMoreElements()) { Instance inst = (Instance) instEnum.nextElement(); m_Distribution[(int) inst.classValue()]++; sum += inst.weight(); } //laplace smooth the distribution instead laplaceSmooth(m_Distribution, sum, data.numClasses()); //Utils.normalize(m_Distribution); m_ClassValue = Utils.maxIndex(m_Distribution); m_ClassAttribute = data.classAttribute(); } else { m_Successors = new MyId3[m_Attribute.numValues()]; for (int j = 0; j < m_Attribute.numValues(); j++) { m_Successors[j] = new MyId3(); m_Successors[j].buildClassifier(splitData[j]); } } }
From source file:net.sf.bddbddb.order.MyId3.java
License:LGPL
/** * Computes the entropy of a dataset.//from w ww. ja va2s .c o m * * @param data * the data for which entropy is to be computed * @return the entropy of the data's class distribution */ private double computeEntropy(Instances data, Attribute att) throws Exception { double[] classCounts = new double[data.numClasses()]; Enumeration instEnum = data.enumerateInstances(); int numInstances = 0; while (instEnum.hasMoreElements()) { Instance inst = (Instance) instEnum.nextElement(); if (inst.isMissing(att)) continue; classCounts[(int) inst.classValue()]++; ++numInstances; } double entropy = 0; for (int j = 0; j < data.numClasses(); j++) { if (classCounts[j] > 0) { entropy -= classCounts[j] * Utils.log2(classCounts[j]); } } entropy /= (double) numInstances; return entropy + Utils.log2(numInstances); }
From source file:NeuralNetwork.Network.java
public Network(Instances instances) { //first layer, this is inputs only numOfNodesPerLayer.add(instances.numAttributes() - 1); //hidden layers numOfNodesPerLayer.add(20);//from w w w . jav a2 s . com //output layer numOfNodesPerLayer.add(instances.numClasses()); //create the layers for (int i = 0; i < numOfNodesPerLayer.size(); i++) { Layer layer = new Layer(numOfNodesPerLayer.get(i), learningRate); layers.add(layer); } //connect the layers going forward for (int i = 0; i < layers.size() - 1; i++) { layers.get(i).connectNextLayer(layers.get(i + 1).getNueronsWithoutBios()); } //connect the layers going backward for (int i = 1; i < layers.size(); i++) { layers.get(i).connectPrevLayer(layers.get(i - 1).getNuerons()); layers.get(i).initializeWeights(); } learnAll(instances); }
From source file:newdtl.NewID3.java
/** * Creates an Id3 tree.//from w w w. j av a2 s.c om * * @param data the training data * @exception Exception if tree failed to build */ private void makeTree(Instances data) throws Exception { // Mengecek apakah tidak terdapat instance dalam node ini if (data.numInstances() == 0) { splitAttribute = null; label = DOUBLE_MISSING_VALUE; classDistributions = new double[data.numClasses()]; //??? } else { // Mencari IG maksimum double[] infoGains = new double[data.numAttributes()]; Enumeration attEnum = data.enumerateAttributes(); while (attEnum.hasMoreElements()) { Attribute att = (Attribute) attEnum.nextElement(); infoGains[att.index()] = computeInfoGain(data, att); } // cek max IG int maxIG = maxIndex(infoGains); if (maxIG != -1) { splitAttribute = data.attribute(maxIndex(infoGains)); } else { Exception exception = new Exception("array null"); throw exception; } // Membuat daun jika IG-nya 0 if (Double.compare(infoGains[splitAttribute.index()], 0) == 0) { splitAttribute = null; classDistributions = new double[data.numClasses()]; for (int i = 0; i < data.numInstances(); i++) { Instance inst = (Instance) data.instance(i); classDistributions[(int) inst.classValue()]++; } normalizeClassDistribution(); label = maxIndex(classDistributions); classAttribute = data.classAttribute(); } else { // Membuat tree baru di bawah node ini Instances[] splitData = splitData(data, splitAttribute); children = new NewID3[splitAttribute.numValues()]; for (int j = 0; j < splitAttribute.numValues(); j++) { children[j] = new NewID3(); children[j].makeTree(splitData[j]); } } } }