List of usage examples for weka.core Instances setClassIndex
public void setClassIndex(int classIndex)
From source file:ffnn.FFNNTubesAI.java
public static Instances filterNominalNumeric(Instances i) { NominalToBinary filter = new NominalToBinary(); Instances temp_instances = new Instances(i); if (temp_instances.classIndex() > -1) { //Jika ada classs index temp_instances.setClassIndex(-1); //Unset }// w w w .ja v a 2 s . c o m try { filter.setInputFormat(temp_instances); temp_instances = Filter.useFilter(temp_instances, filter); } catch (Exception ex) { Logger.getLogger(FFNN.class.getName()).log(Level.SEVERE, null, ex); } return temp_instances; }
From source file:FFNN.MultiplePerceptron.java
public static void main(String args[]) throws Exception { // System.out.println("input jumlah layer 0/1 :"); // Scanner input = new Scanner(System.in); // int layer = input.nextInt(); // System.out.println("input learning rate"); // double rate = input.nextDouble(); // int hidden = 0; // if(layer==1){ // System.out.println("input jumlah neuron di hidden layer"); // hidden = input.nextInt(); // }/*from w w w .j a v a2 s . c o m*/ // // System.out.print("Masukkan nama file : "); // String filename = input.next(); ConverterUtils.DataSource source = new ConverterUtils.DataSource( ("D:\\Program Files\\Weka-3-8\\data\\iris.arff")); Instances train = source.getDataSet(); // Normalize nm = new Normalize(); // nm.setInputFormat(train); // train = Filter.useFilter(train, nm); for (int i = 0; i < train.numAttributes(); i++) System.out.println(i + ". " + train.attribute(i).name()); System.out.print("Masukkan indeks kelas : "); //int classIdx = input.nextInt(); train.setClassIndex(train.numAttributes() - 1); MultiplePerceptron mlp = new MultiplePerceptron(10000, 1, 13, train); mlp.buildClassifier(train); Evaluation eval = new Evaluation(train); eval.evaluateModel(mlp, train); System.out.println(eval.toSummaryString()); // System.out.println(eval.toMatrixString()); }
From source file:ffnn.TucilWeka.java
public static Instances readDataSet(String filepath) { //Membaca dataset Instances data = null; try {/*from w w w .j a va 2 s . c om*/ data = DataSource.read(filepath); } catch (Exception ex) { Logger.getLogger(TucilWeka.class.getName()).log(Level.SEVERE, null, ex); } data.setClassIndex(data.numAttributes() - 1); return data; }
From source file:ffnn.TucilWeka.java
public static Instances createInstances(int max) { //List of Attributes dan List of Class untuk Header //Jumlah attributes: 4 jika tanpa class, 5 jika dengan class ArrayList<Attribute> attrs = new ArrayList<Attribute>(5); ArrayList<String> classVal = new ArrayList<String>(); //Menambahkkan class yang mungkin ke List classVal.add("Iris-setosa"); classVal.add("Iris-versicolor"); classVal.add("Iris-virginica"); //Menambahkan attributes ke List Attribute sepallength = new Attribute("sepallength"); attrs.add(sepallength); //Numeric Attributes Attribute sepalwidth = new Attribute("sepalwidth"); attrs.add(sepalwidth); //Numeric Attributes Attribute petallength = new Attribute("petallength"); attrs.add(petallength); //Numeric Attributes Attribute petalwidth = new Attribute("petalwidth"); attrs.add(petalwidth); //Numeric Attributes Attribute classValue = new Attribute("@@class@@", classVal); attrs.add(classValue); //String Attributes //Pembuatan//from w w w.ja va2 s. c om //Constructor dengan param Nama, List of Attribute, size Instances dataRaw = new Instances("irisNew", attrs, 0); //Instances kosong dataRaw.setClassIndex(dataRaw.numAttributes() - 1); Scanner scan = new Scanner(System.in); for (int i = 0; i < max; i++) { //Weka mennyimpan instance sebagai double double temp; Instance inst = new DenseInstance(dataRaw.numAttributes()); System.out.println("Sepallength:"); temp = scan.nextDouble(); inst.setValue(sepallength, temp); System.out.println("Sepalwidth:"); temp = scan.nextDouble(); inst.setValue(sepalwidth, temp); System.out.println("Petallegth:"); temp = scan.nextDouble(); inst.setValue(petallength, temp); System.out.println("Petalwidth:"); temp = scan.nextDouble(); inst.setValue(petalwidth, temp); //System.out.println("Masukan kelima:"); //temp = scan.nextDouble(); //0 -> setosa, 1 -> vesicolor, 2-> virginica //instS.setValue(classValue, temp); //tidak dibutuhkan sebenarnya //Menambahkan instance ke instances dataRaw.add(inst); } return dataRaw; }
From source file:FinalMineria.Reconocimiento.java
/** * Processes requests for both HTTP <code>GET</code> and <code>POST</code> * methods.// www . j a va 2s. co m * * @param request servlet request * @param response servlet response * @throws ServletException if a servlet-specific error occurs * @throws IOException if an I/O error occurs */ protected void processRequest(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException, Exception { String accion = request.getParameter("accion"); BufferedReader br = null; String ruta = request.getServletContext().getRealPath("/Recursos"); br = new BufferedReader(new FileReader(ruta + "/nombres.txt")); linea = br.readLine(); br.close(); if ("Detener".equals(accion)) { grabar.finish(); try { Thread.sleep(4000); } catch (InterruptedException ex) { Logger.getLogger(GrabarAudio.class.getName()).log(Level.SEVERE, null, ex); } String comando = "cmd /c " + request.getServletContext().getRealPath("/Recursos/OpenSmile") + "\\SMILExtract_Release.exe -C " + request.getServletContext().getRealPath("/Recursos/config") + "\\IS12_speaker_trait.conf -I " + request.getServletContext().getRealPath("/Recursos/audios") + "\\prueba.wav -O " + request.getServletContext().getRealPath("/Recursos/arff") + "\\prueba.arff -classes {" + linea + "} -classlabel ?"; Process proceso = Runtime.getRuntime().exec(comando); proceso.waitFor(); Instances prueba, conocimiento; try (BufferedReader archivoBase = new BufferedReader(new FileReader( request.getServletContext().getRealPath("/Recursos/arff") + "\\baseDatos.arff"))) { conocimiento = new Instances(archivoBase); } try (BufferedReader archivoPrueba = new BufferedReader( new FileReader(request.getServletContext().getRealPath("/Recursos/arff") + "\\prueba.arff"))) { prueba = new Instances(archivoPrueba); } conocimiento.deleteStringAttributes(); conocimiento.setClassIndex(981); prueba.deleteStringAttributes(); prueba.setClassIndex(981); Classifier clasificadorModelo = (Classifier) new NaiveBayes(); clasificadorModelo.buildClassifier(conocimiento); double valorP = clasificadorModelo.classifyInstance(prueba.instance(prueba.numInstances() - 1)); String prediccion = prueba.classAttribute().value((int) valorP); System.out.println(prediccion); request.setAttribute("prediccion", prediccion); RequestDispatcher dispatcher = request.getRequestDispatcher("./Hablante.jsp"); dispatcher.forward(request, response); } else if ("Grabar".equals(accion)) { ruta = request.getServletContext().getRealPath("/Recursos/audios"); grabar = new Grabador(ruta + "\\" + "prueba"); Thread stopper = new Thread(new Runnable() { public void run() { try { Thread.sleep(tiempo); } catch (InterruptedException ex) { ex.printStackTrace(); } grabar.finish(); } }); stopper.start(); // start recording grabar.start(); response.sendRedirect("./grabar.jsp"); } }
From source file:fk.stardust.localizer.machinelearn.WekaFaultLocalizer.java
License:Open Source License
@Override public Ranking<T> localize(final ISpectra<T> spectra) { // == 1. Create Weka training instance final List<INode<T>> nodes = new ArrayList<>(spectra.getNodes()); // nominal true/false values final List<String> tf = new ArrayList<String>(); tf.add("t");//from w ww . jav a 2 s. co m tf.add("f"); // create an attribute for each component final Map<INode<T>, Attribute> attributeMap = new HashMap<INode<T>, Attribute>(); final ArrayList<Attribute> attributeList = new ArrayList<Attribute>(); // NOCS: Weka needs ArrayList.. for (final INode<T> node : nodes) { final Attribute attribute = new Attribute(node.toString(), tf); attributeList.add(attribute); attributeMap.put(node, attribute); } // create class attribute (trace success) final Attribute successAttribute = new Attribute("success", tf); attributeList.add(successAttribute); // create weka training instance final Instances trainingSet = new Instances("TraceInfoInstances", attributeList, 1); trainingSet.setClassIndex(attributeList.size() - 1); // == 2. add traces to training set // add an instance for each trace for (final ITrace<T> trace : spectra.getTraces()) { final Instance instance = new DenseInstance(nodes.size() + 1); instance.setDataset(trainingSet); for (final INode<T> node : nodes) { instance.setValue(attributeMap.get(node), trace.isInvolved(node) ? "t" : "f"); } instance.setValue(successAttribute, trace.isSuccessful() ? "t" : "f"); trainingSet.add(instance); } // == 3. use prediction to localize faults // build classifier try { final Classifier classifier = this.buildClassifier(this.classifierName, this.classifierOptions, trainingSet); final Ranking<T> ranking = new Ranking<>(); System.out.println("begin classifying"); int classified = 0; final Instance instance = new DenseInstance(nodes.size() + 1); instance.setDataset(trainingSet); for (final INode<T> node : nodes) { instance.setValue(attributeMap.get(node), "f"); } instance.setValue(successAttribute, "f"); for (final INode<T> node : nodes) { classified++; if (classified % 1000 == 0) { System.out.println(String.format("Classified %d nodes.", classified)); } // contain only the current node in the network instance.setValue(attributeMap.get(node), "t"); // predict with which probability this setup leads to a failing network final double[] distribution = classifier.distributionForInstance(instance); ranking.rank(node, distribution[1]); // reset involvment for node instance.setValue(attributeMap.get(node), "f"); } return ranking; } catch (final Exception e) { // NOCS: Weka throws only raw exceptions throw new RuntimeException(e); } }
From source file:focusedCrawler.link.classifier.LinkClassifierFactoryImpl.java
License:Open Source License
public static LinkClassifier createLinkClassifierImpl(String[] attributes, String[] classValues, Classifier classifier, String className, int levels) throws IOException { LinkClassifier linkClassifier = null; LinkNeighborhoodWrapper wrapper = loadWrapper(attributes, stoplist); weka.core.FastVector vectorAtt = new weka.core.FastVector(); for (int i = 0; i < attributes.length; i++) { vectorAtt.addElement(new weka.core.Attribute(attributes[i])); }//from ww w.j a v a 2 s .com weka.core.FastVector classAtt = new weka.core.FastVector(); for (int i = 0; i < classValues.length; i++) { classAtt.addElement(classValues[i]); } vectorAtt.addElement(new weka.core.Attribute("class", classAtt)); Instances insts = new Instances("link_classification", vectorAtt, 1); insts.setClassIndex(attributes.length); if (className.indexOf("LinkClassifierImpl") != -1) { LNClassifier lnClassifier = new LNClassifier(classifier, insts, wrapper, attributes); linkClassifier = new LinkClassifierImpl(lnClassifier); } if (className.indexOf("LinkClassifierAuthority") != -1) { linkClassifier = new LinkClassifierAuthority(classifier, insts, wrapper, attributes); } if (className.indexOf("LinkClassifierHub") != -1) { linkClassifier = new LinkClassifierHub(classifier, insts, wrapper, attributes); } return linkClassifier; }
From source file:focusedCrawler.target.classifier.WekaTargetClassifier.java
License:Open Source License
public static TargetClassifier create(String modelFile, String featureFile, double relevanceThreshold, String stopwordsFile) throws IOException { try {/* w w w. java2s . co m*/ ParameterFile featureConfig = new ParameterFile(featureFile); InputStream is = new FileInputStream(modelFile); ObjectInputStream objectInputStream = new ObjectInputStream(is); Classifier classifier = (Classifier) objectInputStream.readObject(); is.close(); String[] attributes = featureConfig.getParam("ATTRIBUTES", " "); weka.core.FastVector vectorAtt = new weka.core.FastVector(); for (int i = 0; i < attributes.length; i++) { vectorAtt.addElement(new weka.core.Attribute(attributes[i])); } String[] classValues = featureConfig.getParam("CLASS_VALUES", " "); weka.core.FastVector classAtt = new weka.core.FastVector(); for (int i = 0; i < classValues.length; i++) { classAtt.addElement(classValues[i]); } vectorAtt.addElement(new weka.core.Attribute("class", classAtt)); Instances insts = new Instances("target_classification", vectorAtt, 1); insts.setClassIndex(attributes.length); StopList stoplist = new StopListArquivo(stopwordsFile); return new WekaTargetClassifier(classifier, relevanceThreshold, insts, attributes, stoplist); } catch (FileNotFoundException e) { throw new IllegalArgumentException("Could not find file: " + modelFile, e); } catch (ClassNotFoundException e) { throw new IllegalArgumentException("Could not deserialize classifier from file:" + modelFile, e); } catch (IOException e) { throw new IllegalArgumentException("Could not load classifier.", e); } }
From source file:focusedCrawler.target.EMClassifier.java
License:Open Source License
private void runDevSet(String model, String developData, String outputDir) throws IOException, ClassNotFoundException, TargetClassifierException { InputStream is = new FileInputStream(model); ObjectInputStream objectInputStream = new ObjectInputStream(is); Classifier classifier = (Classifier) objectInputStream.readObject(); weka.core.FastVector vectorAtt = new weka.core.FastVector(); for (int i = 0; i < attributes.length; i++) { vectorAtt.addElement(new weka.core.Attribute(attributes[i])); }/* w w w . j av a 2 s .c o m*/ String[] classValues = new String[] { "S", "NS" }; weka.core.FastVector classAtt = new weka.core.FastVector(); for (int i = 0; i < classValues.length; i++) { classAtt.addElement(classValues[i]); } vectorAtt.addElement(new weka.core.Attribute("class", classAtt)); Instances insts = new Instances("target_classification", vectorAtt, 1); insts.setClassIndex(attributes.length); TargetClassifier targetClassifier = new TargetClassifierImpl(classifier, insts, attributes, stoplist); File[] list = new File(developData).listFiles(); int total = 0; System.out.println("TOTAL:" + list.length); Vector<VSMElement> posDistances = new Vector<VSMElement>(); Vector<VSMElement> negDistances = new Vector<VSMElement>(); for (int i = 0; i < list.length; i++) { StringBuffer content = new StringBuffer(); BufferedReader input = new BufferedReader(new FileReader(list[i])); for (String line = input.readLine(); line != null; line = input.readLine()) { content.append(line); content.append("\n"); } String src = content.toString(); boolean isTarget = targetClassifier.classify(new Page(null, src)); double[] prob = targetClassifier.distributionForInstance((new Page(null, src))); if (isTarget) { posDistances.add(new VSMElement(list[i].toString(), prob[0])); FileOutputStream fout = new FileOutputStream(outputDir + File.separator + list[i].getName(), false); DataOutputStream dout = new DataOutputStream(fout); dout.writeBytes(src); dout.close(); } else { negDistances.add(new VSMElement(list[i].toString(), prob[1])); FileOutputStream fout = new FileOutputStream( outputDir + "_non" + File.separator + list[i].getName(), false); DataOutputStream dout = new DataOutputStream(fout); dout.writeBytes(src); dout.close(); total++; } } Collections.sort(posDistances, new VSMElementComparator()); Collections.sort(negDistances, new VSMElementComparator()); if (posDistances.size() < 1 && negDistances.size() < 1) { System.exit(0); } for (int i = 0; i < posDistances.size() && i < 10; i++) { VSMElement elem = posDistances.elementAt(i); String fileName = elem.getWord(); System.out.println(elem.getWeight() + " " + fileName); Runtime.getRuntime().exec("mv " + fileName + " " + rootDir + File.separator + "trainData_form" + File.separator + "positive" + File.separator); posCount++; } System.out.println("--------"); for (int i = 0; i < negDistances.size() && i < 10; i++) { VSMElement elem = negDistances.elementAt(i); String fileName = elem.getWord(); System.out.println(elem.getWeight() + " " + fileName); Runtime.getRuntime().exec("mv " + fileName + " " + rootDir + File.separator + "trainData_form" + File.separator + "negative" + File.separator); negCount++; } System.out.println("NON:" + total); }
From source file:focusedCrawler.target.PEBL.java
License:Open Source License
private void runDevSet(String model) throws IOException, ClassNotFoundException, TargetClassifierException { InputStream is = new FileInputStream(model); ObjectInputStream objectInputStream = new ObjectInputStream(is); Classifier classifier = (Classifier) objectInputStream.readObject(); weka.core.FastVector vectorAtt = new weka.core.FastVector(); for (int i = 0; i < attributes.length; i++) { vectorAtt.addElement(new weka.core.Attribute(attributes[i])); }//from w w w . ja v a2 s . c om String[] classValues = new String[] { "S", "NS" }; weka.core.FastVector classAtt = new weka.core.FastVector(); for (int i = 0; i < classValues.length; i++) { classAtt.addElement(classValues[i]); } vectorAtt.addElement(new weka.core.Attribute("class", classAtt)); Instances insts = new Instances("target_classification", vectorAtt, 1); insts.setClassIndex(attributes.length); TargetClassifier targetClassifier = new TargetClassifierImpl(classifier, insts, attributes, stoplist); File[] list = new File(unlabelDir).listFiles(); int total = 0; System.out.println("TOTAL:" + list.length); // Vector<VSMElement> posDistances = new Vector<VSMElement>(); // Vector<VSMElement> negDistances = new Vector<VSMElement>(); for (int i = 0; i < list.length; i++) { StringBuffer content = new StringBuffer(); BufferedReader input = new BufferedReader(new FileReader(list[i])); for (String line = input.readLine(); line != null; line = input.readLine()) { content.append(line); content.append("\n"); } String src = content.toString(); boolean isTarget = targetClassifier.classify(new Page(null, src)); if (!isTarget) { Runtime.getRuntime().exec("mv " + list[i].toString() + " " + trainingData + File.separator + "negative" + File.separator); total++; } } if (total == 0) { System.exit(0); } }