List of usage examples for weka.core Instances add
@Override public boolean add(Instance instance)
From source file:adams.gui.visualization.debug.objectrenderer.WekaInstancesRenderer.java
License:Open Source License
/** * Performs the actual rendering.//from w w w. j av a 2 s. c o m * * @param obj the object to render * @param panel the panel to render into * @return null if successful, otherwise error message */ @Override protected String doRender(Object obj, JPanel panel) { Instance inst; Instances data; InstancesTable table; InstancesTableModel model; BaseScrollPane scrollPane; PlainTextRenderer plain; SpreadSheet sheet; Row row; int i; SpreadSheetRenderer sprenderer; if (obj instanceof Instances) { data = (Instances) obj; if (data.numInstances() == 0) { sheet = new DefaultSpreadSheet(); row = sheet.getHeaderRow(); row.addCell("I").setContentAsString("Index"); row.addCell("N").setContentAsString("Name"); row.addCell("T").setContentAsString("Type"); row.addCell("C").setContentAsString("Class"); for (i = 0; i < data.numAttributes(); i++) { row = sheet.addRow(); row.addCell("I").setContent(i + 1); row.addCell("N").setContentAsString(data.attribute(i).name()); row.addCell("T").setContentAsString(Attribute.typeToString(data.attribute(i))); row.addCell("C").setContent((i == data.classIndex()) ? "true" : ""); } sprenderer = new SpreadSheetRenderer(); sprenderer.render(sheet, panel); } else { model = new InstancesTableModel(data); model.setShowAttributeIndex(true); table = new InstancesTable(model); scrollPane = new BaseScrollPane(table); panel.add(scrollPane, BorderLayout.CENTER); } } else { inst = (Instance) obj; if (inst.dataset() != null) { data = new Instances(inst.dataset(), 0); data.add((Instance) inst.copy()); table = new InstancesTable(data); scrollPane = new BaseScrollPane(table); panel.add(scrollPane, BorderLayout.CENTER); } else { plain = new PlainTextRenderer(); plain.render(obj, panel); } } return null; }
From source file:adams.gui.visualization.instance.InstancePanel.java
License:Open Source License
/** * Returns the currently visible instances. * * @return the instances, null if none visible */// w w w.j ava2 s.c o m public Instances getInstances() { Instances result; Instance inst; int i; List<InstanceContainer> list; result = null; list = getContainerManager().getAllVisible(); for (i = 0; i < list.size(); i++) { inst = list.get(i).getData(); if (result == null) result = inst.getDatasetHeader(); result.add(inst.toInstance()); } return result; }
From source file:adams.gui.visualization.instance.plotpopup.SaveVisible.java
License:Open Source License
/** * Returns a popup menu for the table of the container list. * * @param panel the affected panel// w ww. j a v a2 s .com * @param e the mouse event * @param menu the popup menu to customize */ @Override public void customize( DataContainerPanelWithContainerList<Instance, InstanceContainerManager, InstanceContainer> panel, MouseEvent e, JPopupMenu menu) { JMenuItem item; item = new JMenuItem("Save visible...", GUIHelper.getIcon("save.gif")); item.addActionListener((ActionEvent ae) -> { WekaFileChooser fc = new WekaFileChooser(); int retval = fc.showSaveDialog(panel); if (retval != WekaFileChooser.APPROVE_OPTION) return; weka.core.Instances dataset = null; for (InstanceContainer c : panel.getTableModelContainers(true)) { if (dataset == null) dataset = new weka.core.Instances(c.getData().getDatasetHeader(), 0); dataset.add((weka.core.Instance) c.getData().toInstance().copy()); } if (dataset == null) return; AbstractFileSaver saver = fc.getWriter(); saver.setInstances(dataset); try { saver.setFile(fc.getSelectedFile().getAbsoluteFile()); saver.writeBatch(); } catch (Exception ex) { ex.printStackTrace(); GUIHelper.showErrorMessage(panel, "Error saving instances:\n" + ex); } }); menu.add(item); }
From source file:adams.gui.visualization.instances.InstancesTable.java
License:Open Source License
/** * Exports the data.//from w ww. ja va 2 s . c o m * * @param range what data to export */ protected void saveAs(TableRowRange range) { int retVal; AbstractFileSaver saver; File file; Instances original; Instances data; int[] selRows; int i; retVal = m_FileChooser.showSaveDialog(InstancesTable.this); if (retVal != WekaFileChooser.APPROVE_OPTION) return; saver = m_FileChooser.getWriter(); file = m_FileChooser.getSelectedFile(); original = getInstances(); switch (range) { case ALL: data = original; break; case SELECTED: data = new Instances(original, 0); selRows = getSelectedRows(); for (i = 0; i < selRows.length; i++) data.add((Instance) original.instance(getActualRow(selRows[i])).copy()); break; case VISIBLE: data = new Instances(original, 0); for (i = 0; i < getRowCount(); i++) data.add((Instance) original.instance(getActualRow(i)).copy()); break; default: throw new IllegalStateException("Unhandled range type: " + range); } try { saver.setFile(file); saver.setInstances(data); saver.writeBatch(); } catch (Exception ex) { GUIHelper.showErrorMessage(InstancesTable.this, "Failed to save data (" + range + ") to: " + file, ex); } }
From source file:adams.gui.visualization.instances.InstancesTableModel.java
License:Open Source License
/** * sorts the instances via the given attribute * * @param columnIndex the index of the column * @param ascending ascending if true, otherwise descending *///from w w w.jav a 2 s .c om public void sortInstances(int columnIndex, boolean ascending) { int offset; offset = 1; if (m_ShowWeightsColumn) offset++; if ((columnIndex >= offset) && (columnIndex < getColumnCount())) { addUndoPoint(); m_Data.stableSort(columnIndex - offset); if (!ascending) { Instances reversedData = new Instances(m_Data, m_Data.numInstances()); int i = m_Data.numInstances(); while (i > 0) { i--; int equalCount = 1; while ((i > 0) && (m_Data.instance(i).value(columnIndex - offset) == m_Data.instance(i - 1) .value(columnIndex - offset))) { equalCount++; i--; } int j = 0; while (j < equalCount) { reversedData.add(m_Data.instance(i + j)); j++; } } m_Data = reversedData; } notifyListener(new TableModelEvent(this)); } }
From source file:ann.ANN.java
public void percentageSplit(Classifier model, double percent, Instances data) { try {/* ww w.j a v a 2s . com*/ int trainSize = (int) Math.round(data.numInstances() * percent / 100); int testSize = data.numInstances() - trainSize; Instances train = new Instances(data, trainSize); Instances test = new Instances(data, testSize); ; for (int i = 0; i < trainSize; i++) { train.add(data.instance(i)); } for (int i = trainSize; i < data.numInstances(); i++) { test.add(data.instance(i)); } Evaluation eval = new Evaluation(train); eval.evaluateModel(model, test); System.out.println("================================"); System.out.println("========Percentage Split======="); System.out.println("================================"); System.out.println(eval.toSummaryString("\n=== Summary ===\n", false)); System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ===\n")); System.out.println(eval.toMatrixString("=== Confusion Matrix ===\n")); } catch (Exception ex) { System.out.println("File tidak berhasil di-load"); } }
From source file:aprendizadodemaquina.Transformador.java
License:Open Source License
public void paraDt(File arquivoSaida) { // Gera um objeto Instances ("cabealho" para instancias do Weka) Instances dadosTreinamento = featurizador.geraInstances(); Set<Par> paresTermosUsados = new HashSet<Par>(); // Gera as instancias de treinamento para cada sentena for (Sentenca s : listaSentencas) { s.removerTermosNaoUtilizados();/*from w w w . j av a 2 s.co m*/ // Instancias positivas (relaoes marcadas) for (Relacao r : s.getRelacoes()) { Par p = new Par(s, r.getTermo1(), r.getTermo2()); if (paresTermosUsados.contains(p)) continue; dadosTreinamento.add(featurizador.paraInstancia(s, r, r.getTermo1(), r.getTermo2(), "treinamento")); paresTermosUsados.add(p); } // Instancias negativas (todas os pares de termos nao marcados como relaoes) for (Termo t1 : s.getTermos()) { for (Termo t2 : s.getTermos()) { if (t1.equals(t2) || paresTermosUsados.contains(new Par(s, t1, t2))) continue; dadosTreinamento.add(featurizador.paraInstancia(s, null, t1, t2, "negativa")); } } } // Salva o conjunto de dados no arquivo de saida try { ArffSaver arffSaver = new ArffSaver(); arffSaver.setInstances(dadosTreinamento); arffSaver.setFile(arquivoSaida); arffSaver.writeBatch(); Logger.getLogger("ARS logger").log(Level.INFO, "Conjunto de dados de treinamento salvo no arquivo {0}", arquivoSaida.getAbsolutePath()); } catch (IOException ex) { Logger.getLogger("ARS logger").log(Level.SEVERE, null, ex); } }
From source file:arffcreator.arffFrame.java
private void createActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_createActionPerformed // TODO add your handling code here: FastVector atts;/*from www . ja v a 2s. c o m*/ FastVector attsRel; FastVector attVals; FastVector attValsRel; Instances data; Instances dataRel; double[] vals; double[] valsRel; int i; // 1. set up attributes atts = new FastVector(); // - numeric atts.addElement(new Attribute("att1")); // - nominal attVals = new FastVector(); for (i = 0; i < 5; i++) attVals.addElement("val" + (i + 1)); atts.addElement(new Attribute("att2", attVals)); // - string atts.addElement(new Attribute("att3", (FastVector) null)); // - date atts.addElement(new Attribute("att4", "yyyy-MM-dd")); // - relational attsRel = new FastVector(); // -- numeric attsRel.addElement(new Attribute("att5.1")); // -- nominal attValsRel = new FastVector(); for (i = 0; i < 5; i++) attValsRel.addElement("val5." + (i + 1)); attsRel.addElement(new Attribute("att5.2", attValsRel)); dataRel = new Instances("att5", attsRel, 0); atts.addElement(new Attribute("att5", dataRel, 0)); // 2. create Instances object data = new Instances("MyRelation", atts, 0); // 3. fill with data // first instance vals = new double[data.numAttributes()]; // - numeric vals[0] = Math.PI; // - nominal vals[1] = attVals.indexOf("val3"); // - string vals[2] = data.attribute(2).addStringValue("This is a string!"); try { // - date vals[3] = data.attribute(3).parseDate("2015-07-30"); } catch (ParseException ex) { Logger.getLogger(arffFrame.class.getName()).log(Level.SEVERE, null, ex); } // - relational dataRel = new Instances(data.attribute(4).relation(), 0); // -- first instance valsRel = new double[2]; valsRel[0] = Math.PI + 1; valsRel[1] = attValsRel.indexOf("val5.3"); dataRel.add(new Instance(1.0, valsRel)); // -- second instance valsRel = new double[2]; valsRel[0] = Math.PI + 2; valsRel[1] = attValsRel.indexOf("val5.2"); dataRel.add(new Instance(1.0, valsRel)); vals[4] = data.attribute(4).addRelation(dataRel); // add data.add(new Instance(1.0, vals)); // second instance vals = new double[data.numAttributes()]; // important: needs NEW array! // - numeric vals[0] = Math.E; // - nominal vals[1] = attVals.indexOf("val1"); // - string vals[2] = data.attribute(2).addStringValue("And another one!"); try { // - date vals[3] = data.attribute(3).parseDate("2015-07-30"); } catch (ParseException ex) { Logger.getLogger(arffFrame.class.getName()).log(Level.SEVERE, null, ex); } // - relational dataRel = new Instances(data.attribute(4).relation(), 0); // -- first instance valsRel = new double[2]; valsRel[0] = Math.E + 1; valsRel[1] = attValsRel.indexOf("val5.4"); dataRel.add(new Instance(1.0, valsRel)); // -- second instance valsRel = new double[2]; valsRel[0] = Math.E + 2; valsRel[1] = attValsRel.indexOf("val5.1"); dataRel.add(new Instance(1.0, valsRel)); vals[4] = data.attribute(4).addRelation(dataRel); // add data.add(new Instance(1.0, vals)); // 4. output data textArea.append(data.toString()); dataset = data.toString(); }
From source file:arffGenerator.TextDirectoryToArff.java
License:Open Source License
/** * Crea instancias a partir del texto plano que se encuentra en los archivos dentro de las carpetas del path especificado. * Las instancias tendrán como atributos un nominal con la clase (seran los nombres de las carpetas que contienen los archivos) * y un String con el texto que aparece en los ficheros. * @param clase - String que indica los posibles valores de la clase en el fichero arff que se creará. * @param directoryPath - String que indica el path del directorio donde se encuentran los ficheros/directorios con las instancias. * @param data - Conjunto de instancias que forman la estructura para guardar las nuevas instancias. *///from w w w . ja va2 s . c o m private void cargarAtrribDeClase(String clase, String directoryPath, Instances data) { System.out.println("Se crean las instancias de la clase: " + clase); File dir = new File(directoryPath); String[] files = dir.list(); for (int i = 0; i < files.length; i++) { if (files[i].endsWith(".txt")) { try { double[] newInst = new double[2]; File txt = new File(directoryPath + File.separator + files[i]); InputStreamReader is; is = new InputStreamReader(new FileInputStream(txt)); StringBuffer txtStr = new StringBuffer(); int c; while ((c = is.read()) != -1) { c = (char) c; if (!isFakeChar((char) c)) txtStr.append((char) c); } newInst[0] = (double) data.attribute(0).addStringValue(txtStr.toString()); if (clase == null) { newInst[1] = Double.NaN; } else { newInst[1] = (double) data.attribute(1).indexOfValue(clase); } data.add(new Instance(1.0, newInst)); is.close(); } catch (Exception e) { System.err.println("failed to convert file: " + directoryPath + File.separator + files[i]); } } } System.out.println("Se crearon las instancias de la clase: " + clase); }
From source file:at.aictopic1.sentimentanalysis.machinelearning.impl.TwitterClassifer.java
public Integer classify(Tweet[] tweets) { // TEST// w ww .j a v a2 s . c om // Generate two tweet examples Tweet exOne = new Tweet("This is good and fantastic"); exOne.setPreprocessedText("This is good and fantastic"); Tweet exTwo = new Tweet("Horribly, terribly bad and more"); exTwo.setPreprocessedText("Horribly, terribly bad and more"); Tweet exThree = new Tweet( "I want to update lj and read my friends list, but I\\'m groggy and sick and blargh."); exThree.setPreprocessedText( "I want to update lj and read my friends list, but I\\'m groggy and sick and blargh."); Tweet exFour = new Tweet("bad hate worst sick"); exFour.setPreprocessedText("bad hate worst sick"); tweets = new Tweet[] { exOne, exTwo, exThree, exFour }; // TEST // Load model // loadModel(); // Convert Tweet to Instance type // Get String Data // Create attributes for the Instances set Attribute twitter_id = new Attribute("twitter_id"); // Attribute body = new Attribute("body"); FastVector classVal = new FastVector(2); classVal.addElement("pos"); classVal.addElement("neg"); Attribute class_attr = new Attribute("class_attr", classVal); // Add them to a list FastVector attrVector = new FastVector(3); // attrVector.addElement(twitter_id); // attrVector.addElement(new Attribute("body", (FastVector) null)); // attrVector.addElement(class_attr); // Get the number of tweets and then create predictSet int numTweets = tweets.length; Enumeration structAttrs = dataStructure.enumerateAttributes(); // ArrayList<Attribute> attrList = new ArrayList<Attribute>(dataStructure.numAttributes()); while (structAttrs.hasMoreElements()) { attrVector.addElement((Attribute) structAttrs.nextElement()); } Instances predictSet = new Instances("predictInstances", attrVector, numTweets); // Instances predictSet = new Instances(dataStructure); predictSet.setClassIndex(2); // init prediction double prediction = -1; System.out.println("PredictSet matches source structure: " + predictSet.equalHeaders(dataStructure)); System.out.println("PredSet struct: " + predictSet.attribute(0)); System.out.println("PredSet struct: " + predictSet.attribute(1)); System.out.println("PredSet struct: " + predictSet.attribute(2)); // Array to return predictions //double[] tweetsClassified = new double[2][numTweets]; //List<Integer, Double> tweetsClass = new ArrayList<Integer, Double>(numTweets); for (int i = 0; i < numTweets; i++) { String content = (String) tweets[i].getPreprocessedText(); System.out.println("Tweet content: " + content); // attrList Instance tweetInstance = new Instance(predictSet.numAttributes()); tweetInstance.setDataset(predictSet); tweetInstance.setValue(predictSet.attribute(0), i); tweetInstance.setValue(predictSet.attribute(1), content); tweetInstance.setClassMissing(); predictSet.add(tweetInstance); try { // Apply string filter StringToWordVector filter = new StringToWordVector(); filter.setInputFormat(predictSet); Instances filteredPredictSet = Filter.useFilter(predictSet, filter); // Apply model prediction = trainedModel.classifyInstance(filteredPredictSet.instance(i)); filteredPredictSet.instance(i).setClassValue(prediction); System.out.println("Classification: " + filteredPredictSet.instance(i).toString()); System.out.println("Prediction: " + prediction); } catch (Exception ex) { Logger.getLogger(TwitterClassifer.class.getName()).log(Level.SEVERE, null, ex); } } return 0; }