Example usage for weka.core.converters ArffLoader ArffLoader

List of usage examples for weka.core.converters ArffLoader ArffLoader

Introduction

In this page you can find the example usage for weka.core.converters ArffLoader ArffLoader.

Prototype

ArffLoader

Source Link

Usage

From source file:adams.flow.transformer.WekaReorderAttributesToReference.java

License:Open Source License

/**
 * Adds options to the internal list of options.
 *///w  ww .  ja v a2  s .  c  om
@Override
public void defineOptions() {
    super.defineOptions();

    m_OptionManager.add("reference-file", "referenceFile", new PlaceholderFile("."));

    m_OptionManager.add("use-custom", "useCustomLoader", false);

    m_OptionManager.add("loader", "customLoader", new ArffLoader());

    m_OptionManager.add("reference-actor", "referenceActor",
            new CallableActorReference(CallableActorReference.UNKNOWN));

    m_OptionManager.add("on-the-fly", "onTheFly", false);

    m_OptionManager.add("init-once", "initializeOnce", false);

    m_OptionManager.add("lenient", "lenient", false);

    m_OptionManager.add("keep", "keepRelationName", false);
}

From source file:adams.gui.menu.MakeCompatibleDatasets.java

License:Open Source License

/**
 * Launches the functionality of the menu item.
 *//*www.  jav  a  2  s .  c  o m*/
@Override
public void launch() {
    final WizardPane wizard;
    WekaSelectMultipleDatasetsPage infiles;
    ParameterPanelPage format;
    Properties props;
    SelectDirectoryPage outdir;
    FinalPage finalpage;
    WekaInstancesMerge merge;
    final ChildFrame frame;

    // wizard
    wizard = new WizardPane();
    wizard.setCustomFinishText("Generate");
    infiles = new WekaSelectMultipleDatasetsPage("Input");
    infiles.setDescription(
            "Select the datasets that you want to make compatible.\n" + "You have to choose at least two.");
    infiles.setPageCheck(new PageCheck() {
        @Override
        public boolean checkPage(AbstractWizardPage page) {
            Properties props = page.getProperties();
            try {
                String[] files = OptionUtils
                        .splitOptions(props.getProperty(WekaSelectMultipleDatasetsPage.KEY_FILES));
                return (files.length >= 2);
            } catch (Exception e) {
                getLogger().log(Level.SEVERE, "Failed to obtain files:", e);
            }
            return false;
        }
    });
    wizard.addPage(infiles);

    format = new ParameterPanelPage("Format");
    format.setDescription("Here you can specify how to load the datasets.\n"
            + "If you need to use custom parameters for the Weka loader, then please tick the checkbox "
            + "for using a custom Weka loader and configure the loader accordingly.\n"
            + "The Weka CSV loader does not handle all CSV files correctly. In that case, you can try using "
            + "the ADAMS one. In that case, tick the checkbox for using a custom ADAMS reader and configure "
            + "the reader accordingly. If necessary, you can also modify the file extension that the ADAMS "
            + "reader reacts to.");
    format.getParameterPanel().addPropertyType(CUSTOM_WEKA_FILE_LOADER,
            PropertiesParameterPanel.PropertyType.BOOLEAN);
    format.getParameterPanel().setLabel(CUSTOM_WEKA_FILE_LOADER, "Use custom Weka loader");
    format.getParameterPanel().addPropertyType(WEKA_FILE_LOADER,
            PropertiesParameterPanel.PropertyType.OBJECT_EDITOR);
    format.getParameterPanel().setLabel(WEKA_FILE_LOADER, "Weka loader");
    format.getParameterPanel().setChooser(WEKA_FILE_LOADER, new GenericObjectEditorPanel(
            weka.core.converters.AbstractFileLoader.class, new ArffLoader(), true));
    format.getParameterPanel().addPropertyType(CUSTOM_ADAMS_READER,
            PropertiesParameterPanel.PropertyType.BOOLEAN);
    format.getParameterPanel().setLabel(CUSTOM_ADAMS_READER, "Use custom ADAMS reader");
    format.getParameterPanel().addPropertyType(ADAMS_READER,
            PropertiesParameterPanel.PropertyType.OBJECT_EDITOR);
    format.getParameterPanel().setLabel(ADAMS_READER, "ADAMS reader");
    format.getParameterPanel().setChooser(ADAMS_READER, new GenericObjectEditorPanel(
            adams.data.io.input.SpreadSheetReader.class, new CsvSpreadSheetReader(), true));
    format.getParameterPanel().addPropertyType(ADAMS_FILE_EXTENSION,
            PropertiesParameterPanel.PropertyType.STRING);
    format.getParameterPanel().setLabel(ADAMS_FILE_EXTENSION, "File extension for ADAMS reader");
    format.getParameterPanel().setPropertyOrder(new String[] { CUSTOM_WEKA_FILE_LOADER, WEKA_FILE_LOADER,
            CUSTOM_ADAMS_READER, ADAMS_READER, ADAMS_FILE_EXTENSION });
    props = new Properties();
    props.setBoolean(CUSTOM_WEKA_FILE_LOADER, false);
    props.setProperty(WEKA_FILE_LOADER, OptionUtils.getCommandLine(new ArffLoader()));
    props.setBoolean(CUSTOM_ADAMS_READER, false);
    props.setProperty(ADAMS_READER, OptionUtils.getCommandLine(new CsvSpreadSheetReader()));
    props.setProperty(ADAMS_FILE_EXTENSION, "csv");
    format.getParameterPanel().setProperties(props);
    wizard.addPage(format);

    outdir = new SelectDirectoryPage("Output");
    outdir.setDescription("Select the directory to save the generated data to (in ARFF format).");
    wizard.addPage(outdir);
    finalpage = new FinalPage();
    finalpage.setLogo(null);
    finalpage
            .setDescription("<html><h2>Ready</h2>Please click on <b>Generate</b> to start the process.</html>");
    wizard.addPage(finalpage);
    frame = createChildFrame(wizard, GUIHelper.getDefaultDialogDimension());
    wizard.addActionListener(new ActionListener() {
        @Override
        public void actionPerformed(ActionEvent e) {
            if (!e.getActionCommand().equals(WizardPane.ACTION_FINISH)) {
                frame.dispose();
                return;
            }
            Properties props = wizard.getProperties(false);
            doGenerate(frame, props);
        }
    });
}

From source file:ap.mavenproject1.HelloWeka.java

public static void main(String args[]) {
    Instances data = null;//w  w  w .  ja  v a2  s.  c o m
    ArffLoader loader = new ArffLoader();
    try {

        loader.setFile(new File("C:\\Users\\USER\\Desktop\\data.arff"));

        data = loader.getDataSet();
        data.setClassIndex(data.numAttributes() - 1);

    } catch (IOException ex) {
        Logger.getLogger(HelloWeka.class.getName()).log(Level.SEVERE, null, ex);
    }

    Apriori apriori = new Apriori();
    try {
        NumericToNominal numericToNominal = new NumericToNominal();
        numericToNominal.setInputFormat(data);

        Instances nominalData = Filter.useFilter(data, numericToNominal);
        apriori.buildAssociations(nominalData);
        FastVector[] allTheRules;
        allTheRules = apriori.getAllTheRules();
        for (int i = 0; i < allTheRules.length; i++) {
            System.out.println(allTheRules[i]);
        }
        //             BufferedWriter writer = new BufferedWriter(new FileWriter("./output.arff"));
        //                writer.write(nominalData.toString());
        //                writer.flush();
        //                writer.close();

    } catch (Exception ex) {
        Logger.getLogger(HelloWeka.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:boostingPL.boosting.AdaBoost.java

License:Open Source License

public static void main(String[] args) throws Exception {
    java.io.File inputFile = new java.io.File(
            "/home/aax/xpShareSpace/dataset/single-class/+winered/winequality-red.datatrain1.arff");
    ArffLoader atf = new ArffLoader();
    atf.setFile(inputFile);//from   w  ww  . jav a 2 s  .c om
    Instances training = atf.getDataSet();
    training.setClassIndex(training.numAttributes() - 1);

    AdaBoost adaBoost = new AdaBoost(training, 100);
    for (int t = 0; t < 100; t++) {
        adaBoost.run(t);
    }

    java.io.File inputFilet = new java.io.File(
            "/home/aax/xpShareSpace/dataset/single-class/+winered/winequality-red.datatest1.arff");
    ArffLoader atft = new ArffLoader();
    atft.setFile(inputFilet);
    Instances testing = atft.getDataSet();
    testing.setClassIndex(testing.numAttributes() - 1);

    Evaluation eval = new Evaluation(testing);
    for (Instance inst : testing) {
        eval.evaluateModelOnceAndRecordPrediction(adaBoost, inst);
    }
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());

    /*
    int right = 0;
    for (int i = 0; i < testing.numInstances(); i++) {
       Instance inst = testing.instance(i);
       if (adaBoost.classifyInstance(inst) == inst.classValue()) {
    right++;
       }
    }
    System.out.println(right);
    System.out.println((double)right/training.numInstances());
    */
}

From source file:boostingPL.boosting.SAMME.java

License:Open Source License

public static void main(String[] args) throws Exception {
    java.io.File inputFile = new java.io.File(args[0]);
    ArffLoader atf = new ArffLoader();
    atf.setFile(inputFile);/*from ww w.ja va  2  s.com*/
    Instances training = atf.getDataSet();
    training.setClassIndex(training.numAttributes() - 1);
    //Instances testing = new Instances(training);

    int iterationNum = 100;
    SAMME samme = new SAMME(training, iterationNum);
    for (int t = 0; t < iterationNum; t++) {
        samme.run(t);
    }

    java.io.File inputFilet = new java.io.File(args[1]);
    ArffLoader atft = new ArffLoader();
    atft.setFile(inputFilet);
    Instances testing = atft.getDataSet();
    testing.setClassIndex(testing.numAttributes() - 1);

    Evaluation eval = new Evaluation(testing);
    for (Instance inst : testing) {
        eval.evaluateModelOnceAndRecordPrediction(samme, inst);
    }
    System.out.println(eval.toSummaryString());
    System.out.println(eval.toClassDetailsString());
    System.out.println(eval.toMatrixString());
}

From source file:br.com.edu.partition.Tranning.java

public static Double Tranning_JRIP(String test, String tranning) throws IOException, Exception {
    Double result_ = null;//w  w  w .j a  v a  2s. c o  m
    ArffLoader loader;
    loader = new ArffLoader();
    loader.setFile(new File(tranning));
    loader.getStructure();

    Instances trainingset = loader.getDataSet();
    int classIndex = trainingset.numAttributes() - 1;
    trainingset.setClassIndex(classIndex);

    //J48 j48 = new J48();
    JRip jRip = new JRip();
    //String[] options2 = {"-F", "3", "-N", "2.0", "-O", "2", "-S", "1"};
    //jRip.setOptions(options2);
    //jRip.buildClassifier(trainingset);
    jRip.buildClassifier(trainingset);

    loader = new ArffLoader();
    loader.setFile(new File(test));
    loader.getStructure();

    Instances testset = loader.getDataSet();
    testset.setClassIndex(testset.numAttributes() - 1);
    for (Instance instance : testset) {
        //double[] result = jRip.distributionForInstance(instance);
        double[] result = jRip.distributionForInstance(instance);
        result_ = result[1];
        //System.out.println(test + " " + result[1] + " " + tranning);
    }
    return result_;

}

From source file:c4.pkg5crossv.DataLoad.java

public static Instances loadData(String fileName) throws IOException {
    ArffLoader loader = new ArffLoader(); //Utworzenie obiektu czytajacego dane z formatu ARFF
    loader.setFile(new File(fileName)); //Ustawienie pliku do odczytania
    return loader.getDataSet(); //Odczytanie danych z pliku
}

From source file:CEP.GenerateStream.java

public void MakeStream() {
    File file = new File("C:\\Users\\Weary\\Documents\\w4ndata\\w4ndata.arff");
    String pc = System.getProperty("user.dir").toString();
    if (pc.contains("gs023850")) {
        file = new File("C:\\Users\\gs023850\\Documents\\w4ndata\\w4ndata.arff");
    }/*from   ww w.java 2s.  c om*/
    try {
        ArffLoader loader = new ArffLoader();
        loader.setFile(file);
        Instances structure = loader.getStructure();
        int j = structure.numAttributes();

        HeaderManager.SetStructure(new Instances(structure));
        Instance current;
        long previousTimeStamp = 0;
        String timeStamp = "0";
        long wait = 0;

        while ((current = loader.getNextInstance(structure)) != null) {
            timeStamp = current.stringValue(0);
            cepRT.sendEvent(current);
            System.out.println("Sending event");
            previousTimeStamp = WaitTime(timeStamp, previousTimeStamp, wait);
        }
    } catch (Exception e) {
        if (e.equals(new FileNotFoundException())) {
            System.out.println("File not found - could not generate stream");
            return;
        } else if (e.equals(new IOException())) {
            System.out.println("Unable to read file");
        } else if (e.equals(new NumberFormatException())) {
            System.out.println("Unable to convert to time to number - bad time");
        } else {
            System.out.println(e.toString());
        }
    }
}

From source file:cn.ict.zyq.bestConf.util.DataIOFile.java

License:Open Source License

/**
 * Return the data set loaded from the Arff file at @param path
 *///from w w w.j  a va  2  s . c  om
public static Instances loadDataFromArffFile(String path) throws IOException {
    ArffLoader loader = new ArffLoader();
    loader.setSource(new File(path));
    Instances data = loader.getDataSet();

    System.out.println("\nHeader of dataset:\n");
    System.out.println(new Instances(data, 0));
    return data;
}

From source file:com.ctrl.DataSource.java

License:Open Source License

/**
 * Reads an ARFF file from a file.//w  w w. j a va  2 s  . c o  m
 * 
 * @param filename   the ARFF file to read
 * @return      the data
 * @throws Exception  if reading fails
 */
public static Instances read(String filename) throws Exception {
    ArffLoader loader = new ArffLoader();
    loader.setSource(new File(filename));
    Instances data = loader.getDataSet();
    return data;
}