Example usage for weka.core.converters ArffLoader setSource

List of usage examples for weka.core.converters ArffLoader setSource

Introduction

In this page you can find the example usage for weka.core.converters ArffLoader setSource.

Prototype

@Override
public void setSource(InputStream in) throws IOException 

Source Link

Document

Resets the Loader object and sets the source of the data set to be the supplied InputStream.

Usage

From source file:cn.ict.zyq.bestConf.util.DataIOFile.java

License:Open Source License

/**
 * Return the data set loaded from the Arff file at @param path
 *///from  www .j av a  2 s  .  c om
public static Instances loadDataFromArffFile(String path) throws IOException {
    ArffLoader loader = new ArffLoader();
    loader.setSource(new File(path));
    Instances data = loader.getDataSet();

    System.out.println("\nHeader of dataset:\n");
    System.out.println(new Instances(data, 0));
    return data;
}

From source file:com.ctrl.DataSource.java

License:Open Source License

/**
 * Reads an ARFF file from a file.//  w w  w .  j  a  va2 s. com
 * 
 * @param filename   the ARFF file to read
 * @return      the data
 * @throws Exception  if reading fails
 */
public static Instances read(String filename) throws Exception {
    ArffLoader loader = new ArffLoader();
    loader.setSource(new File(filename));
    Instances data = loader.getDataSet();
    return data;
}

From source file:core.DatabaseSaverEx.java

License:Open Source License

/** 
 * Sets the options. <p/>/*w  ww .j  a  va  2 s .  co m*/
 *
 <!-- options-start -->
 * Valid options are: <p/>
 * 
 * <pre> -url &lt;JDBC URL&gt;
 *  The JDBC URL to connect to.
 *  (default: from DatabaseUtils.props file)</pre>
 * 
 * <pre> -user &lt;name&gt;
 *  The user to connect with to the database.
 *  (default: none)</pre>
 * 
 * <pre> -password &lt;password&gt;
 *  The password to connect with to the database.
 *  (default: none)</pre>
 * 
 * <pre> -T &lt;table name&gt;
 *  The name of the table.
 *  (default: the relation name)</pre>
 * 
 * <pre> -P
 *  Add an ID column as primary key. The name is specified
 *  in the DatabaseUtils file ('idColumn'). The DatabaseLoader
 *  won't load this column.</pre>
 * 
 * <pre> -i &lt;input file name&gt;
 *  Input file in arff format that should be saved in database.</pre>
 * 
 <!-- options-end -->
 *
 * @param options the options
 * @throws Exception if options cannot be set
 */
public void setOptions(String[] options) throws Exception {

    String tableString, inputString, tmpStr;

    resetOptions();

    tmpStr = Utils.getOption("url", options);
    if (tmpStr.length() != 0)
        setUrl(tmpStr);

    tmpStr = Utils.getOption("user", options);
    if (tmpStr.length() != 0)
        setUser(tmpStr);

    tmpStr = Utils.getOption("password", options);
    if (tmpStr.length() != 0)
        setPassword(tmpStr);

    tableString = Utils.getOption('T', options);

    inputString = Utils.getOption('i', options);

    if (tableString.length() != 0) {
        m_tableName = tableString;
        m_tabName = false;
    }

    m_id = Utils.getFlag('P', options);

    if (inputString.length() != 0) {
        try {
            m_inputFile = inputString;
            ArffLoader al = new ArffLoader();
            File inputFile = new File(inputString);
            al.setSource(inputFile);
            setInstances(al.getDataSet());
            //System.out.println(getInstances());
            if (tableString.length() == 0)
                m_tableName = getInstances().relationName();
        } catch (Exception ex) {
            printException(ex);
            ex.printStackTrace();
        }
    }
}

From source file:edu.teco.context.recognition.WekaManager.java

License:Apache License

public void configureWithArffFile(File file) {

    if (FrameworkContext.INFO)
        Log.i(TAG, "Loading from path: " + file.getPath());

    try {/*from  w  w  w . ja v  a2 s .c  om*/
        ArffLoader loader = new ArffLoader();
        loader.setSource(file);
        trainingData = loader.getDataSet();

        if (FrameworkContext.INFO)
            Log.i(TAG, "WEKA configuration from ARFF File: " + file.getPath() + "Training Data Information:\n"
                    + trainingData.toSummaryString());
        buildClassifier();
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:fantail.core.Tools.java

License:Open Source License

public static Instances loadFantailARFFInstances(String arffPath) throws Exception {
    ArffLoader loader = new ArffLoader();
    loader.setSource(new File(arffPath));
    Instances data = loader.getDataSet();
    data.setClassIndex(data.numAttributes() - 1);
    if (data.classAttribute().isRelationValued() != true) {
        throw new Exception("The last attribute needs to be 'RelationValued'");
    }/*  w w w . j a  v a2  s .  c o m*/
    return data;
}

From source file:liac.igmn.loader.DataLoader.java

License:Open Source License

/**
 * Carrega dataset a partir de arquivo ARFF e binariza os atributos nominais.
 * Assume que a classe seja o ultimo atributo.
 * /*from ww  w  .  j  a v  a  2 s .co  m*/
 * @param filename path do arquivo
 * @return dataset
 * @throws DataLoaderException lancado quando o arquivo nao e encontrado
 * ou quando ocorre algum erro de IO
 */
public static Dataset loadARFF(String filename) throws DataLoaderException {
    Dataset dataset = new Dataset();
    try {
        ArffLoader loader = new ArffLoader();

        loader.setSource(new File(filename));
        Instances data = loader.getDataSet();
        Instances m_Intances = new Instances(data);

        data.setClassIndex(data.numAttributes() - 1);

        String[] classes = new String[data.numClasses()];
        for (int i = 0; i < data.numClasses(); i++)
            classes[i] = data.classAttribute().value(i);
        dataset.setClassesNames(classes);

        NominalToBinary filter = new NominalToBinary();
        filter.setInputFormat(m_Intances);
        filter.setOptions(new String[] { "-A" });
        m_Intances = Filter.useFilter(m_Intances, filter);

        int inputSize = m_Intances.numAttributes() - data.numClasses();

        dataset.setInputSize(inputSize);
        dataset.setNumClasses(data.numClasses());

        dataset.setWekaDataset(m_Intances);
    } catch (IOException e) {
        throw new DataLoaderException("Arquivo no encontrado", e.getCause());
    } catch (Exception e) {
        throw new DataLoaderException("Falha na converso do arquivo", e.getCause());
    }

    return dataset;
}

From source file:org.tigr.microarray.mev.cluster.gui.impl.bn.WekaUtil.java

License:Open Source License

/**
 * The <code>readInstancesArff</code> method is given the name of the input file in ARFF format
 * and loads the data contained in the given file in WEKA Instances object
 * (See WEKA documentation for details on ARFF format)
 * @param inFileName a <code>String</code> corresponding to the name of the input file in ARFF format
 * @return an <code>Instances</code> corresponding to a new WEKA Instances object 
 * containing the data read from the given file name in ARFF format
 *//*ww  w.ja  v a  2  s .  c o  m*/
public static Instances readInstancesArff(String inFileName) {
    try {
        //System.out.println("readInstancesArff()" + inFileName);
        Useful.checkFile(inFileName);
        ArffLoader loader = new ArffLoader();
        loader.setSource(new File(inFileName));
        Instances data = loader.getDataSet();
        return data;
    } catch (IOException ioe) {
        System.out.println(ioe);
        ioe.printStackTrace();
    }
    return null;
}

From source file:predictforex.ForexPredictor.java

/**
* @param args the command line arguments
* @throws java.io.FileNotFoundException//from ww  w  .  j  a  v  a2 s.  c o m
*/

public void ARFFtoCSVLoader(String option) throws IOException {
    //load arff
    ArffLoader loader = new ArffLoader();
    if ("ANN".equals(option))
        loader.setSource(new File("arff_files/labeled_" + filename + ".arff"));
    else
        loader.setSource(new File("arff_files/labeled_" + filename + "_MACDANNRecommendation.arff"));
    Instances data = loader.getDataSet();

    //save CSV
    CSVSaver saver = new CSVSaver();
    saver.setInstances(data);
    //and save as CSV
    if ("ANN".equals(option))
        saver.setFile(new File("csv_files/labeled_" + filename + ".csv"));
    else
        saver.setFile(new File("csv_files/labeled_" + filename + "_MACDANNRecommendation.csv"));
    saver.writeBatch();
}

From source file:wekimini.kadenze.LoadableInstanceMaker.java

public static LoadableInstanceMaker readFromFile(String filename) throws IOException, Exception {
    LoadableInstanceMaker m = null;//from   w w  w  .ja va2 s. co  m
    /* LoadableInstanceMaker g = (LoadableInstanceMaker) Util.readFromXMLFile("LoadableInstanceMaker", LoadableInstanceMaker.class, filename);
     return g; */
    FileInputStream instream = null;
    ObjectInputStream objin = null;
    Object o = null;
    boolean err = false;
    Exception myEx = new Exception();
    try {
        instream = new FileInputStream(filename);
        objin = new ObjectInputStream(instream);
        // o = objin.readObject();

        String xml = (String) objin.readObject();
        XStream xstream = new XStream();
        xstream.alias("LoadableInstanceMaker", LoadableInstanceMaker.class);
        m = (LoadableInstanceMaker) xstream.fromXML(xml);

        try {
            //String instancesString = (String) objin.readObject();
            //if (!instancesString.equals("null")) {
            ArffLoader al = new ArffLoader();
            al.setSource(objin);
            m.dummyInstances = al.getDataSet();
            //} else {
            //    m.dummyInstances = null;
            // }
        } catch (Exception ex) {
            //Could not load instances: not necessarily a problem
            logger.log(Level.WARNING, "No instances found in path file; not loading them");
            m.dummyInstances = null;
        }

        if (m.dummyInstances != null) {
            m.outputFilter = new Reorder();
            m.outputFilter.setAttributeIndices(m.outputFilterString);
            m.outputFilter.setInputFormat(m.dummyInstances);
        }

    } catch (Exception ex) {
        myEx = ex;
        err = true;
        logger.log(Level.WARNING, "Error encountered in reading from file: {0}", ex.getMessage());
    } finally {
        try {
            if (objin != null) {
                objin.close();
            }
            if (instream != null) {
                instream.close();
            }
        } catch (IOException ex) {
            logger.log(Level.WARNING, "Encountered error closing file objects");
        }

    }
    if (err || m == null || m.dummyInstances == null) {
        throw new Exception("Could not load LoadableInstanceMaker from file");
    }
    return m;
}

From source file:wekimini.PathAndDataLoader.java

public void tryLoadFromFile(String filename) throws Exception {
    //Danger: Will not have any transient fields initialised!
    Path p = null;/*from   ww  w. j av a  2  s .  c  o  m*/
    FileInputStream instream = null;
    ObjectInputStream objin = null;
    Object o = null;
    boolean err = false;
    Exception myEx = new Exception();
    try {
        instream = new FileInputStream(filename);
        objin = new ObjectInputStream(instream);
        // o = objin.readObject();
        String xml = (String) objin.readObject();
        XStream xstream = new XStream();
        xstream.alias("Path", Path.class);
        p = (Path) xstream.fromXML(xml);
        //Model builder contains transients and will not be initialised properly by xstream,
        //so initialise it
        p.initialiseModelBuilder();

        String modelClassName = (String) objin.readObject();
        Model m = null;
        if (!modelClassName.equals("null")) {
            Class c = Class.forName(modelClassName);
            m = ModelLoader.loadModel(c, objin);
        }
        p.setModel((SupervisedLearningModel) m);

        try {
            String instancesString = (String) objin.readObject();
            if (!instancesString.equals("null")) {
                ArffLoader al = new ArffLoader();
                al.setSource(objin);
                loadedInstances = al.getDataSet();
            } else {
                loadedInstances = null;
            }
        } catch (Exception ex) {
            //Could not load instances: not necessarily a problem
            logger.log(Level.WARNING, "No instances found in path file; not loading them");
            loadedInstances = null;
        }
    } catch (Exception ex) {
        myEx = ex;
        err = true;

        logger.log(Level.WARNING, "Error encountered in reading from file: {0}", ex.getMessage());
    } finally {
        try {
            if (objin != null) {
                objin.close();
            }
            if (instream != null) {
                instream.close();
            }
        } catch (IOException ex) {
            logger.log(Level.WARNING, "Encountered error closing file objects");
        }
    }
    if (err) {
        throw myEx;
    }
    loadedPath = p;
    isLoaded = true;
}