Example usage for weka.core Instances Instances

List of usage examples for weka.core Instances Instances

Introduction

In this page you can find the example usage for weka.core Instances Instances.

Prototype

public Instances(Instances dataset, int capacity) 

Source Link

Document

Constructor creating an empty set of instances.

Usage

From source file:adams.flow.transformer.WekaStoreInstance.java

License:Open Source License

/**
 * Executes the flow item.// ww w .j a  v a  2 s . c  o m
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    Instance inst;
    Instances data;
    Storage storage;

    result = null;

    inst = (Instance) m_InputToken.getPayload();
    storage = getStorageHandler().getStorage();

    // dataset present?
    if (!storage.has(m_Dataset)) {
        data = new Instances(inst.dataset(), 0);
        storage.put(m_Dataset, data);
        if (isLoggingEnabled())
            getLogger().info("Adding dataset to storage: " + m_Dataset);
    } else {
        data = (Instances) storage.get(m_Dataset);
        if (isLoggingEnabled())
            getLogger().info("Dataset present in storage: " + m_Dataset);
    }

    data.add(inst);
    storage.put(m_Dataset, data);
    if (isLoggingEnabled())
        getLogger().info("Added instance to storage: " + m_Dataset);

    // broadcast data
    m_OutputToken = new Token(data);

    return result;
}

From source file:adams.flow.transformer.WekaTrainAssociator.java

License:Open Source License

/**
 * Executes the flow item.//  ww w.j  a  v  a 2s.c om
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    Instances data;
    Instance inst;
    weka.associations.Associator cls;

    result = null;

    try {
        if ((m_InputToken != null) && (m_InputToken.getPayload() instanceof Instances)) {
            cls = getAssociatorInstance();
            data = (Instances) m_InputToken.getPayload();
            cls.buildAssociations(data);
            if ((cls instanceof AssociationRulesProducer) && ((AssociationRulesProducer) cls).canProduceRules())
                m_OutputToken = new Token(new WekaAssociatorContainer(cls, new Instances(data, 0), data,
                        ((AssociationRulesProducer) cls).getAssociationRules().getRules()));
            else
                m_OutputToken = new Token(new WekaAssociatorContainer(cls, new Instances(data, 0), data));
        }
    } catch (Exception e) {
        m_OutputToken = null;
        result = handleException("Failed to process data:", e);
    }

    if (m_OutputToken != null)
        updateProvenance(m_OutputToken);

    return result;
}

From source file:adams.flow.transformer.WekaTrainClassifier.java

License:Open Source License

/**
 * Executes the flow item./*from w  w  w . j a va 2 s.c o m*/
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    Instances data;
    Instance inst;
    weka.classifiers.Classifier cls;

    result = null;

    try {
        cls = null;
        if ((m_InputToken != null) && (m_InputToken.getPayload() instanceof Instances)) {
            cls = getClassifierInstance();
            data = (Instances) m_InputToken.getPayload();
            cls.buildClassifier(data);
            m_OutputToken = new Token(new WekaModelContainer(cls, new Instances(data, 0), data));
        } else if ((m_InputToken != null) && (m_InputToken.getPayload() instanceof Instance)) {
            if (m_IncrementalClassifier == null) {
                cls = getClassifierInstance();
                if (!(cls instanceof UpdateableClassifier))
                    result = m_Classifier + "/" + cls.getClass().getName()
                            + " is not an incremental classifier!";
            }
            if (result == null) {
                inst = (Instance) m_InputToken.getPayload();
                if (m_IncrementalClassifier == null) {
                    m_IncrementalClassifier = cls;
                    if (m_SkipBuild) {
                        ((UpdateableClassifier) m_IncrementalClassifier).updateClassifier(inst);
                    } else {
                        data = new Instances(inst.dataset(), 1);
                        data.add((Instance) inst.copy());
                        m_IncrementalClassifier.buildClassifier(data);
                    }
                } else {
                    ((UpdateableClassifier) m_IncrementalClassifier).updateClassifier(inst);
                }
                m_OutputToken = new Token(
                        new WekaModelContainer(m_IncrementalClassifier, new Instances(inst.dataset(), 0)));
            }
        }
    } catch (Exception e) {
        m_OutputToken = null;
        result = handleException("Failed to process data:", e);
    }

    if (m_OutputToken != null)
        updateProvenance(m_OutputToken);

    return result;
}

From source file:adams.flow.transformer.WekaTrainClusterer.java

License:Open Source License

/**
 * Executes the flow item.//from  w  w w .  ja v a2s  . com
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    Instances data;
    Instance inst;
    weka.clusterers.Clusterer cls;
    WekaModelContainer cont;

    result = null;

    try {
        cls = null;
        if ((m_InputToken != null) && (m_InputToken.getPayload() instanceof Instances)) {
            cls = getClustererInstance();
            data = (Instances) m_InputToken.getPayload();
            cls.buildClusterer(data);
            cont = new WekaModelContainer(cls, new Instances(data, 0), data);
            cont = m_PostProcessor.postProcess(cont);
            m_OutputToken = new Token(cont);
        } else if ((m_InputToken != null) && (m_InputToken.getPayload() instanceof Instance)) {
            if (m_IncrementalClusterer == null) {
                cls = getClustererInstance();
                if (!(cls instanceof UpdateableClusterer))
                    result = m_Clusterer + "/" + cls.getClass().getName() + " is not an incremental clusterer!";
            }
            if (result == null) {
                inst = (Instance) m_InputToken.getPayload();
                if (m_IncrementalClusterer == null) {
                    m_IncrementalClusterer = cls;
                    data = new Instances(inst.dataset(), 1);
                    data.add((Instance) inst.copy());
                    m_IncrementalClusterer.buildClusterer(data);
                } else {
                    ((UpdateableClusterer) m_IncrementalClusterer).updateClusterer(inst);
                    ((UpdateableClusterer) m_IncrementalClusterer).updateFinished();
                }
                m_OutputToken = new Token(
                        new WekaModelContainer(m_IncrementalClusterer, new Instances(inst.dataset(), 0)));
            }
        }
    } catch (Exception e) {
        m_OutputToken = null;
        result = handleException("Failed to process input: " + m_InputToken.getPayload(), e);
    }

    if (m_OutputToken != null)
        updateProvenance(m_OutputToken);

    return result;
}

From source file:adams.flow.transformer.WekaTrainForecaster.java

License:Open Source License

/**
 * Executes the flow item.//  ww w.j  a  va  2 s.  co m
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    Instances data;
    AbstractForecaster cls;
    WekaForecastModelContainer cont;

    result = null;

    try {
        cls = getForecasterInstance();
        if (cls == null) {
            result = "Failed to obtain forecaster!";
            return result;
        }
        if ((m_InputToken != null) && (m_InputToken.getPayload() instanceof Instances)) {
            data = (Instances) m_InputToken.getPayload();
            cls.buildForecaster(data);
            cont = new WekaForecastModelContainer(cls, new Instances(data, 0), data);
            if (m_StoreTransformed) {
                if (cls instanceof TSLagUser)
                    cont.setValue(WekaForecastModelContainer.VALUE_TRANSFORMED,
                            ((TSLagUser) cls).getTSLagMaker().getTransformedData(data));
            }
            m_OutputToken = new Token(cont);
        }
    } catch (Exception e) {
        m_OutputToken = null;
        result = handleException("Failed to process data:", e);
    }

    if (m_OutputToken != null)
        updateProvenance(m_OutputToken);

    return result;
}

From source file:adams.gui.menu.AppendDatasets.java

License:Open Source License

/**
 * Performs the append./*from   w w w. j a va 2 s .c  om*/
 *
 * @param frame       the frame to close
 * @param input       the files to merge
 * @param output      the output file
 */
protected void doAppend(ChildFrame frame, File[] input, File output) {
    Instances[] data;
    Instances full;
    int i;
    int n;
    AbstractFileLoader loader;
    DataSink sink;
    int count;
    TIntArrayList transferAtt;
    int index;

    if (input.length < 2) {
        GUIHelper.showErrorMessage(getOwner(), "At least two files are required!");
        return;
    }

    // load and check compatibility
    loader = ConverterUtils.getLoaderForFile(input[0]);
    data = new Instances[input.length];
    count = 0;
    transferAtt = new TIntArrayList();
    for (i = 0; i < input.length; i++) {
        try {
            loader.setFile(input[i]);
            data[i] = DataSource.read(loader);
            if (i > 0) {
                if (!data[0].equalHeaders(data[i])) {
                    GUIHelper.showErrorMessage(getOwner(), "Datasets '" + input[0] + "' and '" + input[i]
                            + "' are not compatible!\n" + data[0].equalHeadersMsg(data[i]));
                    return;
                }
            } else {
                for (n = 0; n < data[0].numAttributes(); n++) {
                    if (data[0].attribute(n).isString() || data[0].attribute(n).isRelationValued())
                        transferAtt.add(n);
                }
            }
            count += data[i].numInstances();
        } catch (Exception e) {
            GUIHelper.showErrorMessage(getOwner(),
                    "Failed to read '" + input[i] + "'!\n" + Utils.throwableToString(e));
            return;
        }
    }

    // combine
    full = new Instances(data[0], count);
    for (i = 0; i < data.length; i++) {
        for (Instance inst : data[i]) {
            if (transferAtt.size() > 0) {
                for (n = 0; n < transferAtt.size(); n++) {
                    index = transferAtt.get(n);
                    if (inst.attribute(index).isString())
                        full.attribute(index).addStringValue(inst.stringValue(index));
                    else if (inst.attribute(n).isRelationValued())
                        full.attribute(index).addRelation(inst.relationalValue(index));
                    else
                        throw new IllegalStateException(
                                "Unhandled attribute type: " + Attribute.typeToString(inst.attribute(index)));
                }
            }
            full.add(inst);
        }
    }

    // save
    try {
        sink = new DataSink(output.getAbsolutePath());
        sink.write(full);
    } catch (Exception e) {
        GUIHelper.showErrorMessage(getOwner(),
                "Failed to save data to '" + output + "'!\n" + Utils.throwableToString(e));
        return;
    }

    GUIHelper.showInformationMessage(null, "Successfully appended!\n" + output);
    frame.dispose();
}

From source file:adams.gui.visualization.debug.objectrenderer.WekaInstancesRenderer.java

License:Open Source License

/**
 * Performs the actual rendering./*from  w  ww.  j  a v a 2s .co m*/
 *
 * @param obj      the object to render
 * @param panel   the panel to render into
 * @return      null if successful, otherwise error message
 */
@Override
protected String doRender(Object obj, JPanel panel) {
    Instance inst;
    Instances data;
    InstancesTable table;
    InstancesTableModel model;
    BaseScrollPane scrollPane;
    PlainTextRenderer plain;
    SpreadSheet sheet;
    Row row;
    int i;
    SpreadSheetRenderer sprenderer;

    if (obj instanceof Instances) {
        data = (Instances) obj;
        if (data.numInstances() == 0) {
            sheet = new DefaultSpreadSheet();
            row = sheet.getHeaderRow();
            row.addCell("I").setContentAsString("Index");
            row.addCell("N").setContentAsString("Name");
            row.addCell("T").setContentAsString("Type");
            row.addCell("C").setContentAsString("Class");
            for (i = 0; i < data.numAttributes(); i++) {
                row = sheet.addRow();
                row.addCell("I").setContent(i + 1);
                row.addCell("N").setContentAsString(data.attribute(i).name());
                row.addCell("T").setContentAsString(Attribute.typeToString(data.attribute(i)));
                row.addCell("C").setContent((i == data.classIndex()) ? "true" : "");
            }
            sprenderer = new SpreadSheetRenderer();
            sprenderer.render(sheet, panel);
        } else {
            model = new InstancesTableModel(data);
            model.setShowAttributeIndex(true);
            table = new InstancesTable(model);
            scrollPane = new BaseScrollPane(table);
            panel.add(scrollPane, BorderLayout.CENTER);
        }
    } else {
        inst = (Instance) obj;
        if (inst.dataset() != null) {
            data = new Instances(inst.dataset(), 0);
            data.add((Instance) inst.copy());
            table = new InstancesTable(data);
            scrollPane = new BaseScrollPane(table);
            panel.add(scrollPane, BorderLayout.CENTER);
        } else {
            plain = new PlainTextRenderer();
            plain.render(obj, panel);
        }
    }

    return null;
}

From source file:adams.gui.visualization.instances.InstancesTable.java

License:Open Source License

/**
 * Exports the data./*from  w  w w.ja v  a2 s . co  m*/
 *
 * @param range   what data to export
 */
protected void saveAs(TableRowRange range) {
    int retVal;
    AbstractFileSaver saver;
    File file;
    Instances original;
    Instances data;
    int[] selRows;
    int i;

    retVal = m_FileChooser.showSaveDialog(InstancesTable.this);
    if (retVal != WekaFileChooser.APPROVE_OPTION)
        return;

    saver = m_FileChooser.getWriter();
    file = m_FileChooser.getSelectedFile();
    original = getInstances();
    switch (range) {
    case ALL:
        data = original;
        break;

    case SELECTED:
        data = new Instances(original, 0);
        selRows = getSelectedRows();
        for (i = 0; i < selRows.length; i++)
            data.add((Instance) original.instance(getActualRow(selRows[i])).copy());
        break;

    case VISIBLE:
        data = new Instances(original, 0);
        for (i = 0; i < getRowCount(); i++)
            data.add((Instance) original.instance(getActualRow(i)).copy());
        break;

    default:
        throw new IllegalStateException("Unhandled range type: " + range);
    }

    try {
        saver.setFile(file);
        saver.setInstances(data);
        saver.writeBatch();
    } catch (Exception ex) {
        GUIHelper.showErrorMessage(InstancesTable.this, "Failed to save data (" + range + ") to: " + file, ex);
    }
}

From source file:adams.gui.visualization.instances.InstancesTableModel.java

License:Open Source License

/**
 * sorts the instances via the given attribute
 *
 * @param columnIndex the index of the column
 * @param ascending ascending if true, otherwise descending
 *//*from  w w w  .j a  v a  2  s .  co m*/
public void sortInstances(int columnIndex, boolean ascending) {
    int offset;

    offset = 1;
    if (m_ShowWeightsColumn)
        offset++;

    if ((columnIndex >= offset) && (columnIndex < getColumnCount())) {
        addUndoPoint();
        m_Data.stableSort(columnIndex - offset);
        if (!ascending) {
            Instances reversedData = new Instances(m_Data, m_Data.numInstances());
            int i = m_Data.numInstances();
            while (i > 0) {
                i--;
                int equalCount = 1;
                while ((i > 0) && (m_Data.instance(i).value(columnIndex - offset) == m_Data.instance(i - 1)
                        .value(columnIndex - offset))) {
                    equalCount++;
                    i--;
                }
                int j = 0;
                while (j < equalCount) {
                    reversedData.add(m_Data.instance(i + j));
                    j++;
                }
            }
            m_Data = reversedData;
        }
        notifyListener(new TableModelEvent(this));
    }
}

From source file:adams.ml.model.classification.WekaClassificationModel.java

License:Open Source License

/**
 * Initializes the model.//from w  w  w .  j av a 2s .  c o m
 *
 * @param model   the built Weka classifier
 * @param data   the training data
 * @param inst   the Weka training data
 */
public WekaClassificationModel(weka.classifiers.Classifier model, Dataset data, Instances inst) {
    m_Model = model;
    m_DatasetInfo = new DatasetInfo(data);
    m_InstancesHeader = new Instances(inst, 0);
}