Example usage for weka.core Instance stringValue

List of usage examples for weka.core Instance stringValue

Introduction

In this page you can find the example usage for weka.core Instance stringValue.

Prototype

public String stringValue(Attribute att);

Source Link

Document

Returns the value of a nominal, string, date, or relational attribute for the instance as a string.

Usage

From source file:adams.data.conversion.AbstractMatchWekaInstanceAgainstHeader.java

License:Open Source License

/**
 * Matches the input instance against the header.
 *
 * @param input   the Instance to align to the header
 * @return      the aligned Instance//from  ww w. j a  va2  s.  c o m
 */
protected Instance match(Instance input) {
    Instance result;
    double[] values;
    int i;

    values = new double[m_Dataset.numAttributes()];
    for (i = 0; i < m_Dataset.numAttributes(); i++) {
        values[i] = Utils.missingValue();
        switch (m_Dataset.attribute(i).type()) {
        case Attribute.NUMERIC:
        case Attribute.DATE:
            values[i] = input.value(i);
            break;
        case Attribute.NOMINAL:
            if (m_Dataset.attribute(i).indexOfValue(input.stringValue(i)) != -1)
                values[i] = m_Dataset.attribute(i).indexOfValue(input.stringValue(i));
            break;
        case Attribute.STRING:
            values[i] = m_Dataset.attribute(i).addStringValue(input.stringValue(i));
            break;
        case Attribute.RELATIONAL:
            values[i] = m_Dataset.attribute(i).addRelation(input.relationalValue(i));
            break;
        default:
            throw new IllegalStateException(
                    "Unhandled attribute type: " + Attribute.typeToString(m_Dataset.attribute(i).type()));
        }
    }

    if (input instanceof SparseInstance)
        result = new SparseInstance(input.weight(), values);
    else
        result = new DenseInstance(input.weight(), values);
    result.setDataset(m_Dataset);

    // fix class index, if necessary
    if ((input.classIndex() != m_Dataset.classIndex()) && (m_Dataset.classIndex() < 0))
        m_Dataset.setClassIndex(input.classIndex());

    return result;
}

From source file:adams.data.instances.InstanceComparator.java

License:Open Source License

/**
 * Compares its two arguments for order.  Returns a negative integer,
 * zero, or a positive integer as the first argument is less than, equal
 * to, or greater than the second./*from   w w  w  .  j a va2  s  . c o m*/
 *
 * @param o1 the first object to be compared.
 * @param o2 the second object to be compared.
 * @return a negative integer, zero, or a positive integer as the
 *           first argument is less than, equal to, or greater than the
 *          second.
 */
@Override
public int compare(Instance o1, Instance o2) {
    int result;
    Instances header;
    int i;
    int weight;
    double d1;
    double d2;

    result = 0;
    header = o1.dataset();
    i = 0;
    while ((result == 0) && (i < m_Indices.length)) {
        if (o1.isMissing(m_Indices[i]) && o2.isMissing(m_Indices[i]))
            result = 0;
        else if (o1.isMissing(m_Indices[i]))
            result = -1;
        else if (o2.isMissing(m_Indices[i]))
            result = +1;
        else if (header.attribute(m_Indices[i]).isNumeric()) {
            d1 = o1.value(m_Indices[i]);
            d2 = o2.value(m_Indices[i]);
            if (d1 < d2)
                result = -1;
            else if (d1 == d2)
                result = 0;
            else
                result = +1;
        } else {
            result = o1.stringValue(m_Indices[i]).compareTo(o2.stringValue(m_Indices[i]));
        }

        if (!m_Ascending[i])
            result = -result;

        // add weight to index
        weight = (int) Math.pow(10, (m_Indices.length - i));
        result *= weight;

        i++;
    }

    return result;
}

From source file:adams.flow.sink.WekaInstanceViewer.java

License:Open Source License

/**
 * Displays the token (the panel and dialog have already been created at
 * this stage).//  ww  w.  j  av  a  2  s.  c  o  m
 *
 * @param token   the token to display
 */
@Override
protected void display(Token token) {
    InstanceContainerManager manager;
    InstanceContainer cont;
    weka.core.Instance winst;
    weka.core.Attribute att;
    String id;
    adams.data.instance.Instance inst;

    if (token.getPayload() instanceof weka.core.Instance) {
        winst = (weka.core.Instance) token.getPayload();
        inst = new adams.data.instance.Instance();
        inst.set(winst);
        if (!m_ID.isEmpty()) {
            att = winst.dataset().attribute(m_ID);
            if (att != null) {
                if (att.isNominal() || att.isString())
                    id = winst.stringValue(att.index());
                else
                    id = "" + winst.value(att.index());
                inst.setID(id);
            }
        }
    } else {
        inst = (adams.data.instance.Instance) token.getPayload();
        if (inst.hasReport() && inst.getReport().hasValue(m_ID))
            inst.setID("" + inst.getReport().getValue(new Field(m_ID, DataType.UNKNOWN)));
    }

    manager = m_InstancePanel.getContainerManager();
    cont = manager.newContainer(inst);
    manager.startUpdate();
    manager.add(cont);

    m_Updater.update(m_InstancePanel, cont);
}

From source file:adams.flow.transformer.WekaGetInstanceValue.java

License:Open Source License

/**
 * Executes the flow item./*from  w w  w .  j  av  a 2 s  . c o m*/
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    Instance inst;
    int index;

    result = null;

    inst = (Instance) m_InputToken.getPayload();

    try {
        if (m_AttributeName.length() > 0) {
            index = inst.dataset().attribute(m_AttributeName).index();
        } else {
            m_Index.setMax(inst.numAttributes());
            index = m_Index.getIntIndex();
        }
        if (inst.isMissing(index)) {
            m_OutputToken = new Token("?");
        } else {
            switch (inst.attribute(index).type()) {
            case Attribute.NUMERIC:
                m_OutputToken = new Token(inst.value(index));
                break;

            case Attribute.DATE:
            case Attribute.NOMINAL:
            case Attribute.STRING:
            case Attribute.RELATIONAL:
                m_OutputToken = new Token(inst.stringValue(index));
                break;

            default:
                result = "Unhandled attribute type: " + inst.attribute(index).type();
            }
        }
    } catch (Exception e) {
        result = handleException("Failed to obtain value from instance:\n" + inst, e);
    }

    return result;
}

From source file:adams.flow.transformer.WekaInstanceBuffer.java

License:Open Source License

/**
 * Executes the flow item./*from   w ww .  j  a  va2s .c o m*/
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    Instance[] insts;
    Instance inst;
    double[] values;
    int i;
    int n;
    boolean updated;

    result = null;

    if (m_Operation == Operation.INSTANCE_TO_INSTANCES) {
        if (m_InputToken.getPayload() instanceof Instance) {
            insts = new Instance[] { (Instance) m_InputToken.getPayload() };
        } else {
            insts = (Instance[]) m_InputToken.getPayload();
        }

        for (n = 0; n < insts.length; n++) {
            inst = insts[n];

            if ((m_Buffer != null) && m_CheckHeader) {
                if (!m_Buffer.equalHeaders(inst.dataset())) {
                    getLogger().info("Header changed, resetting buffer");
                    m_Buffer = null;
                }
            }

            // buffer instance
            if (m_Buffer == null)
                m_Buffer = new Instances(inst.dataset(), 0);

            // we need to make sure that string and relational values are in our
            // buffer header and update the current Instance accordingly before
            // buffering it
            values = inst.toDoubleArray();
            updated = false;
            for (i = 0; i < values.length; i++) {
                if (inst.isMissing(i))
                    continue;
                if (inst.attribute(i).isString()) {
                    values[i] = m_Buffer.attribute(i).addStringValue(inst.stringValue(i));
                    updated = true;
                } else if (inst.attribute(i).isRelationValued()) {
                    values[i] = m_Buffer.attribute(i).addRelation(inst.relationalValue(i));
                    updated = true;
                }
            }

            if (updated) {
                if (inst instanceof SparseInstance) {
                    inst = new SparseInstance(inst.weight(), values);
                } else if (inst instanceof BinarySparseInstance) {
                    inst = new BinarySparseInstance(inst.weight(), values);
                } else {
                    if (!(inst instanceof DenseInstance)) {
                        getLogger().severe("Unhandled instance class (" + inst.getClass().getName() + "), "
                                + "defaulting to " + DenseInstance.class.getName());
                    }
                    inst = new DenseInstance(inst.weight(), values);
                }
            } else {
                inst = (Instance) inst.copy();
            }

            m_Buffer.add(inst);
        }

        if (m_Buffer.numInstances() % m_Interval == 0) {
            m_OutputToken = new Token(m_Buffer);
            if (m_ClearBuffer)
                m_Buffer = null;
        }
    } else if (m_Operation == Operation.INSTANCES_TO_INSTANCE) {
        m_Buffer = (Instances) m_InputToken.getPayload();
        m_Iterator = m_Buffer.iterator();
    } else {
        throw new IllegalStateException("Unhandled operation: " + m_Operation);
    }

    return result;
}

From source file:adams.flow.transformer.WekaInstancesAppend.java

License:Open Source License

/**
 * Executes the flow item.//from ww w .  j a  v a2  s.  c o m
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    String[] filesStr;
    File[] files;
    int i;
    int n;
    Instances[] inst;
    Instances full;
    String msg;
    StringBuilder relation;
    double[] values;

    result = null;

    // get filenames
    files = null;
    inst = null;
    if (m_InputToken.getPayload() instanceof String[]) {
        filesStr = (String[]) m_InputToken.getPayload();
        files = new File[filesStr.length];
        for (i = 0; i < filesStr.length; i++)
            files[i] = new PlaceholderFile(filesStr[i]);
    } else if (m_InputToken.getPayload() instanceof File[]) {
        files = (File[]) m_InputToken.getPayload();
    } else if (m_InputToken.getPayload() instanceof Instances[]) {
        inst = (Instances[]) m_InputToken.getPayload();
    } else {
        throw new IllegalStateException("Unhandled input type: " + m_InputToken.getPayload().getClass());
    }

    // load data?
    if (files != null) {
        inst = new Instances[files.length];
        for (i = 0; i < files.length; i++) {
            try {
                inst[i] = DataSource.read(files[i].getAbsolutePath());
            } catch (Exception e) {
                result = handleException("Failed to load dataset: " + files[i], e);
                break;
            }
        }
    }

    // test compatibility
    if (result == null) {
        for (i = 0; i < inst.length - 1; i++) {
            for (n = i + 1; n < inst.length; n++) {
                if ((msg = inst[i].equalHeadersMsg(inst[n])) != null) {
                    result = "Dataset #" + (i + 1) + " and #" + (n + 1) + " are not compatible:\n" + msg;
                    break;
                }
            }
            if (result != null)
                break;
        }
    }

    // append
    if (result == null) {
        full = new Instances(inst[0]);
        relation = new StringBuilder(inst[0].relationName());
        for (i = 1; i < inst.length; i++) {
            relation.append("+" + inst[i].relationName());
            for (Instance row : inst[i]) {
                values = row.toDoubleArray();
                for (n = 0; n < values.length; n++) {
                    if (row.attribute(n).isString())
                        values[n] = full.attribute(n).addStringValue(row.stringValue(n));
                    else if (row.attribute(n).isRelationValued())
                        values[n] = full.attribute(n).addRelation(row.relationalValue(n));
                }
                if (row instanceof SparseInstance)
                    row = new SparseInstance(row.weight(), values);
                else
                    row = new DenseInstance(row.weight(), values);
                full.add(row);
            }
        }
        full.setRelationName(relation.toString());
        m_OutputToken = new Token(full);
    }

    return result;
}

From source file:adams.flow.transformer.WekaInstanceStreamPlotGenerator.java

License:Open Source License

/**
 * Executes the flow item./*from   w w  w  .  j a  v  a 2  s  .  c  om*/
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    Instance inst;
    SequencePlotterContainer cont;
    int[] indices;
    int i;

    result = null;

    inst = (Instance) m_InputToken.getPayload();

    m_Counter++;
    m_Containers.clear();
    m_Attributes.setMax(inst.numAttributes());
    indices = m_Attributes.getIntIndices();

    for (i = 0; i < indices.length; i++) {
        if (inst.attribute(indices[i]).isNominal())
            cont = new SequencePlotterContainer(inst.dataset().attribute(indices[i]).name(),
                    new Double(m_Counter), inst.stringValue(indices[i]));
        else
            cont = new SequencePlotterContainer(inst.dataset().attribute(indices[i]).name(),
                    new Double(m_Counter), inst.value(indices[i]));
        m_Containers.add(cont);
    }

    return result;
}

From source file:adams.gui.menu.AppendDatasets.java

License:Open Source License

/**
 * Performs the append.//w  ww .  j  a  v a2  s  .  c  o m
 *
 * @param frame       the frame to close
 * @param input       the files to merge
 * @param output      the output file
 */
protected void doAppend(ChildFrame frame, File[] input, File output) {
    Instances[] data;
    Instances full;
    int i;
    int n;
    AbstractFileLoader loader;
    DataSink sink;
    int count;
    TIntArrayList transferAtt;
    int index;

    if (input.length < 2) {
        GUIHelper.showErrorMessage(getOwner(), "At least two files are required!");
        return;
    }

    // load and check compatibility
    loader = ConverterUtils.getLoaderForFile(input[0]);
    data = new Instances[input.length];
    count = 0;
    transferAtt = new TIntArrayList();
    for (i = 0; i < input.length; i++) {
        try {
            loader.setFile(input[i]);
            data[i] = DataSource.read(loader);
            if (i > 0) {
                if (!data[0].equalHeaders(data[i])) {
                    GUIHelper.showErrorMessage(getOwner(), "Datasets '" + input[0] + "' and '" + input[i]
                            + "' are not compatible!\n" + data[0].equalHeadersMsg(data[i]));
                    return;
                }
            } else {
                for (n = 0; n < data[0].numAttributes(); n++) {
                    if (data[0].attribute(n).isString() || data[0].attribute(n).isRelationValued())
                        transferAtt.add(n);
                }
            }
            count += data[i].numInstances();
        } catch (Exception e) {
            GUIHelper.showErrorMessage(getOwner(),
                    "Failed to read '" + input[i] + "'!\n" + Utils.throwableToString(e));
            return;
        }
    }

    // combine
    full = new Instances(data[0], count);
    for (i = 0; i < data.length; i++) {
        for (Instance inst : data[i]) {
            if (transferAtt.size() > 0) {
                for (n = 0; n < transferAtt.size(); n++) {
                    index = transferAtt.get(n);
                    if (inst.attribute(index).isString())
                        full.attribute(index).addStringValue(inst.stringValue(index));
                    else if (inst.attribute(n).isRelationValued())
                        full.attribute(index).addRelation(inst.relationalValue(index));
                    else
                        throw new IllegalStateException(
                                "Unhandled attribute type: " + Attribute.typeToString(inst.attribute(index)));
                }
            }
            full.add(inst);
        }
    }

    // save
    try {
        sink = new DataSink(output.getAbsolutePath());
        sink.write(full);
    } catch (Exception e) {
        GUIHelper.showErrorMessage(getOwner(),
                "Failed to save data to '" + output + "'!\n" + Utils.throwableToString(e));
        return;
    }

    GUIHelper.showInformationMessage(null, "Successfully appended!\n" + output);
    frame.dispose();
}

From source file:affective.core.ArffLexiconEvaluator.java

License:Open Source License

/**
 * Processes  all the dictionary files./*from w  w w.j ava 2s.c  om*/
 * @throws IOException  an IOException will be raised if an invalid file is supplied
 */
public void processDict() throws IOException {
    BufferedReader reader = new BufferedReader(new FileReader(this.m_lexiconFile));
    Instances lexInstances = new Instances(reader);

    // set upper value for word index
    lexiconWordIndex.setUpper(lexInstances.numAttributes() - 1);

    List<Attribute> numericAttributes = new ArrayList<Attribute>();
    List<Attribute> nominalAttributes = new ArrayList<Attribute>();

    // checks all numeric and nominal attributes and discards the word attribute
    for (int i = 0; i < lexInstances.numAttributes(); i++) {

        if (i != this.lexiconWordIndex.getIndex()) {
            if (lexInstances.attribute(i).isNumeric()) {
                numericAttributes.add(lexInstances.attribute(i));
                // adds the attribute name to the message-level features to be calculated
                this.featureNames.add(this.lexiconName + "-" + lexInstances.attribute(i).name());
            }

            else if (lexInstances.attribute(i).isNominal()) {
                nominalAttributes.add(lexInstances.attribute(i));
                // adds the attribute name together with the nominal value to the message-level features to be calculated
                int numValues = lexInstances.attribute(i).numValues();
                for (int j = 0; j < numValues; j++)
                    this.featureNames.add(this.lexiconName + "-" + lexInstances.attribute(i).name() + "-"
                            + lexInstances.attribute(i).value(j));

            }

        }

    }

    // Maps all words with their affective scores discarding missing values
    for (Instance inst : lexInstances) {
        if (inst.attribute(this.lexiconWordIndex.getIndex()).isString()) {
            String word = inst.stringValue(this.lexiconWordIndex.getIndex());
            // stems the word
            word = this.m_stemmer.stem(word);

            // map numeric scores
            if (!numericAttributes.isEmpty()) {
                Map<String, Double> wordVals = new HashMap<String, Double>();
                for (Attribute na : numericAttributes) {
                    if (!weka.core.Utils.isMissingValue(inst.value(na)))
                        wordVals.put(na.name(), inst.value(na));
                }
                this.numDict.put(word, wordVals);
            }

            // map nominal associations
            if (!nominalAttributes.isEmpty()) {
                Map<String, String> wordCounts = new HashMap<String, String>();
                for (Attribute no : nominalAttributes) {
                    if (!weka.core.Utils.isMissingValue(inst.value(no))) {
                        wordCounts.put(no.name(), no.value((int) inst.value(no)));
                    }

                    this.nomDict.put(word, wordCounts);

                }

            }

        }

    }

}

From source file:affective.core.ArffLexiconWordLabeller.java

License:Open Source License

/**
 * Processes  all the dictionary files./*from   ww  w . j  a  va  2s  . c om*/
 * @throws IOException  an IOException will be raised if an invalid file is supplied
 */
public void processDict() throws IOException {
    BufferedReader reader = new BufferedReader(new FileReader(this.m_lexiconFile));
    Instances lexInstances = new Instances(reader);

    // set upper value for word index
    lexiconWordIndex.setUpper(lexInstances.numAttributes() - 1);

    // checks all numeric and nominal attributes and discards the word attribute
    for (int i = 0; i < lexInstances.numAttributes(); i++) {

        if (i != this.lexiconWordIndex.getIndex()) {
            if (lexInstances.attribute(i).isNumeric() || lexInstances.attribute(i).isNominal()) {
                this.attributes.add(lexInstances.attribute(i));
            }

        }

    }

    // Maps all words with their affective scores discarding missing values
    for (Instance inst : lexInstances) {
        if (inst.attribute(this.lexiconWordIndex.getIndex()).isString()) {
            String word = inst.stringValue(this.lexiconWordIndex.getIndex());
            // stems the word
            word = this.m_stemmer.stem(word);

            // map numeric scores
            if (!attributes.isEmpty()) {
                Map<Attribute, Double> wordVals = new HashMap<Attribute, Double>();
                for (Attribute na : attributes) {
                    wordVals.put(na, inst.value(na));
                }
                this.attValMap.put(word, wordVals);
            }

        }

    }

}