Example usage for weka.core Instances add

List of usage examples for weka.core Instances add

Introduction

In this page you can find the example usage for weka.core Instances add.

Prototype

@Override
public boolean add(Instance instance) 

Source Link

Document

Adds one instance to the end of the set.

Usage

From source file:transformation.mimlTOml.GeometricTransformation.java

License:Open Source License

@Override
public MultiLabelInstances transformDataset() throws Exception {

    Instances newData = new Instances(template);
    int labelIndices[] = dataset.getLabelIndices();
    Instance newInst = new DenseInstance(newData.numAttributes());
    newInst.setDataset(newData); // Sets the reference to the dataset

    // For all bags in the dataset
    double nBags = dataset.getNumBags();
    for (int i = 0; i < nBags; i++) {
        // retrieves a bag
        Bag bag = dataset.getBag(i);//  www  .  ja  va2s  .  c  om
        // sets the bagLabel
        newInst.setValue(0, bag.value(0));

        // retrieves instances (relational value) for each bag
        Instances instances = bag.getBagAsInstances();
        // for all attributes in bag
        for (int j = 0, attIdx = 1; j < instances.numAttributes(); j++, attIdx++) {
            double[] minimax = minimax(instances, j);
            double value = (minimax[0] + minimax[1]) / 2.0;
            newInst.setValue(attIdx, value);
        }

        // inserts label information into the instance
        for (int j = 0; j < labelIndices.length; j++) {
            newInst.setValue(updatedLabelIndices[j], dataset.getBag(i).value(labelIndices[j]));
        }

        newData.add(newInst);
    }
    return new MultiLabelInstances(newData, dataset.getLabelsMetaData());
}

From source file:transformation.mimlTOml.MiniMaxTransformation.java

License:Open Source License

@Override
public MultiLabelInstances transformDataset() throws Exception {

    Instances newData = new Instances(template);
    int labelIndices[] = dataset.getLabelIndices();
    Instance newInst = new DenseInstance(newData.numAttributes());
    newInst.setDataset(newData); // Sets the reference to the dataset

    // For all bags in the dataset
    double nBags = dataset.getNumBags();
    for (int i = 0; i < nBags; i++) {
        // retrieves a bag
        Bag bag = dataset.getBag(i);/*from w ww .jav a  2  s.  c  om*/
        // sets the bagLabel
        newInst.setValue(0, bag.value(0));

        // retrieves instances (relational value) for each bag
        Instances instances = bag.getBagAsInstances();
        // For all attributes in bag
        for (int j = 0, attIdx = 1; j < instances.numAttributes(); j++, attIdx++) {
            double[] minimax = minimax(instances, j);
            newInst.setValue(attIdx, minimax[0]);// minima value
            newInst.setValue(attIdx + instances.numAttributes(), minimax[1]);// maxima
            // value);
        }
        // Copy label information into the dataset
        for (int j = 0; j < labelIndices.length; j++) {
            newInst.setValue(updatedLabelIndices[j], bag.value(labelIndices[j]));
        }
        newData.add(newInst);

    }
    return new MultiLabelInstances(newData, dataset.getLabelsMetaData());
}

From source file:tubes1.Main.java

/**
 * @param args the command line arguments
 *///from  w  w w . jav  a2s. c  o m
public static void main(String[] args) throws IOException, Exception {
    // TODO code application logic here
    String filename = "weather";

    //Masih belum mengerti tipe .csv yang dapat dibaca seperti apa
    //CsvToArff convert = new CsvToArff(filename+".csv");

    //LOAD FILE
    BufferedReader datafile = readDataFile("src/" + filename + ".arff");
    Instances data = new Instances(datafile);
    data.setClassIndex(data.numAttributes() - 1);
    //END OF LOAD FILE

    CustomFilter fil = new CustomFilter();

    //REMOVE USELESS ATTRIBUTE
    data = fil.removeAttribute(data);
    System.out.println(data);

    Instances[] allData = new Instances[4];
    //data for Id3
    allData[0] = fil.resampling(fil.convertNumericToNominal(data));
    //data for J48
    allData[1] = fil.convertNumericToNominal(fil.resampling(data));
    //data for myId3
    allData[2] = allData[0];
    //data for myC4.5
    allData[3] = fil.resampling(fil.convertNumericToNominal(fil.convertNumericRange(data)));

    data = fil.convertNumericToNominal(data);
    // BUILD CLASSIFIERS
    Classifier[] models = { new Id3(), //C4.5
            new J48(), new myID3(), new myC45() };

    for (int j = 0; j < models.length; j++) {
        FastVector predictions = new FastVector();
        //FOR TEN-FOLD CROSS VALIDATION
        Instances[][] split = crossValidationSplit(allData[j], 10);
        // Separate split into training and testing arrays
        Instances[] trainingSplits = split[0];
        Instances[] testingSplits = split[1];
        System.out.println("\n---------------------------------");
        for (int i = 0; i < trainingSplits.length; i++) {
            try {
                //                    System.out.println("Building for training Split : " + i);
                Evaluation validation = classify(models[j], trainingSplits[i], testingSplits[i]);

                predictions.appendElements(validation.predictions());

                // Uncomment to see the summary for each training-testing pair.
                //                    System.out.println(models[j].toString());
            } catch (Exception ex) {
                Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
            }
            // Calculate overall accuracy of current classifier on all splits
            double accuracy = calculateAccuracy(predictions);

            // Print current classifier's name and accuracy in a complicated,
            // but nice-looking way.
            System.out.println(String.format("%.2f%%", accuracy));
        }
        models[j].buildClassifier(allData[j]);
        Model.save(models[j], models[j].getClass().getSimpleName());
    }

    //test instance
    Instances trainingSet = new Instances("Rel", getFvWekaAttributes(data), 10);
    trainingSet.setClassIndex(data.numAttributes() - 1);

    Instance testInstance = new Instance(data.numAttributes());
    for (int i = 0; i < data.numAttributes() - 1; i++) {
        System.out.print("Masukkan " + data.attribute(i).name() + " : ");
        Scanner in = new Scanner(System.in);
        String att = in.nextLine();
        if (isNumeric(att)) {
            att = fil.convertToFit(att, data, i);
        }
        testInstance.setValue(data.attribute(i), att);
    }

    //        System.out.println(testInstance);
    //        System.out.println(testInstance.classAttribute().index());

    trainingSet.add(testInstance);

    Classifier Id3 = Model.load("Id3");
    Classifier J48 = Model.load("J48");
    Classifier myID3 = Model.load("myID3");
    Classifier MyC45 = Model.load("myC45");
    //        Classifier MyId3 = Model.load("myID3");

    Instance A = trainingSet.instance(0);
    Instance B = trainingSet.instance(0);
    Instance C = trainingSet.instance(0);
    Instance D = trainingSet.instance(0);

    //test with ID3 WEKA
    A.setClassValue(Id3.classifyInstance(trainingSet.instance(0)));
    System.out.println("Id3 Weka : " + A);

    //test with C4.5 WEKA
    B.setClassValue(J48.classifyInstance(trainingSet.instance(0)));
    System.out.println("C4.5 Weka : " + B);

    //test with my C4.5
    C.setClassValue(MyC45.classifyInstance(trainingSet.instance(0)));
    System.out.println("My C4.5 : " + C);

    //test with my ID3
    D.setClassValue(myID3.classifyInstance(trainingSet.instance(0)));
    System.out.println("My ID3 : " + D);
}

From source file:tubes1.myClassifiers.myC45.java

private Instances filterInstanceWithAttributeValue(Instances instances, Attribute attribute, String value) {
    Instances newInstances = new Instances(instances);
    newInstances.delete();/*  w  ww. j  a v a  2  s.  c o  m*/
    int numInstances = instances.numInstances();
    for (int i = 0; i < numInstances; i++) {
        Instance instance = instances.instance(i);
        if (instance.stringValue(attribute).equals(value)) {
            newInstances.add(instance);
        }
    }
    return newInstances;
}

From source file:tubes2.myClusterers.myKMeans.java

public String centroidsToString() {
    Instances centroidInstances = new Instances(template, 0);
    for (int i = 0; i < k; i++) {
        centroidInstances.add(centroids[i]);
    }/*  w ww  .  jav  a  2 s  .  c om*/
    return centroidInstances.toString();
}

From source file:tucil.dua.ai.TucilDuaAi.java

public static void addInstance(Instances d) {
    Scanner input = new Scanner(System.in);
    Instance temp = new DenseInstance(d.numAttributes());
    for (int i = 0; i < (d.numAttributes() - 1); i++) {
        System.out.print(d.attribute(i) + " = ");
        Double x = input.nextDouble();
        temp.setValue(d.attribute(i), x);
    }/*from ww w .ja v  a  2s .com*/
    System.out.print(d.attribute(d.numAttributes() - 1) + " = ");
    String x = input.next();
    temp.setValue(d.attribute(d.numAttributes() - 1), x);
    d.add(temp);
}

From source file:uzholdem.classifier.OnlineMultilayerPerceptron.java

License:Open Source License

public void trainModel(Instances aInstances, int numIterations) throws Exception {

    // setup m_instances
    if (this.m_instances == null) {

        this.m_instances = new Instances(aInstances, 0, aInstances.size());
    }//w  w w . j  a  v a2 s.  co m
    ///////////

    if (m_useNomToBin) {
        if (this.m_nominalToBinaryFilter == null) {
            m_nominalToBinaryFilter = new NominalToBinary();
            try {
                m_nominalToBinaryFilter.setInputFormat(m_instances);
            } catch (Exception e) {
                // TODO Auto-generated catch block
                e.printStackTrace();
                return;
            }
        }
        aInstances = Filter.useFilter(aInstances, m_nominalToBinaryFilter);
    }

    Instances epochInstances = new Instances(aInstances);
    epochInstances.randomize(new Random());

    Instances valSet = new Instances(aInstances, (int) (aInstances.size() * 0.3));
    for (int i = 0; i < valSet.size(); i++) {
        valSet.add(epochInstances.instance(0));
        epochInstances.delete(0);
    }

    m_instances = epochInstances;
    double right = 0;
    double driftOff = 0;
    double lastRight = Double.POSITIVE_INFINITY;
    double bestError = Double.POSITIVE_INFINITY;
    double tempRate;
    double totalWeight = 0;
    double totalValWeight = 0;
    double origRate = m_learningRate; //only used for when reset

    int numInVal = valSet.numInstances();

    for (int noa = numInVal; noa < m_instances.numInstances(); noa++) {
        if (!m_instances.instance(noa).classIsMissing()) {
            totalWeight += m_instances.instance(noa).weight();
        }
    }
    if (m_valSize != 0) {
        for (int noa = 0; noa < valSet.numInstances(); noa++) {
            if (!valSet.instance(noa).classIsMissing()) {
                totalValWeight += valSet.instance(noa).weight();
            }
        }
    }
    m_stopped = false;

    for (int noa = 1; noa < 50 + 1; noa++) {
        right = 0;
        for (int nob = numInVal; nob < m_instances.numInstances(); nob++) {
            m_currentInstance = m_instances.instance(nob);

            if (!m_currentInstance.classIsMissing()) {

                //this is where the network updating (and training occurs, for the
                //training set
                resetNetwork();
                calculateOutputs();
                tempRate = m_learningRate * m_currentInstance.weight();
                if (m_decay) {
                    tempRate /= noa;
                }

                right += (calculateErrors() / m_instances.numClasses()) * m_currentInstance.weight();
                updateNetworkWeights(tempRate, m_momentum);

            }

        }
        right /= totalWeight;
        if (Double.isInfinite(right) || Double.isNaN(right)) {

            m_instances = null;
            throw new Exception("Network cannot train. Try restarting with a" + " smaller learning rate.");

        }

        ////////////////////////do validation testing if applicable
        if (m_valSize != 0) {
            right = 0;
            for (int nob = 0; nob < valSet.numInstances(); nob++) {
                m_currentInstance = valSet.instance(nob);
                if (!m_currentInstance.classIsMissing()) {
                    //this is where the network updating occurs, for the validation set
                    resetNetwork();
                    calculateOutputs();
                    right += (calculateErrors() / valSet.numClasses()) * m_currentInstance.weight();
                    //note 'right' could be calculated here just using
                    //the calculate output values. This would be faster.
                    //be less modular
                }

            }

            if (right < lastRight) {
                if (right < bestError) {
                    bestError = right;
                    // save the network weights at this point
                    for (int noc = 0; noc < m_numClasses; noc++) {
                        m_outputs[noc].saveWeights();
                    }
                    driftOff = 0;
                }
            } else {
                driftOff++;
            }
            lastRight = right;
            if (driftOff > m_driftThreshold || noa + 1 >= m_numEpochs) {
                for (int noc = 0; noc < m_numClasses; noc++) {
                    m_outputs[noc].restoreWeights();
                }
                m_accepted = true;
            }
            right /= totalValWeight;
        }
        m_epoch = noa;
        m_error = right;
        //shows what the neuralnet is upto if a gui exists. 

        if (m_accepted) {
            m_instances = new Instances(m_instances, 0);
            return;
        }
    }

}

From source file:view.centerPanels.ClusteringPredictPnlCenter.java

private void btnStartActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_btnStartActionPerformed

    Instances test = new Instances(Data.getInstance().getInstances());
    test.delete();/* w w  w  .  j  av  a  2  s.  co  m*/

    //proverava da li su dobro unete vrednosti
    //ako nesto nije doro uneseno nekaa iskoci JoptionPane
    //sta je lose uneseno, naziv aributa recimo
    for (int i = 0; i < fields.size(); i++) {
        String text = fields.get(i).getText().trim();

        //prekace prazna pollja jer za klasterizaciju znaci da se ona ignorisu
        //to za klasifikaciju nije slucaj
        if (!text.equals("")) {

            if (test.attribute(i).isNominal()) {
                boolean correct = false;
                for (int j = 0; j < test.attribute(i).numValues(); j++) {
                    if (text.equals(test.attribute(i).value(j))) {
                        correct = true;
                    }
                }
                if (!correct) {
                    JOptionPane.showMessageDialog(this,
                            "Incorrect format for attribute " + test.attribute(i).name());
                    break;
                }
            }

            if (test.attribute(i).isNumeric()) {
                try {
                    double value = Double.parseDouble(text);
                } catch (Exception e) {
                    JOptionPane.showMessageDialog(this,
                            "Incorrect format for attribute " + test.attribute(i).name());
                    break;
                }
            }

        }
    }

    int numAttributes = test.numAttributes();

    Instance instance = new Instance(numAttributes);

    //ovaj remove je potreban samo zaklasterizaciju
    String remove = "";

    boolean hasRemove = false;
    for (int i = 0; i < fields.size(); i++) {
        String text = fields.get(i).getText().trim();

        //vama ne sme da se pojavi prazan string
        if (text.equals("")) {
            remove = remove + (i + 1) + ",";
            hasRemove = true;
        } else {
            try {
                double value = Double.parseDouble(text);
                instance.setValue(i, value);

            } catch (Exception e) {

                instance.setValue(i, text);
            }
        }

    }
    if (hasRemove) {
        remove = remove.substring(0, remove.length() - 1);
    }

    //meni se InstanceS zove test a vama instances, ovako se dodaje ta jedna instanca
    test.add(instance);
    //sad radite vasu evaluaciju ovo je klaserizacija ostalo

    Remove removeFilter = new Remove();
    removeFilter.setAttributeIndices(remove);

    FilteredClusterer filteredClusterer = new FilteredClusterer();
    try {

        filteredClusterer.setClusterer(kMeans);
        filteredClusterer.setFilter(removeFilter);
        filteredClusterer.buildClusterer(Data.getInstance().getInstances());

    } catch (Exception e) {

    }

    ClusterEvaluation eval = new ClusterEvaluation();
    eval.setClusterer(filteredClusterer);
    try {
        eval.evaluateClusterer(test);
    } catch (Exception ex) {
        Logger.getLogger(ClusteringPredictPnlCenter.class.getName()).log(Level.SEVERE, null, ex);
    }

    String[] results = eval.clusterResultsToString().split("\n");

    String cluster = results[results.length - 1].split(" ")[0];

    textAreaResult.setText("This instance belongs to \ncluster number:  " + cluster + ".\n\n"
            + "Take a look on visualization \nfor better feeleing about \nthis instance");

    test.delete();

}

From source file:wedt.project.Common.java

public Instances getPrepapredSet(File file) {
    try {/*from  www  . j ava 2 s .c  o  m*/
        CSVLoader csvLoader = new CSVLoader();
        csvLoader.setSource(file);
        Instances loadedInstances = csvLoader.getDataSet();
        Instances instances = getEmptyInstances("instances");

        for (Instance currentInstance : loadedInstances) {
            Instance tmpInstance = extractFeature(currentInstance);
            tmpInstance.setDataset(instances);
            instances.add(tmpInstance);
        }

        return instances;
    } catch (IOException e) {
        System.out.println("Blad w przygotowywaniu zbioru");
        System.out.println(e.toString());
    }

    return null;
}

From source file:wekimini.DataManager.java

public Instance getClassifiableInstanceForOutput(double[] vals, int which) {
    double data[] = new double[numMetaData + numInputs + numOutputs];
    System.arraycopy(vals, 0, data, numMetaData, vals.length);
    /* for (int i = 0; i < numFeatures; i++) {
     data[numMetaData + i] = d[i];/*from ww  w  . j  a v  a  2  s . c o  m*/
     } */

    Instance instance = new Instance(1.0, data);
    Instances tmp = new Instances(dummyInstances);
    tmp.add(instance);
    try {
        tmp = Filter.useFilter(tmp, outputFilters[which]);
        tmp.setClassIndex(tmp.numAttributes() - 1);
        instance = tmp.firstInstance();
    } catch (Exception ex) {
        logger.log(Level.SEVERE, "Could not filter");
        Logger.getLogger(DataManager.class.getName()).log(Level.SEVERE, null, ex);
    }
    tmp.setClassIndex(tmp.numAttributes() - 1);

    return instance;
}