Example usage for weka.core Instances instance

List of usage examples for weka.core Instances instance

Introduction

In this page you can find the example usage for weka.core Instances instance.

Prototype



publicInstance instance(int index) 

Source Link

Document

Returns the instance at the given position.

Usage

From source file:Pair.java

License:Open Source License

/**
 * Sets the weights for the next iteration.
 *///from   w w  w  .j  av  a  2  s . c o m
protected double setWeights(Instances trainData, Classifier cls, double sourceFraction, int numSourceInstances,
        boolean isFinal) throws Exception {

    Enumeration enu = trainData.enumerateInstances();
    int instNum = 0;
    double[] errors = new double[trainData.numInstances()];
    double max = 0;
    int i = 0;
    while (enu.hasMoreElements()) {
        Instance instance = (Instance) enu.nextElement();
        errors[i] = Math.abs(cls.classifyInstance(instance) - instance.classValue());
        if (i >= numSourceInstances && errors[i] > max)
            max = errors[i];
        i++;
    }

    if (max == 0)
        return -1;

    //get avg loss
    double loss = 0;
    double initialTWeightSum = 0;
    double allWeightSum = 0;
    for (int j = 0; j < errors.length; j++) {
        errors[j] /= max;
        Instance instance = trainData.instance(j);
        loss += instance.weight() * errors[j];
        if (j >= numSourceInstances) {
            //loss += instance.weight() * errors[j];
            initialTWeightSum += instance.weight();
        }
        allWeightSum += instance.weight();
    }
    //loss /= weightSum;
    loss /= allWeightSum;

    targetWeight = initialTWeightSum / allWeightSum;
    /*
    if (!isFinal){
    System.out.println("Target weight: " + targetWeight);
    System.out.println("max: " + max);
    System.out.println("avg error: " + loss * max);
    System.out.println("Loss: " + loss);
    }
    */

    double beta;

    if (fixedBeta)
        beta = 0.4 / 0.6;
    else {
        if (isFinal && loss > 0.499)//bad, so quit
            //return -1;
            loss = 0.499; //since we're doing CV, no reason to quit

        beta = loss / (1 - loss); //or just use beta = .4/.6, since beta isn't as meaningful in AdaBoost.R2;
    }

    double tWeightSum = 0;
    if (!isFinal) {
        //need to find b so that weight of source be sourceFraction*num source
        //do binary search
        double goal = sourceFraction * errors.length;
        double bMin = .001;
        double bMax = .999;
        double b;
        double sourceSum = 0;
        while (bMax - bMin > .001) {
            b = (bMax + bMin) / 2;
            double sum = 0;
            for (int j = 0; j < numSourceInstances; j++) {
                Instance instance = trainData.instance(j);
                sum += Math.pow(b, errors[j]) * instance.weight();
            }
            if (sum > goal)
                bMax = b;
            else
                bMin = b;
        }
        b = (bMax + bMin) / 2;
        //System.out.println(b);         
        for (int j = 0; j < numSourceInstances; j++) {
            Instance instance = trainData.instance(j);
            instance.setWeight(instance.weight() * Math.pow(bMin, errors[j]));
            sourceSum += instance.weight();
        }

        //now adjust target weights
        goal = errors.length - sourceSum;
        double m = goal / initialTWeightSum;

        for (int j = numSourceInstances; j < errors.length; j++) {
            Instance instance = trainData.instance(j);
            instance.setWeight(instance.weight() * m);
        }
    } else {//final
        if (!doUpsource) { //modify only target weights
            for (int j = numSourceInstances; j < errors.length; j++) {
                Instance instance = trainData.instance(j);
                instance.setWeight(instance.weight() * Math.pow(beta, -errors[j]));
                tWeightSum += instance.weight();
            }

            double weightSumInverse = initialTWeightSum / tWeightSum;
            for (int j = numSourceInstances; j < errors.length; j++) {
                Instance instance = trainData.instance(j);
                instance.setWeight(instance.weight() * weightSumInverse);
            }
        } else { //modify all weights
            for (int j = 0; j < errors.length; j++) {
                Instance instance = trainData.instance(j);
                instance.setWeight(instance.weight() * Math.pow(beta, -errors[j]));
                tWeightSum += instance.weight();
            }

            double weightSumInverse = errors.length / tWeightSum;
            for (int j = 0; j < errors.length; j++) {
                Instance instance = trainData.instance(j);
                instance.setWeight(instance.weight() * weightSumInverse);
            }
        }

    }

    return beta;
}

From source file:classificationPLugin.java

private void ClassifyActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_ClassifyActionPerformed
    this.name = txtdirecotry2.getText();
    System.out.println(this.name);
    try {//from   w w  w . j  a  va2s  .  com
        CSVLoader loader = new CSVLoader();
        loader.setSource(new File(this.name));
        Instances data = loader.getDataSet();
        System.out.println(data);

        // save ARFF
        String arffile = this.name + ".arff";
        System.out.println(arffile);
        ArffSaver saver = new ArffSaver();
        saver.setInstances(data);
        saver.setFile(new File(arffile));
        saver.writeBatch();
    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }
    try {
        FileReader reader = new FileReader(this.name + ".arff");
        BufferedReader br = new BufferedReader(reader);
        instance.read(br, null);
        br.close();
        instance.requestFocus();

    } catch (Exception e2) {
        System.out.println(e2);
    }
    Instances data;
    try {
        data = new Instances(new BufferedReader(new FileReader(this.name + ".arff")));
        Instances newData = null;
        Add filter;
        newData = new Instances(data);
        filter = new Add();
        filter.setAttributeIndex("last");
        filter.setNominalLabels("rods,punctua,networks");
        filter.setAttributeName("target");
        filter.setInputFormat(newData);
        newData = Filter.useFilter(newData, filter);
        System.out.print(newData);
        Vector vec = new Vector();
        newData.setClassIndex(newData.numAttributes() - 1);
        if (!newData.equalHeaders(newData)) {
            throw new IllegalArgumentException("Train and test are not compatible!");
        }

        URL urlToModel = this.getClass().getResource("/" + "Final.model");
        InputStream stream = urlToModel.openStream();

        Classifier cls = (Classifier) weka.core.SerializationHelper.read(stream);
        System.out.println("PROVANT MODEL.classifyInstance");
        for (int i = 0; i < newData.numInstances(); i++) {
            double pred = cls.classifyInstance(newData.instance(i));
            double[] dist = cls.distributionForInstance(newData.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(newData.classAttribute().value((int) pred) + " - ");
            //txtarea2.setText(Utils.arrayToString(dist));

            System.out.println(Utils.arrayToString(dist));

            vec.add(newData.classAttribute().value((int) pred));

        }
        int p = 0, n = 0, r = 0;

        //txtarea2.append(Utils.arrayToString(this.target));
        for (Object vec1 : vec) {
            if ("rods".equals(vec1.toString())) {
                r = r + 1;
            }
            if ("punctua".equals(vec1.toString())) {
                p = p + 1;
            }
            if ("networks".equals(vec1.toString())) {
                n = n + 1;
            }

            PrintWriter out = null;
            try {

                out = new PrintWriter(this.name + "_morphology.txt");
                out.println(vec);
                out.close();
            } catch (Exception ex) {
                ex.printStackTrace();
            }
            //System.out.println(vec.get(i));
        }

        System.out.println("VECTOR-> punctua: " + p + ", rods: " + r + ", networks: " + n);
        IJ.showMessage(
                "Your file:" + this.name + "arff" + "\nhas been analysed, and it is composed by-> \npunctua: "
                        + p + ", rods: " + r + ", networks: " + n);
        classi.setText(
                "Your file:" + this.name + "arff" + "\nhas been analysed, and it is composed by: \npunctua: "
                        + p + ", rods: " + r + ", networks: " + n);
    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }

    IJ.run("Clear Results");
    IJ.run("Clear Results");
    IJ.run("Close All", "");

    if (WindowManager.getFrame("Results") != null) {
        IJ.selectWindow("Results");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("Summary") != null) {
        IJ.selectWindow("Summary");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("Results") != null) {
        IJ.selectWindow("Results");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("ROI Manager") != null) {
        IJ.selectWindow("ROI Manager");
        IJ.run("Close");
    }
    IJ.run("Close All", "roiManager");
    IJ.run("Close All", "");

}

From source file:ClusteringClass.java

public static void main(String[] args) throws Exception {
    String filename = "C:\\Users\\Daniele\\Desktop\\Humoradio2.csv";

    try {//from  w  ww .  ja  va 2 s  . co  m
        FileWriter fw = new FileWriter(filename);
        Class.forName("org.apache.derby.jdbc.ClientDriver").newInstance();
        Connection conn = DriverManager.getConnection("jdbc:derby://localhost:1527/HumoRadioDB", "dani",
                "dani");

        String query = "SELECT * FROM SONG_RATING2";
        Statement stmt = conn.createStatement();
        ResultSet rs = stmt.executeQuery(query);

        for (int i = 1; i < 23; i++) {
            if (i != 2) {
                ResultSetMetaData rsmd = rs.getMetaData();
                String name = rsmd.getColumnName(i);
                fw.append(name);
                if (i != 22) {
                    fw.append(',');
                } else {
                    fw.append('\n');
                }
            }
        }

        String query1 = "SELECT * FROM SONG_DATA";
        Statement stmt1 = conn.createStatement();
        ResultSet rs1 = stmt1.executeQuery(query1);

        String[] titles = new String[150];

        for (int ii = 0; ii < 150; ii++) {
            rs1.next();
            titles[ii] = rs1.getString("TITLE");
        }

        while (rs.next()) {
            for (int i = 1; i < 23; i++) {
                if (i == 22)
                    fw.append('\n');
                else if (i != 2) {
                    fw.append(',');
                }
            }
        }

        fw.flush();
        fw.close();
        conn.close();
        System.out.println("CSV File is created successfully.");

        /*
         Clustering part
         */
        DataSource source = new DataSource("C:\\Users\\Daniele\\Desktop\\Humoradio2.csv");
        Instances train = source.getDataSet();

        /*
         Applichiamo il filtro Remove fornito da Weka per non considerare un
         attributo nell'algoritmo di Clustering.
         */
        Remove filter = new Remove();
        filter.setAttributeIndices("1");
        filter.setInputFormat(train);
        Instances train2 = Filter.useFilter(train, filter);
        System.out.println("Nominal attributes removed from computation.");

        /*
         Applichiamo il filtro Normalize fornito da Weka per normalizzare il 
         nostro dataset.
         */
        Normalize norm = new Normalize();
        norm.setInputFormat(train2);
        Instances train3 = Filter.useFilter(train2, norm);
        System.out.println("Dataset normalized.");

        /*
         First Clustering Algorithm
         */
        EuclideanDistance df = new EuclideanDistance();
        SimpleKMeans clus1 = new SimpleKMeans();
        int k = 10;
        clus1.setNumClusters(k);
        clus1.setDistanceFunction(df);
        clus1.setPreserveInstancesOrder(true);
        clus1.buildClusterer(train3);

        /*
         First Evaluation
         */
        ClusterEvaluation eval1 = new ClusterEvaluation();
        eval1.setClusterer(clus1);
        eval1.evaluateClusterer(train3);
        System.out.println(eval1.clusterResultsToString());

        int[] assignments = clus1.getAssignments();
        String[][] dati = new String[150][4];

        for (int kk = 0; kk < 150; kk++) {
            dati[kk][0] = String.valueOf(kk);
            dati[kk][1] = train2.instance(kk).toString();
            dati[kk][2] = String.valueOf(assignments[kk]);
            dati[kk][3] = titles[kk];
        }

        for (int w = 0; w < 10; w++) {
            System.out.println();
            for (int i = 0; i < 150; i++) {
                if (dati[i][2].equals(String.valueOf(w))) {
                    for (int j = 0; j < 4; j++) {
                        if (j != 3) {
                            System.out.print(dati[i][j] + "-> \t");
                        } else {
                            System.out.println(dati[i][j]);
                        }
                    }
                }
            }
        }

        /*first graph  
                
         PlotData2D predData = ClustererPanel.setUpVisualizableInstances(train, eval1);
         //String name = (new SimpleDateFormat("HH:mm:ss - ")).format(new Date());
         String name = "";
         String cname = clus1.getClass().getName();
         if (cname.startsWith("weka.clusterers."))
         name += cname.substring("weka.clusterers.".length());
         else
         name += cname;
                
                
         VisualizePanel vp = new VisualizePanel();
         vp.setName(name + " (" + train.relationName() + ")");
         predData.setPlotName(name + " (" + train.relationName() + ")");
         vp.addPlot(predData);
                
         String plotName = vp.getName();
         final javax.swing.JFrame jf = new javax.swing.JFrame("Weka Clusterer Visualize: " + plotName);
         jf.setSize(500,400);
         jf.getContentPane().setLayout(new BorderLayout());
         jf.getContentPane().add(vp, BorderLayout.CENTER);
         jf.dispose();
         jf.addWindowListener(new java.awt.event.WindowAdapter() {
         public void windowClosing(java.awt.event.WindowEvent e) {
         jf.dispose();
         }
         });
         jf.setVisible(true);
                
         end first graph
         */

        /*
         Second Clustering Algorithm
         */

        System.out.println();

        DBSCAN clus3 = new DBSCAN();
        clus3.setEpsilon(0.7);
        clus3.setMinPoints(2);
        clus3.buildClusterer(train3);

        /*
         Second Evaluation
         */
        ClusterEvaluation eval3 = new ClusterEvaluation();
        eval3.setClusterer(clus3);
        eval3.evaluateClusterer(train3);
        System.out.println(eval3.clusterResultsToString());

        double[] assignments3 = eval3.getClusterAssignments();
        String[][] dati3 = new String[150][4];

        for (int kk = 0; kk < 150; kk++) {
            dati3[kk][0] = String.valueOf(kk);
            dati3[kk][1] = train2.instance(kk).toString();
            dati3[kk][2] = String.valueOf(assignments3[kk]);
            dati3[kk][3] = titles[kk];
        }

        for (int w = 0; w < eval3.getNumClusters(); w++) {
            System.out.println();
            for (int i = 0; i < 150; i++) {
                if (Double.parseDouble(dati3[i][2]) == w) {
                    for (int j = 0; j < 4; j++) {
                        if (j != 3) {
                            System.out.print(dati3[i][j] + "-> \t");
                        } else {
                            System.out.println(dati3[i][j]);
                        }
                    }
                }
            }
        }
        System.out.println();
        for (int i = 0; i < 150; i++) {
            if (Double.parseDouble(dati3[i][2]) == -1.0) {
                for (int j = 0; j < 4; j++) {
                    if (j != 3) {
                        System.out.print(dati3[i][j] + "-> \t");
                    } else {
                        System.out.println(dati3[i][j]);
                    }
                }
            }
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:REPRandomTree.java

License:Open Source License

/**
 * Builds classifier./*from  www .  ja va  2  s .  c  o  m*/
 * 
 * @param data the data to train with
 * @throws Exception if building fails
 */
public void buildClassifier(Instances data) throws Exception {

    // can classifier handle the data?
    getCapabilities().testWithFail(data);

    // remove instances with missing class
    data = new Instances(data);
    data.deleteWithMissingClass();

    Random random = new Random(m_Seed);

    m_zeroR = null;
    if (data.numAttributes() == 1) {
        m_zeroR = new ZeroR();
        m_zeroR.buildClassifier(data);
        return;
    }

    // Randomize and stratify
    data.randomize(random);
    if (data.classAttribute().isNominal()) {
        data.stratify(m_NumFolds);
    }

    // Split data into training and pruning set
    Instances train = null;
    Instances prune = null;
    if (!m_NoPruning) {
        train = data.trainCV(m_NumFolds, 0, random);
        prune = data.testCV(m_NumFolds, 0);
    } else {
        train = data;
    }

    // Create array of sorted indices and weights
    int[][][] sortedIndices = new int[1][train.numAttributes()][0];
    double[][][] weights = new double[1][train.numAttributes()][0];
    double[] vals = new double[train.numInstances()];
    for (int j = 0; j < train.numAttributes(); j++) {
        if (j != train.classIndex()) {
            weights[0][j] = new double[train.numInstances()];
            if (train.attribute(j).isNominal()) {

                // Handling nominal attributes. Putting indices of
                // instances with missing values at the end.
                sortedIndices[0][j] = new int[train.numInstances()];
                int count = 0;
                for (int i = 0; i < train.numInstances(); i++) {
                    Instance inst = train.instance(i);
                    if (!inst.isMissing(j)) {
                        sortedIndices[0][j][count] = i;
                        weights[0][j][count] = inst.weight();
                        count++;
                    }
                }
                for (int i = 0; i < train.numInstances(); i++) {
                    Instance inst = train.instance(i);
                    if (inst.isMissing(j)) {
                        sortedIndices[0][j][count] = i;
                        weights[0][j][count] = inst.weight();
                        count++;
                    }
                }
            } else {

                // Sorted indices are computed for numeric attributes
                for (int i = 0; i < train.numInstances(); i++) {
                    Instance inst = train.instance(i);
                    vals[i] = inst.value(j);
                }
                sortedIndices[0][j] = Utils.sort(vals);
                for (int i = 0; i < train.numInstances(); i++) {
                    weights[0][j][i] = train.instance(sortedIndices[0][j][i]).weight();
                }
            }
        }
    }

    // Compute initial class counts
    double[] classProbs = new double[train.numClasses()];
    double totalWeight = 0, totalSumSquared = 0;
    for (int i = 0; i < train.numInstances(); i++) {
        Instance inst = train.instance(i);
        if (data.classAttribute().isNominal()) {
            classProbs[(int) inst.classValue()] += inst.weight();
            totalWeight += inst.weight();
        } else {
            classProbs[0] += inst.classValue() * inst.weight();
            totalSumSquared += inst.classValue() * inst.classValue() * inst.weight();
            totalWeight += inst.weight();
        }
    }
    m_Tree = new Tree();
    double trainVariance = 0;
    if (data.classAttribute().isNumeric()) {
        trainVariance = m_Tree.singleVariance(classProbs[0], totalSumSquared, totalWeight) / totalWeight;
        classProbs[0] /= totalWeight;
    }

    // Build tree
    m_Tree.buildTree(sortedIndices, weights, train, totalWeight, classProbs, new Instances(train, 0), m_MinNum,
            m_MinVarianceProp * trainVariance, 0, m_MaxDepth, m_FeatureFrac, random);

    // Insert pruning data and perform reduced error pruning
    if (!m_NoPruning) {
        m_Tree.insertHoldOutSet(prune);
        m_Tree.reducedErrorPrune();
        m_Tree.backfitHoldOutSet();
    }
}

From source file:LabeledItemSet.java

License:Open Source License

/**
 * Updates counter of a specific item set
 * @param itemSets an item sets//from   www. j ava  2 s  .  co  m
 * @param instancesNoClass instances without the class attribute
 * @param instancesClass the values of the class attribute sorted according to instances
 */
public static void upDateCounters(FastVector itemSets, Instances instancesNoClass, Instances instancesClass) {

    for (int i = 0; i < instancesNoClass.numInstances(); i++) {
        Enumeration enu = itemSets.elements();
        while (enu.hasMoreElements())
            ((LabeledItemSet) enu.nextElement()).upDateCounter(instancesNoClass.instance(i),
                    instancesClass.instance(i));
    }

}

From source file:dialog1.java

private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed

    try {/* ww  w.j  a v a  2  s  .c  o  m*/
        CSVLoader loader = new CSVLoader();
        loader.setSource(new File(txtfilename.getText() + "_complete.csv"));
        Instances data = loader.getDataSet();
        System.out.println(data);

        // save ARFF
        String arffile = this.name3 + ".arff";
        System.out.println(arffile);
        ArffSaver saver = new ArffSaver();
        saver.setInstances(data);
        saver.setFile(new File(arffile));
        saver.writeBatch();
    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }

    Instances data;
    try {
        data = new Instances(new BufferedReader(new FileReader(this.name3 + ".arff")));
        Instances newData = null;
        Add filter;
        newData = new Instances(data);
        filter = new Add();
        filter.setAttributeIndex("last");
        filter.setNominalLabels("rods,punctua,networks");
        filter.setAttributeName("target");
        filter.setInputFormat(newData);
        newData = Filter.useFilter(newData, filter);
        System.out.print(newData);
        Vector vec = new Vector();
        newData.setClassIndex(newData.numAttributes() - 1);
        if (!newData.equalHeaders(newData)) {
            throw new IllegalArgumentException("Train and test are not compatible!");
        }

        /*URL urlToModel = this.getClass().getResource("/" + "Final.model");
        InputStream stream = urlToModel.openStream();*/
        InputStream stream = this.getClass().getResourceAsStream("/" + "Final.model");
        Classifier cls = (Classifier) weka.core.SerializationHelper.read(stream);
        System.out.println("PROVANT MODEL.classifyInstance");
        for (int i = 0; i < newData.numInstances(); i++) {
            double pred = cls.classifyInstance(newData.instance(i));
            double[] dist = cls.distributionForInstance(newData.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(newData.classAttribute().value((int) pred) + " - ");
            //txtarea2.setText(Utils.arrayToString(dist));

            System.out.println(Utils.arrayToString(dist));

            vec.add(newData.classAttribute().value((int) pred));
            //txtarea2.append(Utils.arrayToString(newData.classAttribute().value((int) pred)));
            //this.target2.add((i + 1) + " -);
            //this.target.add(newData.classAttribute().value((int) pred));
            //for (String s : this.list) {
            //this.target2 += s + ",";

        }
        int p = 0, n = 0, r = 0;

        //txtarea2.append(Utils.arrayToString(this.target));
        for (Object vec1 : vec) {
            if ("rods".equals(vec1.toString())) {
                r = r + 1;
            }
            if ("punctua".equals(vec1.toString())) {
                p = p + 1;
            }
            if ("networks".equals(vec1.toString())) {
                n = n + 1;
            }

            PrintWriter out = null;
            try {

                out = new PrintWriter(this.name3 + "_morphology.txt");
                out.println(vec);
                out.close();
            } catch (Exception ex) {
                ex.printStackTrace();
            }
            //System.out.println(vec.get(i));
        }
        System.out.println("VECTOR-> punctua: " + p + ", rods: " + r + ", networks: " + n);
        IJ.showMessage(
                "Your file:" + this.name3 + "arff" + "\nhas been analysed, and it is composed by-> punctua: "
                        + p + ", rods: " + r + ", networks: " + n);
        //txtarea2.setText("Your file:" + this.name3 + ".arff"
        //+ "\nhas been analysed, and it is composed by-> punctua: " + p + ", rods: " + r + ", networks: " + n
        //+ "\n"
        //+ "\nAnalyse complete");
        //txtarea.setText("Analyse complete");

    } catch (IOException ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    } catch (Exception ex) {
        Logger.getLogger(MachinLearningInterface.class.getName()).log(Level.SEVERE, null, ex);
    }

    IJ.run("Clear Results");
    IJ.run("Clear Results");
    IJ.run("Close All", "");

    if (WindowManager.getFrame("Results") != null) {
        IJ.selectWindow("Results");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("Summary") != null) {
        IJ.selectWindow("Summary");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("Results") != null) {
        IJ.selectWindow("Results");
        IJ.run("Close");
    }
    if (WindowManager.getFrame("ROI Manager") != null) {
        IJ.selectWindow("ROI Manager");
        IJ.run("Close");
    }
    IJ.run("Close All", "roiManager");
    IJ.run("Close All", "");
    setVisible(false);
    dispose();// TODO add your handling code here:

    setVisible(false);
    dispose();// TODO add your handling code here:
    // TODO add your handling code here:
}

From source file:MLKNNCS.java

License:Open Source License

/**
 * Computing Cond and CondN Probabilities for each class of the training set
 *
 * @throws Exception Potential exception thrown. To be handled in an upper level.
 *//*from  w w  w .  ja v  a 2  s  . c om*/
private void ComputeCond() throws Exception {
    int[][] temp_Ci = new int[numLabels][numOfNeighbors + 1];
    int[][] temp_NCi = new int[numLabels][numOfNeighbors + 1];

    for (int i = 0; i < train.numInstances(); i++) {

        Instances knn = new Instances(lnn.kNearestNeighbours(train.instance(i), numOfNeighbors));

        // now compute values of temp_Ci and temp_NCi for every class label
        for (int j = 0; j < numLabels; j++) {

            int aces = 0; // num of aces in Knn for j
            for (int k = 0; k < numOfNeighbors; k++) {
                double value = Double.parseDouble(
                        train.attribute(labelIndices[j]).value((int) knn.instance(k).value(labelIndices[j])));
                if (Utils.eq(value, 1.0)) {
                    aces++;
                }
            }
            // raise the counter of temp_Ci[j][aces] and temp_NCi[j][aces] by 1
            if (Utils.eq(Double.parseDouble(
                    train.attribute(labelIndices[j]).value((int) train.instance(i).value(labelIndices[j]))),
                    1.0)) {
                temp_Ci[j][aces]++;
            } else {
                temp_NCi[j][aces]++;
            }
        }
    }

    // compute CondProbabilities[i][..] for labels based on temp_Ci[]
    for (int i = 0; i < numLabels; i++) {
        int temp1 = 0;
        int temp2 = 0;
        for (int j = 0; j < numOfNeighbors + 1; j++) {
            temp1 += temp_Ci[i][j];
            temp2 += temp_NCi[i][j];
        }
        for (int j = 0; j < numOfNeighbors + 1; j++) {
            CondProbabilities[i][j] = (smooth + temp_Ci[i][j]) / (smooth * (numOfNeighbors + 1) + temp1);
            CondNProbabilities[i][j] = (smooth + temp_NCi[i][j]) / (smooth * (numOfNeighbors + 1) + temp2);
        }
    }
}

From source file:MLKNNCS.java

License:Open Source License

protected MultiLabelOutput makePredictionInternal(Instance instance) throws Exception {
    double[] confidences = new double[numLabels];
    boolean[] predictions = new boolean[numLabels];

    Instances knn = null;
    try {//from w w w. j a  va  2 s .c om
        knn = new Instances(lnn.kNearestNeighbours(instance, numOfNeighbors));
    } catch (Exception ex) {
        Logger.getLogger(MLKNNCS.class.getName()).log(Level.SEVERE, null, ex);
    }
    int trueCount = 0;
    for (int i = 0; i < numLabels; i++) {
        // compute sum of aces in KNN
        int aces = 0; // num of aces in Knn for i
        for (int k = 0; k < numOfNeighbors; k++) {
            double value = Double.parseDouble(
                    train.attribute(labelIndices[i]).value((int) knn.instance(k).value(labelIndices[i])));
            if (Utils.eq(value, 1.0)) {
                aces++;
            }
        }
        double Prob_in = PriorProbabilities[i] * CondProbabilities[i][aces];
        double Prob_out = PriorNProbabilities[i] * CondNProbabilities[i][aces];

        confidences[i] = Cost[i] * Prob_in / (Cost[i] * Prob_in + Prob_out);
        //confidences[i] = 6*Prob_in/(6*Prob_in + Prob_out);

        if (confidences[i] > 0.5) {
            predictions[i] = true;
            trueCount++;
        } else if (confidences[i] < 0.5) {
            predictions[i] = false;
        } else {
            Random rnd = new Random();
            predictions[i] = (rnd.nextInt(2) == 1) ? true : false;
        }
        // ranking function
    }

    MultiLabelOutput mlo = new MultiLabelOutput(predictions, confidences);

    if (trueCount < 3) {
        double[] confidence = mlo.getConfidences();
        double[] confidenceTop4 = new double[4];
        int[] top4 = new int[4];

        Arrays.fill(top4, 0);
        Arrays.fill(confidenceTop4, 0);

        for (int i = 0; i < confidence.length; i++) {
            if (confidence[i] > confidenceTop4[0]) {
                top4[3] = top4[2];
                confidenceTop4[3] = confidenceTop4[2];
                top4[2] = top4[1];
                confidenceTop4[2] = confidenceTop4[1];
                top4[1] = top4[0];
                confidenceTop4[1] = confidenceTop4[0];
                top4[0] = i;
                confidenceTop4[0] = confidence[i];
            } else if (confidence[i] > confidenceTop4[1]) {
                top4[3] = top4[2];
                confidenceTop4[3] = confidenceTop4[2];
                top4[2] = top4[1];
                confidenceTop4[2] = confidenceTop4[1];
                top4[1] = i;
                confidenceTop4[1] = confidence[i];
            } else if (confidence[i] > confidenceTop4[2]) {
                top4[3] = top4[2];
                confidenceTop4[3] = confidenceTop4[2];
                top4[2] = i;
                confidenceTop4[2] = confidence[i];
            } else if (confidence[i] > confidenceTop4[3]) {
                top4[3] = i;
                confidenceTop4[3] = confidence[i];
            }

        }
        for (int i = trueCount; i < 4; i++) {
            if ((confidence[top4[i]] > 0.25 && i == 3) || confidence[top4[i]] > 0.2 && i < 3) {
                predictions[top4[i]] = true;
                trueCount++;
            }
        }
        if (trueCount == 0) {
            predictions[top4[0]] = true;
        }
        mlo = new MultiLabelOutput(predictions, confidences);
    }
    return mlo;
}

From source file:CJWeka.java

License:Open Source License

/**
 * This function sets what the m_numeric flag to represent the passed class
 * it also performs the normalization of the attributes if applicable
 * and sets up the info to normalize the class. (note that regardless of
 * the options it will fill an array with the range and base, set to
 * normalize all attributes and the class to be between -1 and 1)
 * @param inst the instances./*from  w  w w . ja  va2 s. com*/
 * @return The modified instances. This needs to be done. If the attributes
 * are normalized then deep copies will be made of all the instances which
 * will need to be passed back out.
 */
private Instances setClassType(Instances inst) throws Exception {
    if (inst != null) {
        // x bounds
        double min = Double.POSITIVE_INFINITY;
        double max = Double.NEGATIVE_INFINITY;
        double value;
        m_attributeRanges = new double[inst.numAttributes()];
        m_attributeBases = new double[inst.numAttributes()];
        for (int noa = 0; noa < inst.numAttributes(); noa++) {
            min = Double.POSITIVE_INFINITY;
            max = Double.NEGATIVE_INFINITY;
            for (int i = 0; i < inst.numInstances(); i++) {
                if (!inst.instance(i).isMissing(noa)) {
                    value = inst.instance(i).value(noa);
                    if (value < min) {
                        min = value;
                    }
                    if (value > max) {
                        max = value;
                    }
                }
            }

            m_attributeRanges[noa] = (max - min) / 2;
            m_attributeBases[noa] = (max + min) / 2;
            if (noa != inst.classIndex() && m_normalizeAttributes) {
                for (int i = 0; i < inst.numInstances(); i++) {
                    if (m_attributeRanges[noa] != 0) {
                        inst.instance(i).setValue(noa,
                                (inst.instance(i).value(noa) - m_attributeBases[noa]) / m_attributeRanges[noa]);
                    } else {
                        inst.instance(i).setValue(noa, inst.instance(i).value(noa) - m_attributeBases[noa]);
                    }
                }
            }
        }
        if (inst.classAttribute().isNumeric()) {
            m_numeric = true;
        } else {
            m_numeric = false;
        }
    }
    return inst;
}

From source file:MultiClassClassifier.java

License:Open Source License

public double[][] calibratedDistributionForTestInstances(Instances test) throws Exception {
    double[][] binProbs = new double[m_Classifiers.length][test.numInstances()];
    double[][] calibratedProbs = new double[m_Classifiers.length][test.numInstances()];
    boolean[] target = new boolean[test.numInstances()];
    int prior1 = 0;
    int prior0 = 0;
    if (m_Classifiers.length == 1) {
        for (int i = 0; i < test.numInstances(); i++) {
            Instance inst = test.instance(i);
            //m_ClassFilters[0].input(inst);
            //m_ClassFilters[0].batchFinished();
            //Instance filteredInst = m_ClassFilters[i].output();

            //binProbs[0][i] = (200*m_Classifiers[0].distributionForInstance(inst)[1])-100;
            binProbs[0][i] = m_Classifiers[0].distributionForInstance(inst)[1];
            if (target[i] = inst.classValue() == 1.0)
                prior1++;//from   w  w w.j av a2  s  .c o m
            else
                prior0++;
        }
        calibratedProbs[0] = sigTraining(binProbs[0], target, prior1, prior0);
        return calibratedProbs;
    } else {

        double[] probs = new double[test.classAttribute().numValues()];

        if (m_Method == METHOD_1_AGAINST_1) {
            throw new Exception("Not implemented for Method 1 against 1");
            /*double[][] r = new double[inst.numClasses()][inst.numClasses()];
            double[][] n = new double[inst.numClasses()][inst.numClasses()];
                    
            for(int i = 0; i < m_ClassFilters.length; i++) 
            {
               if (m_Classifiers[i] != null) {
                  Instance tempInst = (Instance)inst.copy(); 
                  tempInst.setDataset(m_TwoClassDataset);
                  double [] current = m_Classifiers[i].distributionForInstance(tempInst);  
                  Range range = new Range(((RemoveWithValues)m_ClassFilters[i])
                    .getNominalIndices());
                  range.setUpper(m_ClassAttribute.numValues());
                  int[] pair = range.getSelection();
                  if (m_pairwiseCoupling && inst.numClasses() > 2) {
                     r[pair[0]][pair[1]] = current[0];
                     n[pair[0]][pair[1]] = m_SumOfWeights[i];
                  }
                  else {
                     if (current[0] > current[1]) {
             probs[pair[0]] += 1.0;
                     }
                     else {
             probs[pair[1]] += 1.0;
                     }
                  }
               }
            }
            if (m_pairwiseCoupling && inst.numClasses() > 2) {
              return pairwiseCoupling(n, r);
            }*/
        } else {
            // error correcting style methods
            for (int i = 0; i < m_ClassFilters.length; i++) {
                prior1 = 0;
                prior0 = 0;
                for (int k = 0; k < test.numInstances(); k++) {
                    Instance inst = test.instance(k);
                    m_ClassFilters[i].input(inst);
                    m_ClassFilters[i].batchFinished();
                    Instance filteredInst = m_ClassFilters[i].output();
                    //binProbs[i][k] = (200*m_Classifiers[i].distributionForInstance(filteredInst)[1]) - 100;
                    binProbs[i][k] = m_Classifiers[i].distributionForInstance(filteredInst)[1];

                    //System.out.println(binProbs[i][k] + " " + inst.classValue());
                    //System.out.println("Class value: " + filteredInst.classValue() + " " + filteredInst.stringValue(filteredInst.numAttributes()-1) + " " + m_Classifiers[i].distributionForInstance(filteredInst)[0] + " " + m_Classifiers[i].distributionForInstance(filteredInst)[1]);
                    if (target[k] = (filteredInst.classValue() == 1.0))
                        prior1++;
                    else
                        prior0++;

                    /*for (int j = 0; j < m_ClassAttribute.numValues(); j++)
                    {
                       if (((MakeIndicator)m_ClassFilters[i]).getValueRange().isInRange(j))
                       {
                          binProbs[j] += current[1];
                       }
                       else 
                       {
                          binProbs[j] += current[0];
                       }
                    }*/
                }
                calibratedProbs[i] = sigTraining(binProbs[i], target, prior1, prior0);
            }
            /*         for (int k = 0; k < test.numInstances(); k++)   
                     {
                        for (int i =0; i < 3; i++)
                        System.out.println(i + " " + k + " cal: " + calibratedProbs[i][k] + " " + binProbs[i][k]);
                     }
               */ }
    }
    for (int i = 0; i < test.numInstances(); i++) {
        double sum = 0;
        for (int j = 0; j < m_Classifiers.length; j++) {
            sum += calibratedProbs[j][i];
        }
        for (int j = 0; j < m_Classifiers.length; j++)
            calibratedProbs[j][i] /= sum;
    }
    return calibratedProbs;
    /*
    if (Utils.gr(Utils.sum(probs), 0)) 
    {
      Utils.normalize(probs);
      return probs;
    }
    else {
       return m_ZeroR.distributionForInstance(inst);
    }*/
}