Example usage for weka.filters Filter setInputFormat

List of usage examples for weka.filters Filter setInputFormat

Introduction

In this page you can find the example usage for weka.filters Filter setInputFormat.

Prototype

public boolean setInputFormat(Instances instanceInfo) throws Exception 

Source Link

Document

Sets the format of the input instances.

Usage

From source file:adams.flow.transformer.WekaStreamFilter.java

License:Open Source License

/**
 * Executes the flow item.//from  w  ww.jav a2s.co  m
 *
 * @return      null if everything is fine, otherwise error message
 */
@Override
protected String doExecute() {
    String result;
    weka.core.Instances data;
    weka.core.Instance inst;
    adams.data.instance.Instance instA;
    weka.core.Instance filteredInst;
    weka.core.Instances filteredData;
    String relation;
    weka.filters.Filter filter;

    result = null;

    inst = null;
    data = null;
    filteredInst = null;
    filteredData = null;
    filter = (weka.filters.Filter) m_Filter;
    if (m_InputToken.getPayload() instanceof weka.core.Instance)
        inst = (weka.core.Instance) m_InputToken.getPayload();
    else if (m_InputToken.getPayload() instanceof weka.core.Instances)
        data = (weka.core.Instances) m_InputToken.getPayload();
    else
        inst = ((adams.data.instance.Instance) m_InputToken.getPayload()).toInstance();
    if (data == null)
        data = inst.dataset();

    try {
        // initialize filter?
        if (!m_Initialized) {
            result = setUpContainers(filter);
            if (result == null)
                result = updateObject(filter);
            filter.setInputFormat(new weka.core.Instances(data, 0));
        }

        if (result == null) {
            // filter data
            relation = data.relationName();
            if (inst == null) {
                filteredData = Filter.useFilter(data, filter);
                if (m_KeepRelationName)
                    filteredData.setRelationName(relation);
            } else {
                filter.input(inst);
                filter.batchFinished();
                filteredInst = filter.output();
                if (m_KeepRelationName)
                    filteredInst.dataset().setRelationName(relation);
            }

            // build output token
            if (m_InputToken.getPayload() instanceof weka.core.Instance) {
                m_OutputToken = new Token(filteredInst);
            } else if (m_InputToken.getPayload() instanceof weka.core.Instances) {
                m_OutputToken = new Token(filteredData);
            } else {
                instA = new adams.data.instance.Instance();
                instA.set(filteredInst);
                m_OutputToken = new Token(instA);
            }
        }
    } catch (Exception e) {
        result = handleException("Failed to filter data: ", e);
    }

    if (m_OutputToken != null)
        updateProvenance(m_OutputToken);

    return result;
}

From source file:adams.gui.menu.BatchFilterDatasets.java

License:Open Source License

/**
 * Performs the batch filtering./*www .ja  va2s.  com*/
 *
 * @param frame       the frame to close
 * @param input       the files to filter
 * @param filter      the filter setup
 * @param classIndex   the class index, empty for no class
 * @param keep   whether to keep the relation name
 * @param outdir      the output directory
 */
protected void batchFilter(ChildFrame frame, String[] input, Filter filter, String classIndex, boolean keep,
        File outdir) {
    Instances[] data;
    int i;
    AbstractFileLoader loader;
    Instances filtered;
    int clsIndex;
    String outfile;
    StringBuilder outfiles;

    if (input.length < 2) {
        GUIHelper.showErrorMessage(getOwner(), "At least two files are required!");
        return;
    }

    // load and check compatibility
    loader = ConverterUtils.getLoaderForFile(input[0]);
    data = new Instances[input.length];
    for (i = 0; i < input.length; i++) {
        try {
            loader.setFile(new File(input[i]));
            data[i] = DataSource.read(loader);
        } catch (Exception e) {
            GUIHelper.showErrorMessage(getOwner(),
                    "Failed to read '" + input[i] + "'!\n" + Utils.throwableToString(e));
            return;
        }
    }

    // class index
    if (classIndex.isEmpty()) {
        clsIndex = -1;
    } else {
        try {
            if (classIndex.equalsIgnoreCase("first"))
                clsIndex = 0;
            else if (classIndex.equalsIgnoreCase("last"))
                clsIndex = data[0].numAttributes() - 1;
            else
                clsIndex = Integer.parseInt(classIndex) - 1;
        } catch (Exception e) {
            GUIHelper.showErrorMessage(getOwner(),
                    "Failed to parse class attribute index: " + classIndex + "\n" + Utils.throwableToString(e));
            return;
        }
    }

    // filter
    outfiles = new StringBuilder();
    for (i = 0; i < input.length; i++) {
        try {
            outfile = outdir.getAbsolutePath() + File.separator + new File(input[i]).getName();
            data[i].setClassIndex(clsIndex);
            if (i == 0)
                filter.setInputFormat(data[i]);
            filtered = Filter.useFilter(data[i], filter);
            if (keep)
                filtered.setRelationName(data[i].relationName());
            DataSink.write(outfile, filtered);
            if (outfiles.length() > 0)
                outfiles.append("\n");
            outfiles.append(outfile);
        } catch (Exception e) {
            GUIHelper.showErrorMessage(getOwner(), "Failed to filter dataset #" + (i + 1) + " ('" + input[i]
                    + "')!\n" + Utils.throwableToString(e));
            return;
        }
    }

    GUIHelper.showInformationMessage(null, "Successfully filtered!\n" + outfiles);
    frame.dispose();
}

From source file:core.ClusterEvaluationEX.java

License:Open Source License

/**
 * Evaluate the clusterer on a set of instances. Calculates clustering
 * statistics and stores cluster assigments for the instances in
 * m_clusterAssignments//from  w  w w  .  j  ava 2  s .  com
 * 
 * @param test the set of instances to cluster
 * @param testFileName the name of the test file for incremental testing, 
 * if "" or null then not used
 * @param outputModel true if the clustering model is to be output as well
 * as the stats
 * 
 * @throws Exception if something goes wrong
 */
public void evaluateClusterer(Instances test, String testFileName, boolean outputModel) throws Exception {
    int i = 0;
    int cnum;
    double loglk = 0.0;
    int cc = m_Clusterer.numberOfClusters();
    m_numClusters = cc;
    double[] instanceStats = new double[cc];
    Instances testRaw = null;
    boolean hasClass = (test.classIndex() >= 0);
    int unclusteredInstances = 0;
    Vector<Double> clusterAssignments = new Vector<Double>();
    Filter filter = null;
    DataSource source = null;
    Instance inst;

    if (testFileName == null)
        testFileName = "";

    // load data
    if (testFileName.length() != 0)
        source = new DataSource(testFileName);
    else
        source = new DataSource(test);
    testRaw = source.getStructure(test.classIndex());

    // If class is set then do class based evaluation as well
    if (hasClass) {
        if (testRaw.classAttribute().isNumeric())
            throw new Exception("ClusterEvaluation: Class must be nominal!");

        filter = new Remove();
        ((Remove) filter).setAttributeIndices("" + (testRaw.classIndex() + 1));
        ((Remove) filter).setInvertSelection(false);
        filter.setInputFormat(testRaw);
    }

    i = 0;
    while (source.hasMoreElements(testRaw)) {
        // next instance
        inst = source.nextElement(testRaw);
        if (filter != null) {
            filter.input(inst);
            filter.batchFinished();
            inst = filter.output();
        }

        cnum = -1;
        try {
            if (m_Clusterer instanceof DensityBasedClusterer) {
                loglk += ((DensityBasedClusterer) m_Clusterer).logDensityForInstance(inst);
                cnum = m_Clusterer.clusterInstance(inst);
                clusterAssignments.add((double) cnum);
            } else {
                cnum = m_Clusterer.clusterInstance(inst);
                clusterAssignments.add((double) cnum);
            }
        } catch (Exception e) {
            clusterAssignments.add(-1.0);
            unclusteredInstances++;
        }

        if (cnum != -1) {
            instanceStats[cnum]++;
        }
    }

    double sum = Utils.sum(instanceStats);
    loglk /= sum;
    m_logL = loglk;
    m_clusterAssignments = new double[clusterAssignments.size()];
    for (i = 0; i < clusterAssignments.size(); i++) {
        m_clusterAssignments[i] = clusterAssignments.get(i);
    }
    int numInstFieldWidth = (int) ((Math.log(clusterAssignments.size()) / Math.log(10)) + 1);

    if (outputModel) {
        m_clusteringResults.append(m_Clusterer.toString());
    }
    m_clusteringResults.append("Clustered Instances\n\n");
    int clustFieldWidth = (int) ((Math.log(cc) / Math.log(10)) + 1);
    for (i = 0; i < cc; i++) {
        if (instanceStats[i] > 0)
            m_clusteringResults.append(Utils.doubleToString((double) i, clustFieldWidth, 0) + "      "
                    + Utils.doubleToString(instanceStats[i], numInstFieldWidth, 0) + " ("
                    + Utils.doubleToString((instanceStats[i] / sum * 100.0), 3, 0) + "%)\n");
    }

    if (unclusteredInstances > 0)
        m_clusteringResults.append("\nUnclustered instances : " + unclusteredInstances);

    if (m_Clusterer instanceof DensityBasedClusterer)
        m_clusteringResults.append("\n\nLog likelihood: " + Utils.doubleToString(loglk, 1, 5) + "\n");

    if (hasClass) {
        evaluateClustersWithRespectToClass(test, testFileName);
    }
}

From source file:dkpro.similarity.experiments.sts2013.util.Evaluator.java

License:Open Source License

public static void runLinearRegressionCV(Mode mode, Dataset... datasets) throws Exception {
    for (Dataset dataset : datasets) {
        // Set parameters
        int folds = 10;
        Classifier baseClassifier = new LinearRegression();

        // Set up the random number generator
        long seed = new Date().getTime();
        Random random = new Random(seed);

        // Add IDs to the instances
        AddID.main(new String[] { "-i",
                MODELS_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".arff", "-o",
                MODELS_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString()
                        + "-plusIDs.arff" });
        Instances data = DataSource.read(
                MODELS_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + "-plusIDs.arff");
        data.setClassIndex(data.numAttributes() - 1);

        // Instantiate the Remove filter
        Remove removeIDFilter = new Remove();
        removeIDFilter.setAttributeIndices("first");

        // Randomize the data
        data.randomize(random);//  ww w  .j  a  v  a2s  . co m

        // Perform cross-validation
        Instances predictedData = null;
        Evaluation eval = new Evaluation(data);

        for (int n = 0; n < folds; n++) {
            Instances train = data.trainCV(folds, n, random);
            Instances test = data.testCV(folds, n);

            // Apply log filter
            Filter logFilter = new LogFilter();
            logFilter.setInputFormat(train);
            train = Filter.useFilter(train, logFilter);
            logFilter.setInputFormat(test);
            test = Filter.useFilter(test, logFilter);

            // Copy the classifier
            Classifier classifier = AbstractClassifier.makeCopy(baseClassifier);

            // Instantiate the FilteredClassifier
            FilteredClassifier filteredClassifier = new FilteredClassifier();
            filteredClassifier.setFilter(removeIDFilter);
            filteredClassifier.setClassifier(classifier);

            // Build the classifier
            filteredClassifier.buildClassifier(train);

            // Evaluate
            eval.evaluateModel(classifier, test);

            // Add predictions
            AddClassification filter = new AddClassification();
            filter.setClassifier(classifier);
            filter.setOutputClassification(true);
            filter.setOutputDistribution(false);
            filter.setOutputErrorFlag(true);
            filter.setInputFormat(train);
            Filter.useFilter(train, filter); // trains the classifier

            Instances pred = Filter.useFilter(test, filter); // performs predictions on test set
            if (predictedData == null) {
                predictedData = new Instances(pred, 0);
            }
            for (int j = 0; j < pred.numInstances(); j++) {
                predictedData.add(pred.instance(j));
            }
        }

        // Prepare output scores
        double[] scores = new double[predictedData.numInstances()];

        for (Instance predInst : predictedData) {
            int id = new Double(predInst.value(predInst.attribute(0))).intValue() - 1;

            int valueIdx = predictedData.numAttributes() - 2;

            double value = predInst.value(predInst.attribute(valueIdx));

            scores[id] = value;

            // Limit to interval [0;5]
            if (scores[id] > 5.0) {
                scores[id] = 5.0;
            }
            if (scores[id] < 0.0) {
                scores[id] = 0.0;
            }
        }

        // Output
        StringBuilder sb = new StringBuilder();
        for (Double score : scores) {
            sb.append(score.toString() + LF);
        }

        FileUtils.writeStringToFile(
                new File(OUTPUT_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".csv"),
                sb.toString());
    }
}

From source file:dkpro.similarity.experiments.sts2013baseline.util.Evaluator.java

License:Open Source License

public static void runLinearRegressionCV(Mode mode, Dataset... datasets) throws Exception {
    for (Dataset dataset : datasets) {
        // Set parameters
        int folds = 10;
        Classifier baseClassifier = new LinearRegression();

        // Set up the random number generator
        long seed = new Date().getTime();
        Random random = new Random(seed);

        // Add IDs to the instances
        AddID.main(new String[] { "-i",
                MODELS_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".arff", "-o",
                MODELS_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString()
                        + "-plusIDs.arff" });

        String location = MODELS_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString()
                + "-plusIDs.arff";

        Instances data = DataSource.read(location);

        if (data == null) {
            throw new IOException("Could not load data from: " + location);
        }/*from w ww . j a v a 2  s .  c o m*/

        data.setClassIndex(data.numAttributes() - 1);

        // Instantiate the Remove filter
        Remove removeIDFilter = new Remove();
        removeIDFilter.setAttributeIndices("first");

        // Randomize the data
        data.randomize(random);

        // Perform cross-validation
        Instances predictedData = null;
        Evaluation eval = new Evaluation(data);

        for (int n = 0; n < folds; n++) {
            Instances train = data.trainCV(folds, n, random);
            Instances test = data.testCV(folds, n);

            // Apply log filter
            Filter logFilter = new LogFilter();
            logFilter.setInputFormat(train);
            train = Filter.useFilter(train, logFilter);
            logFilter.setInputFormat(test);
            test = Filter.useFilter(test, logFilter);

            // Copy the classifier
            Classifier classifier = AbstractClassifier.makeCopy(baseClassifier);

            // Instantiate the FilteredClassifier
            FilteredClassifier filteredClassifier = new FilteredClassifier();
            filteredClassifier.setFilter(removeIDFilter);
            filteredClassifier.setClassifier(classifier);

            // Build the classifier
            filteredClassifier.buildClassifier(train);

            // Evaluate
            eval.evaluateModel(classifier, test);

            // Add predictions
            AddClassification filter = new AddClassification();
            filter.setClassifier(classifier);
            filter.setOutputClassification(true);
            filter.setOutputDistribution(false);
            filter.setOutputErrorFlag(true);
            filter.setInputFormat(train);
            Filter.useFilter(train, filter); // trains the classifier

            Instances pred = Filter.useFilter(test, filter); // performs predictions on test set
            if (predictedData == null) {
                predictedData = new Instances(pred, 0);
            }
            for (int j = 0; j < pred.numInstances(); j++) {
                predictedData.add(pred.instance(j));
            }
        }

        // Prepare output scores
        double[] scores = new double[predictedData.numInstances()];

        for (Instance predInst : predictedData) {
            int id = new Double(predInst.value(predInst.attribute(0))).intValue() - 1;

            int valueIdx = predictedData.numAttributes() - 2;

            double value = predInst.value(predInst.attribute(valueIdx));

            scores[id] = value;

            // Limit to interval [0;5]
            if (scores[id] > 5.0) {
                scores[id] = 5.0;
            }
            if (scores[id] < 0.0) {
                scores[id] = 0.0;
            }
        }

        // Output
        StringBuilder sb = new StringBuilder();
        for (Double score : scores) {
            sb.append(score.toString() + LF);
        }

        FileUtils.writeStringToFile(
                new File(OUTPUT_DIR + "/" + mode.toString().toLowerCase() + "/" + dataset.toString() + ".csv"),
                sb.toString());
    }
}

From source file:fr.loria.synalp.jtrans.phonetiseur.Classifieurs.java

License:Open Source License

private Instances appliquerFiltre(Filter filtre, Instances instances) throws Exception {
    Instances newInstances;/*from   w w  w  . j ava 2  s  .co m*/
    Instance temp;

    filtre.setInputFormat(instances);
    for (int i = 0; i < instances.numInstances(); i++) {
        filtre.input(instances.instance(i));
    }

    filtre.batchFinished();
    newInstances = filtre.getOutputFormat();
    while ((temp = filtre.output()) != null) {
        newInstances.add(temp);
    }

    return newInstances;
}

From source file:fr.loria.synalp.jtrans.phonetiseur.Classifieurs.java

License:Open Source License

private Instance appliquerFiltreAUneInstance(Filter filtre, Instance instance, Instances instances)
        throws Exception {
    filtre.setInputFormat(instances);
    filtre.input(instance);/*w w  w . j  av  a  2 s .c o  m*/
    filtre.batchFinished();
    return filtre.output();
}

From source file:imba.classifier.FFNNTubes.java

@Override
public void buildClassifier(Instances data) throws Exception {
    getCapabilities().testWithFail(data);

    data.deleteWithMissingClass();// w  w w . j a v  a  2 s.  co m

    nAttribute = data.numAttributes() - 1;
    nOutput = data.numClasses();
    nData = data.size();

    //set target data
    setTarget(data);

    //generate weight
    generateRandomWeight();

    //normalisasi data
    Normalize norm = new Normalize();
    Filter filter = new NominalToBinary();

    norm.setInputFormat(data);

    Instances filteredData = Filter.useFilter(data, norm);

    try {
        filter.setInputFormat(filteredData);

        for (Instance i1 : filteredData) {
            filter.input(i1);
        }

        filter.batchFinished();
    } catch (Exception ex) {
        Logger.getLogger(NBTubes.class.getName()).log(Level.SEVERE, null, ex);
    }

    int z = 0;
    double valMSE = 100.0;
    while ((z <= nEpoch) && (valMSE >= 0.00001)) {
        for (int j = 0; j < nData; j++) {
            feedForward(filteredData.get(j));

            if (nHidden == 0) {
                updateWeight(target[j]);
            } else {
                backPropagation(target[j]);
            }
        }

        countError(filteredData);
        valMSE = countMSE(filteredData);
        System.out.println("ACCURACY " + z + " : " + accuracy);
        System.out.println("MSE " + z + " : " + valMSE);
        z++;
    }
}

From source file:imba.classifier.NBTubes.java

@Override
public void buildClassifier(Instances data) {
    dataClassifier = new ArrayList<>();
    infoClassifier = new ArrayList<>();
    validAttribute = new ArrayList<>();
    dataset = null;//from  w  w w.  j  av  a  2s  .  c  om
    sumClass = null;
    dataSize = 0;
    header_Instances = data;

    Filter f;
    int i, j, k, l, m;
    int sumVal;

    int numAttr = data.numAttributes(); //ini beserta kelasnya, jadi atribut + 1

    i = 0;
    while (i < numAttr && wasNumeric == false) {
        if (i == classIdx) {
            i++;
        }

        if (i != numAttr && data.attribute(i).isNumeric()) {
            wasNumeric = true;
        }

        i++;
    }

    Instance p;

    //kasih filter
    if (wasNumeric) {
        f = new Normalize();
        //Filter f = new NumericToNominal();
        try {
            f.setInputFormat(data);

            for (Instance i1 : data) {
                f.input(i1);
            }

            f.batchFinished();
        } catch (Exception ex) {
            Logger.getLogger(NBTubes.class.getName()).log(Level.SEVERE, null, ex);
        }

        dataset = f.getOutputFormat();

        while ((p = f.output()) != null) {
            dataset.add(p);
        }
    }

    //f = new NumericToNominal();
    if (filter.equals("Discretize")) {
        f = new Discretize();
    } else {
        f = new NumericToNominal();
    }

    try {
        if (wasNumeric) {
            f.setInputFormat(dataset);
            for (Instance i1 : dataset) {
                f.input(i1);
            }
        } else {
            f.setInputFormat(data);
            for (Instance i1 : data) {
                f.input(i1);
            }
        }

        f.batchFinished();
    } catch (Exception ex) {
        Logger.getLogger(NBTubes.class.getName()).log(Level.SEVERE, null, ex);
    }

    dataset = null;
    dataset = f.getOutputFormat();

    while ((p = f.output()) != null) {
        dataset.add(p);
    }

    //building data structure
    classIdx = data.classIndex();

    dataSize = data.size();

    //isi data dan info classifier dengan array kosong
    i = 0;
    j = i;
    while (j < numAttr) {
        if (i == classIdx) {
            i++;
        } else {
            dataClassifier.add(new ArrayList<>());
            infoClassifier.add(new ArrayList<>());

            if (j < i) {
                m = j - 1;
            } else {
                m = j;
            }

            k = 0;
            while (k < dataset.attribute(j).numValues()) {
                dataClassifier.get(m).add(new ArrayList<>());
                infoClassifier.get(m).add(new ArrayList<>());

                l = 0;
                while (l < dataset.attribute(classIdx).numValues()) {
                    dataClassifier.get(m).get(k).add(0);
                    infoClassifier.get(m).get(k).add(0.0);

                    l++;
                }

                k++;
            }
        }

        i++;
        j++;
    }

    //isi data classifier dari dataset
    sumClass = new int[data.numClasses()];

    i = 0;
    while (i < dataset.size()) {
        j = 0;
        k = j;
        while (k < dataset.numAttributes()) {
            if (j == classIdx) {
                j++;
            } else {
                if (k < j) {
                    m = k - 1;
                } else {
                    m = k;
                }

                dataClassifier.get(m).get((int) dataset.get(i).value(k)).set(
                        (int) dataset.get(i).value(classIdx),
                        dataClassifier.get(m).get((int) dataset.get(i).value(k))
                                .get((int) dataset.get(i).value(classIdx)) + 1);

                if (m == 0) {
                    sumClass[(int) dataset.get(i).value(classIdx)]++;
                }

            }

            k++;
            j++;
        }

        i++;
    }

    //proses double values
    i = 0;
    while (i < dataClassifier.size()) {
        j = 0;
        while (j < dataClassifier.get(i).size()) {
            k = 0;
            while (k < dataClassifier.get(i).get(j).size()) {
                infoClassifier.get(i).get(j).set(k, (double) dataClassifier.get(i).get(j).get(k) / sumClass[k]);

                k++;
            }

            j++;
        }

        i++;
    }

    /*
    //liat apakah ada nilai di tiap atribut
    //yang merepresentasikan lebih dari 80% data
    i = 0;
    while (i < dataClassifier.size()) {
    j = 0;
    while (j < dataClassifier.get(i).size()) {
                
                
        j++;
    }
            
    i++;
    }
    */
}

From source file:imba.classifier.NBTubes.java

@Override
public double[] distributionForInstance(Instance instance) throws Exception {
    //Fungsi ini menentukan probabilitas setiap kelas instance untuk instance 
    //yang ada di parameter fungsi
    Instances temp = null;//from w  w  w .ja  v  a 2s  . com
    Instance p;
    Filter f;
    double[] a = new double[infoClassifier.get(0).get(0).size()];
    int i, j, k, l, x, c;
    double t, prev;
    Enumeration n;
    boolean big;
    String val;
    String[] valMinMax;

    if (wasNumeric) {

        header_Instances.add(instance);

        f = new Normalize();
        try {
            f.setInputFormat(header_Instances);

            for (Instance i1 : header_Instances) {
                f.input(i1);
            }

            f.batchFinished();
        } catch (Exception ex) {
            Logger.getLogger(NBTubes.class.getName()).log(Level.SEVERE, null, ex);
        }

        temp = f.getOutputFormat();

        while ((p = f.output()) != null) {
            temp.add(p);
        }
    }

    f = new NumericToNominal();

    if (wasNumeric) {
        try {
            f.setInputFormat(temp);

            for (Instance i1 : temp) {
                f.input(i1);
            }

            f.batchFinished();
        } catch (Exception ex) {
            Logger.getLogger(NBTubes.class.getName()).log(Level.SEVERE, null, ex);
        }

        temp = null;
        temp = f.getOutputFormat();

        p = null;

        while ((p = f.output()) != null) {
            temp.add(p);
        }

        instance = temp.lastInstance();

        header_Instances.remove(header_Instances.size() - 1);
    } else {
        f.setInputFormat(header_Instances);

        f.input(instance);

        f.batchFinished();

        instance = f.output();
    }

    //Itung distribusi instance utk tiap kelas
    i = 0;
    while (i < (a.length)) {
        a[i] = (double) sumClass[i] / dataSize;

        j = 0;
        k = 0;
        while (j < infoClassifier.size()) {

            if (j == classIdx) {
                k++;
            }

            if (wasNumeric) {
                if (filter.equals("Discretize")) {
                    l = 0;
                    big = false;
                    while (l < dataset.attribute(k).numValues() && big == false) {
                        //parse
                        val = String.valueOf(dataset.attribute(k).value(l));
                        //System.out.println("k = " + k);
                        //System.out.println("nilai = " + instance.stringValue(k));
                        val = val.replaceAll("'", "");
                        val = val.replaceAll("\\(", "");
                        val = val.replaceAll("\\)", "");
                        val = val.replaceAll("]", "");

                        //System.out.println(val);

                        valMinMax = val.split("-");

                        //cocokin

                        if (valMinMax.length == 3) {
                            if (valMinMax[1].equals("inf")) {
                                valMinMax[1] = "0.0";
                            }
                            //System.out.println("Min = " + valMinMax[1]);
                            //System.out.println("Max = " + valMinMax[2]);
                            if (Double.valueOf(instance.stringValue(k)) > Double.valueOf(valMinMax[1]) && Double
                                    .valueOf(instance.stringValue(k)) <= Double.valueOf(valMinMax[2])) {
                                big = true;
                            }
                        } else {
                            if (valMinMax.length == 2) {
                                if (valMinMax[1].equals("inf")) {
                                    valMinMax[1] = "1.0";
                                }

                                if (Double.valueOf(instance.stringValue(k)) > Double.valueOf(valMinMax[0])
                                        && Double.valueOf(instance.stringValue(k)) <= Double
                                                .valueOf(valMinMax[1])) {
                                    big = true;
                                }
                            } else {
                                l = dataset.attribute(k).indexOfValue(instance.stringValue(k));
                                big = true;
                            }
                            //System.out.println("Min = " + valMinMax[0]);
                            //System.out.println("Max = " + valMinMax[1]);

                        }
                        l++;
                    }

                    x = l - 1;

                    //System.out.println("x = " + x);
                } else {
                    big = false;
                    l = 0;
                    n = dataset.attribute(k).enumerateValues();

                    t = 0;
                    prev = 0;
                    while (l < dataset.attribute(k).numValues() && big == false) {
                        t = Double.valueOf(n.nextElement().toString());

                        //System.out.println(prev + "   " + t);
                        if (Double.valueOf(instance.stringValue(k)) <= t) {
                            big = true;
                        } else {
                            prev = t;
                        }

                        l++;
                    }

                    if (big == true && t != Double.valueOf(instance.stringValue(k))) {
                        System.out.println(prev + "   " + Double.valueOf(instance.stringValue(k)) + "   " + t);
                    }
                    //x = l - 1;

                    if (classIdx < 2) {
                        c = 2;
                    } else {
                        c = 1;
                    }

                    if (big == true && l > c) {
                        if ((Double.valueOf(instance.stringValue(k)) - prev) <= (t
                                - Double.valueOf(instance.stringValue(k)))) {
                            x = l - 2;
                        } else {
                            x = l - 1;
                        }
                    } else {
                        x = l - 1;
                    }
                }

            } else {
                x = dataset.attribute(k).indexOfValue(instance.stringValue(k));
            }

            a[i] *= infoClassifier.get(j).get(x).get(i);

            k++;
            j++;
        }

        i++;
    }

    return a;
}