Example usage for weka.core Instances numInstances

List of usage examples for weka.core Instances numInstances

Introduction

In this page you can find the example usage for weka.core Instances numInstances.

Prototype


publicint numInstances() 

Source Link

Document

Returns the number of instances in the dataset.

Usage

From source file:ai.BalancedRandomForest.java

License:GNU General Public License

/**
 * Build Balanced Random Forest//from  w w w.jav  a 2  s.c  o  m
 */
public void buildClassifier(final Instances data) throws Exception {
    // If number of features is 0 then set it to log2 of M (number of attributes)
    if (numFeatures < 1)
        numFeatures = (int) Utils.log2(data.numAttributes()) + 1;
    // Check maximum number of random features
    if (numFeatures >= data.numAttributes())
        numFeatures = data.numAttributes() - 1;

    // Initialize array of trees
    tree = new BalancedRandomTree[numTrees];

    // total number of instances
    final int numInstances = data.numInstances();
    // total number of classes
    final int numClasses = data.numClasses();

    final ArrayList<Integer>[] indexSample = new ArrayList[numClasses];
    for (int i = 0; i < numClasses; i++)
        indexSample[i] = new ArrayList<Integer>();

    //System.out.println("numClasses = " + numClasses);

    // fill indexSample with the indices of each class
    for (int i = 0; i < numInstances; i++) {
        //System.out.println("data.get("+i+").classValue() = " + data.get(i).classValue());
        indexSample[(int) data.get(i).classValue()].add(i);
    }

    final Random random = new Random(seed);

    // Executor service to run concurrent trees
    final ExecutorService exe = Executors.newFixedThreadPool(Runtime.getRuntime().availableProcessors());

    List<Future<BalancedRandomTree>> futures = new ArrayList<Future<BalancedRandomTree>>(numTrees);

    final boolean[][] inBag = new boolean[numTrees][numInstances];

    try {
        for (int i = 0; i < numTrees; i++) {
            final ArrayList<Integer> bagIndices = new ArrayList<Integer>();

            // Randomly select the indices in a balanced way
            for (int j = 0; j < numInstances; j++) {
                // Select first the class
                final int randomClass = random.nextInt(numClasses);
                // Select then a random sample of that class
                final int randomSample = random.nextInt(indexSample[randomClass].size());
                bagIndices.add(indexSample[randomClass].get(randomSample));
                inBag[i][indexSample[randomClass].get(randomSample)] = true;
            }

            // Create random tree
            final Splitter splitter = new Splitter(
                    new GiniFunction(numFeatures, data.getRandomNumberGenerator(random.nextInt())));

            futures.add(exe.submit(new Callable<BalancedRandomTree>() {
                public BalancedRandomTree call() {
                    return new BalancedRandomTree(data, bagIndices, splitter);
                }
            }));
        }

        // Grab all trained trees before proceeding
        for (int treeIdx = 0; treeIdx < numTrees; treeIdx++)
            tree[treeIdx] = futures.get(treeIdx).get();

        // Calculate out of bag error
        final boolean numeric = data.classAttribute().isNumeric();

        List<Future<Double>> votes = new ArrayList<Future<Double>>(data.numInstances());

        for (int i = 0; i < data.numInstances(); i++) {
            VotesCollector aCollector = new VotesCollector(tree, i, data, inBag);
            votes.add(exe.submit(aCollector));
        }

        double outOfBagCount = 0.0;
        double errorSum = 0.0;

        for (int i = 0; i < data.numInstances(); i++) {

            double vote = votes.get(i).get();

            // error for instance
            outOfBagCount += data.instance(i).weight();
            if (numeric) {
                errorSum += StrictMath.abs(vote - data.instance(i).classValue()) * data.instance(i).weight();
            } else {
                if (vote != data.instance(i).classValue())
                    errorSum += data.instance(i).weight();
            }

        }

        outOfBagError = errorSum / outOfBagCount;

    } catch (Exception ex) {
        ex.printStackTrace();
    } finally {
        exe.shutdownNow();
    }

}

From source file:analysis.SilhouetteIndex.java

public double calculateIndex(SimpleKMeans sk, Instances inst, int c) throws Exception {
    //Map<Integer, Instances> clustermap = sk.clusterInstance;
    sk.setNumClusters(c);//  w w w  . j a  v  a 2  s .co m
    sk.buildClusterer(inst);
    EuclideanDistance ed = new EuclideanDistance();
    double avgSilhouetteOverAllPoints = 0.d;

    if (sk.getNumClusters() == 1) {
        //Index is not defined for k=1. needs at least 2 clusters
        return Double.NaN;
    }

    for (int i = 0; i < inst.numInstances(); i++) {
        //for the current element get its cluster
        int currentcluster = sk.clusterInstance(inst.instance(i));
        //System.out.println(inst.instance(i).value(2));
        double[] current_attr = new double[inst.numAttributes()];
        double[] other_attr = new double[inst.numAttributes()];
        //get attributes of the current instance
        for (int attr = 0; attr < inst.numAttributes(); attr++) {
            current_attr[attr] = inst.instance(i).value(attr);
        }
        // int counter
        double[] distances = new double[sk.getNumClusters()];
        int[] counters = new int[sk.getNumClusters()];
        //System.out.println("distances: "+distances.length);
        double avgInClusterDist = 0, dist = 0;
        int countsamecluster = 0;
        distances[currentcluster] = Double.MAX_VALUE;
        for (int j = 0; j < inst.numInstances(); j++) {
            for (int attr = 0; attr < inst.numAttributes(); attr++) {
                other_attr[attr] = inst.instance(j).value(attr);
            }
            //get cluster number of j th element
            int clusternumber = sk.clusterInstance(inst.instance(j));
            //check if j and i in the same cluster
            if (clusternumber == currentcluster) {
                if (inst.instance(i) != inst.instance(j)) {
                    //calculate average dist to other elements in the cluster
                    //inst.

                    dist = ed.compute(current_attr, other_attr);
                    avgInClusterDist = avgInClusterDist + dist;
                    countsamecluster++;
                }
            } else {
                dist = ed.compute(current_attr, other_attr);
                distances[clusternumber] = distances[clusternumber] + dist;
                counters[clusternumber]++;
            }
        }
        //calculate value ai
        if (countsamecluster > 0) {
            avgInClusterDist = avgInClusterDist / countsamecluster; //this is value ai
        }
        //find average distances to other clusters
        for (int k = 0; k < distances.length; k++) {
            if (k != currentcluster) {
                distances[k] = distances[k] / counters[k];
            }
        }
        //Find the min value of average distance to other clusters
        double min = distances[0];
        for (int k = 1; k < distances.length; k++) {
            if (min > distances[k]) {
                min = distances[k];
            }
        }

        //si for current element:
        double si;
        // if we only have one element in our cluster it makes sense to set
        // si = 0
        if (countsamecluster == 1) {
            si = 0.0d;
        } else {
            si = (min - avgInClusterDist) / Math.max(min, avgInClusterDist);
        }
        avgSilhouetteOverAllPoints = avgSilhouetteOverAllPoints + si;
    }
    //System.out.println(inst.numInstances());
    return avgSilhouetteOverAllPoints / inst.numInstances();

}

From source file:AnDE.wdAnDEonline.java

License:Open Source License

@Override
public void buildClassifier(Instances instances) throws Exception {

    // can classifier handle the data?
    getCapabilities().testWithFail(instances);

    // remove instances with missing class
    instances.deleteWithMissingClass();/*from  ww w  .  java 2s . c  om*/
    nInstances = instances.numInstances();
    nAttributes = instances.numAttributes() - 1;
    nc = instances.numClasses();

    probs = new double[nc];

    paramsPerAtt = new int[nAttributes];
    for (int u = 0; u < nAttributes; u++) {
        paramsPerAtt[u] = instances.attribute(u).numValues();
    }

    /*
     * Initialize structure array based on m_S
     */
    if (m_S.equalsIgnoreCase("A0DE")) {
        // A0DE
        numTuples = 0;
    } else if (m_S.equalsIgnoreCase("A1DE")) {
        // A1DE         
        numTuples = 1;
    } else if (m_S.equalsIgnoreCase("A2DE")) {
        // A2DE         
        numTuples = 2;
    }

    /* 
     * ----------------------------------------------------------------------------------------
     * Start Parameter Learning Process
     * ----------------------------------------------------------------------------------------
     */

    int scheme = 1;

    /*
     * ---------------------------------------------------------------------------------------------
     * Intitialize data structure
     * ---------------------------------------------------------------------------------------------
     */

    scheme = plTechniques.MAP;

    logDComputer = LogDistributionComputerAnDE.getDistributionComputer(numTuples, scheme);

    dParameters_ = new wdAnDEParametersFlat(nAttributes, nc, nInstances, paramsPerAtt, scheme, numTuples,
            m_MVerb);

    if (m_MVerb)
        System.out.println("All data structures are initialized. Starting to estimate parameters.");

    if (nInstances > 0) {
        for (int i = 0; i < nInstances; i++) {
            Instance instance = instances.instance(i);
            dParameters_.updateFirstPass(instance);
        }
    }
}

From source file:ann.ANN.java

public void percentageSplit(Classifier model, double percent, Instances data) {
    try {//from ww w  .  ja va  2  s.c  o m
        int trainSize = (int) Math.round(data.numInstances() * percent / 100);
        int testSize = data.numInstances() - trainSize;
        Instances train = new Instances(data, trainSize);
        Instances test = new Instances(data, testSize);
        ;

        for (int i = 0; i < trainSize; i++) {
            train.add(data.instance(i));
        }
        for (int i = trainSize; i < data.numInstances(); i++) {
            test.add(data.instance(i));
        }

        Evaluation eval = new Evaluation(train);
        eval.evaluateModel(model, test);
        System.out.println("================================");
        System.out.println("========Percentage  Split=======");
        System.out.println("================================");
        System.out.println(eval.toSummaryString("\n=== Summary ===\n", false));
        System.out.println(eval.toClassDetailsString("=== Detailed Accuracy By Class ===\n"));
        System.out.println(eval.toMatrixString("=== Confusion Matrix ===\n"));
    } catch (Exception ex) {
        System.out.println("File tidak berhasil di-load");
    }
}

From source file:ann.ANN.java

public void classify(String data_address, Classifier model) {
    try {//from w  ww .  j a v a 2  s.c o  m
        Instances test = ConverterUtils.DataSource.read(data_address);
        test.setClassIndex(test.numAttributes() - 1);
        System.out.println("====================================");
        System.out.println("=== Predictions on user test set ===");
        System.out.println("====================================");
        System.out.println("# - actual - predicted - distribution");
        for (int i = 0; i < test.numInstances(); i++) {
            double pred = model.classifyInstance(test.instance(i));
            double[] dist = model.distributionForInstance(test.instance(i));
            System.out.print((i + 1) + " - ");
            System.out.print(test.instance(i).toString(test.classIndex()) + " - ");
            System.out.print(test.classAttribute().value((int) pred) + " - ");
            System.out.println(Utils.arrayToString(dist));
        }
        System.out.println("\n");
    } catch (Exception ex) {
        System.out.println("Tidak berhasil memprediksi hasil\n");
    }
}

From source file:ann.Main.java

public static void main(String[] args) {
    String trainPath = null;/*from w  ww.  ja  v  a2  s .c  o  m*/
    String testPath = null;
    String weights = null;
    String predictPath = null;
    char activationFunction = MyANN.SIGMOID_FUNCTION, terminateCondition = MyANN.TERMINATE_MAX_ITERATION,
            learningRule = MyANN.PERCEPTRON_TRAINING_RULE, topology = MyANN.ONE_PERCEPTRON;
    double deltaMSE = 0.01;
    int maxIteration = 500;
    double learningRate = 0.3;
    double momentum = 0.2;
    int nbHidden = 0;
    int[] hiddenConf = null;
    boolean isCV = false;
    int numFolds = 10;
    boolean isEvaluate = false;

    if (args.length < 1 || args.length % 2 == 0) {

        System.out.println("Usage: ANN [-I <path>] [-t O|M] [-r P|B|D] [-h <layer>]"
                + "\n\t [-a N|G|T] [-L <rate>] [-m <momentum>] [-E D|I|B] [-d <mse>]"
                + "\n\t [-i <iteration>] [-e <path>|<n>] [-p <path>] <trainDataPath>");
        System.out.println("");
        System.out.println("-a N|G|T \t set activation function for OnePerceptron");
        System.out.println("\t\t   N=SIGN, G=SIGMOID, T=STEP");
        System.out.println("-d <mse> \t set MSE = <mse> for terminate condition");
        System.out.println("-E D|I|B \t\t set terminate condition, D=by MSE, I=by iteration");
        System.out.println("-e <path>|<n> \t set test data using <path> or cross-validation w/ folds = <n>");
        System.out.println("-h <layer> \t set hidden layer. <layer>=0 no hidden layer");
        System.out.println("\t\t   <layer>=2 => 1 hidden layer with 2 nodes");
        System.out.println("\t\t   <layer>=2,3 => 2 hidden layer with 2 nodes on first and 3 on second layer");
        System.out.println("-I <path> \t set initial weight from <path>");
        System.out.println("-i <iteration> \t set max iteration for terminate condition");
        System.out.println("-L <rate> \t set learning rate = <rate>");
        System.out.println("-m <momentum> \t set momentum = <momentum>");
        System.out.println("-p <path> \t set data to predict");
        System.out.println("-r P|B|D \t set learning rule for OnePerceptron ");
        System.out.println("\t\t   P=Perceptron training rule,B=Batch, D=DeltaRule");
        System.out.println("-t O|M \t\t set topology, O=OnePerceptron, M=MLP");
        return;
    } else {
        trainPath = args[args.length - 1];

        int i = 0;
        while (i < args.length - 1) {
            switch (args[i]) {
            case "-a":
                switch (args[i + 1]) {
                case "N":
                    activationFunction = MyANN.SIGN_FUNCTION;
                    break;
                case "G":
                    activationFunction = MyANN.SIGMOID_FUNCTION;
                    break;
                case "T":
                    activationFunction = MyANN.STEP_FUNCTION;
                    break;
                default:
                    break;
                }
                break;
            case "-d":
                deltaMSE = Double.valueOf(args[i + 1]);
                break;
            case "-E":
                switch (args[i + 1]) {
                case "D":
                    terminateCondition = MyANN.TERMINATE_MSE;
                    break;
                case "I":
                    terminateCondition = MyANN.TERMINATE_MAX_ITERATION;
                    break;
                case "B":
                    terminateCondition = MyANN.TERMINATE_BOTH;
                default:
                    break;
                }
                break;
            case "-e":
                if (args[i + 1].length() <= 2) {
                    numFolds = Integer.parseInt(args[i + 1]);
                    isCV = true;
                } else {
                    isEvaluate = true;
                    testPath = args[i + 1];
                }
                break;
            case "-h":
                String[] nbl = args[i + 1].split(",");
                if (nbl.length == 1) {
                    nbHidden = Integer.parseInt(nbl[0]);
                    if (nbHidden != 0) {
                        hiddenConf = new int[1];
                        hiddenConf[0] = nbHidden;
                        nbHidden = 1;
                    }
                } else {
                    nbHidden = nbl.length;
                    hiddenConf = new int[nbHidden];
                    for (int j = 0; j < nbHidden; j++) {
                        hiddenConf[j] = Integer.parseInt(nbl[j]);
                    }
                }
                break;
            case "-I":
                weights = args[i + 1];
                break;
            case "-i":
                maxIteration = Integer.parseInt(args[i + 1]);
                break;
            case "-L":
                learningRate = Double.parseDouble(args[i + 1]);
                break;
            case "-m":
                momentum = Double.parseDouble(args[i + 1]);
                break;
            case "-p":
                predictPath = args[i + 1];
                break;
            case "-r":
                switch (args[i + 1]) {
                case "P":
                    learningRule = MyANN.PERCEPTRON_TRAINING_RULE;
                    break;
                case "B":
                    learningRule = MyANN.BATCH_GRADIENT_DESCENT;
                    break;
                case "D":
                    learningRule = MyANN.DELTA_RULE;
                    break;
                default:
                    break;
                }
                break;
            case "-t":
                switch (args[i + 1]) {
                case "O":
                    topology = MyANN.ONE_PERCEPTRON;
                    break;
                case "M":
                    topology = MyANN.MULTILAYER_PERCEPTRON;
                    break;
                default:
                    break;
                }
                break;
            default:
                break;
            }
            i += 2;
        }
    }

    // persiapkan data
    Instances trainData = null;
    Instances testData = null;
    Instances predictData = null;
    try {
        ConverterUtils.DataSource source = new ConverterUtils.DataSource(trainPath);
        trainData = source.getDataSet();
        if (trainData.classIndex() == -1) {
            trainData.setClassIndex(trainData.numAttributes() - 1);
        }
        if (testPath != null) {
            source = new ConverterUtils.DataSource(testPath);
            testData = source.getDataSet();
            if (testData.classIndex() == -1) {
                testData.setClassIndex(testData.numAttributes() - 1);
            }
        }
        if (predictPath != null) {
            source = new ConverterUtils.DataSource(predictPath);
            predictData = source.getDataSet();
            if (predictData.classIndex() == -1) {
                predictData.setClassIndex(predictData.numAttributes() - 1);
            }
        }
    } catch (Exception ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }

    // persiapkan model dan parameter
    MyANN myAnn = new MyANN();
    WeightParser wp = null;
    if (weights != null) {
        wp = new WeightParser(weights);
        myAnn.setInitialWeight(wp.weight);
    }
    myAnn.setActivationFunction(activationFunction);
    myAnn.setDeltaMSE(deltaMSE);
    myAnn.setLearningRate(learningRate);
    myAnn.setLearningRule(learningRule);
    myAnn.setMaxIteration(maxIteration);
    myAnn.setMomentum(momentum);
    myAnn.setTerminationCondition(terminateCondition);
    myAnn.setThreshold(momentum);
    myAnn.setTopology(topology);

    int[] nbLayer = new int[2];
    if (nbHidden != 0) {
        nbLayer = new int[2 + nbHidden];
        for (int j = 1; j < nbLayer.length - 1; j++) {
            nbLayer[j] = hiddenConf[j - 1];
        }
    }
    nbLayer[0] = trainData.numAttributes() - 1;
    if (trainData.classAttribute().isNominal())
        nbLayer[nbLayer.length - 1] = trainData.classAttribute().numValues();
    else
        nbLayer[nbLayer.length - 1] = 1;

    myAnn.setNbLayers(nbLayer);

    // debug: cek kondigurasi
    System.out.println("training data: " + trainPath);
    System.out.println("settings:");
    myAnn.printSetting();
    System.out.println("");

    // klasifikasi
    System.out.println("start classifiying...");
    try {
        myAnn.buildClassifier(trainData);
    } catch (Exception ex) {
        Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }
    myAnn.printSummary();
    System.out.println("done");

    System.out.println("-------------------------------------------------");

    System.out.print("evaluating ");
    int[][] result = null;
    int nbData = trainData.numInstances();
    if (isCV) {
        System.out.println("using " + numFolds + "-folds cross validation");
        result = myAnn.crossValidation(trainData, numFolds, new Random(1));
    } else if (isEvaluate) {
        System.out.println("using testData: " + testPath);
        result = myAnn.evaluate(testData);
        nbData = testData.numInstances();
    } else {
        System.out.println("using trainData");
        result = myAnn.evaluate(trainData);
    }
    System.out.println("");

    System.out.println("result:");

    double accuracy = 0.0; // a+d/total
    double[] precision = new double[result.length]; // a/a+c;   prec[i] = M[i,i] / sumj(M[j,i])
    double[] recall = new double[result[0].length]; // a/a+b;   rec[i] = M[i,i] / sumj(M[i,j])

    for (int i = 0; i < result.length; i++) {
        for (int j = 0; j < result[0].length; j++) {
            System.out.print(result[i][j] + " ");
            if (i == j) {
                accuracy += result[i][j];
            }
        }
        System.out.println("");
    }

    // precision
    for (int i = 0; i < precision.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < result[0].length; j++) {
            sum += result[j][i];
        }
        precision[i] = result[i][i] / sum;
    }

    // recall
    for (int i = 0; i < recall.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < result[0].length; j++) {
            sum += result[i][j];
        }
        recall[i] = result[i][i] / sum;
    }

    accuracy /= nbData;
    System.out.println("");
    System.out.println("accuracy: " + accuracy);
    System.out.println("precision: ");
    for (double p : precision) {
        System.out.println(p);
    }
    System.out.println("");
    System.out.println("recall: ");
    for (double r : recall)
        System.out.println(r);
    System.out.println("");
    System.out.println("-------------------------------------------------");

    if (predictPath != null) {
        System.out.println("predicting: " + predictPath);
        for (int i = 0; i < predictData.numInstances(); i++) {
            try {
                int idx = myAnn.predictClassIndex(myAnn.distributionForInstance(predictData.instance(i)));
                System.out.println("instance[" + (i) + "]: " + trainData.classAttribute().value(idx));
            } catch (Exception ex) {
                Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
            }
        }
        System.out.println("done");
    }
    /*
    try {
    File file = new File("/media/yusuf/5652859E52858389/Data/Kuliah/Semester 7/ML/WekaMiddle/weather.nominal.arff");
    File unlabel = new File("/media/yusuf/5652859E52858389/Data/Kuliah/Semester 7/ML/WekaMiddle/weather.nominal.unlabeled.arff");
    Instances data, test;
    ConverterUtils.DataSource source = new ConverterUtils.DataSource(file.getPath());
    data = source.getDataSet();
    if (data.classIndex() == -1) {
        data.setClassIndex(data.numAttributes() - 1);
    }
    source = new ConverterUtils.DataSource(unlabel.getPath());
    test = source.getDataSet();
    if (test.classIndex() == -1) {
        test.setClassIndex(data.numAttributes() - 1);
    }
            
    WeightParser wp = new WeightParser("/media/yusuf/5652859E52858389/Data/Kuliah/Semester 7/ML/khaidzir_myANN/initial.weight");
    MyANN myANN = new MyANN();
    int[] nbLayers = {4, 3, 2};
    myANN.setNbLayers(nbLayers);
    myANN.setDeltaMSE(0.001);
    //myANN.setMomentum(0.2);
    myANN.setLearningRate(0.1);
    myANN.setTopology(MyANN.MULTILAYER_PERCEPTRON);
    myANN.setLearningRule(MyANN.PERCEPTRON_TRAINING_RULE);
    myANN.setActivationFunction(MyANN.SIGMOID_FUNCTION);
    myANN.setMaxIteration(10000);
    myANN.setTerminationCondition(MyANN.TERMINATE_MAX_ITERATION);
    //myANN.setInitialWeight(wp.weight);
            
            
    myANN.buildClassifier(data);
    int[][] ev = myANN.evaluate(data);
    for (int[] ev1 : ev) {
        for (int ev2 : ev1) {
            System.out.print(ev2+", ");
        }
        System.out.println("");
    }
    System.out.println("");
    //ev = myANN.crossValidation(data, 10, new Random(1));
    for (int[] ev1 : ev) {
        for (int ev2 : ev1) {
            System.out.print(ev2+", ");
        }
        System.out.println("");
    }
    System.out.println("");
            
    /*
    myANN.buildClassifier(data);
    int[][] cm = myANN.evaluate(data);
    double accuracy = 0.0;      // a+d/total
    double[] precision = new double[cm.length];     // a/a+c;   prec[i] = M[i,i] / sumj(M[j,i])
    double[] recall = new double[cm[0].length];        // a/a+b;   rec[i] = M[i,i] / sumj(M[i,j])
            
    for (int i = 0; i < cm.length; i++) {
        for (int j = 0; j < cm[0].length; j++) {
            System.out.print(cm[i][j] + " ");
            if (i==j) {
                accuracy += cm[i][j];
            }
        }
        System.out.println("");
    }
            
    // precision
    for(int i = 0; i < precision.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < cm[0].length; j++) {
            sum += cm[j][i];
        }
        precision[i] = cm[i][i] / sum;
    }
            
    // recall
    for(int i = 0; i < recall.length; i++) {
        double sum = 0.0;
        for (int j = 0; j < cm[0].length; j++) {
            sum += cm[i][j];
        }
        recall[i] = cm[i][i] / sum;
    }
            
    accuracy /= data.numInstances();
    System.out.println("accuracy: "+accuracy);
    System.out.println("precision: ");
    for(double p : precision) {
        System.out.print(p+", ");
    }
    System.out.println("");
    System.out.println("recall: ");
    for (double r : recall) System.out.print(r+", ");
    System.out.println("");
            
    } catch (Exception ex) {
    Logger.getLogger(Main.class.getName()).log(Level.SEVERE, null, ex);
    }
    */
}

From source file:ANN.MultilayerPerceptron.java

public MultilayerPerceptron(Instances i, double rate, int itter, int numHidden) {
    learningRate = rate;/*from w  w  w . java 2 s  . c  om*/
    listHidden = new ArrayList<>();

    for (int num = 0; num < numHidden + 1; num++) {
        listHidden.add(new Node(i.numAttributes()));
    }

    listOutput = new ArrayList<>();
    for (int num = 0; num < i.numClasses(); num++) {
        listOutput.add(new Node(listHidden.size()));
    }
    itteration = itter;
    listDoubleinstance = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }
}

From source file:ANN.MultilayerPerceptron.java

@Override
public void buildClassifier(Instances i) {
    //       System.out.println(listOutput.get(0).getWeightSize() + " "+ listHidden.size());
    int cnt = 0;/*from   w ww  . j  a v a  2s.c  om*/
    while (true) {//ulang iterasi
        //            System.out.println();
        //            System.out.println("iterasi "+itt);
        for (int idxInstance = 0; idxInstance < i.numInstances(); idxInstance++) {
            //buat list input
            ArrayList<Double> listInput = new ArrayList<>();
            listInput.add(1.0);
            for (int idx = 0; idx < i.numAttributes() - 1; idx++) {
                listInput.add(i.get(idxInstance).value(idx));
            }

            //hitung output hidden
            ArrayList<Double> hiddenOutput = new ArrayList<>();
            hiddenOutput.add(1.0);
            for (int idxOutput = 1; idxOutput < listHidden.size(); idxOutput++) {
                output(listHidden, listInput, idxOutput);
                hiddenOutput.add(listHidden.get(idxOutput).getValue());
                //                    System.out.println(outputVal);
            }
            //hitung output layer
            for (int idxOutput = 0; idxOutput < listOutput.size(); idxOutput++) {
                output(listOutput, hiddenOutput, idxOutput);
                //                    System.out.println(outputVal);
            }

            //hitung error
            calculateError(idxInstance);
            //update bobot
            updateWeight(listInput);
        }
        double error = 0;
        for (int idxErr = 0; idxErr < i.numInstances(); idxErr++) {
            for (int idx = 0; idx < listOutput.size(); idx++) {
                error += Math.pow(listOutput.get(idx).getError(), 2) / 2;
                //                    System.out.println(listOutput.get(idx).getError());
            }
            //                System.out.println(error);
        }
        if (cnt == 1000) {
            System.out.println(error);
            System.out.println();
            cnt = 0;
        }
        cnt++;
        if (error <= 0.3)
            break;
    }
    //        for (int idx=0;idx<listOutput.size();idx++) {
    //            System.out.println("Output value "+listOutput.get(idx).getValue());
    //            System.out.println("Output error "+listOutput.get(idx).getError());
    //            for (int idx2=0; idx2<listOutput.get(idx).getWeightSize();idx2++)
    //                System.out.println("Output weight"+listOutput.get(idx).getWeightFromList(idx2));
    //        }
}

From source file:ANN.MultiplePerceptron.java

public MultiplePerceptron(Instances i, int numNode, double rate) {
    listNodeHidden = new ArrayList<>();
    for (int num = 0; num < numNode + 1; num++) {
        listNodeHidden.add(new Node(i.numAttributes()));
    }//from   w  w  w . j  av  a  2  s  . c o  m

    listNodeOutput = new ArrayList<>();
    for (int num = 0; num < i.numClasses(); num++) {
        listNodeOutput.add(new Node(listNodeHidden.size()));
    }

    listDoubleinstance = new double[i.numInstances()];
    for (int numIns = 0; numIns < i.numInstances(); numIns++) {
        listDoubleinstance[numIns] = i.instance(numIns).toDoubleArray()[i.classIndex()];
    }
    learningRate = rate;
}

From source file:ANN.MultiplePerceptron.java

@Override
public void buildClassifier(Instances i) {
    //        System.out.println(listNodeHidden.get(0).getWeightSize()+" "+listNodeOutput.get(0).getWeightSize());
    for (int itt = 0; itt < 5000; itt++) {
        for (int idxInstance = 0; idxInstance < i.numInstances(); idxInstance++) {
            ArrayList<Double> listInput = new ArrayList<>();
            listInput.add(1.0);/*from   w  w w.  j a v  a 2 s  .c  om*/
            for (int idxInstanceVal = 0; idxInstanceVal < i.numAttributes() - 1; idxInstanceVal++) {
                listInput.add(i.get(idxInstance).value(idxInstanceVal));
            }

            ArrayList<Double> listOutputHidden = new ArrayList<>();
            listOutputHidden.add(1.0);

            //set output hidden layer
            //                System.out.println("Hidden layer\n");
            for (int idxNodeHidden = 1; idxNodeHidden < listNodeHidden.size(); idxNodeHidden++) {
                double outputVal = listNodeHidden.get(idxNodeHidden).output(listInput);
                listNodeHidden.get(idxNodeHidden).setValue(outputVal);
                listOutputHidden.add(outputVal);
                //                    System.out.println(outputVal);
            }

            //                System.out.println("Output layer\n");
            //set output layer
            for (int idxNodeHidden = 0; idxNodeHidden < listNodeOutput.size(); idxNodeHidden++) {
                double outputVal = listNodeOutput.get(idxNodeHidden).output(listOutputHidden);
                listNodeOutput.get(idxNodeHidden).setValue(outputVal);
                //                    System.out.println(outputVal);
            }

            //calculate error (back propagation)
            calculateError(idxInstance);
            //re-calculate weight
            calculateWeight(i.instance(idxInstance));
        }
    }
    for (int idx = 0; idx < listNodeHidden.size(); idx++) {
        System.out.println("Hidden value " + listNodeHidden.get(idx).getValue());
        System.out.println("Hidden error " + listNodeHidden.get(idx).getError());
        for (int idx2 = 0; idx2 < listNodeHidden.get(idx).getWeightSize(); idx2++)
            System.out.println("Hidden weight" + listNodeHidden.get(idx).getWeightFromList(idx2));
    }
    System.out.println();
    for (int idx = 0; idx < listNodeOutput.size(); idx++) {
        System.out.println("Output value " + listNodeOutput.get(idx).getValue());
        System.out.println("Output error " + listNodeOutput.get(idx).getError());
        for (int idx2 = 0; idx2 < listNodeOutput.get(idx).getWeightSize(); idx2++)
            System.out.println("Output weight" + listNodeOutput.get(idx).getWeightFromList(idx2));
    }
}