Example usage for org.deeplearning4j.eval Evaluation eval

List of usage examples for org.deeplearning4j.eval Evaluation eval

Introduction

In this page you can find the example usage for org.deeplearning4j.eval Evaluation eval.

Prototype

public void eval(INDArray realOutcomes, INDArray guesses) 

Source Link

Document

Collects statistics on the real outcomes vs the guesses.

Usage

From source file:com.example.android.displayingbitmaps.ui.ImageGridActivity.java

License:Apache License

public void trainMLP() throws Exception {
    Nd4j.ENFORCE_NUMERICAL_STABILITY = true;
    final int numRows = 28;
    final int numColumns = 28;
    int outputNum = 10;
    int numSamples = 10000;
    int batchSize = 500;
    int iterations = 10;
    int seed = 123;
    int listenerFreq = iterations / 5;
    int splitTrainNum = (int) (batchSize * .8);
    DataSet mnist;/*from w w  w . ja v a2s  .  c  om*/
    SplitTestAndTrain trainTest;
    DataSet trainInput;
    List<INDArray> testInput = new ArrayList<>();
    List<INDArray> testLabels = new ArrayList<>();

    log.info("Load data....");
    DataSetIterator mnistIter = new MnistDataSetIterator(batchSize, numSamples, true);

    log.info("Build model....");
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed)
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(iterations)
            .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).learningRate(1e-1f)
            .momentum(0.5).momentumAfter(Collections.singletonMap(3, 0.9)).useDropConnect(true).list(2)
            .layer(0,
                    new DenseLayer.Builder().nIn(numRows * numColumns).nOut(1000).activation("relu")
                            .weightInit(WeightInit.XAVIER).build())
            .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD).nIn(1000).nOut(outputNum)
                    .activation("softmax").weightInit(WeightInit.XAVIER).build())
            .build();

    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();
    model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(listenerFreq)));

    log.info("Train model....");
    model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(listenerFreq)));
    while (mnistIter.hasNext()) {
        mnist = mnistIter.next();
        trainTest = mnist.splitTestAndTrain(splitTrainNum, new Random(seed)); // train set that is the result
        trainInput = trainTest.getTrain(); // get feature matrix and labels for training
        testInput.add(trainTest.getTest().getFeatureMatrix());
        testLabels.add(trainTest.getTest().getLabels());
        model.fit(trainInput);
    }

    log.info("Evaluate model....");
    Evaluation eval = new Evaluation(outputNum);
    for (int i = 0; i < testInput.size(); i++) {
        INDArray output = model.output(testInput.get(i));
        eval.eval(testLabels.get(i), output);
    }

    log.info(eval.stats());
    log.info("****************Example finished********************");
}

From source file:com.heatonresearch.aifh.examples.ann.LearnDigitsBackprop.java

License:Apache License

/**
 * The main method.//from w  w  w.  jav a2s. c  om
 * @param args Not used.
 */
public static void main(String[] args) {
    try {
        int seed = 43;
        double learningRate = 1e-2;
        int nEpochs = 50;
        int batchSize = 500;

        // Setup training data.
        System.out.println("Please wait, reading MNIST training data.");
        String dir = System.getProperty("user.dir");
        MNISTReader trainingReader = MNIST.loadMNIST(dir, true);
        MNISTReader validationReader = MNIST.loadMNIST(dir, false);

        DataSet trainingSet = trainingReader.getData();
        DataSet validationSet = validationReader.getData();

        DataSetIterator trainSetIterator = new ListDataSetIterator(trainingSet.asList(), batchSize);
        DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(),
                validationReader.getNumRows());

        System.out.println("Training set size: " + trainingReader.getNumImages());
        System.out.println("Validation set size: " + validationReader.getNumImages());

        System.out.println(trainingSet.get(0).getFeatures().size(1));
        System.out.println(validationSet.get(0).getFeatures().size(1));

        int numInputs = trainingReader.getNumCols() * trainingReader.getNumRows();
        int numOutputs = 10;
        int numHiddenNodes = 200;

        // Create neural network.
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate)
                .updater(Updater.NESTEROVS).momentum(0.9).regularization(true).dropOut(0.50).list(2)
                .layer(0,
                        new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
                                .weightInit(WeightInit.XAVIER).activation("relu").build())
                .layer(1,
                        new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD)
                                .weightInit(WeightInit.XAVIER).activation("softmax").nIn(numHiddenNodes)
                                .nOut(numOutputs).build())
                .pretrain(false).backprop(true).build();

        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        model.setListeners(new ScoreIterationListener(1));

        // Define when we want to stop training.
        EarlyStoppingModelSaver saver = new InMemoryModelSaver();
        EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder()
                //.epochTerminationConditions(new MaxEpochsTerminationCondition(10))
                .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(5))
                .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score
                .modelSaver(saver).build();
        EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator);

        // Train and display result.
        EarlyStoppingResult result = trainer.fit();
        System.out.println("Termination reason: " + result.getTerminationReason());
        System.out.println("Termination details: " + result.getTerminationDetails());
        System.out.println("Total epochs: " + result.getTotalEpochs());
        System.out.println("Best epoch number: " + result.getBestModelEpoch());
        System.out.println("Score at best epoch: " + result.getBestModelScore());

        model = saver.getBestModel();

        // Evaluate
        Evaluation eval = new Evaluation(numOutputs);
        validationSetIterator.reset();

        for (int i = 0; i < validationSet.numExamples(); i++) {
            DataSet t = validationSet.get(i);
            INDArray features = t.getFeatureMatrix();
            INDArray labels = t.getLabels();
            INDArray predicted = model.output(features, false);
            eval.eval(labels, predicted);
        }

        //Print the evaluation statistics
        System.out.println(eval.stats());
    } catch (Exception ex) {
        ex.printStackTrace();
    }

}

From source file:com.heatonresearch.aifh.examples.ann.LearnDigitsConv.java

License:Apache License

/**
 * The main method.//from  w  ww . j  a  v  a  2 s  .c  o  m
 * @param args Not used.
 */
public static void main(String[] args) {
    try {
        int seed = 43;
        double learningRate = 1e-2;
        int nEpochs = 50;
        int batchSize = 500;
        int channels = 1;

        // Setup training data.
        System.out.println("Please wait, reading MNIST training data.");
        String dir = System.getProperty("user.dir");
        MNISTReader trainingReader = MNIST.loadMNIST(dir, true);
        MNISTReader validationReader = MNIST.loadMNIST(dir, false);

        DataSet trainingSet = trainingReader.getData();
        DataSet validationSet = validationReader.getData();

        DataSetIterator trainSetIterator = new ListDataSetIterator(trainingSet.asList(), batchSize);
        DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(),
                validationReader.getNumRows());

        System.out.println("Training set size: " + trainingReader.getNumImages());
        System.out.println("Validation set size: " + validationReader.getNumImages());

        int numOutputs = 10;

        // Create neural network.
        MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed).iterations(1)
                .regularization(true).l2(0.0005).learningRate(0.01).weightInit(WeightInit.XAVIER)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(Updater.NESTEROVS)
                .momentum(0.9).list(4)
                .layer(0,
                        new ConvolutionLayer.Builder(5, 5).nIn(channels).stride(1, 1).nOut(20).dropOut(0.5)
                                .activation("relu").build())
                .layer(1,
                        new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
                                .stride(2, 2).build())
                .layer(2, new DenseLayer.Builder().activation("relu").nOut(500).build())
                .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10)
                        .activation("softmax").build())
                .backprop(true).pretrain(false);

        new ConvolutionLayerSetup(builder, 28, 28, 1);
        MultiLayerConfiguration conf = builder.build();

        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        model.setListeners(new ScoreIterationListener(1));

        // Define when we want to stop training.
        EarlyStoppingModelSaver saver = new InMemoryModelSaver();
        EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder()
                //.epochTerminationConditions(new MaxEpochsTerminationCondition(10))
                .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(5))
                .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score
                .modelSaver(saver).build();
        EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator);

        // Train and display result.
        EarlyStoppingResult result = trainer.fit();
        System.out.println("Termination reason: " + result.getTerminationReason());
        System.out.println("Termination details: " + result.getTerminationDetails());
        System.out.println("Total epochs: " + result.getTotalEpochs());
        System.out.println("Best epoch number: " + result.getBestModelEpoch());
        System.out.println("Score at best epoch: " + result.getBestModelScore());

        model = saver.getBestModel();

        // Evaluate
        Evaluation eval = new Evaluation(numOutputs);
        validationSetIterator.reset();

        for (int i = 0; i < validationSet.numExamples(); i++) {
            DataSet t = validationSet.get(i);
            INDArray features = t.getFeatureMatrix();
            INDArray labels = t.getLabels();
            INDArray predicted = model.output(features, false);
            eval.eval(labels, predicted);
        }

        //Print the evaluation statistics
        System.out.println(eval.stats());
    } catch (Exception ex) {
        ex.printStackTrace();
    }

}

From source file:com.heatonresearch.aifh.examples.ann.LearnDigitsDropout.java

License:Apache License

/**
 * The main method./*from   www  .j  ava  2  s .  c  o m*/
 * @param args Not used.
 */
public static void main(String[] args) {
    try {
        int seed = 43;
        double learningRate = 1e-2;
        int nEpochs = 50;
        int batchSize = 500;

        // Setup training data.
        System.out.println("Please wait, reading MNIST training data.");
        String dir = System.getProperty("user.dir");
        MNISTReader trainingReader = MNIST.loadMNIST(dir, true);
        MNISTReader validationReader = MNIST.loadMNIST(dir, false);

        DataSet trainingSet = trainingReader.getData();
        DataSet validationSet = validationReader.getData();

        DataSetIterator trainSetIterator = new ListDataSetIterator(trainingSet.asList(), batchSize);
        DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(),
                validationReader.getNumRows());

        System.out.println("Training set size: " + trainingReader.getNumImages());
        System.out.println("Validation set size: " + validationReader.getNumImages());

        System.out.println(trainingSet.get(0).getFeatures().size(1));
        System.out.println(validationSet.get(0).getFeatures().size(1));

        int numInputs = trainingReader.getNumCols() * trainingReader.getNumRows();
        int numOutputs = 10;
        int numHiddenNodes = 100;

        // Create neural network.
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate)
                .updater(Updater.NESTEROVS).momentum(0.9).list(2)
                .layer(0,
                        new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
                                .weightInit(WeightInit.XAVIER).activation("relu").build())
                .layer(1,
                        new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD)
                                .weightInit(WeightInit.XAVIER).activation("softmax").nIn(numHiddenNodes)
                                .nOut(numOutputs).build())
                .pretrain(false).backprop(true).build();

        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        model.setListeners(new ScoreIterationListener(1));

        // Define when we want to stop training.
        EarlyStoppingModelSaver saver = new InMemoryModelSaver();
        EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder()
                //.epochTerminationConditions(new MaxEpochsTerminationCondition(10))
                .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(5))
                .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score
                .modelSaver(saver).build();
        EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator);

        // Train and display result.
        EarlyStoppingResult result = trainer.fit();
        System.out.println("Termination reason: " + result.getTerminationReason());
        System.out.println("Termination details: " + result.getTerminationDetails());
        System.out.println("Total epochs: " + result.getTotalEpochs());
        System.out.println("Best epoch number: " + result.getBestModelEpoch());
        System.out.println("Score at best epoch: " + result.getBestModelScore());

        model = saver.getBestModel();

        // Evaluate
        Evaluation eval = new Evaluation(numOutputs);
        validationSetIterator.reset();

        for (int i = 0; i < validationSet.numExamples(); i++) {
            DataSet t = validationSet.get(i);
            INDArray features = t.getFeatureMatrix();
            INDArray labels = t.getLabels();
            INDArray predicted = model.output(features, false);
            eval.eval(labels, predicted);
        }

        //Print the evaluation statistics
        System.out.println(eval.stats());
    } catch (Exception ex) {
        ex.printStackTrace();
    }

}

From source file:com.heatonresearch.aifh.examples.ann.LearnIrisBackprop.java

License:Apache License

/**
 * The main method./*from w  ww  .  j  a  v  a2s  . c o m*/
 * @param args Not used.
 */
public static void main(String[] args) {
    try {
        int seed = 43;
        double learningRate = 0.1;
        int splitTrainNum = (int) (150 * .75);

        int numInputs = 4;
        int numOutputs = 3;
        int numHiddenNodes = 50;

        // Setup training data.
        final InputStream istream = LearnIrisBackprop.class.getResourceAsStream("/iris.csv");
        if (istream == null) {
            System.out.println("Cannot access data set, make sure the resources are available.");
            System.exit(1);
        }
        final NormalizeDataSet ds = NormalizeDataSet.load(istream);
        final CategoryMap species = ds.encodeOneOfN(4); // species is column 4
        istream.close();

        DataSet next = ds.extractSupervised(0, 4, 4, 3);
        next.shuffle();

        // Training and validation data split
        SplitTestAndTrain testAndTrain = next.splitTestAndTrain(splitTrainNum, new Random(seed));
        DataSet trainSet = testAndTrain.getTrain();
        DataSet validationSet = testAndTrain.getTest();

        DataSetIterator trainSetIterator = new ListDataSetIterator(trainSet.asList(), trainSet.numExamples());

        DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(),
                validationSet.numExamples());

        // Create neural network.
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate)
                .updater(Updater.NESTEROVS).momentum(0.9).list(2)
                .layer(0,
                        new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
                                .weightInit(WeightInit.XAVIER).activation("relu").build())
                .layer(1,
                        new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD)
                                .weightInit(WeightInit.XAVIER).activation("softmax").nIn(numHiddenNodes)
                                .nOut(numOutputs).build())
                .pretrain(false).backprop(true).build();

        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        model.setListeners(new ScoreIterationListener(1));

        // Define when we want to stop training.
        EarlyStoppingModelSaver saver = new InMemoryModelSaver();
        EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder()
                .epochTerminationConditions(new MaxEpochsTerminationCondition(500)) //Max of 50 epochs
                .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(25))
                .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score
                .modelSaver(saver).build();
        EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator);

        // Train and display result.
        EarlyStoppingResult result = trainer.fit();
        System.out.println("Termination reason: " + result.getTerminationReason());
        System.out.println("Termination details: " + result.getTerminationDetails());
        System.out.println("Total epochs: " + result.getTotalEpochs());
        System.out.println("Best epoch number: " + result.getBestModelEpoch());
        System.out.println("Score at best epoch: " + result.getBestModelScore());

        model = saver.getBestModel();

        // Evaluate
        Evaluation eval = new Evaluation(numOutputs);
        validationSetIterator.reset();

        for (int i = 0; i < validationSet.numExamples(); i++) {
            DataSet t = validationSet.get(i);
            INDArray features = t.getFeatureMatrix();
            INDArray labels = t.getLabels();
            INDArray predicted = model.output(features, false);
            System.out.println(features + ":Prediction(" + findSpecies(labels, species) + "):Actual("
                    + findSpecies(predicted, species) + ")" + predicted);
            eval.eval(labels, predicted);
        }

        //Print the evaluation statistics
        System.out.println(eval.stats());
    } catch (Exception ex) {
        ex.printStackTrace();
    }
}

From source file:com.javafxpert.neuralnetviz.scenario.CSVExample.java

License:Apache License

public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception {
    //public static void main(String[] args) throws  Exception {

    //First: get the dataset using the record reader. CSVRecordReader handles loading/parsing
    int numLinesToSkip = 0;
    String delimiter = ",";
    RecordReader recordReader = new CSVRecordReader(numLinesToSkip, delimiter);
    recordReader.initialize(new FileSplit(new ClassPathResource("iris.txt").getFile()));

    //Second: the RecordReaderDataSetIterator handles conversion to DataSet objects, ready for use in neural network
    int labelIndex = 4; //5 values in each row of the iris.txt CSV: 4 input features followed by an integer label (class) index. Labels are the 5th value (index 4) in each row
    int numClasses = 3; //3 classes (types of iris flowers) in the iris data set. Classes have integer values 0, 1 or 2
    int batchSize = 150; //Iris data set: 150 examples total. We are loading all of them into one DataSet (not recommended for large data sets)

    DataSetIterator iterator = new RecordReaderDataSetIterator(recordReader, batchSize, labelIndex, numClasses);
    DataSet allData = iterator.next();//w ww.j  a  v  a 2 s  .  c  om
    allData.shuffle();
    SplitTestAndTrain testAndTrain = allData.splitTestAndTrain(0.65); //Use 65% of data for training

    DataSet trainingData = testAndTrain.getTrain();
    DataSet testData = testAndTrain.getTest();

    //We need to normalize our data. We'll use NormalizeStandardize (which gives us mean 0, unit variance):
    DataNormalization normalizer = new NormalizerStandardize();
    normalizer.fit(trainingData); //Collect the statistics (mean/stdev) from the training data. This does not modify the input data
    normalizer.transform(trainingData); //Apply normalization to the training data
    normalizer.transform(testData); //Apply normalization to the test data. This is using statistics calculated from the *training* set

    final int numInputs = 4;
    int outputNum = 3;
    int iterations = 1000;
    long seed = 6;

    log.info("Build model....");
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations)
            .activation("tanh").weightInit(WeightInit.XAVIER).learningRate(0.1).regularization(true).l2(1e-4)
            .list().layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(3).build())
            .layer(1, new DenseLayer.Builder().nIn(3).nOut(3).build())
            .layer(2,
                    new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                            .activation("softmax").nIn(3).nOut(outputNum).build())
            .backprop(true).pretrain(false).build();

    //run the model
    String[] inputFeatureNames = { "Sepal length (4.3-7.9)", "Sepal width (2.0-4.4)", "Petal length (1.0-6.9)",
            "Petal width (0.1-2.5)" };
    String[] outputLabelNames = { "Iris setosa", "Iris versicolor", "Iris virginica" };
    MultiLayerNetworkEnhanced model = new MultiLayerNetworkEnhanced(conf, inputFeatureNames, outputLabelNames);
    model.setDataNormalization(normalizer);

    model.init();
    //model.setListeners(new ScoreIterationListener(100));
    model.setListeners(new ModelListener(100, webSocketSession));

    model.fit(trainingData);

    //evaluate the model on the test set
    Evaluation eval = new Evaluation(3);
    INDArray output = model.output(testData.getFeatureMatrix());
    eval.eval(testData.getLabels(), output);
    log.info(eval.stats());

    // Make prediction: Expecting 0
    INDArray example = Nd4j.zeros(1, 4);
    example.putScalar(new int[] { 0, 0 }, 5.1);
    example.putScalar(new int[] { 0, 1 }, 3.5);
    example.putScalar(new int[] { 0, 2 }, 1.4);
    example.putScalar(new int[] { 0, 3 }, 0.2);
    DataSet ds = new DataSet(example, null);
    normalizer.transform(ds);
    int[] prediction = model.predict(example);
    System.out.println("prediction for 5.1,3.5,1.4,0.2: " + prediction[0]);

    return model;
}

From source file:com.javafxpert.neuralnetviz.scenario.MLPClassifierMoon.java

License:Apache License

public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception {
    int seed = 123;
    double learningRate = 0.005;
    int batchSize = 50;
    int nEpochs = 100;

    int numInputs = 2;
    int numOutputs = 2;
    int numHiddenNodes = 8;

    //Load the training data:
    RecordReader rr = new CSVRecordReader();
    rr.initialize(new FileSplit(new File("src/main/resources/classification/saturn_data_train.csv")));
    DataSetIterator trainIter = new RecordReaderDataSetIterator(rr, batchSize, 0, 2);

    //Load the test/evaluation data:
    RecordReader rrTest = new CSVRecordReader();
    rrTest.initialize(new FileSplit(new File("src/main/resources/classification/saturn_data_eval.csv")));
    DataSetIterator testIter = new RecordReaderDataSetIterator(rrTest, batchSize, 0, 2);

    //log.info("Build model....");
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1)
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate)
            .updater(Updater.NESTEROVS).momentum(0.9).list()
            .layer(0,/*from   www  .j  av a  2s  .  c  o m*/
                    new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes).weightInit(WeightInit.XAVIER)
                            .activation("relu").build())
            .layer(1,
                    new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD).weightInit(WeightInit.XAVIER)
                            .activation("softmax").nIn(numHiddenNodes).nOut(numOutputs).build())
            .pretrain(false).backprop(true).build();

    String[] inputFeatureNames = { "x (-1.52 .. 2.54)", "y (-1.06 .. 1.58)" };
    String[] outputLabelNames = { "planet", "ring" };
    MultiLayerNetworkEnhanced model = new MultiLayerNetworkEnhanced(conf, inputFeatureNames, outputLabelNames);
    model.init();
    //model.setListeners(new ScoreIterationListener(100));    //Print score every 100 parameter updates
    model.setListeners(new ModelListener(100, webSocketSession));

    for (int n = 0; n < nEpochs; n++) {
        model.fit(trainIter);
    }

    System.out.println("Evaluate model....");
    Evaluation eval = new Evaluation(numOutputs);
    while (testIter.hasNext()) {
        DataSet t = testIter.next();
        INDArray features = t.getFeatureMatrix();
        INDArray labels = t.getLabels();
        INDArray predicted = model.output(features, false);

        eval.eval(labels, predicted);
    }

    //Print the evaluation statistics
    System.out.println(eval.stats());

    // Make prediction
    // Input: 0.6236,-0.7822  Expected output: 1
    INDArray example = Nd4j.zeros(1, 2);
    example.putScalar(new int[] { 0, 0 }, 9.8520);
    example.putScalar(new int[] { 0, 1 }, -1.9809);
    int[] prediction = model.predict(example);
    System.out.println("prediction for 9.8520, -1.9809: " + prediction[0]);

    System.out.println("****************Example finished********************");

    return model;
}

From source file:com.javafxpert.neuralnetviz.scenario.SpeedDating.java

License:Apache License

public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception {

    //First: get the dataset using the record reader. CSVRecordReader handles loading/parsing
    int numLinesToSkip = 0;
    String delimiter = ",";
    org.datavec.api.records.reader.RecordReader recordReader = new org.datavec.api.records.reader.impl.csv.CSVRecordReader(
            numLinesToSkip, delimiter);/*from www .jav  a  2 s. c om*/
    recordReader.initialize(new org.datavec.api.split.FileSplit(
            new File("src/main/resources/classification/speed_dating_all.csv")));

    //Second: the RecordReaderDataSetIterator handles conversion to DataSet objects, ready for use in neural network
    int labelIndex = 0; //5 values in each row of the iris.txt CSV: 4 input features followed by an integer label (class) index. Labels are the 5th value (index 4) in each row
    int numClasses = 2; //3 classes (types of iris flowers) in the iris data set. Classes have integer values 0, 1 or 2
    int batchSize = 8378; //Iris data set: 150 examples total. We are loading all of them into one DataSet (not recommended for large data sets)

    DataSetIterator iterator = new org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator(recordReader,
            batchSize, labelIndex, numClasses);
    DataSet allData = iterator.next();
    allData.shuffle();
    SplitTestAndTrain testAndTrain = allData.splitTestAndTrain(0.65); //Use 65% of data for training

    DataSet trainingData = testAndTrain.getTrain();
    DataSet testData = testAndTrain.getTest();

    //We need to normalize our data. We'll use NormalizeStandardize (which gives us mean 0, unit variance):
    DataNormalization normalizer = new NormalizerStandardize();
    normalizer.fit(trainingData); //Collect the statistics (mean/stdev) from the training data. This does not modify the input data
    normalizer.transform(trainingData); //Apply normalization to the training data
    normalizer.transform(testData); //Apply normalization to the test data. This is using statistics calculated from the *training* set

    final int numInputs = 3;
    int outputNum = 2;
    int iterations = 300;
    long seed = 6;

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations)
            .activation("tanh").weightInit(WeightInit.XAVIER).learningRate(0.1).regularization(true).l2(1e-4)
            .list().layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(4).build())
            .layer(1,
                    new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                            .activation("softmax").nIn(4).nOut(outputNum).build())
            .backprop(true).pretrain(false).build();

    String[] inputFeatureNames = { "Attractive (1-10)", "Intelligent (1-10)", "Fun (1-10)" };
    String[] outputLabelNames = { "No second date", "Date again" };
    MultiLayerNetworkEnhanced model = new MultiLayerNetworkEnhanced(conf, inputFeatureNames, outputLabelNames);
    model.init();
    //model.setListeners(new ScoreIterationListener(100));    //Print score every 100 parameter updates
    model.setListeners(new ModelListener(10, webSocketSession));
    model.setDataNormalization(normalizer);

    model.fit(trainingData);

    //evaluate the model on the test set
    Evaluation eval = new Evaluation(outputNum);
    INDArray output = model.output(testData.getFeatureMatrix());
    eval.eval(testData.getLabels(), output);
    System.out.println(eval.stats());

    // Make prediction
    // Input: 7, 8, 9  Expected output: ?
    INDArray example = Nd4j.zeros(1, 3);
    example.putScalar(new int[] { 0, 0 }, 7);
    example.putScalar(new int[] { 0, 1 }, 8);
    example.putScalar(new int[] { 0, 2 }, 9);
    DataSet ds = new DataSet(example, null);
    normalizer.transform(ds);
    int[] prediction = model.predict(example);
    System.out.println("prediction for 7 (attractive), 8 (intelligent), 9 (fun): " + prediction[0]);

    System.out.println("****************Example finished********************");

    return model;
}

From source file:com.javafxpert.neuralnetviz.scenario.WineClassifier.java

License:Apache License

public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception {
    //Load the training data:
    RecordReader rr = new CSVRecordReader();
    rr.initialize(new FileSplit((new File("src/main/resources/classification/wine.data"))));

    DataSetIterator iterator = new RecordReaderDataSetIterator(rr, FILE_SIZE, CLASS_INDEX, NUM_OF_CLASSES);
    DataSet wineData = iterator.next();/*from  w w  w . j av  a  2s.  c om*/
    wineData.shuffle();
    SplitTestAndTrain testAndTrain = wineData.splitTestAndTrain(DATA_SPLIT_TRAIN_TEST);
    DataSet trainingData = testAndTrain.getTrain();
    DataSet testData = testAndTrain.getTest();

    // A Standard Normalizer which gives zero-mean, unit variance
    DataNormalization normalizer = new NormalizerStandardize();
    normalizer.fit(trainingData); // Collect the statistics (mean/stdev) from the training data. This does not modify the input data
    normalizer.transform(trainingData); // Apply normalization to the training data
    normalizer.transform(testData); // Apply normalization to the test data. This is using statistics calculated from the *training* set

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(SEED).iterations(ITERATIONS)
            .weightInit(WeightInit.RELU).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .learningRate(EPSILON).regularization(true).l2(1e-4).updater(Updater.NESTEROVS).momentum(ALPHA)
            .list()
            .layer(0,
                    new DenseLayer.Builder().nIn(INPUT_NEURONS).nOut(HIDDEN_NEURONS)
                            .activation(HIDDEN_LAYER_ACTIVATION).build())
            .layer(1,
                    new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD).nIn(HIDDEN_NEURONS)
                            .nOut(OUTPUT_NEURONS).activation(OUTPUT_LAYER_ACTIVATION).build())
            .pretrain(false).backprop(true).build();

    String[] inputFeatureNames = { "Alcohol (11.0-14.9)", "Malic acid (0.7-5.8)", "Ash (1.3-3.3)",
            "Alcalinity of ash (10.6-30.0)", "Magnesium (70-162)", "Total phenols (0.9-3.9)",
            "Flavanoids (0.30-5.1)", "Nonflavanoid phenols (0.1-0.7)", "Proanthocyanins (0.4-3.6)",
            "Color intensity (1.2-13.0)", "Hue (0.4-1.8)", "OD280/OD315 of diluted (1.2-4.0)",
            "Proline (278-1680)" };
    String[] outputLabelNames = { "Cultivar A", "Cultivar B", "Cultivar C" };
    MultiLayerNetworkEnhanced networkModel = new MultiLayerNetworkEnhanced(conf, inputFeatureNames,
            outputLabelNames);
    networkModel.init();
    //model.setListeners(new ScoreIterationListener(100));    //Print score every 100 parameter updates
    networkModel.setListeners(new ModelListener(10, webSocketSession));
    networkModel.setDataNormalization(normalizer);

    for (int n = 0; n < EPOCHS; n++) {
        networkModel.fit(trainingData);
    }

    System.out.println("Evaluate model....");
    Evaluation eval = new Evaluation(OUTPUT_NEURONS);
    INDArray output = networkModel.output(testData.getFeatureMatrix());

    System.out.println(testData.getFeatureMatrix());
    System.out.println(output);

    eval.eval(testData.getLabels(), output);

    //Print the evaluation statistics
    System.out.println(eval.stats());

    return networkModel;

}

From source file:com.javafxpert.neuralnetviz.scenario.XorExample.java

License:Apache License

public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception {
    //public static void main(String[] args) throws  Exception {

    //System.out.println("In XorExample.go()");

    // list off input values, 4 training samples with data for 2
    // input-neurons each
    INDArray input = Nd4j.zeros(4, 2);//  ww  w .  j a  v a 2  s.c  o  m

    //System.out.println("After INDArray input: " + input);

    // correspondending list with expected output values, 4 training samples
    // with data for 2 output-neurons each
    INDArray labels = Nd4j.zeros(4, 2);

    // create first dataset
    // when first input=0 and second input=0
    input.putScalar(new int[] { 0, 0 }, 0);
    input.putScalar(new int[] { 0, 1 }, 0);
    // then the first output fires for false, and the second is 0 (see class
    // comment)
    labels.putScalar(new int[] { 0, 0 }, 1);
    labels.putScalar(new int[] { 0, 1 }, 0);

    // when first input=1 and second input=0
    input.putScalar(new int[] { 1, 0 }, 1);
    input.putScalar(new int[] { 1, 1 }, 0);
    // then xor is true, therefore the second output neuron fires
    labels.putScalar(new int[] { 1, 0 }, 0);
    labels.putScalar(new int[] { 1, 1 }, 1);

    // same as above
    input.putScalar(new int[] { 2, 0 }, 0);
    input.putScalar(new int[] { 2, 1 }, 1);
    labels.putScalar(new int[] { 2, 0 }, 0);
    labels.putScalar(new int[] { 2, 1 }, 1);

    // when both inputs fire, xor is false again - the first output should
    // fire
    input.putScalar(new int[] { 3, 0 }, 1);
    input.putScalar(new int[] { 3, 1 }, 1);
    labels.putScalar(new int[] { 3, 0 }, 1);
    labels.putScalar(new int[] { 3, 1 }, 0);

    //System.out.println("Before DataSet ds");

    // create dataset object
    DataSet ds = new DataSet(input, labels);

    //System.out.println("After DataSet ds: " + ds);

    // Set up network configuration
    NeuralNetConfiguration.Builder builder = new NeuralNetConfiguration.Builder();
    // how often should the training set be run, we need something above
    // 1000, or a higher learning-rate - found this values just by trial and
    // error
    builder.iterations(10000);
    // learning rate
    builder.learningRate(0.1);
    // fixed seed for the random generator, so any run of this program
    // brings the same results - may not work if you do something like
    // ds.shuffle()
    builder.seed(123);
    // not applicable, this network is to small - but for bigger networks it
    // can help that the network will not only recite the training data
    builder.useDropConnect(false);
    // a standard algorithm for moving on the error-plane, this one works
    // best for me, LINE_GRADIENT_DESCENT or CONJUGATE_GRADIENT can do the
    // job, too - it's an empirical value which one matches best to
    // your problem
    builder.optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT);
    // init the bias with 0 - empirical value, too
    builder.biasInit(0);
    // from "http://deeplearning4j.org/architecture": The networks can
    // process the input more quickly and more accurately by ingesting
    // minibatches 5-10 elements at a time in parallel.
    // this example runs better without, because the dataset is smaller than
    // the mini batch size
    builder.miniBatch(false);

    // create a multilayer network with 2 layers (including the output
    // layer, excluding the input payer)
    ListBuilder listBuilder = builder.list();

    DenseLayer.Builder hiddenLayerBuilder = new DenseLayer.Builder();
    // two input connections - simultaneously defines the number of input
    // neurons, because it's the first non-input-layer
    hiddenLayerBuilder.nIn(2);
    // number of outgooing connections, nOut simultaneously defines the
    // number of neurons in this layer
    hiddenLayerBuilder.nOut(2);
    // put the output through the sigmoid function, to cap the output
    // valuebetween 0 and 1
    hiddenLayerBuilder.activation("sigmoid");
    // random initialize weights with values between 0 and 1
    hiddenLayerBuilder.weightInit(WeightInit.DISTRIBUTION);
    hiddenLayerBuilder.dist(new UniformDistribution(0, 1));

    // build and set as layer 0
    listBuilder.layer(0, hiddenLayerBuilder.build());

    // MCXENT or NEGATIVELOGLIKELIHOOD work ok for this example - this
    // function calculates the error-value
    // From homepage: Your net's purpose will determine the loss funtion you
    // use. For pretraining, choose reconstruction entropy. For
    // classification, use multiclass cross entropy.
    Builder outputLayerBuilder = new Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD);
    // must be the same amout as neurons in the layer before
    outputLayerBuilder.nIn(2);
    // two neurons in this layer
    outputLayerBuilder.nOut(2);
    outputLayerBuilder.activation("sigmoid");
    outputLayerBuilder.weightInit(WeightInit.DISTRIBUTION);
    outputLayerBuilder.dist(new UniformDistribution(0, 1));
    listBuilder.layer(1, outputLayerBuilder.build());

    // no pretrain phase for this network
    listBuilder.pretrain(false);

    // seems to be mandatory
    // according to agibsonccc: You typically only use that with
    // pretrain(true) when you want to do pretrain/finetune without changing
    // the previous layers finetuned weights that's for autoencoders and
    // rbms
    listBuilder.backprop(true);

    // build and init the network, will check if everything is configured
    // correct
    MultiLayerConfiguration conf = listBuilder.build();

    String[] inputFeatureNames = { "true (1) or false (0)", "true (1) or false (0)" };
    String[] outputLabelNames = { "false", "true" };
    MultiLayerNetworkEnhanced net = new MultiLayerNetworkEnhanced(conf, inputFeatureNames, outputLabelNames);
    net.init();

    // add an listener which outputs the error every 100 parameter updates
    //net.setListeners(new ScoreIterationListener(100));
    net.setListeners(new ModelListener(100, webSocketSession));

    // C&P from GravesLSTMCharModellingExample
    // Print the number of parameters in the network (and for each layer)
    Layer[] layers = net.getLayers();
    int totalNumParams = 0;
    for (int i = 0; i < layers.length; i++) {
        int nParams = layers[i].numParams();
        //System.out.println("Number of parameters in layer " + i + ": " + nParams);
        totalNumParams += nParams;
    }
    //System.out.println("Total number of network parameters: " + totalNumParams);

    // here the actual learning takes place
    net.fit(ds);

    // create output for every training sample
    INDArray output = net.output(ds.getFeatureMatrix());
    //System.out.println("output: " + output);

    for (int i = 0; i < output.rows(); i++) {
        String actual = ds.getLabels().getRow(i).toString().trim();
        String predicted = output.getRow(i).toString().trim();
        //System.out.println("actual " + actual + " vs predicted " + predicted);
    }

    // let Evaluation prints stats how often the right output had the
    // highest value
    Evaluation eval = new Evaluation(2);
    eval.eval(ds.getLabels(), output);
    System.out.println(eval.stats());

    //displayNetwork(net);

    // Make prediction
    INDArray example = Nd4j.zeros(1, 2);
    // create first dataset
    // when first input=0 and second input=0
    example.putScalar(new int[] { 0, 0 }, 0);
    example.putScalar(new int[] { 0, 1 }, 1);

    int[] prediction = net.predict(example);

    System.out.println("prediction for 0, 1: " + prediction[0]);

    return net;
}