Example usage for org.deeplearning4j.nn.multilayer MultiLayerNetwork output

List of usage examples for org.deeplearning4j.nn.multilayer MultiLayerNetwork output

Introduction

In this page you can find the example usage for org.deeplearning4j.nn.multilayer MultiLayerNetwork output.

Prototype

public INDArray output(DataSetIterator iterator, boolean train) 

Source Link

Document

Generate the output for all examples/batches in the input iterator, and concatenate them into a single array.

Usage

From source file:com.heatonresearch.aifh.examples.ann.LearnAutoMPGBackprop.java

License:Apache License

/**
 * The main method./* ww  w .j av a  2  s. co  m*/
 * @param args Not used.
 */
public static void main(String[] args) {
    try {
        int seed = 43;
        double learningRate = 0.01;

        // Setup training data.
        final InputStream istream = LearnAutoMPGBackprop.class.getResourceAsStream("/auto-mpg.data.csv");
        if (istream == null) {
            System.out.println("Cannot access data set, make sure the resources are available.");
            System.exit(1);
        }
        final NormalizeDataSet ds = NormalizeDataSet.load(istream);
        istream.close();

        // The following ranges are setup for the Auto MPG data set.  If you wish to normalize other files you will
        // need to modify the below function calls other files.

        // First remove some columns that we will not use:
        ds.deleteColumn(8); // Car name
        ds.deleteColumn(7); // Car origin
        ds.deleteColumn(6); // Year
        ds.deleteUnknowns();

        ds.normalizeZScore(1);
        ds.normalizeZScore(2);
        ds.normalizeZScore(3);
        ds.normalizeZScore(4);
        ds.normalizeZScore(5);

        DataSet next = ds.extractSupervised(1, 4, 0, 1);
        next.shuffle();

        // Training and validation data split
        int splitTrainNum = (int) (next.numExamples() * .75);
        SplitTestAndTrain testAndTrain = next.splitTestAndTrain(splitTrainNum, new Random(seed));
        DataSet trainSet = testAndTrain.getTrain();
        DataSet validationSet = testAndTrain.getTest();

        DataSetIterator trainSetIterator = new ListDataSetIterator(trainSet.asList(), trainSet.numExamples());

        DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(),
                validationSet.numExamples());

        // Create neural network.
        int numInputs = next.numInputs();
        int numOutputs = next.numOutcomes();
        int numHiddenNodes = 50;

        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate)
                .updater(Updater.NESTEROVS).momentum(0.9).list(2)
                .layer(0,
                        new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
                                .weightInit(WeightInit.XAVIER).activation("relu").build())
                .layer(1,
                        new OutputLayer.Builder(LossFunction.MSE).weightInit(WeightInit.XAVIER)
                                .activation("identity").nIn(numHiddenNodes).nOut(numOutputs).build())
                .pretrain(false).backprop(true).build();

        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        model.setListeners(new ScoreIterationListener(1));

        // Define when we want to stop training.
        EarlyStoppingModelSaver saver = new InMemoryModelSaver();
        EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder()
                .epochTerminationConditions(new MaxEpochsTerminationCondition(500)) //Max of 50 epochs
                .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(25))
                .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score
                .modelSaver(saver).build();
        EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator);

        // Train and display result.
        EarlyStoppingResult result = trainer.fit();
        System.out.println("Termination reason: " + result.getTerminationReason());
        System.out.println("Termination details: " + result.getTerminationDetails());
        System.out.println("Total epochs: " + result.getTotalEpochs());
        System.out.println("Best epoch number: " + result.getBestModelEpoch());
        System.out.println("Score at best epoch: " + result.getBestModelScore());

        model = saver.getBestModel();

        // Evaluate
        validationSetIterator.reset();

        for (int i = 0; i < validationSet.numExamples(); i++) {
            DataSet t = validationSet.get(i);
            INDArray features = t.getFeatureMatrix();
            INDArray labels = t.getLabels();
            INDArray predicted = model.output(features, false);
            System.out.println(features + ":Prediction(" + predicted + "):Actual(" + labels + ")");
        }

    } catch (Exception ex) {
        ex.printStackTrace();
    }
}

From source file:com.heatonresearch.aifh.examples.ann.LearnDigitsBackprop.java

License:Apache License

/**
 * The main method.//from w  ww .j  a va 2  s .c  om
 * @param args Not used.
 */
public static void main(String[] args) {
    try {
        int seed = 43;
        double learningRate = 1e-2;
        int nEpochs = 50;
        int batchSize = 500;

        // Setup training data.
        System.out.println("Please wait, reading MNIST training data.");
        String dir = System.getProperty("user.dir");
        MNISTReader trainingReader = MNIST.loadMNIST(dir, true);
        MNISTReader validationReader = MNIST.loadMNIST(dir, false);

        DataSet trainingSet = trainingReader.getData();
        DataSet validationSet = validationReader.getData();

        DataSetIterator trainSetIterator = new ListDataSetIterator(trainingSet.asList(), batchSize);
        DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(),
                validationReader.getNumRows());

        System.out.println("Training set size: " + trainingReader.getNumImages());
        System.out.println("Validation set size: " + validationReader.getNumImages());

        System.out.println(trainingSet.get(0).getFeatures().size(1));
        System.out.println(validationSet.get(0).getFeatures().size(1));

        int numInputs = trainingReader.getNumCols() * trainingReader.getNumRows();
        int numOutputs = 10;
        int numHiddenNodes = 200;

        // Create neural network.
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate)
                .updater(Updater.NESTEROVS).momentum(0.9).regularization(true).dropOut(0.50).list(2)
                .layer(0,
                        new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
                                .weightInit(WeightInit.XAVIER).activation("relu").build())
                .layer(1,
                        new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD)
                                .weightInit(WeightInit.XAVIER).activation("softmax").nIn(numHiddenNodes)
                                .nOut(numOutputs).build())
                .pretrain(false).backprop(true).build();

        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        model.setListeners(new ScoreIterationListener(1));

        // Define when we want to stop training.
        EarlyStoppingModelSaver saver = new InMemoryModelSaver();
        EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder()
                //.epochTerminationConditions(new MaxEpochsTerminationCondition(10))
                .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(5))
                .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score
                .modelSaver(saver).build();
        EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator);

        // Train and display result.
        EarlyStoppingResult result = trainer.fit();
        System.out.println("Termination reason: " + result.getTerminationReason());
        System.out.println("Termination details: " + result.getTerminationDetails());
        System.out.println("Total epochs: " + result.getTotalEpochs());
        System.out.println("Best epoch number: " + result.getBestModelEpoch());
        System.out.println("Score at best epoch: " + result.getBestModelScore());

        model = saver.getBestModel();

        // Evaluate
        Evaluation eval = new Evaluation(numOutputs);
        validationSetIterator.reset();

        for (int i = 0; i < validationSet.numExamples(); i++) {
            DataSet t = validationSet.get(i);
            INDArray features = t.getFeatureMatrix();
            INDArray labels = t.getLabels();
            INDArray predicted = model.output(features, false);
            eval.eval(labels, predicted);
        }

        //Print the evaluation statistics
        System.out.println(eval.stats());
    } catch (Exception ex) {
        ex.printStackTrace();
    }

}

From source file:com.heatonresearch.aifh.examples.ann.LearnDigitsConv.java

License:Apache License

/**
 * The main method.//from   w  w w  . j ava 2s. c o m
 * @param args Not used.
 */
public static void main(String[] args) {
    try {
        int seed = 43;
        double learningRate = 1e-2;
        int nEpochs = 50;
        int batchSize = 500;
        int channels = 1;

        // Setup training data.
        System.out.println("Please wait, reading MNIST training data.");
        String dir = System.getProperty("user.dir");
        MNISTReader trainingReader = MNIST.loadMNIST(dir, true);
        MNISTReader validationReader = MNIST.loadMNIST(dir, false);

        DataSet trainingSet = trainingReader.getData();
        DataSet validationSet = validationReader.getData();

        DataSetIterator trainSetIterator = new ListDataSetIterator(trainingSet.asList(), batchSize);
        DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(),
                validationReader.getNumRows());

        System.out.println("Training set size: " + trainingReader.getNumImages());
        System.out.println("Validation set size: " + validationReader.getNumImages());

        int numOutputs = 10;

        // Create neural network.
        MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed).iterations(1)
                .regularization(true).l2(0.0005).learningRate(0.01).weightInit(WeightInit.XAVIER)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(Updater.NESTEROVS)
                .momentum(0.9).list(4)
                .layer(0,
                        new ConvolutionLayer.Builder(5, 5).nIn(channels).stride(1, 1).nOut(20).dropOut(0.5)
                                .activation("relu").build())
                .layer(1,
                        new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
                                .stride(2, 2).build())
                .layer(2, new DenseLayer.Builder().activation("relu").nOut(500).build())
                .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10)
                        .activation("softmax").build())
                .backprop(true).pretrain(false);

        new ConvolutionLayerSetup(builder, 28, 28, 1);
        MultiLayerConfiguration conf = builder.build();

        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        model.setListeners(new ScoreIterationListener(1));

        // Define when we want to stop training.
        EarlyStoppingModelSaver saver = new InMemoryModelSaver();
        EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder()
                //.epochTerminationConditions(new MaxEpochsTerminationCondition(10))
                .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(5))
                .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score
                .modelSaver(saver).build();
        EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator);

        // Train and display result.
        EarlyStoppingResult result = trainer.fit();
        System.out.println("Termination reason: " + result.getTerminationReason());
        System.out.println("Termination details: " + result.getTerminationDetails());
        System.out.println("Total epochs: " + result.getTotalEpochs());
        System.out.println("Best epoch number: " + result.getBestModelEpoch());
        System.out.println("Score at best epoch: " + result.getBestModelScore());

        model = saver.getBestModel();

        // Evaluate
        Evaluation eval = new Evaluation(numOutputs);
        validationSetIterator.reset();

        for (int i = 0; i < validationSet.numExamples(); i++) {
            DataSet t = validationSet.get(i);
            INDArray features = t.getFeatureMatrix();
            INDArray labels = t.getLabels();
            INDArray predicted = model.output(features, false);
            eval.eval(labels, predicted);
        }

        //Print the evaluation statistics
        System.out.println(eval.stats());
    } catch (Exception ex) {
        ex.printStackTrace();
    }

}

From source file:com.heatonresearch.aifh.examples.ann.LearnDigitsDropout.java

License:Apache License

/**
 * The main method./*from www  . j a  v  a 2s  .  com*/
 * @param args Not used.
 */
public static void main(String[] args) {
    try {
        int seed = 43;
        double learningRate = 1e-2;
        int nEpochs = 50;
        int batchSize = 500;

        // Setup training data.
        System.out.println("Please wait, reading MNIST training data.");
        String dir = System.getProperty("user.dir");
        MNISTReader trainingReader = MNIST.loadMNIST(dir, true);
        MNISTReader validationReader = MNIST.loadMNIST(dir, false);

        DataSet trainingSet = trainingReader.getData();
        DataSet validationSet = validationReader.getData();

        DataSetIterator trainSetIterator = new ListDataSetIterator(trainingSet.asList(), batchSize);
        DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(),
                validationReader.getNumRows());

        System.out.println("Training set size: " + trainingReader.getNumImages());
        System.out.println("Validation set size: " + validationReader.getNumImages());

        System.out.println(trainingSet.get(0).getFeatures().size(1));
        System.out.println(validationSet.get(0).getFeatures().size(1));

        int numInputs = trainingReader.getNumCols() * trainingReader.getNumRows();
        int numOutputs = 10;
        int numHiddenNodes = 100;

        // Create neural network.
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate)
                .updater(Updater.NESTEROVS).momentum(0.9).list(2)
                .layer(0,
                        new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
                                .weightInit(WeightInit.XAVIER).activation("relu").build())
                .layer(1,
                        new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD)
                                .weightInit(WeightInit.XAVIER).activation("softmax").nIn(numHiddenNodes)
                                .nOut(numOutputs).build())
                .pretrain(false).backprop(true).build();

        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        model.setListeners(new ScoreIterationListener(1));

        // Define when we want to stop training.
        EarlyStoppingModelSaver saver = new InMemoryModelSaver();
        EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder()
                //.epochTerminationConditions(new MaxEpochsTerminationCondition(10))
                .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(5))
                .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score
                .modelSaver(saver).build();
        EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator);

        // Train and display result.
        EarlyStoppingResult result = trainer.fit();
        System.out.println("Termination reason: " + result.getTerminationReason());
        System.out.println("Termination details: " + result.getTerminationDetails());
        System.out.println("Total epochs: " + result.getTotalEpochs());
        System.out.println("Best epoch number: " + result.getBestModelEpoch());
        System.out.println("Score at best epoch: " + result.getBestModelScore());

        model = saver.getBestModel();

        // Evaluate
        Evaluation eval = new Evaluation(numOutputs);
        validationSetIterator.reset();

        for (int i = 0; i < validationSet.numExamples(); i++) {
            DataSet t = validationSet.get(i);
            INDArray features = t.getFeatureMatrix();
            INDArray labels = t.getLabels();
            INDArray predicted = model.output(features, false);
            eval.eval(labels, predicted);
        }

        //Print the evaluation statistics
        System.out.println(eval.stats());
    } catch (Exception ex) {
        ex.printStackTrace();
    }

}

From source file:com.heatonresearch.aifh.examples.ann.LearnIrisBackprop.java

License:Apache License

/**
 * The main method.//w ww .j a va2s .  c om
 * @param args Not used.
 */
public static void main(String[] args) {
    try {
        int seed = 43;
        double learningRate = 0.1;
        int splitTrainNum = (int) (150 * .75);

        int numInputs = 4;
        int numOutputs = 3;
        int numHiddenNodes = 50;

        // Setup training data.
        final InputStream istream = LearnIrisBackprop.class.getResourceAsStream("/iris.csv");
        if (istream == null) {
            System.out.println("Cannot access data set, make sure the resources are available.");
            System.exit(1);
        }
        final NormalizeDataSet ds = NormalizeDataSet.load(istream);
        final CategoryMap species = ds.encodeOneOfN(4); // species is column 4
        istream.close();

        DataSet next = ds.extractSupervised(0, 4, 4, 3);
        next.shuffle();

        // Training and validation data split
        SplitTestAndTrain testAndTrain = next.splitTestAndTrain(splitTrainNum, new Random(seed));
        DataSet trainSet = testAndTrain.getTrain();
        DataSet validationSet = testAndTrain.getTest();

        DataSetIterator trainSetIterator = new ListDataSetIterator(trainSet.asList(), trainSet.numExamples());

        DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(),
                validationSet.numExamples());

        // Create neural network.
        MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate)
                .updater(Updater.NESTEROVS).momentum(0.9).list(2)
                .layer(0,
                        new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes)
                                .weightInit(WeightInit.XAVIER).activation("relu").build())
                .layer(1,
                        new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD)
                                .weightInit(WeightInit.XAVIER).activation("softmax").nIn(numHiddenNodes)
                                .nOut(numOutputs).build())
                .pretrain(false).backprop(true).build();

        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();
        model.setListeners(new ScoreIterationListener(1));

        // Define when we want to stop training.
        EarlyStoppingModelSaver saver = new InMemoryModelSaver();
        EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder()
                .epochTerminationConditions(new MaxEpochsTerminationCondition(500)) //Max of 50 epochs
                .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(25))
                .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score
                .modelSaver(saver).build();
        EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator);

        // Train and display result.
        EarlyStoppingResult result = trainer.fit();
        System.out.println("Termination reason: " + result.getTerminationReason());
        System.out.println("Termination details: " + result.getTerminationDetails());
        System.out.println("Total epochs: " + result.getTotalEpochs());
        System.out.println("Best epoch number: " + result.getBestModelEpoch());
        System.out.println("Score at best epoch: " + result.getBestModelScore());

        model = saver.getBestModel();

        // Evaluate
        Evaluation eval = new Evaluation(numOutputs);
        validationSetIterator.reset();

        for (int i = 0; i < validationSet.numExamples(); i++) {
            DataSet t = validationSet.get(i);
            INDArray features = t.getFeatureMatrix();
            INDArray labels = t.getLabels();
            INDArray predicted = model.output(features, false);
            System.out.println(features + ":Prediction(" + findSpecies(labels, species) + "):Actual("
                    + findSpecies(predicted, species) + ")" + predicted);
            eval.eval(labels, predicted);
        }

        //Print the evaluation statistics
        System.out.println(eval.stats());
    } catch (Exception ex) {
        ex.printStackTrace();
    }
}

From source file:examples.cnn.NetworkTrainer.java

License:Apache License

public void train(JavaRDD<DataSet> train, JavaRDD<DataSet> test) {

    int batchSize = 12 * cores;
    int lrCount = 0;
    double bestAccuracy = Double.MIN_VALUE;

    double learningRate = initialLearningRate;

    int trainCount = Long.valueOf(train.count()).intValue();
    log.info("Number of training images {}", trainCount);
    log.info("Number of test images {}", test.count());

    MultiLayerNetwork net = new MultiLayerNetwork(
            model.apply(learningRate, width, height, channels, numLabels));
    net.init();//from  ww  w.  ja v a 2s  .c o m

    Map<Integer, Double> acc = new HashMap<>();
    for (int i = 0; i < epochs; i++) {

        SparkDl4jMultiLayer sparkNetwork = networkToSparkNetwork.apply(net);
        final MultiLayerNetwork nn = sparkNetwork.fitDataSet(train, batchSize, trainCount, cores);
        log.info("Epoch {} completed", i);

        JavaPairRDD<Object, Object> predictionsAndLabels = test.mapToPair(
                ds -> new Tuple2<>(label(nn.output(ds.getFeatureMatrix(), false)), label(ds.getLabels())));
        MulticlassMetrics metrics = new MulticlassMetrics(predictionsAndLabels.rdd());
        double accuracy = 1.0 * predictionsAndLabels.filter(x -> x._1.equals(x._2)).count() / test.count();
        log.info("Epoch {} accuracy {} ", i, accuracy);
        acc.put(i, accuracy);
        predictionsAndLabels.take(10).forEach(t -> log.info("predicted {}, label {}", t._1, t._2));
        log.info("confusionMatrix {}", metrics.confusionMatrix());

        INDArray params = nn.params();
        if (accuracy > bestAccuracy) {
            bestAccuracy = accuracy;
            try {
                ModelSerializer.writeModel(nn, new File(workingDir, Double.toString(accuracy)), false);
            } catch (IOException e) {
                log.error("Error writing trained model", e);
            }
            lrCount = 0;
        } else {

            if (++lrCount % stepDecayTreshold == 0) {
                learningRate *= learningRateDecayFactor;
            }
            if (lrCount >= resetLearningRateThreshold) {
                lrCount = 0;
                learningRate = initialLearningRate;
            }
            if (learningRate < minimumLearningRate) {
                lrCount = 0;
                learningRate = initialLearningRate;
            }
            if (bestAccuracy - accuracy > downgradeAccuracyThreshold) {
                params = ModelLoader.load(workingDir, bestAccuracy);
            }
        }
        net = new MultiLayerNetwork(model.apply(learningRate, width, height, channels, numLabels));
        net.init();
        net.setParameters(params);
        log.info("Learning rate {} for epoch {}", learningRate, i + 1);
    }
    log.info("Training completed");

}

From source file:org.knime.ext.dl4j.base.nodes.predict.AbstractDLPredictorNodeModel.java

License:Open Source License

/**
 * Creates output for an input {@link INDArray}. The input array must contain each example to predict in a row.
 * Returns a {@link INDArray} with 'number of outputs' columns and 'number of examples' rows, whereby the number of
 * examples is the number of rows of the input array.
 *
 * @param mln the network to use for prediction
 * @param input the input used to create output
 * @return array containing the output of the network for each row of the input
 *///  w ww  . j  a  va  2  s  .c om
protected INDArray predict(final MultiLayerNetwork mln, final INDArray input) {
    final INDArray output = Nd4j.create(input.rows(), getNumberOfOutputs(mln));
    for (int i = 0; i < input.rows(); i++) {
        output.putRow(i, mln.output(input.getRow(i), false));
    }
    return output;
}

From source file:org.wso2.carbon.ml.rest.api.neuralNetworks.FeedForwardNetwork.java

License:Open Source License

/**
 * method to createFeedForwardNetwork./*from   w w w.j  a  v  a 2s  .  c o  m*/
 * @param seed
 * @param learningRate
 * @param analysisID
 * @param bachSize
 * @param backprop
 * @param hiddenList
 * @param inputLayerNodes
 * @param iterations
 * @param versionID
 * @param momentum
 * @param nepoches
 * @param datasetId
 * @param noHiddenLayers
 * @param optimizationAlgorithms
 * @param outputList
 * @param pretrain
 * @param updater
 * @return an String object with evaluation result.
 */
public String createFeedForwardNetwork(long seed, double learningRate, int bachSize, double nepoches,
        int iterations, String optimizationAlgorithms, String updater, double momentum, boolean pretrain,
        boolean backprop, int noHiddenLayers, int inputLayerNodes, int datasetId, int versionID, int analysisID,
        List<HiddenLayerDetails> hiddenList, List<OutputLayerDetails> outputList)
        throws IOException, InterruptedException {

    String evaluationDetails = null;
    int numLinesToSkip = 0;
    String delimiter = ",";
    mlDataSet = getDatasetPath(datasetId, versionID);
    analysisFraction = getAnalysisFraction(analysisID);
    analysisResponceVariable = getAnalysisResponseVariable(analysisID);
    responseIndex = getAnalysisResponseVariableIndex(analysisID);
    SplitTestAndTrain splitTestAndTrain;
    DataSet currentDataset;
    DataSet trainingSet = null;
    DataSet testingSet = null;
    INDArray features = null;
    INDArray labels = null;
    INDArray predicted = null;
    Random rnd = new Random();
    int labelIndex = 0;
    int numClasses = 0;
    int fraction = 0;

    //Initialize RecordReader
    RecordReader rr = new CSVRecordReader(numLinesToSkip, delimiter);
    //read the dataset
    rr.initialize(new FileSplit(new File(mlDataSet)));
    labelIndex = responseIndex;
    numClasses = outputList.get(0).outputNodes;

    //Get the fraction to do the spliting data to training and testing
    FileReader fr = new FileReader(mlDataSet);
    LineNumberReader lineNumberReader = new LineNumberReader(fr);
    //Get the total number of lines
    lineNumberReader.skip(Long.MAX_VALUE);
    int lines = lineNumberReader.getLineNumber();

    //handling multiplication of 0 error
    if (analysisFraction == 0) {
        return null;
    }

    //Take floor value to set the numHold of training data
    fraction = ((int) Math.floor(lines * analysisFraction));

    org.nd4j.linalg.dataset.api.iterator.DataSetIterator trainIter = new RecordReaderDataSetIterator(rr, lines,
            labelIndex, numClasses);

    //Create NeuralNetConfiguration object having basic settings.
    NeuralNetConfiguration.ListBuilder neuralNetConfiguration = new NeuralNetConfiguration.Builder().seed(seed)
            .iterations(iterations).optimizationAlgo(mapOptimizationAlgorithm(optimizationAlgorithms))
            .learningRate(learningRate).updater(mapUpdater(updater)).momentum(momentum)
            .list(noHiddenLayers + 1);

    //Add Hidden Layers to the network with unique settings
    for (int i = 0; i < noHiddenLayers; i++) {
        int nInput = 0;
        if (i == 0)
            nInput = inputLayerNodes;
        else
            nInput = hiddenList.get(i - 1).hiddenNodes;

        neuralNetConfiguration.layer(i,
                new DenseLayer.Builder().nIn(nInput).nOut(hiddenList.get(i).hiddenNodes)
                        .weightInit(mapWeightInit(hiddenList.get(i).weightInit))
                        .activation(hiddenList.get(i).activationAlgo).build());
    }

    //Add Output Layers to the network with unique settings
    neuralNetConfiguration.layer(noHiddenLayers,
            new OutputLayer.Builder(mapLossFunction(outputList.get(0).lossFunction))
                    .nIn(hiddenList.get(noHiddenLayers - 1).hiddenNodes).nOut(outputList.get(0).outputNodes)
                    .weightInit(mapWeightInit(outputList.get(0).weightInit))
                    .activation(outputList.get(0).activationAlgo).build());

    //Create MultiLayerConfiguration network
    MultiLayerConfiguration conf = neuralNetConfiguration.pretrain(pretrain).backprop(backprop).build();

    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();
    model.setListeners(Collections.singletonList((IterationListener) new ScoreIterationListener(1)));

    while (trainIter.hasNext()) {
        currentDataset = trainIter.next();
        splitTestAndTrain = currentDataset.splitTestAndTrain(fraction, rnd);
        trainingSet = splitTestAndTrain.getTrain();
        testingSet = splitTestAndTrain.getTest();
        features = testingSet.getFeatureMatrix();
        labels = testingSet.getLabels();
    }

    //Train the model with the training data
    for (int n = 0; n < nepoches; n++) {
        model.fit(trainingSet);
    }

    //Do the evaluations of the model including the Accuracy, F1 score etc.
    log.info("Evaluate model....");
    Evaluation eval = new Evaluation(outputList.get(0).outputNodes);
    predicted = model.output(features, false);

    eval.eval(labels, predicted);

    evaluationDetails = "{\"Accuracy\":\"" + eval.accuracy() + "\", \"Pecision\":\"" + eval.precision()
            + "\",\"Recall\":\"" + eval.recall() + "\",\"F1Score\":\"" + eval.f1() + "\"}";
    return evaluationDetails;

}

From source file:seqmodel.RNNModel.java

public void evaluate() throws Exception {
    String dataSetBaseDir = prop.getProperty("docvec.dir");
    train = getAMISentenceIterator(dataSetBaseDir + "/train/");
    test = getAMISentenceIterator(dataSetBaseDir + "/test/");

    System.out.println("Traning num_instances: " + train.numExamples());
    System.out.println("Test num_instances: " + test.numExamples());

    //+++ DEBUG:/*from   w  w  w.  j a  va 2s . c o  m*/
    //System.out.println("train:");
    //train.reset();
    //while (train.hasNext()) {
    //    System.out.println(train.next());
    //}

    //System.out.println("test:");
    //test.reset();
    //while (test.hasNext()) {
    //    System.out.println(test.next());
    //}
    //--- DEBUG

    MultiLayerNetwork rnn = buildRNN(train);

    for (int i = 0; i < NUM_EPOCHS; i++) {
        System.out.println("Epoch: " + i);
        rnn.fit(train);

        Evaluation evaluation = new Evaluation();
        while (test.hasNext()) {
            DataSet t = test.next();
            INDArray features = t.getFeatureMatrix();
            INDArray lables = t.getLabels();
            //INDArray inMask = t.getFeaturesMaskArray();
            //INDArray outMask = t.getLabelsMaskArray();
            INDArray predicted = null;
            predicted = rnn.output(features, false/*, inMask, outMask*/);

            evaluation.evalTimeSeries(lables, predicted/*, outMask*/);
        }

        train.reset();
        test.reset();

        System.out.println(evaluation.stats());
    }

}

From source file:trash.CNNLFWExample.java

public static void main(String[] args) {

    try {/*www  .  j av  a2 s  . c  om*/

        int batchSize = 30; // numSamples/10;
        int iterations = 50000;
        int splitTrainNum = (int) (batchSize * .8);
        //            int seed = 123;
        int listenerFreq = iterations / 5;
        boolean useSubset = true;
        DataSet lfwNext;
        SplitTestAndTrain trainTest;
        DataSet trainInput;
        List<INDArray> testInput = new ArrayList<>();
        List<INDArray> testLabels = new ArrayList<>();

        int nChannels = 3;

        File parentDir = new File("C:\\Users\\acastano\\Downloads\\lfw");
        String[] allowedExtensions = new String[] { "jpg" };
        FileSplit filesInDir = new FileSplit(parentDir, allowedExtensions, randNumGen);
        ParentPathLabelGenerator labelMaker = new ParentPathLabelGenerator();

        BalancedPathFilter pathFilter = new BalancedPathFilter(randNumGen, allowedExtensions, labelMaker);
        InputSplit[] filesInDirSplit = filesInDir.sample(pathFilter, 80, 20);
        InputSplit trainData = filesInDirSplit[0];
        InputSplit testData = filesInDirSplit[1];

        //            int tam = 28;
        final int numRows = 40;
        final int numColumns = 40;

        int outputNum = 20; // 5749;
        ImageRecordReader recordReader = new ImageRecordReader(numRows, numColumns, nChannels, labelMaker);
        recordReader.initialize(trainData);
        ImageRecordReader recordReaderTest = new ImageRecordReader(numRows, numColumns, nChannels, labelMaker);
        recordReaderTest.initialize(testData);

        int classposition = -1; //numRows * numColumns * nChannels+200000;

        org.nd4j.linalg.dataset.api.iterator.DataSetIterator dataIter = new RecordReaderDataSetIterator(
                recordReader, batchSize, classposition, outputNum);
        org.nd4j.linalg.dataset.api.iterator.DataSetIterator dataIterTest = new RecordReaderDataSetIterator(
                recordReaderTest, batchSize, classposition, outputNum);

        System.out.println("Num Clases: " + dataIter.getLabels().size());
        int seed = 0;
        //            while (dataIter.hasNext()) {
        //                DataSet ds = dataIter.next();
        //
        //              //  System.out.println(ds.numExamples());
        //               System.out.println(ds);
        //
        //            }
        //            
        //            while (dataIterTest.hasNext()) {
        //                DataSet ds = dataIterTest.next();
        //
        //              //  System.out.println(ds.numExamples());
        //               System.out.println(ds);
        //
        //            }

        //            int nChannels = 3;
        //            int outputNum = LFWLoader.SUB_NUM_LABELS;
        //            int numSamples = LFWLoader.SUB_NUM_IMAGES-4;

        MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
                .iterations(iterations).regularization(true).l2(0.0005).learningRate(0.01)//.biasLearningRate(0.02)
                //.learningRateDecayPolicy(LearningRatePolicy.Inverse).lrPolicyDecayRate(0.001).lrPolicyPower(0.75)
                .weightInit(WeightInit.XAVIER)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(Updater.NESTEROVS)
                .momentum(0.9).list().layer(0, new ConvolutionLayer.Builder(5, 5)
                        //nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
                        .nIn(nChannels).stride(1, 1).nOut(20).activation("identity").build())
                .layer(1,
                        new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 2, 2 })
                                .name("pool1").build())
                .layer(2, new ConvolutionLayer.Builder(3, 3).name("cnn2").stride(1, 1).nOut(40).build())
                .layer(3,
                        new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 2, 2 })
                                .name("pool2").build())
                .layer(2, new ConvolutionLayer.Builder(3, 3).name("cnn3").stride(1, 1).nOut(60).build())
                .layer(3,
                        new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 2, 2 })
                                .name("pool3").build())
                .layer(2, new ConvolutionLayer.Builder(2, 2).name("cnn3").stride(1, 1).nOut(80).build())
                .layer(4, new DenseLayer.Builder().name("ffn1").nOut(10).dropOut(0.5).build())
                .layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                        .nOut(outputNum).activation("softmax").build())
                .backprop(true).pretrain(false);
        new ConvolutionLayerSetup(builder, numRows, numColumns, nChannels);

        MultiLayerConfiguration conf = builder.build();
        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();

        //        log.info("Train model....");
        model.setListeners(new ScoreIterationListener(1));
        int nEpochs = 1;

        Nd4j.ENFORCE_NUMERICAL_STABILITY = true;
        for (int i = 0; i < nEpochs; i++) {
            model.fit(dataIter);

            // saveModel(model,"mimodelo.txt");

            Evaluation eval = new Evaluation(outputNum);
            dataIterTest.reset();
            while (dataIterTest.hasNext()) {
                DataSet ds = dataIterTest.next();
                INDArray output = model.output(ds.getFeatureMatrix(), false);
                eval.eval(ds.getLabels(), output);
            }
            System.out.println(eval.stats());
            dataIterTest.reset();
        }
        //            log.info(eval.stats());
        //            log.info("****************Example finished********************");
    } catch (IOException ex) {
        java.util.logging.Logger.getLogger(CNNLFWExample.class.getName()).log(Level.SEVERE, null, ex);
    }

}