Example usage for org.deeplearning4j.datasets.datavec RecordReaderDataSetIterator RecordReaderDataSetIterator

List of usage examples for org.deeplearning4j.datasets.datavec RecordReaderDataSetIterator RecordReaderDataSetIterator

Introduction

In this page you can find the example usage for org.deeplearning4j.datasets.datavec RecordReaderDataSetIterator RecordReaderDataSetIterator.

Prototype

public RecordReaderDataSetIterator(RecordReader recordReader, int batchSize, int labelIndex,
        int numPossibleLabels) 

Source Link

Document

Main constructor for classification.

Usage

From source file:cnn.image.classification.CNNImageClassification.java

public static void main(String[] args) {
    int nChannels = 3;
    int outputNum = 10;
    //        int numExamples = 80;
    int batchSize = 10;
    int nEpochs = 20;
    int iterations = 1;
    int seed = 123;
    int height = 32;
    int width = 32;
    Random randNumGen = new Random(seed);
    System.out.println("Load data....");

    File parentDir = new File("train1/");

    FileSplit filesInDir = new FileSplit(parentDir, allowedExtensions, randNumGen);

    ParentPathLabelGenerator labelMaker = new ParentPathLabelGenerator();

    BalancedPathFilter pathFilter = new BalancedPathFilter(randNumGen, allowedExtensions, labelMaker);

    //Split the image files into train and test. Specify the train test split as 80%,20%
    InputSplit[] filesInDirSplit = filesInDir.sample(pathFilter, 100, 0);
    InputSplit[] filesInDirSplitTest = filesInDir.sample(pathFilter, 0, 100);

    InputSplit trainData = filesInDirSplit[0];
    InputSplit testData = filesInDirSplitTest[1];

    System.out.println("train = " + trainData.length());
    System.out.println("test = " + testData.length());
    //Specifying a new record reader with the height and width you want the images to be resized to.
    //Note that the images in this example are all of different size
    //They will all be resized to the height and width specified below
    ImageRecordReader recordReader = new ImageRecordReader(height, width, nChannels, labelMaker);

    //Often there is a need to transforming images to artificially increase the size of the dataset

    recordReader.initialize(trainData);/*from w  w  w  .j a  va 2  s. co  m*/

    DataSetIterator dataIterTrain = new RecordReaderDataSetIterator(recordReader, batchSize, 1, outputNum);
    //        recordReader.reset();
    recordReader.initialize(testData);
    DataSetIterator dataIterTest = new RecordReaderDataSetIterator(recordReader, batchSize, 1, outputNum);

    DataNormalization scaler = new ImagePreProcessingScaler(0, 1);

    dataIterTrain.setPreProcessor(scaler);
    dataIterTest.setPreProcessor(scaler);

    System.out.println("Build model....");
    MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
            .iterations(iterations).regularization(true).l2(0.0005)
            //                .dropOut(0.5)
            .learningRate(0.001)//.biasLearningRate(0.02)
            //.learningRateDecayPolicy(LearningRatePolicy.Inverse).lrPolicyDecayRate(0.001).lrPolicyPower(0.75)
            .weightInit(WeightInit.XAVIER).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(Updater.NESTEROVS).momentum(0.9).list()
            .layer(0,
                    new ConvolutionLayer.Builder(5, 5).nIn(nChannels).stride(1, 1).nOut(20)
                            .activation("identity").build())
            .layer(1,
                    new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2).stride(2, 2)
                            .build())
            .layer(2, new ConvolutionLayer.Builder(5, 5).stride(1, 1).nOut(50).activation("identity").build())
            .layer(3,
                    new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
                            .kernelSize(2, 2).stride(2, 2).build())
            .layer(4, new DenseLayer.Builder().activation("relu").nOut(500).build())
            .layer(5,
                    new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(outputNum)
                            .activation("softmax").build())
            .setInputType(InputType.convolutional(height, width, nChannels)) //See note below
            .backprop(true).pretrain(false);

    MultiLayerConfiguration b = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations)
            .regularization(false).l2(0.005) // tried 0.0001, 0.0005
            .learningRate(0.0001) // tried 0.00001, 0.00005, 0.000001
            .weightInit(WeightInit.XAVIER).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .updater(Updater.NESTEROVS).momentum(0.9).list().layer(0, new ConvolutionLayer.Builder(5, 5)
                    //nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
                    .nIn(nChannels).stride(1, 1).nOut(50) // tried 10, 20, 40, 50
                    .activation("relu").build())
            .layer(1,
                    new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
                            .kernelSize(2, 2).stride(2, 2).build())
            .layer(2, new ConvolutionLayer.Builder(5, 5).stride(1, 1).nOut(100) // tried 25, 50, 100
                    .activation("relu").build())
            .layer(3,
                    new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
                            .kernelSize(2, 2).stride(2, 2).build())
            .layer(4, new DenseLayer.Builder().activation("relu").nOut(500).build())
            .layer(5,
                    new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(outputNum)
                            .activation("softmax").build())
            .backprop(true).pretrain(false).cnnInputSize(height, width, nChannels).build();

    MultiLayerConfiguration conf = builder.build();
    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();

    System.out.println("Train model....");
    model.setListeners(new ScoreIterationListener(1));
    //        for( int i=0; i<nEpochs; i++ ) {
    //            model.setListeners(new HistogramIterationListener(1));

    MultipleEpochsIterator trainIter = new MultipleEpochsIterator(nEpochs, dataIterTrain, 2);
    model.fit(trainIter);

    //            System.out.println("*** Completed epoch - " + i + "  ***");

    System.out.println("Evaluate model....");
    //            Evaluation eval = new Evaluation(outputNum);
    //            while(dataIterTest.hasNext()){
    //                DataSet ds = dataIterTest.next();
    //                INDArray output = model.output(ds.getFeatureMatrix(), false);
    //                eval.eval(ds.getLabels(), output);
    //            }
    //            System.out.println(eval.stats());
    //            dataIterTest.reset();
    //        }

    Evaluation eval1 = model.evaluate(dataIterTest);
    System.out.println(eval1.stats());

    System.out.println("****************Example finished********************");
}

From source file:com.javafxpert.neuralnetviz.scenario.CSVExample.java

License:Apache License

public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception {
    //public static void main(String[] args) throws  Exception {

    //First: get the dataset using the record reader. CSVRecordReader handles loading/parsing
    int numLinesToSkip = 0;
    String delimiter = ",";
    RecordReader recordReader = new CSVRecordReader(numLinesToSkip, delimiter);
    recordReader.initialize(new FileSplit(new ClassPathResource("iris.txt").getFile()));

    //Second: the RecordReaderDataSetIterator handles conversion to DataSet objects, ready for use in neural network
    int labelIndex = 4; //5 values in each row of the iris.txt CSV: 4 input features followed by an integer label (class) index. Labels are the 5th value (index 4) in each row
    int numClasses = 3; //3 classes (types of iris flowers) in the iris data set. Classes have integer values 0, 1 or 2
    int batchSize = 150; //Iris data set: 150 examples total. We are loading all of them into one DataSet (not recommended for large data sets)

    DataSetIterator iterator = new RecordReaderDataSetIterator(recordReader, batchSize, labelIndex, numClasses);
    DataSet allData = iterator.next();/*from   w w w. j a v  a  2 s .  c o m*/
    allData.shuffle();
    SplitTestAndTrain testAndTrain = allData.splitTestAndTrain(0.65); //Use 65% of data for training

    DataSet trainingData = testAndTrain.getTrain();
    DataSet testData = testAndTrain.getTest();

    //We need to normalize our data. We'll use NormalizeStandardize (which gives us mean 0, unit variance):
    DataNormalization normalizer = new NormalizerStandardize();
    normalizer.fit(trainingData); //Collect the statistics (mean/stdev) from the training data. This does not modify the input data
    normalizer.transform(trainingData); //Apply normalization to the training data
    normalizer.transform(testData); //Apply normalization to the test data. This is using statistics calculated from the *training* set

    final int numInputs = 4;
    int outputNum = 3;
    int iterations = 1000;
    long seed = 6;

    log.info("Build model....");
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations)
            .activation("tanh").weightInit(WeightInit.XAVIER).learningRate(0.1).regularization(true).l2(1e-4)
            .list().layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(3).build())
            .layer(1, new DenseLayer.Builder().nIn(3).nOut(3).build())
            .layer(2,
                    new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                            .activation("softmax").nIn(3).nOut(outputNum).build())
            .backprop(true).pretrain(false).build();

    //run the model
    String[] inputFeatureNames = { "Sepal length (4.3-7.9)", "Sepal width (2.0-4.4)", "Petal length (1.0-6.9)",
            "Petal width (0.1-2.5)" };
    String[] outputLabelNames = { "Iris setosa", "Iris versicolor", "Iris virginica" };
    MultiLayerNetworkEnhanced model = new MultiLayerNetworkEnhanced(conf, inputFeatureNames, outputLabelNames);
    model.setDataNormalization(normalizer);

    model.init();
    //model.setListeners(new ScoreIterationListener(100));
    model.setListeners(new ModelListener(100, webSocketSession));

    model.fit(trainingData);

    //evaluate the model on the test set
    Evaluation eval = new Evaluation(3);
    INDArray output = model.output(testData.getFeatureMatrix());
    eval.eval(testData.getLabels(), output);
    log.info(eval.stats());

    // Make prediction: Expecting 0
    INDArray example = Nd4j.zeros(1, 4);
    example.putScalar(new int[] { 0, 0 }, 5.1);
    example.putScalar(new int[] { 0, 1 }, 3.5);
    example.putScalar(new int[] { 0, 2 }, 1.4);
    example.putScalar(new int[] { 0, 3 }, 0.2);
    DataSet ds = new DataSet(example, null);
    normalizer.transform(ds);
    int[] prediction = model.predict(example);
    System.out.println("prediction for 5.1,3.5,1.4,0.2: " + prediction[0]);

    return model;
}

From source file:com.javafxpert.neuralnetviz.scenario.WineClassifier.java

License:Apache License

public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception {
    //Load the training data:
    RecordReader rr = new CSVRecordReader();
    rr.initialize(new FileSplit((new File("src/main/resources/classification/wine.data"))));

    DataSetIterator iterator = new RecordReaderDataSetIterator(rr, FILE_SIZE, CLASS_INDEX, NUM_OF_CLASSES);
    DataSet wineData = iterator.next();// ww  w.  ja  v  a 2 s .co  m
    wineData.shuffle();
    SplitTestAndTrain testAndTrain = wineData.splitTestAndTrain(DATA_SPLIT_TRAIN_TEST);
    DataSet trainingData = testAndTrain.getTrain();
    DataSet testData = testAndTrain.getTest();

    // A Standard Normalizer which gives zero-mean, unit variance
    DataNormalization normalizer = new NormalizerStandardize();
    normalizer.fit(trainingData); // Collect the statistics (mean/stdev) from the training data. This does not modify the input data
    normalizer.transform(trainingData); // Apply normalization to the training data
    normalizer.transform(testData); // Apply normalization to the test data. This is using statistics calculated from the *training* set

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(SEED).iterations(ITERATIONS)
            .weightInit(WeightInit.RELU).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
            .learningRate(EPSILON).regularization(true).l2(1e-4).updater(Updater.NESTEROVS).momentum(ALPHA)
            .list()
            .layer(0,
                    new DenseLayer.Builder().nIn(INPUT_NEURONS).nOut(HIDDEN_NEURONS)
                            .activation(HIDDEN_LAYER_ACTIVATION).build())
            .layer(1,
                    new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD).nIn(HIDDEN_NEURONS)
                            .nOut(OUTPUT_NEURONS).activation(OUTPUT_LAYER_ACTIVATION).build())
            .pretrain(false).backprop(true).build();

    String[] inputFeatureNames = { "Alcohol (11.0-14.9)", "Malic acid (0.7-5.8)", "Ash (1.3-3.3)",
            "Alcalinity of ash (10.6-30.0)", "Magnesium (70-162)", "Total phenols (0.9-3.9)",
            "Flavanoids (0.30-5.1)", "Nonflavanoid phenols (0.1-0.7)", "Proanthocyanins (0.4-3.6)",
            "Color intensity (1.2-13.0)", "Hue (0.4-1.8)", "OD280/OD315 of diluted (1.2-4.0)",
            "Proline (278-1680)" };
    String[] outputLabelNames = { "Cultivar A", "Cultivar B", "Cultivar C" };
    MultiLayerNetworkEnhanced networkModel = new MultiLayerNetworkEnhanced(conf, inputFeatureNames,
            outputLabelNames);
    networkModel.init();
    //model.setListeners(new ScoreIterationListener(100));    //Print score every 100 parameter updates
    networkModel.setListeners(new ModelListener(10, webSocketSession));
    networkModel.setDataNormalization(normalizer);

    for (int n = 0; n < EPOCHS; n++) {
        networkModel.fit(trainingData);
    }

    System.out.println("Evaluate model....");
    Evaluation eval = new Evaluation(OUTPUT_NEURONS);
    INDArray output = networkModel.output(testData.getFeatureMatrix());

    System.out.println(testData.getFeatureMatrix());
    System.out.println(output);

    eval.eval(testData.getLabels(), output);

    //Print the evaluation statistics
    System.out.println(eval.stats());

    return networkModel;

}

From source file:stratego.neural.net.NeuralNetTest.java

/**
 * used for testing and training/*from  ww  w .j a  v a  2  s  . co  m*/
 *
 * @param csvFileClasspath
 * @param batchSize
 * @param labelIndex
 * @param numClasses
 * @return
 * @throws IOException
 * @throws InterruptedException
 */
private static DataSet readCSVDataset(String csvFileClasspath, int batchSize, int labelIndex, int numClasses)
        throws IOException, InterruptedException {

    RecordReader rr = new CSVRecordReader();
    File file = new File(csvFileClasspath);
    // rr.initialize(new FileSplit(new ClassPathResource(csvFileClasspath).getFile()));
    rr.initialize(new FileSplit(file));
    DataSetIterator iterator = new RecordReaderDataSetIterator(rr, batchSize, labelIndex, numClasses);
    return iterator.next();
}

From source file:trash.CNNLFWExample.java

public static void main(String[] args) {

    try {/*w w w .  j  a  va  2 s.  co m*/

        int batchSize = 30; // numSamples/10;
        int iterations = 50000;
        int splitTrainNum = (int) (batchSize * .8);
        //            int seed = 123;
        int listenerFreq = iterations / 5;
        boolean useSubset = true;
        DataSet lfwNext;
        SplitTestAndTrain trainTest;
        DataSet trainInput;
        List<INDArray> testInput = new ArrayList<>();
        List<INDArray> testLabels = new ArrayList<>();

        int nChannels = 3;

        File parentDir = new File("C:\\Users\\acastano\\Downloads\\lfw");
        String[] allowedExtensions = new String[] { "jpg" };
        FileSplit filesInDir = new FileSplit(parentDir, allowedExtensions, randNumGen);
        ParentPathLabelGenerator labelMaker = new ParentPathLabelGenerator();

        BalancedPathFilter pathFilter = new BalancedPathFilter(randNumGen, allowedExtensions, labelMaker);
        InputSplit[] filesInDirSplit = filesInDir.sample(pathFilter, 80, 20);
        InputSplit trainData = filesInDirSplit[0];
        InputSplit testData = filesInDirSplit[1];

        //            int tam = 28;
        final int numRows = 40;
        final int numColumns = 40;

        int outputNum = 20; // 5749;
        ImageRecordReader recordReader = new ImageRecordReader(numRows, numColumns, nChannels, labelMaker);
        recordReader.initialize(trainData);
        ImageRecordReader recordReaderTest = new ImageRecordReader(numRows, numColumns, nChannels, labelMaker);
        recordReaderTest.initialize(testData);

        int classposition = -1; //numRows * numColumns * nChannels+200000;

        org.nd4j.linalg.dataset.api.iterator.DataSetIterator dataIter = new RecordReaderDataSetIterator(
                recordReader, batchSize, classposition, outputNum);
        org.nd4j.linalg.dataset.api.iterator.DataSetIterator dataIterTest = new RecordReaderDataSetIterator(
                recordReaderTest, batchSize, classposition, outputNum);

        System.out.println("Num Clases: " + dataIter.getLabels().size());
        int seed = 0;
        //            while (dataIter.hasNext()) {
        //                DataSet ds = dataIter.next();
        //
        //              //  System.out.println(ds.numExamples());
        //               System.out.println(ds);
        //
        //            }
        //            
        //            while (dataIterTest.hasNext()) {
        //                DataSet ds = dataIterTest.next();
        //
        //              //  System.out.println(ds.numExamples());
        //               System.out.println(ds);
        //
        //            }

        //            int nChannels = 3;
        //            int outputNum = LFWLoader.SUB_NUM_LABELS;
        //            int numSamples = LFWLoader.SUB_NUM_IMAGES-4;

        MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
                .iterations(iterations).regularization(true).l2(0.0005).learningRate(0.01)//.biasLearningRate(0.02)
                //.learningRateDecayPolicy(LearningRatePolicy.Inverse).lrPolicyDecayRate(0.001).lrPolicyPower(0.75)
                .weightInit(WeightInit.XAVIER)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(Updater.NESTEROVS)
                .momentum(0.9).list().layer(0, new ConvolutionLayer.Builder(5, 5)
                        //nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
                        .nIn(nChannels).stride(1, 1).nOut(20).activation("identity").build())
                .layer(1,
                        new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 2, 2 })
                                .name("pool1").build())
                .layer(2, new ConvolutionLayer.Builder(3, 3).name("cnn2").stride(1, 1).nOut(40).build())
                .layer(3,
                        new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 2, 2 })
                                .name("pool2").build())
                .layer(2, new ConvolutionLayer.Builder(3, 3).name("cnn3").stride(1, 1).nOut(60).build())
                .layer(3,
                        new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX, new int[] { 2, 2 })
                                .name("pool3").build())
                .layer(2, new ConvolutionLayer.Builder(2, 2).name("cnn3").stride(1, 1).nOut(80).build())
                .layer(4, new DenseLayer.Builder().name("ffn1").nOut(10).dropOut(0.5).build())
                .layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD)
                        .nOut(outputNum).activation("softmax").build())
                .backprop(true).pretrain(false);
        new ConvolutionLayerSetup(builder, numRows, numColumns, nChannels);

        MultiLayerConfiguration conf = builder.build();
        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();

        //        log.info("Train model....");
        model.setListeners(new ScoreIterationListener(1));
        int nEpochs = 1;

        Nd4j.ENFORCE_NUMERICAL_STABILITY = true;
        for (int i = 0; i < nEpochs; i++) {
            model.fit(dataIter);

            // saveModel(model,"mimodelo.txt");

            Evaluation eval = new Evaluation(outputNum);
            dataIterTest.reset();
            while (dataIterTest.hasNext()) {
                DataSet ds = dataIterTest.next();
                INDArray output = model.output(ds.getFeatureMatrix(), false);
                eval.eval(ds.getLabels(), output);
            }
            System.out.println(eval.stats());
            dataIterTest.reset();
        }
        //            log.info(eval.stats());
        //            log.info("****************Example finished********************");
    } catch (IOException ex) {
        java.util.logging.Logger.getLogger(CNNLFWExample.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:trash.Sample2.java

public static void main(String[] args) {

    //  http://www.cs.toronto.edu/%7Ehinton/absps/guideTR.pdf 
    try {//from  w  ww  . jav  a  2s. c  om
        int nChannels = 3;

        File parentDir = new File("C:\\Users\\acastano\\Downloads\\lfw");
        String[] allowedExtensions = new String[] { "jpg" };
        FileSplit filesInDir = new FileSplit(parentDir, allowedExtensions, randNumGen);
        ParentPathLabelGenerator labelMaker = new ParentPathLabelGenerator();

        BalancedPathFilter pathFilter = new BalancedPathFilter(randNumGen, allowedExtensions, labelMaker);
        InputSplit[] filesInDirSplit = filesInDir.sample(pathFilter, 80, 20);
        InputSplit trainData = filesInDirSplit[0];
        InputSplit testData = filesInDirSplit[1];

        int tam = 28;

        int outputNum = 20; // 5749;
        ImageRecordReader recordReader = new ImageRecordReader(tam, tam, nChannels, labelMaker);
        recordReader.initialize(trainData);
        ImageRecordReader recordReaderTest = new ImageRecordReader(tam, tam, nChannels, labelMaker);
        recordReaderTest.initialize(testData);

        DataSetIterator dataIter = new RecordReaderDataSetIterator(recordReader, 10, tam * tam * nChannels,
                outputNum);
        DataSetIterator dataIterTest = new RecordReaderDataSetIterator(recordReaderTest, 10,
                tam * tam * nChannels, outputNum);

        int contador = 0;
        //            while (dataIter.hasNext()) {
        //               DataSet ds =  dataIter.next();
        //               
        //               System.out.println(ds.numExamples());
        //              // System.out.println(ds.numInputs());
        //                contador++;                
        //            }

        System.out.println("Num Clases: " + dataIter.getLabels().size());
        int seed = 0;
        int iterations = 1000;

        MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed)
                .iterations(iterations).regularization(true).l2(0.0005).learningRate(0.01)//.biasLearningRate(0.02)
                //.learningRateDecayPolicy(LearningRatePolicy.Inverse).lrPolicyDecayRate(0.001).lrPolicyPower(0.75)
                .weightInit(WeightInit.XAVIER)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(Updater.NESTEROVS)
                .momentum(0.1).list().layer(0, new ConvolutionLayer.Builder(5, 5)
                        //nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied
                        .nIn(tam * tam).stride(1, 1).nOut(20).activation("identity").build())
                .layer(1,
                        new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX)
                                .kernelSize(2, 2).stride(2, 2).build())
                .layer(2, new ConvolutionLayer.Builder(5, 5)
                        //Note that nIn need not be specified in later layers
                        .stride(1, 1).nOut(50).activation("identity").build())
                .layer(3,
                        new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2)
                                .stride(2, 2).build())
                .layer(4, new DenseLayer.Builder().activation("relu").nOut(500).build())
                .layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.MSE).nOut(outputNum)
                        .activation("softmax").build())
                .backprop(true).pretrain(false);
        // The builder needs the dimensions of the image along with the number of channels. these are 28x28 images in one channel
        new ConvolutionLayerSetup(builder, tam, tam, nChannels);

        MultiLayerConfiguration conf = builder.build();
        MultiLayerNetwork model = new MultiLayerNetwork(conf);
        model.init();

        //        log.info("Train model....");
        model.setListeners(new ScoreIterationListener(1));
        int nEpochs = 2;

        Nd4j.ENFORCE_NUMERICAL_STABILITY = true;
        for (int i = 0; i < nEpochs; i++) {
            model.fit(dataIter);

            Evaluation eval = new Evaluation(outputNum);
            while (dataIterTest.hasNext()) {
                DataSet ds = dataIterTest.next();
                INDArray output = model.output(ds.getFeatureMatrix(), false);
                eval.eval(ds.getLabels(), output);
            }
            System.out.println(eval.stats());
            dataIterTest.reset();
        }
        //        log.info("****************Example finished********************");

    } catch (IOException ex) {
        Logger.getLogger(Sample2.class.getName()).log(Level.SEVERE, null, ex);
    }

}

From source file:weka.dl4j.iterators.ImageDataSetIterator.java

License:Open Source License

/**
 * This method returns the iterator. Scales all intensity values: it divides them by 255. Shuffles the data
 * before the iterator is created./*w w  w. j a  va2  s  .  c o  m*/
 *
 * @param data the dataset to use
 * @param seed the seed for the random number generator
 * @param batchSize the batch size to use
 * @return the iterator
 * @throws Exception
 */
@Override
public DataSetIterator getIterator(Instances data, int seed, int batchSize) throws Exception {

    validate(data);
    data.randomize(new Random(seed));
    EasyImageRecordReader reader = getImageRecordReader(data, seed);
    DataSetIterator tmpIter = new RecordReaderDataSetIterator(reader, batchSize, -1, data.numClasses());
    tmpIter.setPreProcessor(new ScaleImagePixelsPreProcessor());
    return tmpIter;
}