List of usage examples for org.deeplearning4j.eval Evaluation stats
public String stats()
From source file:aiLogicImplementation.RNNBasic.java
License:Apache License
@Override public void evaluate() { Evaluation evaluation = new Evaluation(4); evaluation.eval(labels, input, net); System.out.println(evaluation.stats()); }
From source file:cnn.image.classification.CNNImageClassification.java
public static void main(String[] args) { int nChannels = 3; int outputNum = 10; // int numExamples = 80; int batchSize = 10; int nEpochs = 20; int iterations = 1; int seed = 123; int height = 32; int width = 32; Random randNumGen = new Random(seed); System.out.println("Load data...."); File parentDir = new File("train1/"); FileSplit filesInDir = new FileSplit(parentDir, allowedExtensions, randNumGen); ParentPathLabelGenerator labelMaker = new ParentPathLabelGenerator(); BalancedPathFilter pathFilter = new BalancedPathFilter(randNumGen, allowedExtensions, labelMaker); //Split the image files into train and test. Specify the train test split as 80%,20% InputSplit[] filesInDirSplit = filesInDir.sample(pathFilter, 100, 0); InputSplit[] filesInDirSplitTest = filesInDir.sample(pathFilter, 0, 100); InputSplit trainData = filesInDirSplit[0]; InputSplit testData = filesInDirSplitTest[1]; System.out.println("train = " + trainData.length()); System.out.println("test = " + testData.length()); //Specifying a new record reader with the height and width you want the images to be resized to. //Note that the images in this example are all of different size //They will all be resized to the height and width specified below ImageRecordReader recordReader = new ImageRecordReader(height, width, nChannels, labelMaker); //Often there is a need to transforming images to artificially increase the size of the dataset recordReader.initialize(trainData);/*from w w w. j av a 2 s .com*/ DataSetIterator dataIterTrain = new RecordReaderDataSetIterator(recordReader, batchSize, 1, outputNum); // recordReader.reset(); recordReader.initialize(testData); DataSetIterator dataIterTest = new RecordReaderDataSetIterator(recordReader, batchSize, 1, outputNum); DataNormalization scaler = new ImagePreProcessingScaler(0, 1); dataIterTrain.setPreProcessor(scaler); dataIterTest.setPreProcessor(scaler); System.out.println("Build model...."); MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed) .iterations(iterations).regularization(true).l2(0.0005) // .dropOut(0.5) .learningRate(0.001)//.biasLearningRate(0.02) //.learningRateDecayPolicy(LearningRatePolicy.Inverse).lrPolicyDecayRate(0.001).lrPolicyPower(0.75) .weightInit(WeightInit.XAVIER).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(Updater.NESTEROVS).momentum(0.9).list() .layer(0, new ConvolutionLayer.Builder(5, 5).nIn(nChannels).stride(1, 1).nOut(20) .activation("identity").build()) .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2).stride(2, 2) .build()) .layer(2, new ConvolutionLayer.Builder(5, 5).stride(1, 1).nOut(50).activation("identity").build()) .layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX) .kernelSize(2, 2).stride(2, 2).build()) .layer(4, new DenseLayer.Builder().activation("relu").nOut(500).build()) .layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(outputNum) .activation("softmax").build()) .setInputType(InputType.convolutional(height, width, nChannels)) //See note below .backprop(true).pretrain(false); MultiLayerConfiguration b = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations) .regularization(false).l2(0.005) // tried 0.0001, 0.0005 .learningRate(0.0001) // tried 0.00001, 0.00005, 0.000001 .weightInit(WeightInit.XAVIER).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .updater(Updater.NESTEROVS).momentum(0.9).list().layer(0, new ConvolutionLayer.Builder(5, 5) //nIn and nOut specify depth. nIn here is the nChannels and nOut is the number of filters to be applied .nIn(nChannels).stride(1, 1).nOut(50) // tried 10, 20, 40, 50 .activation("relu").build()) .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX) .kernelSize(2, 2).stride(2, 2).build()) .layer(2, new ConvolutionLayer.Builder(5, 5).stride(1, 1).nOut(100) // tried 25, 50, 100 .activation("relu").build()) .layer(3, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX) .kernelSize(2, 2).stride(2, 2).build()) .layer(4, new DenseLayer.Builder().activation("relu").nOut(500).build()) .layer(5, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(outputNum) .activation("softmax").build()) .backprop(true).pretrain(false).cnnInputSize(height, width, nChannels).build(); MultiLayerConfiguration conf = builder.build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); System.out.println("Train model...."); model.setListeners(new ScoreIterationListener(1)); // for( int i=0; i<nEpochs; i++ ) { // model.setListeners(new HistogramIterationListener(1)); MultipleEpochsIterator trainIter = new MultipleEpochsIterator(nEpochs, dataIterTrain, 2); model.fit(trainIter); // System.out.println("*** Completed epoch - " + i + " ***"); System.out.println("Evaluate model...."); // Evaluation eval = new Evaluation(outputNum); // while(dataIterTest.hasNext()){ // DataSet ds = dataIterTest.next(); // INDArray output = model.output(ds.getFeatureMatrix(), false); // eval.eval(ds.getLabels(), output); // } // System.out.println(eval.stats()); // dataIterTest.reset(); // } Evaluation eval1 = model.evaluate(dataIterTest); System.out.println(eval1.stats()); System.out.println("****************Example finished********************"); }
From source file:com.example.android.displayingbitmaps.ui.ImageGridActivity.java
License:Apache License
public void trainMLP() throws Exception { Nd4j.ENFORCE_NUMERICAL_STABILITY = true; final int numRows = 28; final int numColumns = 28; int outputNum = 10; int numSamples = 10000; int batchSize = 500; int iterations = 10; int seed = 123; int listenerFreq = iterations / 5; int splitTrainNum = (int) (batchSize * .8); DataSet mnist;/*from w w w .ja v a2 s .co m*/ SplitTestAndTrain trainTest; DataSet trainInput; List<INDArray> testInput = new ArrayList<>(); List<INDArray> testLabels = new ArrayList<>(); log.info("Load data...."); DataSetIterator mnistIter = new MnistDataSetIterator(batchSize, numSamples, true); log.info("Build model...."); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(iterations) .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).learningRate(1e-1f) .momentum(0.5).momentumAfter(Collections.singletonMap(3, 0.9)).useDropConnect(true).list(2) .layer(0, new DenseLayer.Builder().nIn(numRows * numColumns).nOut(1000).activation("relu") .weightInit(WeightInit.XAVIER).build()) .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD).nIn(1000).nOut(outputNum) .activation("softmax").weightInit(WeightInit.XAVIER).build()) .build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(listenerFreq))); log.info("Train model...."); model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(listenerFreq))); while (mnistIter.hasNext()) { mnist = mnistIter.next(); trainTest = mnist.splitTestAndTrain(splitTrainNum, new Random(seed)); // train set that is the result trainInput = trainTest.getTrain(); // get feature matrix and labels for training testInput.add(trainTest.getTest().getFeatureMatrix()); testLabels.add(trainTest.getTest().getLabels()); model.fit(trainInput); } log.info("Evaluate model...."); Evaluation eval = new Evaluation(outputNum); for (int i = 0; i < testInput.size(); i++) { INDArray output = model.output(testInput.get(i)); eval.eval(testLabels.get(i), output); } log.info(eval.stats()); log.info("****************Example finished********************"); }
From source file:com.heatonresearch.aifh.examples.ann.LearnDigitsBackprop.java
License:Apache License
/** * The main method.// ww w . ja v a 2 s .c om * @param args Not used. */ public static void main(String[] args) { try { int seed = 43; double learningRate = 1e-2; int nEpochs = 50; int batchSize = 500; // Setup training data. System.out.println("Please wait, reading MNIST training data."); String dir = System.getProperty("user.dir"); MNISTReader trainingReader = MNIST.loadMNIST(dir, true); MNISTReader validationReader = MNIST.loadMNIST(dir, false); DataSet trainingSet = trainingReader.getData(); DataSet validationSet = validationReader.getData(); DataSetIterator trainSetIterator = new ListDataSetIterator(trainingSet.asList(), batchSize); DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(), validationReader.getNumRows()); System.out.println("Training set size: " + trainingReader.getNumImages()); System.out.println("Validation set size: " + validationReader.getNumImages()); System.out.println(trainingSet.get(0).getFeatures().size(1)); System.out.println(validationSet.get(0).getFeatures().size(1)); int numInputs = trainingReader.getNumCols() * trainingReader.getNumRows(); int numOutputs = 10; int numHiddenNodes = 200; // Create neural network. MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate) .updater(Updater.NESTEROVS).momentum(0.9).regularization(true).dropOut(0.50).list(2) .layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes) .weightInit(WeightInit.XAVIER).activation("relu").build()) .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD) .weightInit(WeightInit.XAVIER).activation("softmax").nIn(numHiddenNodes) .nOut(numOutputs).build()) .pretrain(false).backprop(true).build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); model.setListeners(new ScoreIterationListener(1)); // Define when we want to stop training. EarlyStoppingModelSaver saver = new InMemoryModelSaver(); EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder() //.epochTerminationConditions(new MaxEpochsTerminationCondition(10)) .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(5)) .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score .modelSaver(saver).build(); EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator); // Train and display result. EarlyStoppingResult result = trainer.fit(); System.out.println("Termination reason: " + result.getTerminationReason()); System.out.println("Termination details: " + result.getTerminationDetails()); System.out.println("Total epochs: " + result.getTotalEpochs()); System.out.println("Best epoch number: " + result.getBestModelEpoch()); System.out.println("Score at best epoch: " + result.getBestModelScore()); model = saver.getBestModel(); // Evaluate Evaluation eval = new Evaluation(numOutputs); validationSetIterator.reset(); for (int i = 0; i < validationSet.numExamples(); i++) { DataSet t = validationSet.get(i); INDArray features = t.getFeatureMatrix(); INDArray labels = t.getLabels(); INDArray predicted = model.output(features, false); eval.eval(labels, predicted); } //Print the evaluation statistics System.out.println(eval.stats()); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:com.heatonresearch.aifh.examples.ann.LearnDigitsConv.java
License:Apache License
/** * The main method.//from w ww . ja v a 2s. c om * @param args Not used. */ public static void main(String[] args) { try { int seed = 43; double learningRate = 1e-2; int nEpochs = 50; int batchSize = 500; int channels = 1; // Setup training data. System.out.println("Please wait, reading MNIST training data."); String dir = System.getProperty("user.dir"); MNISTReader trainingReader = MNIST.loadMNIST(dir, true); MNISTReader validationReader = MNIST.loadMNIST(dir, false); DataSet trainingSet = trainingReader.getData(); DataSet validationSet = validationReader.getData(); DataSetIterator trainSetIterator = new ListDataSetIterator(trainingSet.asList(), batchSize); DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(), validationReader.getNumRows()); System.out.println("Training set size: " + trainingReader.getNumImages()); System.out.println("Validation set size: " + validationReader.getNumImages()); int numOutputs = 10; // Create neural network. MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed).iterations(1) .regularization(true).l2(0.0005).learningRate(0.01).weightInit(WeightInit.XAVIER) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(Updater.NESTEROVS) .momentum(0.9).list(4) .layer(0, new ConvolutionLayer.Builder(5, 5).nIn(channels).stride(1, 1).nOut(20).dropOut(0.5) .activation("relu").build()) .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2) .stride(2, 2).build()) .layer(2, new DenseLayer.Builder().activation("relu").nOut(500).build()) .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10) .activation("softmax").build()) .backprop(true).pretrain(false); new ConvolutionLayerSetup(builder, 28, 28, 1); MultiLayerConfiguration conf = builder.build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); model.setListeners(new ScoreIterationListener(1)); // Define when we want to stop training. EarlyStoppingModelSaver saver = new InMemoryModelSaver(); EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder() //.epochTerminationConditions(new MaxEpochsTerminationCondition(10)) .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(5)) .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score .modelSaver(saver).build(); EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator); // Train and display result. EarlyStoppingResult result = trainer.fit(); System.out.println("Termination reason: " + result.getTerminationReason()); System.out.println("Termination details: " + result.getTerminationDetails()); System.out.println("Total epochs: " + result.getTotalEpochs()); System.out.println("Best epoch number: " + result.getBestModelEpoch()); System.out.println("Score at best epoch: " + result.getBestModelScore()); model = saver.getBestModel(); // Evaluate Evaluation eval = new Evaluation(numOutputs); validationSetIterator.reset(); for (int i = 0; i < validationSet.numExamples(); i++) { DataSet t = validationSet.get(i); INDArray features = t.getFeatureMatrix(); INDArray labels = t.getLabels(); INDArray predicted = model.output(features, false); eval.eval(labels, predicted); } //Print the evaluation statistics System.out.println(eval.stats()); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:com.heatonresearch.aifh.examples.ann.LearnDigitsDropout.java
License:Apache License
/** * The main method./* w w w. j ava 2 s . com*/ * @param args Not used. */ public static void main(String[] args) { try { int seed = 43; double learningRate = 1e-2; int nEpochs = 50; int batchSize = 500; // Setup training data. System.out.println("Please wait, reading MNIST training data."); String dir = System.getProperty("user.dir"); MNISTReader trainingReader = MNIST.loadMNIST(dir, true); MNISTReader validationReader = MNIST.loadMNIST(dir, false); DataSet trainingSet = trainingReader.getData(); DataSet validationSet = validationReader.getData(); DataSetIterator trainSetIterator = new ListDataSetIterator(trainingSet.asList(), batchSize); DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(), validationReader.getNumRows()); System.out.println("Training set size: " + trainingReader.getNumImages()); System.out.println("Validation set size: " + validationReader.getNumImages()); System.out.println(trainingSet.get(0).getFeatures().size(1)); System.out.println(validationSet.get(0).getFeatures().size(1)); int numInputs = trainingReader.getNumCols() * trainingReader.getNumRows(); int numOutputs = 10; int numHiddenNodes = 100; // Create neural network. MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate) .updater(Updater.NESTEROVS).momentum(0.9).list(2) .layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes) .weightInit(WeightInit.XAVIER).activation("relu").build()) .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD) .weightInit(WeightInit.XAVIER).activation("softmax").nIn(numHiddenNodes) .nOut(numOutputs).build()) .pretrain(false).backprop(true).build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); model.setListeners(new ScoreIterationListener(1)); // Define when we want to stop training. EarlyStoppingModelSaver saver = new InMemoryModelSaver(); EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder() //.epochTerminationConditions(new MaxEpochsTerminationCondition(10)) .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(5)) .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score .modelSaver(saver).build(); EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator); // Train and display result. EarlyStoppingResult result = trainer.fit(); System.out.println("Termination reason: " + result.getTerminationReason()); System.out.println("Termination details: " + result.getTerminationDetails()); System.out.println("Total epochs: " + result.getTotalEpochs()); System.out.println("Best epoch number: " + result.getBestModelEpoch()); System.out.println("Score at best epoch: " + result.getBestModelScore()); model = saver.getBestModel(); // Evaluate Evaluation eval = new Evaluation(numOutputs); validationSetIterator.reset(); for (int i = 0; i < validationSet.numExamples(); i++) { DataSet t = validationSet.get(i); INDArray features = t.getFeatureMatrix(); INDArray labels = t.getLabels(); INDArray predicted = model.output(features, false); eval.eval(labels, predicted); } //Print the evaluation statistics System.out.println(eval.stats()); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:com.heatonresearch.aifh.examples.ann.LearnIrisBackprop.java
License:Apache License
/** * The main method./*w ww.j a v a 2s.co m*/ * @param args Not used. */ public static void main(String[] args) { try { int seed = 43; double learningRate = 0.1; int splitTrainNum = (int) (150 * .75); int numInputs = 4; int numOutputs = 3; int numHiddenNodes = 50; // Setup training data. final InputStream istream = LearnIrisBackprop.class.getResourceAsStream("/iris.csv"); if (istream == null) { System.out.println("Cannot access data set, make sure the resources are available."); System.exit(1); } final NormalizeDataSet ds = NormalizeDataSet.load(istream); final CategoryMap species = ds.encodeOneOfN(4); // species is column 4 istream.close(); DataSet next = ds.extractSupervised(0, 4, 4, 3); next.shuffle(); // Training and validation data split SplitTestAndTrain testAndTrain = next.splitTestAndTrain(splitTrainNum, new Random(seed)); DataSet trainSet = testAndTrain.getTrain(); DataSet validationSet = testAndTrain.getTest(); DataSetIterator trainSetIterator = new ListDataSetIterator(trainSet.asList(), trainSet.numExamples()); DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(), validationSet.numExamples()); // Create neural network. MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate) .updater(Updater.NESTEROVS).momentum(0.9).list(2) .layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes) .weightInit(WeightInit.XAVIER).activation("relu").build()) .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD) .weightInit(WeightInit.XAVIER).activation("softmax").nIn(numHiddenNodes) .nOut(numOutputs).build()) .pretrain(false).backprop(true).build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); model.setListeners(new ScoreIterationListener(1)); // Define when we want to stop training. EarlyStoppingModelSaver saver = new InMemoryModelSaver(); EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder() .epochTerminationConditions(new MaxEpochsTerminationCondition(500)) //Max of 50 epochs .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(25)) .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score .modelSaver(saver).build(); EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator); // Train and display result. EarlyStoppingResult result = trainer.fit(); System.out.println("Termination reason: " + result.getTerminationReason()); System.out.println("Termination details: " + result.getTerminationDetails()); System.out.println("Total epochs: " + result.getTotalEpochs()); System.out.println("Best epoch number: " + result.getBestModelEpoch()); System.out.println("Score at best epoch: " + result.getBestModelScore()); model = saver.getBestModel(); // Evaluate Evaluation eval = new Evaluation(numOutputs); validationSetIterator.reset(); for (int i = 0; i < validationSet.numExamples(); i++) { DataSet t = validationSet.get(i); INDArray features = t.getFeatureMatrix(); INDArray labels = t.getLabels(); INDArray predicted = model.output(features, false); System.out.println(features + ":Prediction(" + findSpecies(labels, species) + "):Actual(" + findSpecies(predicted, species) + ")" + predicted); eval.eval(labels, predicted); } //Print the evaluation statistics System.out.println(eval.stats()); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:com.javafxpert.neuralnetviz.scenario.CSVExample.java
License:Apache License
public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception { //public static void main(String[] args) throws Exception { //First: get the dataset using the record reader. CSVRecordReader handles loading/parsing int numLinesToSkip = 0; String delimiter = ","; RecordReader recordReader = new CSVRecordReader(numLinesToSkip, delimiter); recordReader.initialize(new FileSplit(new ClassPathResource("iris.txt").getFile())); //Second: the RecordReaderDataSetIterator handles conversion to DataSet objects, ready for use in neural network int labelIndex = 4; //5 values in each row of the iris.txt CSV: 4 input features followed by an integer label (class) index. Labels are the 5th value (index 4) in each row int numClasses = 3; //3 classes (types of iris flowers) in the iris data set. Classes have integer values 0, 1 or 2 int batchSize = 150; //Iris data set: 150 examples total. We are loading all of them into one DataSet (not recommended for large data sets) DataSetIterator iterator = new RecordReaderDataSetIterator(recordReader, batchSize, labelIndex, numClasses); DataSet allData = iterator.next();//from ww w . j a v a2 s .co m allData.shuffle(); SplitTestAndTrain testAndTrain = allData.splitTestAndTrain(0.65); //Use 65% of data for training DataSet trainingData = testAndTrain.getTrain(); DataSet testData = testAndTrain.getTest(); //We need to normalize our data. We'll use NormalizeStandardize (which gives us mean 0, unit variance): DataNormalization normalizer = new NormalizerStandardize(); normalizer.fit(trainingData); //Collect the statistics (mean/stdev) from the training data. This does not modify the input data normalizer.transform(trainingData); //Apply normalization to the training data normalizer.transform(testData); //Apply normalization to the test data. This is using statistics calculated from the *training* set final int numInputs = 4; int outputNum = 3; int iterations = 1000; long seed = 6; log.info("Build model...."); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations) .activation("tanh").weightInit(WeightInit.XAVIER).learningRate(0.1).regularization(true).l2(1e-4) .list().layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(3).build()) .layer(1, new DenseLayer.Builder().nIn(3).nOut(3).build()) .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .activation("softmax").nIn(3).nOut(outputNum).build()) .backprop(true).pretrain(false).build(); //run the model String[] inputFeatureNames = { "Sepal length (4.3-7.9)", "Sepal width (2.0-4.4)", "Petal length (1.0-6.9)", "Petal width (0.1-2.5)" }; String[] outputLabelNames = { "Iris setosa", "Iris versicolor", "Iris virginica" }; MultiLayerNetworkEnhanced model = new MultiLayerNetworkEnhanced(conf, inputFeatureNames, outputLabelNames); model.setDataNormalization(normalizer); model.init(); //model.setListeners(new ScoreIterationListener(100)); model.setListeners(new ModelListener(100, webSocketSession)); model.fit(trainingData); //evaluate the model on the test set Evaluation eval = new Evaluation(3); INDArray output = model.output(testData.getFeatureMatrix()); eval.eval(testData.getLabels(), output); log.info(eval.stats()); // Make prediction: Expecting 0 INDArray example = Nd4j.zeros(1, 4); example.putScalar(new int[] { 0, 0 }, 5.1); example.putScalar(new int[] { 0, 1 }, 3.5); example.putScalar(new int[] { 0, 2 }, 1.4); example.putScalar(new int[] { 0, 3 }, 0.2); DataSet ds = new DataSet(example, null); normalizer.transform(ds); int[] prediction = model.predict(example); System.out.println("prediction for 5.1,3.5,1.4,0.2: " + prediction[0]); return model; }
From source file:com.javafxpert.neuralnetviz.scenario.MLPClassifierMoon.java
License:Apache License
public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception { int seed = 123; double learningRate = 0.005; int batchSize = 50; int nEpochs = 100; int numInputs = 2; int numOutputs = 2; int numHiddenNodes = 8; //Load the training data: RecordReader rr = new CSVRecordReader(); rr.initialize(new FileSplit(new File("src/main/resources/classification/saturn_data_train.csv"))); DataSetIterator trainIter = new RecordReaderDataSetIterator(rr, batchSize, 0, 2); //Load the test/evaluation data: RecordReader rrTest = new CSVRecordReader(); rrTest.initialize(new FileSplit(new File("src/main/resources/classification/saturn_data_eval.csv"))); DataSetIterator testIter = new RecordReaderDataSetIterator(rrTest, batchSize, 0, 2); //log.info("Build model...."); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate) .updater(Updater.NESTEROVS).momentum(0.9).list() .layer(0,//from w w w. j a va 2 s . co m new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes).weightInit(WeightInit.XAVIER) .activation("relu").build()) .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD).weightInit(WeightInit.XAVIER) .activation("softmax").nIn(numHiddenNodes).nOut(numOutputs).build()) .pretrain(false).backprop(true).build(); String[] inputFeatureNames = { "x (-1.52 .. 2.54)", "y (-1.06 .. 1.58)" }; String[] outputLabelNames = { "planet", "ring" }; MultiLayerNetworkEnhanced model = new MultiLayerNetworkEnhanced(conf, inputFeatureNames, outputLabelNames); model.init(); //model.setListeners(new ScoreIterationListener(100)); //Print score every 100 parameter updates model.setListeners(new ModelListener(100, webSocketSession)); for (int n = 0; n < nEpochs; n++) { model.fit(trainIter); } System.out.println("Evaluate model...."); Evaluation eval = new Evaluation(numOutputs); while (testIter.hasNext()) { DataSet t = testIter.next(); INDArray features = t.getFeatureMatrix(); INDArray labels = t.getLabels(); INDArray predicted = model.output(features, false); eval.eval(labels, predicted); } //Print the evaluation statistics System.out.println(eval.stats()); // Make prediction // Input: 0.6236,-0.7822 Expected output: 1 INDArray example = Nd4j.zeros(1, 2); example.putScalar(new int[] { 0, 0 }, 9.8520); example.putScalar(new int[] { 0, 1 }, -1.9809); int[] prediction = model.predict(example); System.out.println("prediction for 9.8520, -1.9809: " + prediction[0]); System.out.println("****************Example finished********************"); return model; }
From source file:com.javafxpert.neuralnetviz.scenario.SpeedDating.java
License:Apache License
public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception { //First: get the dataset using the record reader. CSVRecordReader handles loading/parsing int numLinesToSkip = 0; String delimiter = ","; org.datavec.api.records.reader.RecordReader recordReader = new org.datavec.api.records.reader.impl.csv.CSVRecordReader( numLinesToSkip, delimiter);/*from w w w . j a v a2 s . c o m*/ recordReader.initialize(new org.datavec.api.split.FileSplit( new File("src/main/resources/classification/speed_dating_all.csv"))); //Second: the RecordReaderDataSetIterator handles conversion to DataSet objects, ready for use in neural network int labelIndex = 0; //5 values in each row of the iris.txt CSV: 4 input features followed by an integer label (class) index. Labels are the 5th value (index 4) in each row int numClasses = 2; //3 classes (types of iris flowers) in the iris data set. Classes have integer values 0, 1 or 2 int batchSize = 8378; //Iris data set: 150 examples total. We are loading all of them into one DataSet (not recommended for large data sets) DataSetIterator iterator = new org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator(recordReader, batchSize, labelIndex, numClasses); DataSet allData = iterator.next(); allData.shuffle(); SplitTestAndTrain testAndTrain = allData.splitTestAndTrain(0.65); //Use 65% of data for training DataSet trainingData = testAndTrain.getTrain(); DataSet testData = testAndTrain.getTest(); //We need to normalize our data. We'll use NormalizeStandardize (which gives us mean 0, unit variance): DataNormalization normalizer = new NormalizerStandardize(); normalizer.fit(trainingData); //Collect the statistics (mean/stdev) from the training data. This does not modify the input data normalizer.transform(trainingData); //Apply normalization to the training data normalizer.transform(testData); //Apply normalization to the test data. This is using statistics calculated from the *training* set final int numInputs = 3; int outputNum = 2; int iterations = 300; long seed = 6; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations) .activation("tanh").weightInit(WeightInit.XAVIER).learningRate(0.1).regularization(true).l2(1e-4) .list().layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(4).build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .activation("softmax").nIn(4).nOut(outputNum).build()) .backprop(true).pretrain(false).build(); String[] inputFeatureNames = { "Attractive (1-10)", "Intelligent (1-10)", "Fun (1-10)" }; String[] outputLabelNames = { "No second date", "Date again" }; MultiLayerNetworkEnhanced model = new MultiLayerNetworkEnhanced(conf, inputFeatureNames, outputLabelNames); model.init(); //model.setListeners(new ScoreIterationListener(100)); //Print score every 100 parameter updates model.setListeners(new ModelListener(10, webSocketSession)); model.setDataNormalization(normalizer); model.fit(trainingData); //evaluate the model on the test set Evaluation eval = new Evaluation(outputNum); INDArray output = model.output(testData.getFeatureMatrix()); eval.eval(testData.getLabels(), output); System.out.println(eval.stats()); // Make prediction // Input: 7, 8, 9 Expected output: ? INDArray example = Nd4j.zeros(1, 3); example.putScalar(new int[] { 0, 0 }, 7); example.putScalar(new int[] { 0, 1 }, 8); example.putScalar(new int[] { 0, 2 }, 9); DataSet ds = new DataSet(example, null); normalizer.transform(ds); int[] prediction = model.predict(example); System.out.println("prediction for 7 (attractive), 8 (intelligent), 9 (fun): " + prediction[0]); System.out.println("****************Example finished********************"); return model; }