List of usage examples for org.deeplearning4j.eval Evaluation Evaluation
public Evaluation(int numClasses)
From source file:aiLogicImplementation.RNNBasic.java
License:Apache License
@Override public void evaluate() { Evaluation evaluation = new Evaluation(4); evaluation.eval(labels, input, net); System.out.println(evaluation.stats()); }
From source file:com.example.android.displayingbitmaps.ui.ImageGridActivity.java
License:Apache License
public void trainMLP() throws Exception { Nd4j.ENFORCE_NUMERICAL_STABILITY = true; final int numRows = 28; final int numColumns = 28; int outputNum = 10; int numSamples = 10000; int batchSize = 500; int iterations = 10; int seed = 123; int listenerFreq = iterations / 5; int splitTrainNum = (int) (batchSize * .8); DataSet mnist;//from w ww . ja va 2s . c om SplitTestAndTrain trainTest; DataSet trainInput; List<INDArray> testInput = new ArrayList<>(); List<INDArray> testLabels = new ArrayList<>(); log.info("Load data...."); DataSetIterator mnistIter = new MnistDataSetIterator(batchSize, numSamples, true); log.info("Build model...."); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(iterations) .gradientNormalization(GradientNormalization.RenormalizeL2PerLayer).learningRate(1e-1f) .momentum(0.5).momentumAfter(Collections.singletonMap(3, 0.9)).useDropConnect(true).list(2) .layer(0, new DenseLayer.Builder().nIn(numRows * numColumns).nOut(1000).activation("relu") .weightInit(WeightInit.XAVIER).build()) .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD).nIn(1000).nOut(outputNum) .activation("softmax").weightInit(WeightInit.XAVIER).build()) .build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(listenerFreq))); log.info("Train model...."); model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(listenerFreq))); while (mnistIter.hasNext()) { mnist = mnistIter.next(); trainTest = mnist.splitTestAndTrain(splitTrainNum, new Random(seed)); // train set that is the result trainInput = trainTest.getTrain(); // get feature matrix and labels for training testInput.add(trainTest.getTest().getFeatureMatrix()); testLabels.add(trainTest.getTest().getLabels()); model.fit(trainInput); } log.info("Evaluate model...."); Evaluation eval = new Evaluation(outputNum); for (int i = 0; i < testInput.size(); i++) { INDArray output = model.output(testInput.get(i)); eval.eval(testLabels.get(i), output); } log.info(eval.stats()); log.info("****************Example finished********************"); }
From source file:com.heatonresearch.aifh.examples.ann.LearnDigitsBackprop.java
License:Apache License
/** * The main method./*from ww w . j a v a 2 s. co m*/ * @param args Not used. */ public static void main(String[] args) { try { int seed = 43; double learningRate = 1e-2; int nEpochs = 50; int batchSize = 500; // Setup training data. System.out.println("Please wait, reading MNIST training data."); String dir = System.getProperty("user.dir"); MNISTReader trainingReader = MNIST.loadMNIST(dir, true); MNISTReader validationReader = MNIST.loadMNIST(dir, false); DataSet trainingSet = trainingReader.getData(); DataSet validationSet = validationReader.getData(); DataSetIterator trainSetIterator = new ListDataSetIterator(trainingSet.asList(), batchSize); DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(), validationReader.getNumRows()); System.out.println("Training set size: " + trainingReader.getNumImages()); System.out.println("Validation set size: " + validationReader.getNumImages()); System.out.println(trainingSet.get(0).getFeatures().size(1)); System.out.println(validationSet.get(0).getFeatures().size(1)); int numInputs = trainingReader.getNumCols() * trainingReader.getNumRows(); int numOutputs = 10; int numHiddenNodes = 200; // Create neural network. MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate) .updater(Updater.NESTEROVS).momentum(0.9).regularization(true).dropOut(0.50).list(2) .layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes) .weightInit(WeightInit.XAVIER).activation("relu").build()) .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD) .weightInit(WeightInit.XAVIER).activation("softmax").nIn(numHiddenNodes) .nOut(numOutputs).build()) .pretrain(false).backprop(true).build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); model.setListeners(new ScoreIterationListener(1)); // Define when we want to stop training. EarlyStoppingModelSaver saver = new InMemoryModelSaver(); EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder() //.epochTerminationConditions(new MaxEpochsTerminationCondition(10)) .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(5)) .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score .modelSaver(saver).build(); EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator); // Train and display result. EarlyStoppingResult result = trainer.fit(); System.out.println("Termination reason: " + result.getTerminationReason()); System.out.println("Termination details: " + result.getTerminationDetails()); System.out.println("Total epochs: " + result.getTotalEpochs()); System.out.println("Best epoch number: " + result.getBestModelEpoch()); System.out.println("Score at best epoch: " + result.getBestModelScore()); model = saver.getBestModel(); // Evaluate Evaluation eval = new Evaluation(numOutputs); validationSetIterator.reset(); for (int i = 0; i < validationSet.numExamples(); i++) { DataSet t = validationSet.get(i); INDArray features = t.getFeatureMatrix(); INDArray labels = t.getLabels(); INDArray predicted = model.output(features, false); eval.eval(labels, predicted); } //Print the evaluation statistics System.out.println(eval.stats()); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:com.heatonresearch.aifh.examples.ann.LearnDigitsConv.java
License:Apache License
/** * The main method./*from ww w .j ava2 s. co m*/ * @param args Not used. */ public static void main(String[] args) { try { int seed = 43; double learningRate = 1e-2; int nEpochs = 50; int batchSize = 500; int channels = 1; // Setup training data. System.out.println("Please wait, reading MNIST training data."); String dir = System.getProperty("user.dir"); MNISTReader trainingReader = MNIST.loadMNIST(dir, true); MNISTReader validationReader = MNIST.loadMNIST(dir, false); DataSet trainingSet = trainingReader.getData(); DataSet validationSet = validationReader.getData(); DataSetIterator trainSetIterator = new ListDataSetIterator(trainingSet.asList(), batchSize); DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(), validationReader.getNumRows()); System.out.println("Training set size: " + trainingReader.getNumImages()); System.out.println("Validation set size: " + validationReader.getNumImages()); int numOutputs = 10; // Create neural network. MultiLayerConfiguration.Builder builder = new NeuralNetConfiguration.Builder().seed(seed).iterations(1) .regularization(true).l2(0.0005).learningRate(0.01).weightInit(WeightInit.XAVIER) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).updater(Updater.NESTEROVS) .momentum(0.9).list(4) .layer(0, new ConvolutionLayer.Builder(5, 5).nIn(channels).stride(1, 1).nOut(20).dropOut(0.5) .activation("relu").build()) .layer(1, new SubsamplingLayer.Builder(SubsamplingLayer.PoolingType.MAX).kernelSize(2, 2) .stride(2, 2).build()) .layer(2, new DenseLayer.Builder().activation("relu").nOut(500).build()) .layer(3, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).nOut(10) .activation("softmax").build()) .backprop(true).pretrain(false); new ConvolutionLayerSetup(builder, 28, 28, 1); MultiLayerConfiguration conf = builder.build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); model.setListeners(new ScoreIterationListener(1)); // Define when we want to stop training. EarlyStoppingModelSaver saver = new InMemoryModelSaver(); EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder() //.epochTerminationConditions(new MaxEpochsTerminationCondition(10)) .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(5)) .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score .modelSaver(saver).build(); EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator); // Train and display result. EarlyStoppingResult result = trainer.fit(); System.out.println("Termination reason: " + result.getTerminationReason()); System.out.println("Termination details: " + result.getTerminationDetails()); System.out.println("Total epochs: " + result.getTotalEpochs()); System.out.println("Best epoch number: " + result.getBestModelEpoch()); System.out.println("Score at best epoch: " + result.getBestModelScore()); model = saver.getBestModel(); // Evaluate Evaluation eval = new Evaluation(numOutputs); validationSetIterator.reset(); for (int i = 0; i < validationSet.numExamples(); i++) { DataSet t = validationSet.get(i); INDArray features = t.getFeatureMatrix(); INDArray labels = t.getLabels(); INDArray predicted = model.output(features, false); eval.eval(labels, predicted); } //Print the evaluation statistics System.out.println(eval.stats()); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:com.heatonresearch.aifh.examples.ann.LearnDigitsDropout.java
License:Apache License
/** * The main method./*from w w w .j a va2s. c o m*/ * @param args Not used. */ public static void main(String[] args) { try { int seed = 43; double learningRate = 1e-2; int nEpochs = 50; int batchSize = 500; // Setup training data. System.out.println("Please wait, reading MNIST training data."); String dir = System.getProperty("user.dir"); MNISTReader trainingReader = MNIST.loadMNIST(dir, true); MNISTReader validationReader = MNIST.loadMNIST(dir, false); DataSet trainingSet = trainingReader.getData(); DataSet validationSet = validationReader.getData(); DataSetIterator trainSetIterator = new ListDataSetIterator(trainingSet.asList(), batchSize); DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(), validationReader.getNumRows()); System.out.println("Training set size: " + trainingReader.getNumImages()); System.out.println("Validation set size: " + validationReader.getNumImages()); System.out.println(trainingSet.get(0).getFeatures().size(1)); System.out.println(validationSet.get(0).getFeatures().size(1)); int numInputs = trainingReader.getNumCols() * trainingReader.getNumRows(); int numOutputs = 10; int numHiddenNodes = 100; // Create neural network. MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate) .updater(Updater.NESTEROVS).momentum(0.9).list(2) .layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes) .weightInit(WeightInit.XAVIER).activation("relu").build()) .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD) .weightInit(WeightInit.XAVIER).activation("softmax").nIn(numHiddenNodes) .nOut(numOutputs).build()) .pretrain(false).backprop(true).build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); model.setListeners(new ScoreIterationListener(1)); // Define when we want to stop training. EarlyStoppingModelSaver saver = new InMemoryModelSaver(); EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder() //.epochTerminationConditions(new MaxEpochsTerminationCondition(10)) .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(5)) .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score .modelSaver(saver).build(); EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator); // Train and display result. EarlyStoppingResult result = trainer.fit(); System.out.println("Termination reason: " + result.getTerminationReason()); System.out.println("Termination details: " + result.getTerminationDetails()); System.out.println("Total epochs: " + result.getTotalEpochs()); System.out.println("Best epoch number: " + result.getBestModelEpoch()); System.out.println("Score at best epoch: " + result.getBestModelScore()); model = saver.getBestModel(); // Evaluate Evaluation eval = new Evaluation(numOutputs); validationSetIterator.reset(); for (int i = 0; i < validationSet.numExamples(); i++) { DataSet t = validationSet.get(i); INDArray features = t.getFeatureMatrix(); INDArray labels = t.getLabels(); INDArray predicted = model.output(features, false); eval.eval(labels, predicted); } //Print the evaluation statistics System.out.println(eval.stats()); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:com.heatonresearch.aifh.examples.ann.LearnIrisBackprop.java
License:Apache License
/** * The main method.//from w ww . j a va2 s .c om * @param args Not used. */ public static void main(String[] args) { try { int seed = 43; double learningRate = 0.1; int splitTrainNum = (int) (150 * .75); int numInputs = 4; int numOutputs = 3; int numHiddenNodes = 50; // Setup training data. final InputStream istream = LearnIrisBackprop.class.getResourceAsStream("/iris.csv"); if (istream == null) { System.out.println("Cannot access data set, make sure the resources are available."); System.exit(1); } final NormalizeDataSet ds = NormalizeDataSet.load(istream); final CategoryMap species = ds.encodeOneOfN(4); // species is column 4 istream.close(); DataSet next = ds.extractSupervised(0, 4, 4, 3); next.shuffle(); // Training and validation data split SplitTestAndTrain testAndTrain = next.splitTestAndTrain(splitTrainNum, new Random(seed)); DataSet trainSet = testAndTrain.getTrain(); DataSet validationSet = testAndTrain.getTest(); DataSetIterator trainSetIterator = new ListDataSetIterator(trainSet.asList(), trainSet.numExamples()); DataSetIterator validationSetIterator = new ListDataSetIterator(validationSet.asList(), validationSet.numExamples()); // Create neural network. MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate) .updater(Updater.NESTEROVS).momentum(0.9).list(2) .layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes) .weightInit(WeightInit.XAVIER).activation("relu").build()) .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD) .weightInit(WeightInit.XAVIER).activation("softmax").nIn(numHiddenNodes) .nOut(numOutputs).build()) .pretrain(false).backprop(true).build(); MultiLayerNetwork model = new MultiLayerNetwork(conf); model.init(); model.setListeners(new ScoreIterationListener(1)); // Define when we want to stop training. EarlyStoppingModelSaver saver = new InMemoryModelSaver(); EarlyStoppingConfiguration esConf = new EarlyStoppingConfiguration.Builder() .epochTerminationConditions(new MaxEpochsTerminationCondition(500)) //Max of 50 epochs .epochTerminationConditions(new ScoreImprovementEpochTerminationCondition(25)) .evaluateEveryNEpochs(1).scoreCalculator(new DataSetLossCalculator(validationSetIterator, true)) //Calculate test set score .modelSaver(saver).build(); EarlyStoppingTrainer trainer = new EarlyStoppingTrainer(esConf, conf, trainSetIterator); // Train and display result. EarlyStoppingResult result = trainer.fit(); System.out.println("Termination reason: " + result.getTerminationReason()); System.out.println("Termination details: " + result.getTerminationDetails()); System.out.println("Total epochs: " + result.getTotalEpochs()); System.out.println("Best epoch number: " + result.getBestModelEpoch()); System.out.println("Score at best epoch: " + result.getBestModelScore()); model = saver.getBestModel(); // Evaluate Evaluation eval = new Evaluation(numOutputs); validationSetIterator.reset(); for (int i = 0; i < validationSet.numExamples(); i++) { DataSet t = validationSet.get(i); INDArray features = t.getFeatureMatrix(); INDArray labels = t.getLabels(); INDArray predicted = model.output(features, false); System.out.println(features + ":Prediction(" + findSpecies(labels, species) + "):Actual(" + findSpecies(predicted, species) + ")" + predicted); eval.eval(labels, predicted); } //Print the evaluation statistics System.out.println(eval.stats()); } catch (Exception ex) { ex.printStackTrace(); } }
From source file:com.javafxpert.neuralnetviz.scenario.CSVExample.java
License:Apache License
public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception { //public static void main(String[] args) throws Exception { //First: get the dataset using the record reader. CSVRecordReader handles loading/parsing int numLinesToSkip = 0; String delimiter = ","; RecordReader recordReader = new CSVRecordReader(numLinesToSkip, delimiter); recordReader.initialize(new FileSplit(new ClassPathResource("iris.txt").getFile())); //Second: the RecordReaderDataSetIterator handles conversion to DataSet objects, ready for use in neural network int labelIndex = 4; //5 values in each row of the iris.txt CSV: 4 input features followed by an integer label (class) index. Labels are the 5th value (index 4) in each row int numClasses = 3; //3 classes (types of iris flowers) in the iris data set. Classes have integer values 0, 1 or 2 int batchSize = 150; //Iris data set: 150 examples total. We are loading all of them into one DataSet (not recommended for large data sets) DataSetIterator iterator = new RecordReaderDataSetIterator(recordReader, batchSize, labelIndex, numClasses); DataSet allData = iterator.next();/*from w ww. j a va 2s. co m*/ allData.shuffle(); SplitTestAndTrain testAndTrain = allData.splitTestAndTrain(0.65); //Use 65% of data for training DataSet trainingData = testAndTrain.getTrain(); DataSet testData = testAndTrain.getTest(); //We need to normalize our data. We'll use NormalizeStandardize (which gives us mean 0, unit variance): DataNormalization normalizer = new NormalizerStandardize(); normalizer.fit(trainingData); //Collect the statistics (mean/stdev) from the training data. This does not modify the input data normalizer.transform(trainingData); //Apply normalization to the training data normalizer.transform(testData); //Apply normalization to the test data. This is using statistics calculated from the *training* set final int numInputs = 4; int outputNum = 3; int iterations = 1000; long seed = 6; log.info("Build model...."); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations) .activation("tanh").weightInit(WeightInit.XAVIER).learningRate(0.1).regularization(true).l2(1e-4) .list().layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(3).build()) .layer(1, new DenseLayer.Builder().nIn(3).nOut(3).build()) .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .activation("softmax").nIn(3).nOut(outputNum).build()) .backprop(true).pretrain(false).build(); //run the model String[] inputFeatureNames = { "Sepal length (4.3-7.9)", "Sepal width (2.0-4.4)", "Petal length (1.0-6.9)", "Petal width (0.1-2.5)" }; String[] outputLabelNames = { "Iris setosa", "Iris versicolor", "Iris virginica" }; MultiLayerNetworkEnhanced model = new MultiLayerNetworkEnhanced(conf, inputFeatureNames, outputLabelNames); model.setDataNormalization(normalizer); model.init(); //model.setListeners(new ScoreIterationListener(100)); model.setListeners(new ModelListener(100, webSocketSession)); model.fit(trainingData); //evaluate the model on the test set Evaluation eval = new Evaluation(3); INDArray output = model.output(testData.getFeatureMatrix()); eval.eval(testData.getLabels(), output); log.info(eval.stats()); // Make prediction: Expecting 0 INDArray example = Nd4j.zeros(1, 4); example.putScalar(new int[] { 0, 0 }, 5.1); example.putScalar(new int[] { 0, 1 }, 3.5); example.putScalar(new int[] { 0, 2 }, 1.4); example.putScalar(new int[] { 0, 3 }, 0.2); DataSet ds = new DataSet(example, null); normalizer.transform(ds); int[] prediction = model.predict(example); System.out.println("prediction for 5.1,3.5,1.4,0.2: " + prediction[0]); return model; }
From source file:com.javafxpert.neuralnetviz.scenario.MLPClassifierMoon.java
License:Apache License
public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception { int seed = 123; double learningRate = 0.005; int batchSize = 50; int nEpochs = 100; int numInputs = 2; int numOutputs = 2; int numHiddenNodes = 8; //Load the training data: RecordReader rr = new CSVRecordReader(); rr.initialize(new FileSplit(new File("src/main/resources/classification/saturn_data_train.csv"))); DataSetIterator trainIter = new RecordReaderDataSetIterator(rr, batchSize, 0, 2); //Load the test/evaluation data: RecordReader rrTest = new CSVRecordReader(); rrTest.initialize(new FileSplit(new File("src/main/resources/classification/saturn_data_eval.csv"))); DataSetIterator testIter = new RecordReaderDataSetIterator(rrTest, batchSize, 0, 2); //log.info("Build model...."); MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(1) .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).learningRate(learningRate) .updater(Updater.NESTEROVS).momentum(0.9).list() .layer(0,//from w ww. j av a2s.c o m new DenseLayer.Builder().nIn(numInputs).nOut(numHiddenNodes).weightInit(WeightInit.XAVIER) .activation("relu").build()) .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD).weightInit(WeightInit.XAVIER) .activation("softmax").nIn(numHiddenNodes).nOut(numOutputs).build()) .pretrain(false).backprop(true).build(); String[] inputFeatureNames = { "x (-1.52 .. 2.54)", "y (-1.06 .. 1.58)" }; String[] outputLabelNames = { "planet", "ring" }; MultiLayerNetworkEnhanced model = new MultiLayerNetworkEnhanced(conf, inputFeatureNames, outputLabelNames); model.init(); //model.setListeners(new ScoreIterationListener(100)); //Print score every 100 parameter updates model.setListeners(new ModelListener(100, webSocketSession)); for (int n = 0; n < nEpochs; n++) { model.fit(trainIter); } System.out.println("Evaluate model...."); Evaluation eval = new Evaluation(numOutputs); while (testIter.hasNext()) { DataSet t = testIter.next(); INDArray features = t.getFeatureMatrix(); INDArray labels = t.getLabels(); INDArray predicted = model.output(features, false); eval.eval(labels, predicted); } //Print the evaluation statistics System.out.println(eval.stats()); // Make prediction // Input: 0.6236,-0.7822 Expected output: 1 INDArray example = Nd4j.zeros(1, 2); example.putScalar(new int[] { 0, 0 }, 9.8520); example.putScalar(new int[] { 0, 1 }, -1.9809); int[] prediction = model.predict(example); System.out.println("prediction for 9.8520, -1.9809: " + prediction[0]); System.out.println("****************Example finished********************"); return model; }
From source file:com.javafxpert.neuralnetviz.scenario.SpeedDating.java
License:Apache License
public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception { //First: get the dataset using the record reader. CSVRecordReader handles loading/parsing int numLinesToSkip = 0; String delimiter = ","; org.datavec.api.records.reader.RecordReader recordReader = new org.datavec.api.records.reader.impl.csv.CSVRecordReader( numLinesToSkip, delimiter);/*from ww w. j ava 2s . c o m*/ recordReader.initialize(new org.datavec.api.split.FileSplit( new File("src/main/resources/classification/speed_dating_all.csv"))); //Second: the RecordReaderDataSetIterator handles conversion to DataSet objects, ready for use in neural network int labelIndex = 0; //5 values in each row of the iris.txt CSV: 4 input features followed by an integer label (class) index. Labels are the 5th value (index 4) in each row int numClasses = 2; //3 classes (types of iris flowers) in the iris data set. Classes have integer values 0, 1 or 2 int batchSize = 8378; //Iris data set: 150 examples total. We are loading all of them into one DataSet (not recommended for large data sets) DataSetIterator iterator = new org.deeplearning4j.datasets.datavec.RecordReaderDataSetIterator(recordReader, batchSize, labelIndex, numClasses); DataSet allData = iterator.next(); allData.shuffle(); SplitTestAndTrain testAndTrain = allData.splitTestAndTrain(0.65); //Use 65% of data for training DataSet trainingData = testAndTrain.getTrain(); DataSet testData = testAndTrain.getTest(); //We need to normalize our data. We'll use NormalizeStandardize (which gives us mean 0, unit variance): DataNormalization normalizer = new NormalizerStandardize(); normalizer.fit(trainingData); //Collect the statistics (mean/stdev) from the training data. This does not modify the input data normalizer.transform(trainingData); //Apply normalization to the training data normalizer.transform(testData); //Apply normalization to the test data. This is using statistics calculated from the *training* set final int numInputs = 3; int outputNum = 2; int iterations = 300; long seed = 6; MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed).iterations(iterations) .activation("tanh").weightInit(WeightInit.XAVIER).learningRate(0.1).regularization(true).l2(1e-4) .list().layer(0, new DenseLayer.Builder().nIn(numInputs).nOut(4).build()) .layer(1, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD) .activation("softmax").nIn(4).nOut(outputNum).build()) .backprop(true).pretrain(false).build(); String[] inputFeatureNames = { "Attractive (1-10)", "Intelligent (1-10)", "Fun (1-10)" }; String[] outputLabelNames = { "No second date", "Date again" }; MultiLayerNetworkEnhanced model = new MultiLayerNetworkEnhanced(conf, inputFeatureNames, outputLabelNames); model.init(); //model.setListeners(new ScoreIterationListener(100)); //Print score every 100 parameter updates model.setListeners(new ModelListener(10, webSocketSession)); model.setDataNormalization(normalizer); model.fit(trainingData); //evaluate the model on the test set Evaluation eval = new Evaluation(outputNum); INDArray output = model.output(testData.getFeatureMatrix()); eval.eval(testData.getLabels(), output); System.out.println(eval.stats()); // Make prediction // Input: 7, 8, 9 Expected output: ? INDArray example = Nd4j.zeros(1, 3); example.putScalar(new int[] { 0, 0 }, 7); example.putScalar(new int[] { 0, 1 }, 8); example.putScalar(new int[] { 0, 2 }, 9); DataSet ds = new DataSet(example, null); normalizer.transform(ds); int[] prediction = model.predict(example); System.out.println("prediction for 7 (attractive), 8 (intelligent), 9 (fun): " + prediction[0]); System.out.println("****************Example finished********************"); return model; }
From source file:com.javafxpert.neuralnetviz.scenario.WineClassifier.java
License:Apache License
public static MultiLayerNetworkEnhanced buildNetwork(WebSocketSession webSocketSession) throws Exception { //Load the training data: RecordReader rr = new CSVRecordReader(); rr.initialize(new FileSplit((new File("src/main/resources/classification/wine.data")))); DataSetIterator iterator = new RecordReaderDataSetIterator(rr, FILE_SIZE, CLASS_INDEX, NUM_OF_CLASSES); DataSet wineData = iterator.next();//from w w w . j a v a 2 s . com wineData.shuffle(); SplitTestAndTrain testAndTrain = wineData.splitTestAndTrain(DATA_SPLIT_TRAIN_TEST); DataSet trainingData = testAndTrain.getTrain(); DataSet testData = testAndTrain.getTest(); // A Standard Normalizer which gives zero-mean, unit variance DataNormalization normalizer = new NormalizerStandardize(); normalizer.fit(trainingData); // Collect the statistics (mean/stdev) from the training data. This does not modify the input data normalizer.transform(trainingData); // Apply normalization to the training data normalizer.transform(testData); // Apply normalization to the test data. This is using statistics calculated from the *training* set MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(SEED).iterations(ITERATIONS) .weightInit(WeightInit.RELU).optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT) .learningRate(EPSILON).regularization(true).l2(1e-4).updater(Updater.NESTEROVS).momentum(ALPHA) .list() .layer(0, new DenseLayer.Builder().nIn(INPUT_NEURONS).nOut(HIDDEN_NEURONS) .activation(HIDDEN_LAYER_ACTIVATION).build()) .layer(1, new OutputLayer.Builder(LossFunction.NEGATIVELOGLIKELIHOOD).nIn(HIDDEN_NEURONS) .nOut(OUTPUT_NEURONS).activation(OUTPUT_LAYER_ACTIVATION).build()) .pretrain(false).backprop(true).build(); String[] inputFeatureNames = { "Alcohol (11.0-14.9)", "Malic acid (0.7-5.8)", "Ash (1.3-3.3)", "Alcalinity of ash (10.6-30.0)", "Magnesium (70-162)", "Total phenols (0.9-3.9)", "Flavanoids (0.30-5.1)", "Nonflavanoid phenols (0.1-0.7)", "Proanthocyanins (0.4-3.6)", "Color intensity (1.2-13.0)", "Hue (0.4-1.8)", "OD280/OD315 of diluted (1.2-4.0)", "Proline (278-1680)" }; String[] outputLabelNames = { "Cultivar A", "Cultivar B", "Cultivar C" }; MultiLayerNetworkEnhanced networkModel = new MultiLayerNetworkEnhanced(conf, inputFeatureNames, outputLabelNames); networkModel.init(); //model.setListeners(new ScoreIterationListener(100)); //Print score every 100 parameter updates networkModel.setListeners(new ModelListener(10, webSocketSession)); networkModel.setDataNormalization(normalizer); for (int n = 0; n < EPOCHS; n++) { networkModel.fit(trainingData); } System.out.println("Evaluate model...."); Evaluation eval = new Evaluation(OUTPUT_NEURONS); INDArray output = networkModel.output(testData.getFeatureMatrix()); System.out.println(testData.getFeatureMatrix()); System.out.println(output); eval.eval(testData.getLabels(), output); //Print the evaluation statistics System.out.println(eval.stats()); return networkModel; }