Example usage for org.deeplearning4j.util ModelSerializer restoreMultiLayerNetwork

List of usage examples for org.deeplearning4j.util ModelSerializer restoreMultiLayerNetwork

Introduction

In this page you can find the example usage for org.deeplearning4j.util ModelSerializer restoreMultiLayerNetwork.

Prototype

public static MultiLayerNetwork restoreMultiLayerNetwork(@NonNull String path) throws IOException 

Source Link

Document

Load a MultilayerNetwork model from a file

Usage

From source file:com.circle_technologies.cnn4j.predictive.network.Network.java

License:Apache License

/**
 * Restores the network from a file. This file is most commonly called cnn4j.net
 *
 * @param file The network file.//w w w  . ja va 2s.  c om
 * @return <code>true - </code> if the network was stored successfully <br>
 * <code>false - </code> if not (IOException)
 * @see #save(File)
 */
public boolean restore(File file) {
    try {
        mMultiLayerNetwork = ModelSerializer.restoreMultiLayerNetwork(file);
        mMultiLayerNetwork.init();
        return true;
    } catch (IOException e) {
        Log.debug("ERROR", "Failed restoring model");
        return false;
    }
}

From source file:com.sliit.neuralnetwork.RecurrentNN.java

private void loadSaveNN(String name, boolean save) {
    System.out.println("recNN loadSaveNN");

    File directory = new File(uploadDirectory);
    File[] allNN = directory.listFiles();
    boolean status = false;
    try {/*from  w ww .  j  a  v a  2  s  . c  o m*/

        if (model == null && save) {

            buildModel();
        }
        if (allNN != null && allNN.length > 0) {
            for (File NN : allNN) {

                String fnme = FilenameUtils.removeExtension(NN.getName());
                if (name.equals(fnme)) {

                    status = true;
                    if (save) {

                        ModelSerializer.writeModel(model, NN, true);
                        System.out.println("Model Saved With Weights Successfully");

                    } else {

                        model = ModelSerializer.restoreMultiLayerNetwork(NN);
                    }
                    break;
                }
            }
        }
        if (!status && save) {

            //File tempFIle = File.createTempFile(name,".zip",directory);
            File tempFile = new File(directory.getAbsolutePath() + "/" + name + ".zip");
            if (!tempFile.exists()) {

                tempFile.createNewFile();
            }
            ModelSerializer.writeModel(model, tempFile, true);
        }
    } catch (IOException e) {
        System.out.println("Error occurred:" + e.getMessage());
    }
}

From source file:examples.cnn.EnsambleAvg.java

License:Apache License

public static void main(String[] args) {

    SparkConf conf = new SparkConf();
    conf.setMaster("local[*]");
    conf.setAppName("Images CNN Classification Ensamble");
    try (JavaSparkContext sc = new JavaSparkContext(conf)) {

        JavaRDD<String> raw = sc.textFile("data/images-data-rgb.csv");
        String first = raw.first();

        JavaPairRDD<String, String> labelData = raw.filter(f -> f.equals(first) == false).mapToPair(r -> {
            String[] tab = r.split(";");
            return new Tuple2<>(tab[0], tab[1]);
        });/*from ww w. j  a v  a2s  .co  m*/

        Map<String, Long> labels = labelData.map(t -> t._1).distinct().zipWithIndex()
                .mapToPair(t -> new Tuple2<>(t._1, t._2)).collectAsMap();

        int numLabels = labels.size();
        log.info("Number of labels {}", numLabels);
        labels.forEach((a, b) -> log.info("{}: {}", a, b));

        JavaRDD<Tuple2<INDArray, double[]>> labelsWithData = labelData.map(t -> {
            INDArray label = FeatureUtil.toOutcomeVector(labels.get(t._1).intValue(), labels.size());
            double[] arr = Arrays.stream(t._2.split(" ")).map(normalize1).mapToDouble(Double::doubleValue)
                    .toArray();
            return new Tuple2<>(label, arr);
        });

        JavaRDD<Tuple2<INDArray, double[]>>[] splited = labelsWithData.randomSplit(new double[] { .8, .2 },
                seed);

        JavaRDD<DataSet> test = splited[1].map(t -> {
            INDArray features = Nd4j.create(t._2, new int[] { 1, t._2.length });
            return new DataSet(features, t._1);
        });
        log.info("Number of test images {}", test.count());

        String dir = EnsambleAvg.class.getClassLoader().getResource("models").getFile();
        MultiLayerNetwork n1 = ModelSerializer.restoreMultiLayerNetwork(new File(dir, "0.7596314907872697"));
        MultiLayerNetwork n2 = ModelSerializer.restoreMultiLayerNetwork(new File(dir, "0.7763819095477387"));
        MultiLayerNetwork n3 = ModelSerializer.restoreMultiLayerNetwork(new File(dir, "0.7646566164154104"));

        test.filter(ds -> label(predictAvg(numLabels, ds, n1, n2, n3)) != label(ds.getLabels()))
                .foreach(ds -> log.info("predicted {}, label {}",
                        asString(predictAvg(numLabels, ds, n1, n2, n3)), label(ds.getLabels())));

        JavaPairRDD<Object, Object> predictionsAndLabels = test.mapToPair(
                ds -> new Tuple2<>(label(predictAvg(numLabels, ds, n1, n2, n3)), label(ds.getLabels())));

        MulticlassMetrics metrics = new MulticlassMetrics(predictionsAndLabels.rdd());
        double accuracy = 1.0 * predictionsAndLabels.filter(x -> x._1.equals(x._2)).count() / test.count();
        log.info("accuracy {} ", accuracy);
        predictionsAndLabels.take(10).forEach(t -> log.info("predicted {}, label {}", t._1, t._2));
        log.info("confusionMatrix {}", metrics.confusionMatrix());

    } catch (IOException e) {
        log.error(e.getLocalizedMessage(), e);
    }

}

From source file:examples.cnn.EnsambleVote.java

License:Apache License

public static void main(String[] args) {

    SparkConf conf = new SparkConf();
    conf.setMaster("local[*]");
    conf.setAppName("Images CNN Classification voting Ensamble");

    try (JavaSparkContext sc = new JavaSparkContext(conf)) {

        JavaRDD<String> raw = sc.textFile("data/images-data-rgb.csv");
        String first = raw.first();

        JavaPairRDD<String, String> labelData = raw.filter(f -> f.equals(first) == false).mapToPair(r -> {
            String[] tab = r.split(";");
            return new Tuple2<>(tab[0], tab[1]);
        });//from  w  w  w  . j  av  a  2 s. c  o m

        Map<String, Long> labels = labelData.map(t -> t._1).distinct().zipWithIndex()
                .mapToPair(t -> new Tuple2<>(t._1, t._2)).collectAsMap();

        int numLabels = labels.size();
        log.info("Number of labels {}", numLabels);
        labels.forEach((a, b) -> log.info("{}: {}", a, b));

        JavaRDD<Tuple2<INDArray, double[]>> labelsWithData = labelData.map(t -> {
            INDArray label = FeatureUtil.toOutcomeVector(labels.get(t._1).intValue(), labels.size());
            double[] arr = Arrays.stream(t._2.split(" ")).map(normalize1).mapToDouble(Double::doubleValue)
                    .toArray();
            return new Tuple2<>(label, arr);
        });

        JavaRDD<Tuple2<INDArray, double[]>>[] splited = labelsWithData.randomSplit(new double[] { .8, .2 },
                seed);

        JavaRDD<DataSet> test = splited[1].map(t -> {
            INDArray features = Nd4j.create(t._2, new int[] { 1, t._2.length });
            return new DataSet(features, t._1);
        });
        log.info("Number of test images {}", test.count());

        List<MultiLayerNetwork> nets = Arrays.stream(
                new File(EnsambleVote.class.getClassLoader().getResource("models").getFile()).listFiles())
                .map(f -> {
                    MultiLayerNetwork n = null;
                    try {
                        n = ModelSerializer.restoreMultiLayerNetwork(f);
                    } catch (Exception e) {
                        throw new RuntimeException(e);
                    }
                    return n;
                }).collect(Collectors.toList());

        test.filter(ds -> label(predictByVote(numLabels, ds, nets.toArray(new MultiLayerNetwork[0]))) != label(
                ds.getLabels()))
                .foreach(ds -> log.info("predicted {}, label {}",
                        asString(predictByVote(numLabels, ds, nets.toArray(new MultiLayerNetwork[0]))),
                        label(ds.getLabels())));

        JavaPairRDD<Object, Object> predictionsAndLabels = test.mapToPair(
                ds -> new Tuple2<>(label(predictByVote(numLabels, ds, nets.toArray(new MultiLayerNetwork[0]))),
                        label(ds.getLabels())));

        MulticlassMetrics metrics = new MulticlassMetrics(predictionsAndLabels.rdd());
        double accuracy = 1.0 * predictionsAndLabels.filter(x -> x._1.equals(x._2)).count() / test.count();
        log.info("accuracy {} ", accuracy);
        predictionsAndLabels.take(10).forEach(t -> log.info("predicted {}, label {}", t._1, t._2));
        log.info("confusionMatrix {}", metrics.confusionMatrix());
    }
}

From source file:examples.utils.ModelLoader.java

License:Apache License

public static INDArray load(String workingDir, double... models) {
    double size = models.length;
    List<INDArray> params = Arrays.stream(models).mapToObj(acc -> {
        try {/*ww  w . j  a  v a 2 s.co  m*/
            return ModelSerializer.restoreMultiLayerNetwork(new File(workingDir, Double.toString(acc)))
                    .params();
        } catch (IOException e) {
            throw new RuntimeException(e);
        }
    }).collect(Collectors.toList());
    return params.stream().reduce((m1, m2) -> m1.add(m2)).get().div(size);
}

From source file:NeuralNetwork.NeuralNetwork.java

public NeuralNetwork(Main main, String[] setup) throws IOException {
    this.main = main;
    this.attackersetup = setup;
    this.network = ModelSerializer.restoreMultiLayerNetwork("src/NeuralNetwork/network.zip"); //Loads in the neural network trained before hand from a file
}

From source file:org.ensor.fftmusings.autoencoder.DeepAutoencoder.java

public static MultiLayerNetwork readAutoencoder(String filename, double learningRate) throws IOException {
    MultiLayerNetwork model;//from   w  ww  .  ja v a  2 s .co m

    File modelFile = new File(filename);
    if (!modelFile.exists()) {
        NeuralNetConfiguration.Builder builder = new NeuralNetConfiguration.Builder()
                .seed(System.currentTimeMillis()).iterations(1)
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                .weightInit(WeightInit.XAVIER).updater(Updater.NESTEROVS).regularization(true).l1(0.001);

        builder = builder.learningRate(learningRate);

        NeuralNetConfiguration.ListBuilder listBuilder = builder.list()
                .layer(0,
                        new RBM.Builder().nIn(1024).nOut(1200).activation(Activation.RELU).sparsity(0.1)
                                .build())
                .layer(1,
                        new RBM.Builder().nIn(1200).nOut(800).activation(Activation.RELU).sparsity(0.1).build())
                .layer(2,
                        new RBM.Builder().nIn(800).nOut(400).activation(Activation.RELU).sparsity(0.1).build())
                .layer(3,
                        new RBM.Builder().nIn(400).nOut(200).activation(Activation.RELU).sparsity(0.1).build())
                .layer(4,
                        new RBM.Builder().nIn(200).nOut(100).activation(Activation.RELU).sparsity(0.1).build())
                .layer(5,
                        new RBM.Builder().nIn(100).nOut(200).activation(Activation.RELU).sparsity(0.1).build())
                .layer(6,
                        new RBM.Builder().nIn(200).nOut(400).activation(Activation.RELU).sparsity(0.1).build())
                .layer(7,
                        new RBM.Builder().nIn(400).nOut(800).activation(Activation.RELU).sparsity(0.1).build())
                .layer(8,
                        new RBM.Builder().nIn(800).nOut(1200).activation(Activation.RELU).sparsity(0.1).build())
                .layer(9,
                        new OutputLayer.Builder().nIn(1200).nOut(1024).activation(Activation.IDENTITY)
                                .lossFunction(LossFunctions.LossFunction.L2).build())
                .pretrain(false).backprop(true);

        MultiLayerConfiguration conf = listBuilder.build();

        model = new MultiLayerNetwork(conf);

        model.init();
        model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(System.out)));
    } else {
        model = ModelSerializer.restoreMultiLayerNetwork(modelFile);
    }

    return model;
}

From source file:org.ensor.fftmusings.autoencoder.FFTAutoEncoderTest.java

public static void main(String[] args) throws IOException {
    MultiLayerNetwork model = ModelSerializer
            .restoreMultiLayerNetwork("data/autoencoders/1024-1500/model-130.faa");

    //model.init();

    String inputFilename = "data/dct/20.dct";
    String outputFilename = "sample.wav";

    // Read DCT data from file and write a corresponding
    // .wav file based on that after passing it through the
    // auto-encoder to see what the network has learned.
    try (DCT.Reader wavFileIterator = DCT.createReader(inputFilename)) {
        new Pipeline(new DCTAutoEncoderProcessor(model)).add(new DCT.ToPNG("fft-magnitude-daa2.png"))
                .add(new DCT.Reverse(false)).add(new ChannelDuplicator(AudioSample.class, 2))
                .add(WAVFileWriter.create(outputFilename)).execute(wavFileIterator);
    } catch (Exception ex) {
        throw new RuntimeException("Could not process file " + inputFilename, ex);
    }/*from w  ww  .  j  a va2  s. c o  m*/
}

From source file:org.ensor.fftmusings.autoencoder.GenericAutoencoder.java

public static Autoencoder readAutoencoder(String filename) throws IOException {
    ObjectMapper m = new ObjectMapper();
    AutoencoderMetadata autoEncoderData = m.readValue(new File(filename), AutoencoderMetadata.class);
    Autoencoder ae = new Autoencoder(autoEncoderData);

    if ("autoencoded".equals(autoEncoderData.getInputSource())) {
        Autoencoder sourceAutoencoder = readAutoencoder(autoEncoderData.getSourceAutoencoder());
        ae.setSourceAutoencoder(sourceAutoencoder);
    } else if ("iterator".equals(autoEncoderData.getInputSource())) {
    }//from  w ww.j av a2s  . c o  m

    String modelFilename = autoEncoderData.getFilename();
    if (modelFilename == null) {
        modelFilename = "newmodel.faa";
        autoEncoderData.setFilename(modelFilename);
    }

    File modelFile = new File(modelFilename);
    if (!modelFile.exists()) {
        NeuralNetConfiguration.Builder builder = new NeuralNetConfiguration.Builder()
                .seed(System.currentTimeMillis()).iterations(autoEncoderData.getIterations())
                .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
                .weightInit(WeightInit.XAVIER).updater(Updater.NESTEROVS)
                .regularization(autoEncoderData.getRegularization()).l1(autoEncoderData.getL2Regularization());

        if (autoEncoderData.getLRArray().isEmpty()) {
            builder = builder.learningRate(autoEncoderData.getLearningRate());
        } else {
            Map<Integer, Double> lrSchedule = new HashMap<>();
            builder = builder.learningRate(autoEncoderData.getLRArray().get(0).getLR());
            for (LRSchedule schedule : autoEncoderData.getLRArray()) {
                lrSchedule.put(schedule.getIteration(), schedule.getLR());
            }
            builder = builder.learningRateDecayPolicy(LearningRatePolicy.Schedule)
                    .learningRateSchedule(lrSchedule);
        }

        NeuralNetConfiguration.ListBuilder listBuilder = builder.list()
                .layer(0,
                        new RBM.Builder().nIn(autoEncoderData.getInput()).nOut(autoEncoderData.getHidden())
                                .activation(Activation.fromString(autoEncoderData.getIntermediateActivation()))
                                .sparsity(autoEncoderData.getSparsity()).build())
                .layer(1, new OutputLayer.Builder().nIn(autoEncoderData.getHidden())
                        .nOut(autoEncoderData.getInput()).lossFunction(autoEncoderData.getLossFunction())
                        .activation(Activation.fromString(autoEncoderData.getFinalActivation())).build())
                .pretrain(false).backprop(true);

        MultiLayerConfiguration conf = listBuilder.build();

        MultiLayerNetwork model = new MultiLayerNetwork(conf);

        model.init();
        model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(System.out)));
        ae.setModel(model);
    } else {
        MultiLayerNetwork model = ModelSerializer.restoreMultiLayerNetwork(modelFile);
        ae.setModel(model);
    }

    System.out.println("Autoencoder: " + autoEncoderData);
    return ae;
}

From source file:org.ensor.fftmusings.autoencoder.RNNTrainer.java

public static void main(String[] args) throws Exception {

    MultiLayerNetwork stackedAutoencoder = ModelSerializer.restoreMultiLayerNetwork("stack.rnn");

    Random rng = new Random();

    RNNIterator iter = new RNNIterator(stackedAutoencoder, rng, 100, 100, System.out);

    double learningRate = 0.0001;
    if (args.length != 0) {
        learningRate = Double.parseDouble(args[0]);
    }//  ww  w.  j ava 2s. com

    int nGaussians = 8;
    int labelWidth = iter.totalOutcomes();
    int inputWidth = iter.inputColumns();
    int lstmLayerSize = 400;
    int bttLength = 50;

    LossMixtureDensity costFunction = LossMixtureDensity.builder().gaussians(nGaussians).labelWidth(inputWidth)
            .build();

    //Set up network configuration:
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1)
            .learningRate(learningRate).rmsDecay(0.95).seed(12345).iterations(1).regularization(true).l2(0.001)
            .weightInit(WeightInit.XAVIER).list()
            .layer(0,
                    new GravesLSTM.Builder().nIn(inputWidth).nOut(lstmLayerSize).updater(Updater.RMSPROP)
                            .activation(Activation.TANH).build())
            .layer(1,
                    new GravesLSTM.Builder().nIn(lstmLayerSize).nOut(lstmLayerSize).updater(Updater.RMSPROP)
                            .activation(Activation.TANH).build())
            //                .layer(2, new RnnOutputLayer.Builder()
            //                        .nIn(lstmLayerSize)
            //                        .nOut((labelWidth + 2) * nGaussians)
            //                        .activation(Activation.IDENTITY)
            //                        //.lossFunction(LossFunctions.LossFunction.MSE)
            //                        .lossFunction(LossMixtureDensity.builder()
            //                            .gaussians(nGaussians)
            //                            .labelWidth(inputWidth)
            //                            .build())
            //                        .updater(Updater.RMSPROP)
            //                        .weightInit(WeightInit.DISTRIBUTION)
            //                        .dist(new UniformDistribution(-0.08, 0.08)).build())
            .layer(2,
                    new MixtureDensityRNNOutputLayer.Builder().gaussians(nGaussians).nIn(lstmLayerSize)
                            .nOut(labelWidth).updater(Updater.RMSPROP).build())
            .pretrain(false).backprop(true).backpropType(BackpropType.TruncatedBPTT)
            .tBPTTForwardLength(bttLength).tBPTTBackwardLength(bttLength).build();

    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();
    model.setListeners(new ScoreIterationListener(System.out));

    for (int epoch = 0; epoch < 300; epoch++) {
        model.fit(iter);
        iter.reset();
        evaluateModel(model, costFunction, stackedAutoencoder, rng, epoch);
        ModelSerializer.writeModel(model, "stack-timeseries.rnn", true);
    }
}