Example usage for org.deeplearning4j.nn.multilayer MultiLayerNetwork getnLayers

List of usage examples for org.deeplearning4j.nn.multilayer MultiLayerNetwork getnLayers

Introduction

In this page you can find the example usage for org.deeplearning4j.nn.multilayer MultiLayerNetwork getnLayers.

Prototype

public int getnLayers() 

Source Link

Document

Get the number of layers in the network

Usage

From source file:org.ensor.fftmusings.autoencoder.StackTrainer.java

public static void main(String[] args) throws IOException, Exception {

    MultiLayerNetwork pretrainedLayers[] = new MultiLayerNetwork[6];

    pretrainedLayers[0] = ModelSerializer.restoreMultiLayerNetwork("data/daa/model-1024-1200sparse0.01.nn");
    pretrainedLayers[1] = ModelSerializer.restoreMultiLayerNetwork("data/daa/model-1200-800sparse0.01.nn");
    pretrainedLayers[2] = ModelSerializer.restoreMultiLayerNetwork("data/daa/model-800-400sparse0.01.nn");
    pretrainedLayers[3] = ModelSerializer.restoreMultiLayerNetwork("data/daa/model-400-200sparse0.01.nn");
    pretrainedLayers[4] = ModelSerializer.restoreMultiLayerNetwork("data/daa/model-200-100sparse0.01.nn");

    NeuralNetConfiguration.Builder builder = new NeuralNetConfiguration.Builder()
            .seed(System.currentTimeMillis()).iterations(1)
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).weightInit(WeightInit.XAVIER)
            .updater(Updater.NESTEROVS).regularization(false).l1(0.000).learningRate(0.0001);
    //.learningRate(Double.parseDouble(args[0]));
    int layerNo = 0;

    NeuralNetConfiguration.ListBuilder listBuilder = builder.list()
            .layer(layerNo++, new RBM.Builder().nIn(1024).nOut(1200).activation(Activation.SIGMOID).build())
            .layer(layerNo++, new RBM.Builder().nIn(1200).nOut(800).activation(Activation.SIGMOID).build())
            .layer(layerNo++, new RBM.Builder().nIn(800).nOut(400).activation(Activation.SIGMOID).build())
            .layer(layerNo++, new RBM.Builder().nIn(400).nOut(200).activation(Activation.SIGMOID).build())
            .layer(layerNo++, new RBM.Builder().nIn(200).nOut(100).activation(Activation.SIGMOID).build())
            //                .layer(layerNo++, new RBM.Builder()
            //                        .nIn(100)
            //                        .nOut(50)
            //                        .activation(Activation.SIGMOID)
            //                        .build())
            //                .layer(layerNo++, new RBM.Builder()
            //                        .nIn(50)
            //                        .nOut(100)
            //                        .activation(Activation.SIGMOID)
            //                        .build())
            .layer(layerNo++, new RBM.Builder().nIn(100).nOut(200).activation(Activation.SIGMOID).build())
            .layer(layerNo++, new RBM.Builder().nIn(200).nOut(400).activation(Activation.SIGMOID).build())
            .layer(layerNo++, new RBM.Builder().nIn(400).nOut(800).activation(Activation.SIGMOID).build())
            .layer(layerNo++, new RBM.Builder().nIn(800).nOut(1200).activation(Activation.SIGMOID).build())
            .layer(layerNo++,/*from  w  w  w .  ja v a 2 s  . c  o m*/
                    new OutputLayer.Builder().nIn(1200).nOut(1024).activation(Activation.IDENTITY)
                            .lossFunction(LossFunctions.LossFunction.L2).build())
            .pretrain(false).backprop(true);

    MultiLayerConfiguration conf = listBuilder.build();

    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();
    model.setListeners(Arrays.asList((IterationListener) new ScoreIterationListener(System.out)));

    for (layerNo = 0; layerNo < 5; layerNo++) {
        model.getLayer(layerNo).setParam(PretrainParamInitializer.BIAS_KEY,
                pretrainedLayers[layerNo].getLayer(0).getParam(PretrainParamInitializer.BIAS_KEY));
        model.getLayer(layerNo).setParam(PretrainParamInitializer.WEIGHT_KEY,
                pretrainedLayers[layerNo].getLayer(0).getParam(PretrainParamInitializer.WEIGHT_KEY));

        model.getLayer(model.getnLayers() - layerNo - 1).setParam(PretrainParamInitializer.BIAS_KEY,
                pretrainedLayers[layerNo].getLayer(1).getParam(PretrainParamInitializer.BIAS_KEY));
        model.getLayer(model.getnLayers() - layerNo - 1).setParam(PretrainParamInitializer.WEIGHT_KEY,
                pretrainedLayers[layerNo].getLayer(1).getParam(PretrainParamInitializer.WEIGHT_KEY));
    }

    DataSetIterator iter = new FFTDataIterator(new Random(), 100, 1250, System.out);

    int epoch = 0;
    for (int i = 0; i < 300; i++) {
        model.fit(iter);
        iter.reset();
        evaluateModel(model, epoch);
        ModelSerializer.writeModel(model, "stack.rnn", true);
        epoch++;
    }
}

From source file:org.knime.ext.dl4j.base.nodes.learn.AbstractDLLearnerNodeModel.java

License:Open Source License

/**
 * Performs one epoch of pretraining of the specified {@link MultiLayerNetwork}. Checks {@link LearningMonitor} of
 * this learner if learning should be prematurely stopped.
 *
 * @param mln the network to train// w  w  w .  j a  v  a 2s. co m
 * @param data the data to train on
 * @param exec used to check for cancelled execution and stop learning
 * @throws CanceledExecutionException
 */
protected void pretrainOneEpoch(final MultiLayerNetwork mln, final DataSetIterator data,
        final ExecutionContext exec) throws CanceledExecutionException {
    for (int i = 0; i < mln.getnLayers(); i++) {
        exec.setMessage("Performing Pretraining on Layer: " + (i + 1));
        while (data.hasNext()) {
            exec.checkCanceled();
            if (m_learningMonitor.checkStopLearning()) {
                break;
            }
            mln.pretrainLayer(i, data.next().getFeatureMatrix());
        }
        data.reset();
    }
}