Example usage for org.deeplearning4j.nn.multilayer MultiLayerNetwork getLayerWiseConfigurations

List of usage examples for org.deeplearning4j.nn.multilayer MultiLayerNetwork getLayerWiseConfigurations

Introduction

In this page you can find the example usage for org.deeplearning4j.nn.multilayer MultiLayerNetwork getLayerWiseConfigurations.

Prototype

public MultiLayerConfiguration getLayerWiseConfigurations() 

Source Link

Document

Get the configuration for the network

Usage

From source file:org.knime.ext.dl4j.base.nodes.learn.AbstractDLLearnerNodeModel.java

License:Open Source License

/**
 * Performs one epoch of backpropagation with gradient descent of the specified {@link MultiLayerNetwork}. Checks
 * {@link LearningMonitor} of this learner if learning should be prematurely stopped.
 *
 * @param mln the network to train/* ww w.  ja  v a  2s. c o m*/
 * @param data the data to train on
 * @param exec used to check for cancelled execution and stop learning
 * @throws CanceledExecutionException
 */
protected void backpropOneEpoch(final MultiLayerNetwork mln, final DataSetIterator data,
        final ExecutionContext exec) throws CanceledExecutionException {
    exec.setMessage("Performing Backpropagation");
    final boolean isPretrain = mln.getLayerWiseConfigurations().isPretrain();
    /**
     * Need to set pretrain to false here because mln.fit() on a DataSetIterator performs pretraining and
     * finetuneing if pretrain is set to true. Workaround because can't extract backprop procedure from
     * MultiLayerNetwork.fit(DataSetIterator) implementation as it depends on protected methods.
     */
    if (isPretrain) {
        mln.getLayerWiseConfigurations().setPretrain(false);
    }
    while (data.hasNext()) {
        exec.checkCanceled();
        if (m_learningMonitor.checkStopLearning()) {
            break;
        }

        mln.fit(data.next());
    }
    mln.getLayerWiseConfigurations().setPretrain(isPretrain);
}

From source file:org.knime.ext.dl4j.base.nodes.learn.feedforward.classification.FeedforwardClassificationLearnerNodeModel.java

License:Open Source License

/**
 * Performs training of the specified {@link MultiLayerNetwork} (whether to do backprop or finetuning or pretraining
 * is set in model configuration) for the specified number of epochs using the specified {@link DataSetIterator} and
 * specified {@link ExecutionContext} for progress reporting and execution cancelling.
 *
 * @param mln the network to train//from   w  ww .  j  a va2 s  .c  om
 * @param epochs the number of epochs to train
 * @param data the data to train on
 * @param exec
 * @throws Exception
 */
private void trainNetwork(final MultiLayerNetwork mln, final int epochs, final DataSetIterator data,
        final ExecutionContext exec) throws Exception {

    // do only backprop for classification
    mln.getLayerWiseConfigurations().setBackprop(true);
    final boolean isFinetune = m_learnerParameterSettings.getBoolean(LearnerParameter.USE_FINETUNE);

    exec.setProgress(0.0);

    //calculate progress relative to number of epochs and what to train
    double maxProgress = epochs;

    if (isFinetune) {
        logger.info("Finetune Model for " + epochs + " epochs.");
        for (int i = 0; i < epochs; i++) {
            exec.checkCanceled();
            if (getLearningMonitor().checkStopLearning()) {
                break;
            }

            updateView(i + 1, epochs, "Finetune");
            finetuneOneEpoch(mln, data, exec);

            logEpochScore(mln, (i + 1));
            data.reset();
            exec.setProgress((i + 1) / maxProgress);
        }
    } else {
        logger.info("Do Backpropagation for " + epochs + " epochs.");
        for (int i = 0; i < epochs; i++) {
            exec.checkCanceled();
            if (getLearningMonitor().checkStopLearning()) {
                break;
            }

            updateView(i + 1, epochs, "Backprop");
            backpropOneEpoch(mln, data, exec);

            logEpochScore(mln, (i + 1));
            data.reset();
            exec.setProgress((i + 1) / maxProgress);
        }
    }
}

From source file:org.knime.ext.dl4j.base.nodes.learn.feedforward.FeedforwardLearnerNodeModel.java

License:Open Source License

/**
 * Performs training of the specified {@link MultiLayerNetwork} (whether to do backprop or finetuning or pretraining
 * is set in model configuration) for the specified number of epochs using the specified {@link DataSetIterator} and
 * specified {@link ExecutionContext} for progress reporting and execution cancelling.
 *
 * @param mln the network to train/*from  w ww .  j  ava 2  s.c om*/
 * @param epochs the number of epochs to train
 * @param data the data to train on
 * @param exec
 * @throws Exception
 */
private void trainNetwork(final MultiLayerNetwork mln, final int epochs, final DataSetIterator data,
        final ExecutionContext exec) throws Exception {
    final boolean isPretrain = mln.getLayerWiseConfigurations().isPretrain();
    final boolean isBackprop = mln.getLayerWiseConfigurations().isBackprop();
    final boolean isFinetune = m_learnerParameterSettings.getUseFinetune().getBooleanValue();

    exec.setProgress(0.0);

    //calculate progress relative to number of epochs and what to train
    double maxProgress = 0.0;
    if (isBackprop) {
        maxProgress += epochs;
    }
    if (isFinetune) {
        maxProgress += epochs;
    }
    if (isPretrain) {
        maxProgress += epochs;
    }

    if (isPretrain) {
        logger.info("Pretrain Model for " + epochs + " epochs.");
        for (int i = 0; i < epochs; i++) {
            exec.checkCanceled();
            if (getLearningMonitor().checkStopLearning()) {
                break;
            }
            logger.info("Pretrain epoch: " + (i + 1) + " of: " + epochs);

            updateView(i + 1, epochs, "Pretrain");
            pretrainOneEpoch(mln, data, exec);

            logEpochScore(mln, (i + 1));
            data.reset();
            exec.setProgress((i + 1) / maxProgress);
        }
    }
    if (isFinetune) {
        logger.info("Finetune Model for " + epochs + " epochs.");
        for (int i = 0; i < epochs; i++) {
            exec.checkCanceled();
            if (getLearningMonitor().checkStopLearning()) {
                break;
            }
            logger.info("Finetune epoch: " + (i + 1) + " of: " + epochs);

            updateView(i + 1, epochs, "Finetune");
            finetuneOneEpoch(mln, data, exec);

            logEpochScore(mln, (i + 1));
            data.reset();
            exec.setProgress((i + 1) / maxProgress);
        }
    }
    if (isBackprop) {
        logger.info("Do Backpropagation for " + epochs + " epochs.");
        for (int i = 0; i < epochs; i++) {
            exec.checkCanceled();
            if (getLearningMonitor().checkStopLearning()) {
                break;
            }
            logger.info("Backprop epoch: " + (i + 1) + " of: " + epochs);

            updateView(i + 1, epochs, "Backprop");
            backpropOneEpoch(mln, data, exec);

            logEpochScore(mln, (i + 1));
            data.reset();
            exec.setProgress((i + 1) / maxProgress);
        }
    }
}

From source file:org.knime.ext.dl4j.base.nodes.learn.feedforward.pretraining.FeedforwardPretrainingLearnerNodeModel.java

License:Open Source License

/**
 * Performs training of the specified {@link MultiLayerNetwork} (whether to do backprop or finetuning or pretraining
 * is set in model configuration) for the specified number of epochs using the specified {@link DataSetIterator} and
 * specified {@link ExecutionContext} for progress reporting and execution cancelling.
 *
 * @param mln the network to train/*from w w  w . j ava 2 s  .  com*/
 * @param epochs the number of epochs to train
 * @param data the data to train on
 * @param exec
 * @throws Exception
 */
private void trainNetwork(final MultiLayerNetwork mln, final int epochs, final DataSetIterator data,
        final ExecutionContext exec) throws Exception {
    // do only backprop for regression
    mln.getLayerWiseConfigurations().setPretrain(true);

    exec.setProgress(0.0);

    //calculate progress relative to number of epochs and what to train
    double maxProgress = epochs;

    logger.info("Pretrain Model for " + epochs + " epochs.");
    for (int i = 0; i < epochs; i++) {
        exec.checkCanceled();
        if (getLearningMonitor().checkStopLearning()) {
            break;
        }

        updateView(i + 1, epochs, "Pretrain");
        pretrainOneEpoch(mln, data, exec);

        logEpochScore(mln, (i + 1));
        data.reset();
        exec.setProgress((i + 1) / maxProgress);
    }
}

From source file:org.knime.ext.dl4j.base.nodes.learn.feedforward.regression.FeedforwardRegressionLearnerNodeModel.java

License:Open Source License

/**
 * Performs training of the specified {@link MultiLayerNetwork} for the specified number of epochs using the
 * specified {@link DataSetIterator} and specified {@link ExecutionContext} for progress reporting and execution
 * cancelling./*from   w w w.  j a  va  2s  .  com*/
 *
 * @param mln the network to train
 * @param epochs the number of epochs to train
 * @param data the data to train on
 * @param exec
 * @throws Exception
 */
private void trainNetwork(final MultiLayerNetwork mln, final int epochs, final DataSetIterator data,
        final ExecutionContext exec) throws Exception {

    // do only backprop for regression
    mln.getLayerWiseConfigurations().setBackprop(true);

    exec.setProgress(0.0);

    //calculate progress relative to number of epochs and what to train
    double maxProgress = epochs;

    logger.info("Do Backpropagation for " + epochs + " epochs.");
    for (int i = 0; i < epochs; i++) {
        exec.checkCanceled();
        if (getLearningMonitor().checkStopLearning()) {
            break;
        }

        updateView(i + 1, epochs, "Backprop");
        backpropOneEpoch(mln, data, exec);

        logEpochScore(mln, (i + 1));
        data.reset();
        exec.setProgress((i + 1) / maxProgress);

    }
}

From source file:org.knime.ext.dl4j.base.nodes.predict.AbstractDLPredictorNodeModel.java

License:Open Source License

/**
 * Determines the number of outputs of a {@link MultiLayerNetwork}, hence the number of outputs of the last layer
 * which must be a {@link OutputLayer}.//from w w w .j  ava2 s.  c  om
 *
 * @param mln the network to use
 * @return number of outputs of the network
 * @throws RuntimeException if the last layer is not an output layer
 */
protected int getNumberOfOutputs(final MultiLayerNetwork mln) {
    final int numberOfLayers = mln.getLayerWiseConfigurations().getConfs().size();

    final Layer l = mln.getLayerWiseConfigurations().getConf(numberOfLayers - 1).getLayer();
    if (l instanceof OutputLayer) {
        return ((OutputLayer) l).getNOut();
    } else {
        throw new RuntimeException("Last layer is not a Output Layer");
    }
}

From source file:org.knime.ext.dl4j.base.nodes.predict.AbstractDLPredictorNodeModel.java

License:Open Source License

/**
 * Determines the number of outputs of the layer with specified index contained in the specified network.
 *
 * @param mln the network to use// w  w w  .  j  ava2s .  c om
 * @param layerIndex the index of the layer to get the number of outputs from
 * @return the number of output neurons of the specified layer
 */
protected int getNumberOfOutputs(final MultiLayerNetwork mln, final int layerIndex) {
    final int numberOfLayers = mln.getLayerWiseConfigurations().getConfs().size();
    if (layerIndex > numberOfLayers - 1) {
        throw new ArrayIndexOutOfBoundsException("No layer with index " + layerIndex + " available!");
    }
    Layer l = mln.getLayerWiseConfigurations().getConf(layerIndex).getLayer();
    if (l instanceof FeedForwardLayer) {
        return ((FeedForwardLayer) l).getNOut();
    } else {
        throw new RuntimeException("Can't get number of outputs from non FeedforwardLayer.");
    }
}

From source file:stratego.neural.net.StrategoNeuralNet.java

/**
 * @param args the command line arguments
 *//* www. ja  v  a  2s .  c o m*/
public static void main(String[] args) throws Exception {

    /********************************************
    INPUT DATASETS HERE        
    ********************************************/

    String data1 = "src/Data/dataPoint_201.csv"; // location of first dataset
    int labelIndex1 = 12; // label index, the place in a single line where the label is (a label is the correct classification that belongs to the datapoint)
    double ratio1 = 0.9; // the ratio of the data to be used as training (remainder is test)
    int batchSize1 = 35; // sets the size of the micro-batch 
    int numClasses1 = 7; // sets the total number of classifications possible.

    /***************************************
     * LOADING OF DATASETS HAPPENS HERE
     ***************************************/
    DataSet dataSet1 = readCSVDataset(data1, batchSize1, labelIndex1, numClasses1); // storing the raw CSV data in a DataSet object

    /**************************************
     * BUILDING NETWORKS HAPPENS HERE
     **************************************/

    // FIRST NETWORK
    int numInput1 = 12; // sets the number of input neurons (always set equal to the amount of variables for training in a datapoint
    int numHidden1 = 50; // set the number of neurons in the hidden layer
    int iterations1 = 10; // sets the number of iterations to be performed during each epoch
    int scoreListener1 = 1; // sets after how many iterations the score should be listed on the output terminal

    //Note, no need to set numOutput here if we have set numClasses for the dataset, since these are the same

    int numEpochs1 = 50; // sets the amount of epochs to run the training for

    String name1 = "One Layer, 201 datapoints, batchsize " + batchSize1 + " ratio " + ratio1 + " epochs: "
            + numEpochs1; // setting the name for identication
    //SECOND NETWORK
    // Right now I'm interested in difference in performance, so I'm just going to copy all the stats from te first and only change the name
    String name2 = "Two Layer, 201 datapoints, batchsize " + batchSize1 + " ratio " + ratio1 + " epochs: "
            + numEpochs1;

    //THIRD NETWORK
    String name3 = "Three Layer, 201 datapoints, batchsize " + batchSize1 + " ratio " + ratio1 + " epochs: "
            + numEpochs1;
    /*
            OneLayerNetwork oneLayerNetwork = new OneLayerNetwork(numInput1, numHidden1, numClasses1, iterations1, scoreListener1, name1);
            List<NamedDataSet> plotData1 = oneLayerNetwork.train(dataSet1, ratio1, numEpochs1); // trains the network and returns a List containing overfitting data for the plot.
            plotDataSet(plotData1, oneLayerNetwork.getName());
            */
    TwoLayerNetwork twoLayerNetwork = new TwoLayerNetwork(numInput1, numHidden1, numClasses1, iterations1,
            scoreListener1, name2);
    List<NamedDataSet> plotData2 = twoLayerNetwork.train(dataSet1, ratio1, numEpochs1);
    plotDataSet(plotData2, twoLayerNetwork.getName());
    /*
    ThreeLayerNetwork threeLayerNetwork = new ThreeLayerNetwork(numInput1, numHidden1, numClasses1, iterations1, scoreListener1, name3);
    List<NamedDataSet> plotData3 = threeLayerNetwork.train(dataSet1, ratio1, numEpochs1);
    plotDataSet(plotData3, threeLayerNetwork.getName());
    */

    /***********************************************************
     * CONSOLE OUTPUT HAPPENS HERE
     ****************************************************/
    //oneLayerNetwork.evaluation();
    twoLayerNetwork.evaluation();
    // threeLayerNetwork.evaluation();

    twoLayerNetwork.storeNetwork("network");

    MultiLayerNetwork twoLayerTest = ModelSerializer.restoreMultiLayerNetwork("src/NetworkFiles/network.zip");

    System.out.println("Original and restored networks: configs are equal: " + twoLayerNetwork.getNetwork()
            .getLayerWiseConfigurations().equals(twoLayerTest.getLayerWiseConfigurations()));
    System.out.println("Original and restored networks: parameters are equal: "
            + twoLayerNetwork.getNetwork().params().equals(twoLayerTest.params()));

}