Example usage for org.deeplearning4j.nn.api OptimizationAlgorithm CONJUGATE_GRADIENT

List of usage examples for org.deeplearning4j.nn.api OptimizationAlgorithm CONJUGATE_GRADIENT

Introduction

In this page you can find the example usage for org.deeplearning4j.nn.api OptimizationAlgorithm CONJUGATE_GRADIENT.

Prototype

OptimizationAlgorithm CONJUGATE_GRADIENT

To view the source code for org.deeplearning4j.nn.api OptimizationAlgorithm CONJUGATE_GRADIENT.

Click Source Link

Usage

From source file:org.wso2.carbon.ml.rest.api.neuralNetworks.FeedForwardNetwork.java

License:Open Source License

/**
 * method to map user selected Optimazation Algorithm to OptimizationAlgorithm object.
 * @param optimizationAlgorithms// www .ja  v a2  s . c  o m
 * @return an OptimizationAlgorithm object.
 */

OptimizationAlgorithm mapOptimizationAlgorithm(String optimizationAlgorithms) {

    OptimizationAlgorithm optimizationAlgo = null;

    switch (optimizationAlgorithms) {
    case "Line_Gradient_Descent":
        optimizationAlgo = OptimizationAlgorithm.LINE_GRADIENT_DESCENT;
        break;

    case "Conjugate_Gradient":
        optimizationAlgo = OptimizationAlgorithm.CONJUGATE_GRADIENT;
        break;

    case "Hessian_Free":
        optimizationAlgo = OptimizationAlgorithm.HESSIAN_FREE;
        break;

    case "LBFGS":
        optimizationAlgo = OptimizationAlgorithm.LBFGS;
        break;

    case "Stochastic_Gradient_Descent":
        optimizationAlgo = OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT;
        break;

    default:
        optimizationAlgo = null;
        break;
    }

    return optimizationAlgo;
}

From source file:vectorizer.Doc2Vec.java

MultiLayerNetwork buildAutoEncoder(int vocabSize, int iterations) {
    int[] numInputs = { vocabSize, 100, 25 };
    int seed = 100;

    /*/*  w  w  w .  j  av a  2 s.com*/
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
        .seed(seed)
        .iterations(iterations)
        .momentum(0.1)
        .momentumAfter(Collections.singletonMap(10, 0.01))
        .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT)
        .list(4)
        .layer(0, new RBM.Builder().nIn(vocabSize).nOut(numInputs[0])
                .lossFunction(LossFunctions.LossFunction.RMSE_XENT).build())
        .layer(1, new RBM.Builder().nIn(numInputs[0]).nOut(numInputs[1])
                .lossFunction(LossFunctions.LossFunction.RMSE_XENT).build())
        .layer(2, new RBM.Builder().lossFunction(LossFunctions.LossFunction.RMSE_XENT)
                .nIn(numInputs[1]).nOut(numInputs[2]).build())
        .layer(3, new RBM.Builder().lossFunction(LossFunctions.LossFunction.RMSE_XENT)
                .nIn(numInputs[2]).nOut(numInputs[3]).build())
        .pretrain(false)
        .build();
    */

    /*
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
    .seed(seed)
    .gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue)
    .gradientNormalizationThreshold(1.0)
    .iterations(iterations)
    .momentum(0.5)
    .momentumAfter(Collections.singletonMap(3, 0.9))
    .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT)
    .list(3)
    .layer(0, new AutoEncoder.Builder().nIn(numInputs[0]).nOut(numInputs[1])
            .weightInit(WeightInit.XAVIER).lossFunction(LossFunctions.LossFunction.RMSE_XENT)
            .corruptionLevel(0.3)
            .build())
    .layer(1, new AutoEncoder.Builder().nIn(numInputs[1]).nOut(numInputs[2])
            .weightInit(WeightInit.XAVIER).lossFunction(LossFunctions.LossFunction.RMSE_XENT)
            .corruptionLevel(0.3)
            .build())
    //.layer(2, new AutoEncoder.Builder().nIn(numInputs[2]).nOut(numInputs[3])
    //        .weightInit(WeightInit.XAVIER).lossFunction(LossFunctions.LossFunction.RMSE_XENT)
    //        .corruptionLevel(0.3)
    //        .build())
    //.layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.NEGATIVELOGLIKELIHOOD).activation("softmax")
    //        .nIn(numInputs[2]).nOut(numInputs[3]).build())
    .layer(2, new OutputLayer.Builder(LossFunctions.LossFunction.RMSE_XENT)
            .nIn(numInputs[2]).nOut(numInputs[3]).build())
    .pretrain(true).backprop(false)
    .build();        
    */

    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(seed) // Locks in weight initialization for tuning      
            .iterations(iterations) // # training iterations predict/classify & backprop      
            .learningRate(1e-6f) // Optimization step size      
            .optimizationAlgo(OptimizationAlgorithm.CONJUGATE_GRADIENT) // Backprop to calculate gradients      
            .l1(1e-1).regularization(true).l2(2e-4).useDropConnect(true).list(2) // # NN layers (doesn't count input layer)      
            .layer(0, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN).nIn(numInputs[0]) // # input nodes      
                    .nOut(numInputs[1]) // # fully connected hidden layer nodes. Add list if multiple layers.      
                    .weightInit(WeightInit.XAVIER) // Weight initialization      
                    .k(1) // # contrastive divergence iterations      
                    .activation("relu") // Activation function type      
                    .lossFunction(LossFunctions.LossFunction.RMSE_XENT) // Loss function type      
                    .updater(Updater.ADAGRAD).dropOut(0.5).build()) // NN layer type      
            .layer(1, new RBM.Builder(RBM.HiddenUnit.RECTIFIED, RBM.VisibleUnit.GAUSSIAN).nIn(numInputs[1]) // # input nodes      
                    .nOut(numInputs[2]) // # fully connected hidden layer nodes. Add list if multiple layers.      
                    .weightInit(WeightInit.XAVIER) // Weight initialization      
                    .k(1) // # contrastive divergence iterations      
                    .activation("relu") // Activation function type      
                    .lossFunction(LossFunctions.LossFunction.RMSE_XENT) // Loss function type      
                    .updater(Updater.ADAGRAD).dropOut(0.5).build()) // NN layer type      
            .build();

    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();
    return model;
}