Example usage for org.deeplearning4j.eval Evaluation f1

List of usage examples for org.deeplearning4j.eval Evaluation f1

Introduction

In this page you can find the example usage for org.deeplearning4j.eval Evaluation f1.

Prototype

public double f1() 

Source Link

Document

Calculate the F1 score
F1 score is defined as:
TP: true positive
FP: False Positive
FN: False Negative
F1 score: 2 * TP / (2TP + FP + FN)

Note: value returned will differ depending on number of classes and settings.
1.

Usage

From source file:org.wso2.carbon.ml.rest.api.neuralNetworks.FeedForwardNetwork.java

License:Open Source License

/**
 * method to createFeedForwardNetwork.//from w ww. j  a  v a  2 s.  com
 * @param seed
 * @param learningRate
 * @param analysisID
 * @param bachSize
 * @param backprop
 * @param hiddenList
 * @param inputLayerNodes
 * @param iterations
 * @param versionID
 * @param momentum
 * @param nepoches
 * @param datasetId
 * @param noHiddenLayers
 * @param optimizationAlgorithms
 * @param outputList
 * @param pretrain
 * @param updater
 * @return an String object with evaluation result.
 */
public String createFeedForwardNetwork(long seed, double learningRate, int bachSize, double nepoches,
        int iterations, String optimizationAlgorithms, String updater, double momentum, boolean pretrain,
        boolean backprop, int noHiddenLayers, int inputLayerNodes, int datasetId, int versionID, int analysisID,
        List<HiddenLayerDetails> hiddenList, List<OutputLayerDetails> outputList)
        throws IOException, InterruptedException {

    String evaluationDetails = null;
    int numLinesToSkip = 0;
    String delimiter = ",";
    mlDataSet = getDatasetPath(datasetId, versionID);
    analysisFraction = getAnalysisFraction(analysisID);
    analysisResponceVariable = getAnalysisResponseVariable(analysisID);
    responseIndex = getAnalysisResponseVariableIndex(analysisID);
    SplitTestAndTrain splitTestAndTrain;
    DataSet currentDataset;
    DataSet trainingSet = null;
    DataSet testingSet = null;
    INDArray features = null;
    INDArray labels = null;
    INDArray predicted = null;
    Random rnd = new Random();
    int labelIndex = 0;
    int numClasses = 0;
    int fraction = 0;

    //Initialize RecordReader
    RecordReader rr = new CSVRecordReader(numLinesToSkip, delimiter);
    //read the dataset
    rr.initialize(new FileSplit(new File(mlDataSet)));
    labelIndex = responseIndex;
    numClasses = outputList.get(0).outputNodes;

    //Get the fraction to do the spliting data to training and testing
    FileReader fr = new FileReader(mlDataSet);
    LineNumberReader lineNumberReader = new LineNumberReader(fr);
    //Get the total number of lines
    lineNumberReader.skip(Long.MAX_VALUE);
    int lines = lineNumberReader.getLineNumber();

    //handling multiplication of 0 error
    if (analysisFraction == 0) {
        return null;
    }

    //Take floor value to set the numHold of training data
    fraction = ((int) Math.floor(lines * analysisFraction));

    org.nd4j.linalg.dataset.api.iterator.DataSetIterator trainIter = new RecordReaderDataSetIterator(rr, lines,
            labelIndex, numClasses);

    //Create NeuralNetConfiguration object having basic settings.
    NeuralNetConfiguration.ListBuilder neuralNetConfiguration = new NeuralNetConfiguration.Builder().seed(seed)
            .iterations(iterations).optimizationAlgo(mapOptimizationAlgorithm(optimizationAlgorithms))
            .learningRate(learningRate).updater(mapUpdater(updater)).momentum(momentum)
            .list(noHiddenLayers + 1);

    //Add Hidden Layers to the network with unique settings
    for (int i = 0; i < noHiddenLayers; i++) {
        int nInput = 0;
        if (i == 0)
            nInput = inputLayerNodes;
        else
            nInput = hiddenList.get(i - 1).hiddenNodes;

        neuralNetConfiguration.layer(i,
                new DenseLayer.Builder().nIn(nInput).nOut(hiddenList.get(i).hiddenNodes)
                        .weightInit(mapWeightInit(hiddenList.get(i).weightInit))
                        .activation(hiddenList.get(i).activationAlgo).build());
    }

    //Add Output Layers to the network with unique settings
    neuralNetConfiguration.layer(noHiddenLayers,
            new OutputLayer.Builder(mapLossFunction(outputList.get(0).lossFunction))
                    .nIn(hiddenList.get(noHiddenLayers - 1).hiddenNodes).nOut(outputList.get(0).outputNodes)
                    .weightInit(mapWeightInit(outputList.get(0).weightInit))
                    .activation(outputList.get(0).activationAlgo).build());

    //Create MultiLayerConfiguration network
    MultiLayerConfiguration conf = neuralNetConfiguration.pretrain(pretrain).backprop(backprop).build();

    MultiLayerNetwork model = new MultiLayerNetwork(conf);
    model.init();
    model.setListeners(Collections.singletonList((IterationListener) new ScoreIterationListener(1)));

    while (trainIter.hasNext()) {
        currentDataset = trainIter.next();
        splitTestAndTrain = currentDataset.splitTestAndTrain(fraction, rnd);
        trainingSet = splitTestAndTrain.getTrain();
        testingSet = splitTestAndTrain.getTest();
        features = testingSet.getFeatureMatrix();
        labels = testingSet.getLabels();
    }

    //Train the model with the training data
    for (int n = 0; n < nepoches; n++) {
        model.fit(trainingSet);
    }

    //Do the evaluations of the model including the Accuracy, F1 score etc.
    log.info("Evaluate model....");
    Evaluation eval = new Evaluation(outputList.get(0).outputNodes);
    predicted = model.output(features, false);

    eval.eval(labels, predicted);

    evaluationDetails = "{\"Accuracy\":\"" + eval.accuracy() + "\", \"Pecision\":\"" + eval.precision()
            + "\",\"Recall\":\"" + eval.recall() + "\",\"F1Score\":\"" + eval.f1() + "\"}";
    return evaluationDetails;

}

From source file:seqtest.Pair.java

public static void main(String[] args) throws Exception {
    downloadUCIData();/*from  w w w.j  a  v  a 2s . c  o  m*/

    // ----- Load the training data -----
    //Note that we have 450 training files for features: train/features/0.csv through train/features/449.csv
    SequenceRecordReader trainFeatures = new CSVSequenceRecordReader();
    trainFeatures
            .initialize(new NumberedFileInputSplit(featuresDirTrain.getAbsolutePath() + "/%d.csv", 0, 449));
    SequenceRecordReader trainLabels = new CSVSequenceRecordReader();
    trainLabels.initialize(new NumberedFileInputSplit(labelsDirTrain.getAbsolutePath() + "/%d.csv", 0, 449));

    int miniBatchSize = 10;
    int numLabelClasses = 6;
    DataSetIterator trainData = new SequenceRecordReaderDataSetIterator(trainFeatures, trainLabels,
            miniBatchSize, numLabelClasses, false, SequenceRecordReaderDataSetIterator.AlignmentMode.ALIGN_END);

    //Normalize the training data
    //DataNormalization normalizer = new NormalizerStandardize();
    //normalizer.fit(trainData);              //Collect training data statistics
    //trainData.reset();

    //Use previously collected statistics to normalize on-the-fly. Each DataSet returned by 'trainData' iterator will be normalized
    //trainData.setPreProcessor(normalizer);

    // ----- Load the test data -----
    //Same process as for the training data.
    SequenceRecordReader testFeatures = new CSVSequenceRecordReader();
    testFeatures.initialize(new NumberedFileInputSplit(featuresDirTest.getAbsolutePath() + "/%d.csv", 0, 149));
    SequenceRecordReader testLabels = new CSVSequenceRecordReader();
    testLabels.initialize(new NumberedFileInputSplit(labelsDirTest.getAbsolutePath() + "/%d.csv", 0, 149));

    DataSetIterator testData = new SequenceRecordReaderDataSetIterator(testFeatures, testLabels, miniBatchSize,
            numLabelClasses, false, SequenceRecordReaderDataSetIterator.AlignmentMode.ALIGN_END);

    //testData.setPreProcessor(normalizer);   //Note that we are using the exact same normalization process as the training data

    // ----- Configure the network -----
    MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder().seed(123) //Random number generator seed for improved repeatability. Optional.
            .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1)
            .weightInit(WeightInit.XAVIER).updater(Updater.NESTEROVS).momentum(0.9).learningRate(0.005)
            .gradientNormalization(GradientNormalization.ClipElementWiseAbsoluteValue) //Not always required, but helps with this data set
            .gradientNormalizationThreshold(0.5).list(2)
            .layer(0, new GravesLSTM.Builder().activation("tanh").nIn(1).nOut(10).build())
            .layer(1, new RnnOutputLayer.Builder(LossFunctions.LossFunction.MCXENT).activation("softmax")
                    .nIn(10).nOut(numLabelClasses).build())
            .pretrain(false).backprop(true).build();

    MultiLayerNetwork net = new MultiLayerNetwork(conf);
    net.init();

    net.setListeners(new ScoreIterationListener(20)); //Print the score (loss function value) every 20 iterations

    // ----- Train the network, evaluating the test set performance at each epoch -----
    int nEpochs = 40;
    String str = "Test set evaluation at epoch %d: Accuracy = %.2f, F1 = %.2f";
    for (int i = 0; i < nEpochs; i++) {
        net.fit(trainData);

        //Evaluate on the test set:
        Evaluation evaluation = new Evaluation();
        while (testData.hasNext()) {
            DataSet t = testData.next();
            INDArray features = t.getFeatureMatrix();
            INDArray lables = t.getLabels();
            INDArray inMask = t.getFeaturesMaskArray();
            INDArray outMask = t.getLabelsMaskArray();
            INDArray predicted = net.output(features, false, inMask, outMask);

            evaluation.evalTimeSeries(lables, predicted, outMask);
        }

        System.out.println(String.format(str, i, evaluation.accuracy(), evaluation.f1()));

        testData.reset();
        trainData.reset();
    }

    System.out.println("----- Example Complete -----");
}