List of usage examples for org.deeplearning4j.nn.multilayer MultiLayerNetwork score
double score
To view the source code for org.deeplearning4j.nn.multilayer MultiLayerNetwork score.
Click Source Link
From source file:org.knime.ext.dl4j.base.nodes.learn.AbstractDLLearnerNodeModel.java
License:Open Source License
/** * Logs score of specified model at specified epoch in the view and adds the information to the history. * * @param m the model to get score from/* w w w . j a v a 2s .co m*/ * @param epoch the epoch number to print into log message */ protected void logEpochScore(final MultiLayerNetwork m, final int epoch) { HistoryEntry entry = new HistoryEntry(m.score(), epoch); m_history.add(entry); notifyViews(entry); }
From source file:org.knime.ext.dl4j.base.nodes.predict.feedforward.FeedforwardPredictorNodeModel.java
License:Open Source License
@Override protected PortObject[] execute(final PortObject[] inObjects, final ExecutionContext exec) throws Exception { final DLModelPortObject port = (DLModelPortObject) inObjects[0]; final DLModelPortObjectSpec portSpec = (DLModelPortObjectSpec) port.getSpec(); final BufferedDataTable table = (BufferedDataTable) inObjects[1]; //select feature columns from table used for prediction final String[] predictCols = DLModelPortObjectUtils.getFirsts(portSpec.getLearnedColumns(), String.class); final BufferedDataTable filteredTable = exec .createBufferedDataTable(new FilterColumnTable(table, predictCols), exec); //create iterator and prediction TableUtils.checkForEmptyTable(filteredTable); final DataSetIterator input = new ClassificationBufferedDataTableDataSetIterator(filteredTable, 1); final MultiLayerNetwork mln = port.getMultilayerLayerNetwork(); //set flag if last layer activation is softmax final boolean outputActivationIsSoftmax = isOutActivation(port.getLayers(), DL4JActivationFunction.softmax); final boolean appendPrediction = m_predictorParameter.getAppendPrediction().getBooleanValue(); final boolean appendScore = m_predictorParameter.getAppendScore().getBooleanValue(); final List<String> labels = portSpec.getLabels(); //write output to table final BufferedDataContainer container = exec.createDataContainer(m_outputSpec); final CloseableRowIterator tabelIter = table.iterator(); int i = 0;//from w w w .ja va 2 s . com while (tabelIter.hasNext()) { exec.setProgress((double) (i + 1) / (double) (table.size())); exec.checkCanceled(); final DataRow row = tabelIter.next(); final List<DataCell> cells = TableUtils.toListOfCells(row); final DataSet next = input.next(); final INDArray prediction = predict(mln, next.getFeatureMatrix()); final ListCell outputVector = CollectionCellFactory .createListCell(NDArrayUtils.toListOfDoubleCells(prediction)); cells.add(outputVector); if (appendScore) { final double score = mln.score(new DataSet(next.getFeatureMatrix(), prediction), false); cells.add(new DoubleCell(score)); } if (appendPrediction && outputActivationIsSoftmax && containsLabels()) { final String winningLabel = NDArrayUtils.softmaxActivationToLabel(labels, prediction); cells.add(new StringCell(winningLabel)); } else if (appendPrediction && containsLabels()) { cells.add(new MissingCell("Output Layer activation is not softmax")); } else if (appendPrediction && !containsLabels()) { cells.add(new MissingCell("Model contains no labels")); } container.addRowToTable(new DefaultRow(row.getKey(), cells)); i++; } if (appendPrediction && !outputActivationIsSoftmax) { logger.warn("Output Layer activation is not softmax. Label prediction column will be empty."); } if (appendPrediction && outputActivationIsSoftmax && !containsLabels()) { logger.warn( "Model contains no labels. May be trained unsupervised. Label prediction column will be empty."); } container.close(); final BufferedDataTable outputTable = container.getTable(); return new PortObject[] { outputTable }; }