Example usage for org.apache.commons.io FilenameUtils concat

List of usage examples for org.apache.commons.io FilenameUtils concat

Introduction

In this page you can find the example usage for org.apache.commons.io FilenameUtils concat.

Prototype

public static String concat(String basePath, String fullFilenameToAdd) 

Source Link

Document

Concatenates a filename to a base path using normal command line style rules.

Usage

From source file:org.avantssar.aslanpp.GraphvizGenerator.java

@Override
public void translationOver(ASLanPPSpecification aslanPPspec, IASLanSpec aslanSpec, ASLanBuilder builder) {
    for (Entity e : builder.getFirstNodes().keySet()) {
        TransitionsRecorder rec = builder.getRecorders().get(e);
        INode firstNode = builder.getFirstNodes().get(e);
        String gvFile = FilenameUtils.concat(outputDir.getAbsolutePath(), e.getOriginalName() + ".dot");
        try {//from   w ww.j  a  v  a 2  s .  c  o  m
            PrintStream out = new PrintStream(gvFile);
            out.println("digraph " + e.getOriginalName() + " {");
            firstNode.clearVisited();
            firstNode.renderGraphviz(out);
            for (TransitionIndexes tri : rec.getRecordedTransitions()) {
                out.println(tri.start + " -> " + tri.end + " [label=\"step_" + tri.index + "\", color="
                        + LUMPED_COLOR + "];");
            }
            out.println("}");
            out.close();
            DotUtil.runDot(new File(gvFile));
        } catch (IOException ex) {
            System.err.println("Failed to render Graphviz file '" + gvFile + "' for entity "
                    + e.getOriginalName() + ": " + ex.getMessage());
            Debug.logger.error(
                    "Failed to render Graphiv file '" + gvFile + " for entity " + e.getOriginalName() + ".",
                    ex);
        }
    }
}

From source file:org.axonframework.ext.chronicle.store.IndexedChronicleDomainEventStore.java

@Override
public void init() {
    String dataPath = FilenameUtils.concat(getBasePath(), getStorageId());
    LOGGER.debug("IndexedChronicle => BasePath: {}, DataPath: {}", getBasePath(), dataPath);

    try {/* w ww  .ja v  a2  s .  c o m*/
        init(ChronicleQueueBuilder.indexed(dataPath).build());
    } catch (Exception e) {
        LOGGER.warn("Exception", e);
    }
}

From source file:org.axonframework.ext.chronicle.store.VanillaChronicleDomainEventStore.java

@Override
public void init() {
    String dataPath = FilenameUtils.concat(getBasePath(), getStorageId());
    LOGGER.debug("VanillaChronicle => BasePath: {}, DataPath: {}", getBasePath(), dataPath);

    try {/*from  ww w  . j a v a  2 s. c  o m*/
        init(ChronicleQueueBuilder.vanilla(dataPath).build());
    } catch (Exception e) {
        LOGGER.warn("Exception", e);
    }
}

From source file:org.bdval.ConsensusBDVModel.java

/**
 * Loads the juror models used for consensus.
 * @param options specific options to use when loading the model
 * @throws IOException if there is a problem accessing the model
 * @throws ClassNotFoundException if the type of the model is not recognized
 *///from   w w  w .ja  v  a  2 s . c o  m
private void loadJurorModels(final DAVOptions options) throws IOException, ClassNotFoundException {
    jurorModels.clear();

    final String pathToModel = FilenameUtils.getFullPath(modelFilename);
    final String endpointName = FilenameUtils.getBaseName(FilenameUtils.getPathNoEndSeparator(pathToModel));

    if (properties.getBoolean("bdval.consensus.jurors.embedded", false)) {
        final File tmpdir = File.createTempFile("juror-models", "");
        tmpdir.delete();
        tmpdir.mkdir();

        try {
            // load juror models from the zip file
            final ZipFile zipFile = new ZipFile(zipFilename);
            for (final String jurorPrefix : jurorModelFilenamePrefixes) {
                // zip files should always use "/" as a separator
                final String jurorFilename = "models/" + endpointName + "/" + jurorPrefix + ".zip";
                LOG.debug("Loading juror model " + jurorFilename);
                final InputStream jurorStream = zipFile.getInputStream(zipFile.getEntry(jurorFilename));

                final File jurorFile = new File(FilenameUtils.concat(tmpdir.getPath(), jurorFilename));

                // put the juror model to disk so it can be loaded with existing code
                IOUtils.copy(jurorStream, FileUtils.openOutputStream(jurorFile));

                final BDVModel jurorModel = new BDVModel(jurorFile.getPath());
                jurorModel.load(options);
                jurorModels.add(jurorModel);
            }
        } finally {
            FileUtils.forceDeleteOnExit(tmpdir);
        }
    } else {
        // load juror models from disk
        final File finalModelPath = new File(pathToModel);
        final File finalModelParentPath = new File(finalModelPath.getParent());
        // assume the model is under a directory "models" at the same level as a models
        // directory which contains the model components.
        for (final String jurorPrefix : jurorModelFilenamePrefixes) {
            final String modelComponentFilename = finalModelParentPath.getParent() + SystemUtils.FILE_SEPARATOR
                    + "models" + SystemUtils.FILE_SEPARATOR + endpointName + SystemUtils.FILE_SEPARATOR
                    + jurorPrefix;
            LOG.debug("Loading model component " + modelComponentFilename);
            final BDVModel jurorModel = new BDVModel(modelComponentFilename);
            jurorModel.load(options);
            jurorModels.add(jurorModel);
        }
    }

    if (jurorModels.size() < 1) {
        throw new IllegalStateException("No juror models could be found");
    }

    jurorModelsAreLoaded = true;
}

From source file:org.bdval.MakeSyntheticDataset.java

private void process(final JSAPResult arguments) throws IOException {
    outputDirectory = arguments.getString("output-directory");
    FileUtils.forceMkdir(new File(outputDirectory));

    final int numProbesets = arguments.getInt("probeset-number");
    final int numSamples = arguments.getInt("sample-number");
    final int numPositiveSamples = arguments.getInt("positive-sample-number");

    final int numInformativeProbesets = arguments.getInt("number-informative-probesets");
    final String outputFilenamePrefix = arguments.getString("dataset-name");

    scalePositiveLabel = arguments.getDouble("scale-positive-labels");
    meanPositiveLabel = arguments.getDouble("mean-positive-labels");

    scaleNegativeLabel = arguments.getDouble("scale-negative-labels");
    meanNegativeLabel = arguments.getDouble("mean-negative-labels");

    scaleNonInformativeFeature = arguments.getDouble("scale-non-informative");
    meanNonInformativeFeature = arguments.getDouble("mean-non-informative");

    final double[][] data = new double[numSamples][numProbesets];

    final RandomEngine random = new MersenneTwister();
    randomAdapter = new RandomAdapter(random);

    printStats(numProbesets, numSamples, numPositiveSamples, numInformativeProbesets, outputFilenamePrefix,
            new PrintWriter(System.out));

    // pick informative probeset indices, making sure indices are not picked more than once.
    final IntSet informativeProbesetIndices = generateRandomIndices(numInformativeProbesets, numProbesets,
            randomAdapter);/*from ww  w  .  ja  v  a  2 s .  co  m*/
    final IntSet positiveSampleIndices = generateRandomIndices(numPositiveSamples, numSamples, randomAdapter);

    for (final double[] sample : data) { // for each sample:
        for (int probesetIndex = 0; probesetIndex < sample.length; probesetIndex++) { // for each  probeset:
            sample[probesetIndex] = generateNonInformativeFeatureValue();
        }
    }

    int sampleIndex = 0;
    for (final double[] sample : data) { // for each sample:
        for (final int informativeProbesetIndex : informativeProbesetIndices) { // for each informative probeset:
            sample[informativeProbesetIndex] = generateInformativeFeatureValue(
                    positiveSampleIndices.contains(sampleIndex));
        }
        sampleIndex++;
    }

    final String datasetFilename = FilenameUtils.concat(outputDirectory,
            FilenameUtils.concat("norm-data", outputFilenamePrefix + ".tmm"));
    FileUtils.forceMkdir(new File(FilenameUtils.getPath(datasetFilename)));
    PrintWriter datasetWriter = null;
    try {
        datasetWriter = new PrintWriter(datasetFilename);
        outputDataset(data, informativeProbesetIndices, positiveSampleIndices, datasetWriter);
    } finally {
        IOUtils.closeQuietly(datasetWriter);
    }

    final IntList sampleIndicesList = outputCids(numSamples, positiveSampleIndices, outputFilenamePrefix);
    outputTrainingAndTestingCids(positiveSampleIndices, outputFilenamePrefix, sampleIndicesList);
    Collections.shuffle(sampleIndicesList);
    final IntList trainingSetSampleList = sampleIndicesList.subList(0,
            (int) (sampleIndicesList.size() * trainingVsTestingSizeRatio));
    final IntList testingSetSampleList = sampleIndicesList.subList(
            (int) (sampleIndicesList.size() * trainingVsTestingSizeRatio) + 1, sampleIndicesList.size());
    outputCids(positiveSampleIndices, outputFilenamePrefix + "Training", trainingSetSampleList);
    outputCids(positiveSampleIndices, outputFilenamePrefix + "Testing", testingSetSampleList);

    final IntSet positiveInCompleteSet = new IntLinkedOpenHashSet();
    positiveInCompleteSet.addAll(sampleIndicesList);
    positiveInCompleteSet.retainAll(positiveSampleIndices);
    System.out.println("positiveInCompleteSet: " + positiveInCompleteSet.size());
    // task for full training set:
    outputTasks(outputFilenamePrefix, numSamples, positiveInCompleteSet, outputFilenamePrefix + ".tasks");

    // task for training set only:
    final IntSet positiveInTrainingSet = new IntLinkedOpenHashSet();
    positiveInTrainingSet.addAll(trainingSetSampleList);
    positiveInTrainingSet.retainAll(positiveSampleIndices);
    System.out.println("positiveInTrainingSet: " + positiveInTrainingSet.size());

    outputTasks(outputFilenamePrefix + "_Training", trainingSetSampleList.size(), positiveInTrainingSet,
            outputFilenamePrefix + "_Training" + ".tasks");
    //task for test set only:
    final IntSet positiveInTestingSet = new IntLinkedOpenHashSet();
    positiveInTestingSet.addAll(testingSetSampleList);
    positiveInTestingSet.retainAll(positiveSampleIndices);
    System.out.println("positiveInTestingSet: " + positiveInTestingSet.size());
    outputTasks(outputFilenamePrefix + "_Testing", testingSetSampleList.size(), positiveInTestingSet,
            outputFilenamePrefix + "_Testing" + ".tasks");

    final String summaryFilename = FilenameUtils.concat(outputDirectory, outputFilenamePrefix + "-README.txt");
    PrintWriter summaryWriter = null;
    try {
        summaryWriter = new PrintWriter(summaryFilename);
        printStats(numProbesets, numSamples, numPositiveSamples, numInformativeProbesets, outputFilenamePrefix,
                summaryWriter);
    } finally {
        IOUtils.closeQuietly(summaryWriter);
    }
}

From source file:org.bdval.MakeSyntheticDataset.java

private void outputTasks(final String outputFilenamePrefix, final int numSamples,
        final IntSet positiveSampleIndices, final String outputFilename) throws IOException {
    final String tasksFilename = FilenameUtils.concat(outputDirectory,
            FilenameUtils.concat("tasks", outputFilename));
    FileUtils.forceMkdir(new File(FilenameUtils.getPath(tasksFilename)));

    PrintWriter tasksWriter = null;
    try {/*from w w  w .ja v a2 s .  c  om*/
        tasksWriter = new PrintWriter(tasksFilename);

        tasksWriter.println(String.format("%s\tnegative\tpositive\t%d\t%d", outputFilenamePrefix,
                numSamples - positiveSampleIndices.size(), positiveSampleIndices.size()));
    } finally {
        IOUtils.closeQuietly(tasksWriter);
    }
}

From source file:org.bdval.MakeSyntheticDataset.java

private void outputCids(final IntSet positiveSampleIndices, final String outputFilenamePrefix,
        final IntList sampleIndices) throws IOException {
    final String cidsFilename = FilenameUtils.concat(outputDirectory,
            FilenameUtils.concat("cids", outputFilenamePrefix + ".cids"));
    FileUtils.forceMkdir(new File(FilenameUtils.getPath(cidsFilename)));
    PrintWriter cidsWriter = null;
    try {/*w  ww  .  j  a v a 2 s . c o  m*/
        cidsWriter = new PrintWriter(cidsFilename);

        for (final int sampleIndex : sampleIndices) {
            cidsWriter.print(positiveSampleIndices.contains(sampleIndex) ? "positive" : "negative");
            cidsWriter.print("\t");
            cidsWriter.print(sampleId(sampleIndex, positiveSampleIndices));
            cidsWriter.println();
        }
    } finally {
        IOUtils.closeQuietly(cidsWriter);
    }
}

From source file:org.bdval.TestDAVMode.java

/**
 * Gets the name of a directory to use for a cache during the tests.  The directory
 * itself is not created.//  w w w.j a  va  2  s  .co  m
 * @return The full path of a directory that can be used for testing.
 * @throws IOException if no valid directory name can be created
 */
private String getTempCacheDirectory() throws IOException {
    final File tmpFile = File.createTempFile("davMode", "test");
    final String tmpDirName = tmpFile.getAbsolutePath();
    tmpFile.delete();
    return FilenameUtils.concat(tmpDirName, "cache");
}

From source file:org.bdval.TestDAVMode.java

/**
 * Validates that the cache is created properly in DAVMode when enabled.
 * @throws JSAPException If there was a problem setting up the test
 * @throws IOException If there was a problem with the cache
 * @throws ColumnTypeException If there was a problem reading the tables
 * @throws InvalidColumnException If there was a problem reading the tables
 * @throws TypeMismatchException If there was a problem reading the tables
 *//*from  ww  w . j a  v  a 2s  .  co  m*/
@Test
public void testTableCacheEnabled()
        throws IOException, JSAPException, InvalidColumnException, ColumnTypeException, TypeMismatchException {
    final String cacheDirectoryName = getTempCacheDirectory();
    final String inputFileName = "test-data/test.tmm.gz";

    final String[] args = { "--enable-cache", "--cache-dir", cacheDirectoryName, "--input", inputFileName };

    final DAVMode davMode = new DAVMode();
    final DAVOptions davOptions = new DAVOptions();
    final JSAP jsap = new JSAP();
    davMode.defineOptions(jsap);

    final JSAPResult jsapResult = jsap.parse(args);
    davMode.setupTableCache(jsapResult, davOptions);

    final File cacheDirectory = new File(cacheDirectoryName);
    assertTrue("Cache directory should have been created", cacheDirectory.exists());

    // create the actual cache directory
    davMode.setupPathwayOptions(jsapResult, davOptions);
    davMode.setupInput(jsapResult, davOptions);
    davMode.processTable(new FullGeneList(null), davOptions.inputTable, davOptions,
            new ArrayList<Set<String>>(), true);

    final File cacheFile = new File(FilenameUtils.concat(cacheDirectoryName,
            "pathways=false" + SystemUtils.FILE_SEPARATOR + "cached-table-null-complete-0.bin"));
    assertTrue("Cache file should have been created", cacheFile.exists());

    final TableCache tableCache = new TableCache(
            new File(FilenameUtils.concat(cacheDirectoryName, "pathways=false")));
    assertTrue("Table should be cached at this point", tableCache.isTableCached(0, null, null));

    // note that the cached table is not the same as the input dataset
    Table table = tableCache.getCachedTable(0, null, null);
    assertNotNull("The table from the cache should not be null", table);
    assertEquals("There should be 6 columns", 6, table.getColumnNumber());
    assertEquals("There should be 2 rows", 2, table.getRowNumber());

    davMode.removeFromCache(0, null, null);
    assertFalse("Cache file should have been deleted", cacheFile.exists());
    assertFalse("Table should no longer be cached at this point", tableCache.isTableCached(0, null, null));
    table = tableCache.getCachedTable(0, null, null);
    assertNull("The table from the cache should be null", table);
}

From source file:org.bimserver.charting.Charts.Chart.java

public void saveToSVGInUserDirectory(ArrayList<LinkedHashMap<String, Object>> rawData) {
    String home = System.getProperty("user.home");
    String directoryPath = ((System.getProperty("os.name").startsWith("Windows")))
            ? FilenameUtils.concat(home, "Desktop")
            : home;//from   ww  w.j  a  va  2s  . c o m
    saveToSVG(rawData, directoryPath);
}