Example usage for org.apache.commons.lang3 SystemUtils FILE_SEPARATOR

List of usage examples for org.apache.commons.lang3 SystemUtils FILE_SEPARATOR

Introduction

In this page you can find the example usage for org.apache.commons.lang3 SystemUtils FILE_SEPARATOR.

Prototype

String FILE_SEPARATOR

To view the source code for org.apache.commons.lang3 SystemUtils FILE_SEPARATOR.

Click Source Link

Document

The file.separator System Property.

Usage

From source file:io.github.blindio.prospero.core.browserdrivers.phantomjs.PhantomJSInstaller.java

public static String getPhantomJSInstallDirPath() {
    return SystemUtils.getUserDir().getAbsolutePath() + SystemUtils.FILE_SEPARATOR + PHANTOMJS_INSTALL_DIR
            + SystemUtils.FILE_SEPARATOR;
}

From source file:com.norconex.commons.wicket.markup.html.filesystem.FileSystemTreeProvider.java

public static boolean isWindowsRoot(File file) {
    return SystemUtils.IS_OS_WINDOWS && file.getPath().equals(SystemUtils.FILE_SEPARATOR);
}

From source file:de.mirkosertic.desktopsearch.QueryResult.java

public String getSimpleFileName(String aFileName) {
    if (aFileName == null) {
        return null;
    }/* w  w w. j av  a  2 s. co  m*/
    int p = aFileName.lastIndexOf(SystemUtils.FILE_SEPARATOR);
    if (p > 0) {
        return aFileName.substring(p + 1);
    }
    return aFileName;
}

From source file:io.github.blindio.prospero.core.browserdrivers.phantomjs.AbstractUnarchiver.java

protected void extract(ArchiveInputStream arcInStream) throws IOException {
    ArchiveEntry entry = null;/*from  w w w  . j av a 2 s.co m*/

    /** Read the tar entries using the getNextEntry method **/

    while ((entry = (ArchiveEntry) arcInStream.getNextEntry()) != null) {

        System.out.println("Extracting: " + entry.getName());

        /** If the entry is a directory, create the directory. **/

        if (entry.isDirectory()) {

            File f = new File(getDestDirectory() + SystemUtils.FILE_SEPARATOR + entry.getName());
            f.mkdirs();
        }
        /**
         * If the entry is a file,write the decompressed file to the disk
         * and close destination stream.
         **/
        else {
            int count;
            byte data[] = new byte[BUFFER];

            FileOutputStream fos = new FileOutputStream(
                    getDestDirectory() + SystemUtils.FILE_SEPARATOR + entry.getName());
            BufferedOutputStream dest = new BufferedOutputStream(fos, BUFFER);
            while ((count = arcInStream.read(data, 0, BUFFER)) != -1) {
                dest.write(data, 0, count);
            }
            dest.close();
        }
    }

    /** Close the input stream **/

    arcInStream.close();
    System.out.println("untar completed successfully!!");
}

From source file:com.mgmtp.perfload.perfalyzer.normalization.Normalizer.java

public void normalize(final File file) throws IOException {
    checkState(!file.isAbsolute(), "'file' must be relative");

    String filePath = file.getPath();
    String[] pathElements = split(getPath(filePath), SystemUtils.FILE_SEPARATOR); // strip out dir

    StrBuilder sb = new StrBuilder();
    for (int i = 0; i < pathElements.length; ++i) {
        if (i == 1) {
            continue; // strip out dir, e. g. perfmon-logs, measuring-logs
        }//from  ww  w .j a  va 2 s  .co m
        sb.appendSeparator(SystemUtils.FILE_SEPARATOR);
        sb.append(pathElements[i]);
    }
    String dirPath = sb.toString();

    Map<String, FileChannel> channels = newHashMap();
    List<OutputStream> outputStreams = newArrayList();
    FileInputStream fis = null;
    try {
        fis = new FileInputStream(new File(sourceDir, filePath)); //relative to source dir
        for (Scanner scanner = new Scanner(fis.getChannel(), Charsets.UTF_8.name()); scanner.hasNext();) {
            String line = scanner.nextLine();
            if (trimToNull(line) == null || line.startsWith("#")) {
                continue;
            }
            List<ChannelData> channelDataList = normalizingStrategy.normalizeLine(line);
            for (ChannelData channelData : channelDataList) {

                FileChannel channel = channels.get(channelData.getChannelKey());
                if (channel == null) {
                    String baseName = channelData.getChannelBaseName();
                    String key = channelData.getChannelKey();
                    String fileName = new File(dirPath, String.format("[%s][%s].csv", baseName, key)).getPath();
                    File destFile = new File(destDir, fileName);
                    destFile.getParentFile().mkdirs();
                    FileOutputStream fos = new FileOutputStream(destFile);
                    outputStreams.add(fos);
                    channel = fos.getChannel();
                    channels.put(channelData.getChannelKey(), channel);
                }

                writeLineToChannel(channel, channelData.getValue(), Charsets.UTF_8);
            }
        }
    } finally {
        outputStreams.forEach(IOUtils::closeQuietly);
        closeQuietly(fis);
    }
}

From source file:com.mgmtp.perfload.perfalyzer.workflow.GcLogWorkflow.java

@Override
public List<Runnable> getNormalizationTasks(final File inputDir, final File outputDir) {
    List<File> inputFiles = listFiles(inputDir);
    return inputFiles.stream().filter(fileNameStartsWith("gclog")).map(file -> {
        Runnable task = () -> {
            String filePath = file.getPath();
            String[] pathElements = split(getPath(filePath), SystemUtils.FILE_SEPARATOR); // strip out dir

            StrBuilder sb = new StrBuilder();
            for (int i = 0; i < pathElements.length; ++i) {
                if (i == 1) {
                    continue; // strip out dir, e. g. perfmon-logs, measuring-logs
                }//from  www .  j  a v  a2 s .c o  m
                sb.appendSeparator(SystemUtils.FILE_SEPARATOR);
                sb.append(pathElements[i]);
            }
            String dirPath = sb.toString();

            String s = trimToNull(substringAfter(getBaseName(filePath), "gclog"));
            File destFile = new File(outputDir, dirPath + SystemUtils.FILE_SEPARATOR + "[gclog]"
                    + (s != null ? "[" + s + "]." : ".") + getExtension(filePath));

            try {
                copyFile(new File(inputDir, file.getPath()), destFile);
            } catch (IOException ex) {
                throw new PerfAlyzerException("Error copying file: " + file, ex);
            }
        };
        return task;
    }).collect(toList());
}

From source file:ch.vorburger.mariadb4j.DBConfigurationBuilder.java

protected String _getDataDir() {
    if (isNull(getDataDir()) || getDataDir().equals(DEFAULT_DATA_DIR))
        return DEFAULT_DATA_DIR + SystemUtils.FILE_SEPARATOR + getPort();
    else// w ww .  j av a 2s .  c o  m
        return getDataDir();
}

From source file:com.mgmtp.perfload.perfalyzer.reportpreparation.PerfMonReportPreparationStrategy.java

private void createCsvFiles(final File sourceDir, final File destDir,
        final ListMultimap<String, PerfAlyzerFile> byTypeAndHostMultimap) throws IOException {

    ListMultimap<String, String> globalContentListMultimap = LinkedListMultimap.create();
    StrTokenizer tokenizer = StrTokenizer.getCSVInstance();
    tokenizer.setDelimiterChar(DELIMITER);

    for (String key : byTypeAndHostMultimap.keySet()) {
        List<PerfAlyzerFile> filesByType = byTypeAndHostMultimap.get(key);
        File destFile = new File(destDir, key);

        String[] split = split(key, SystemUtils.FILE_SEPARATOR, 2);
        String host = split[0];//  w  w w .  j av  a2s .  c  o  m
        String keyWithoutHost = split[1];

        List<String> contentList = newLinkedList();

        for (PerfAlyzerFile f : filesByType) {
            String type = f.getFileNameParts().get(1);

            // aggregated files always have two lines, a header and a data line
            List<String> lines = readLines(new File(sourceDir, f.getFile().getPath()), Charsets.UTF_8);
            if (lines.size() < 2) {
                // needs at least header and one content line
                continue;
            }
            if (contentList.isEmpty()) {
                // write header
                contentList.add(0, "\"type\"" + DELIMITER + lines.get(0));
            }
            String line = lines.get(1);
            tokenizer.reset(line);

            String[] columns = tokenizer.getTokenArray();

            StrBuilder sb = new StrBuilder(10 + line.length());
            appendEscapedAndQuoted(sb, DELIMITER, type);
            for (String column : columns) {
                appendEscapedAndQuoted(sb, DELIMITER, column);
            }

            line = sb.toString();
            contentList.add(line);

            List<String> globalContentList = globalContentListMultimap.get(keyWithoutHost);
            if (globalContentList.isEmpty()) {
                globalContentList.add("\"host\"" + DELIMITER + "\"type\"" + DELIMITER + lines.get(0));
            }
            globalContentList.add("\"" + host + "\"" + DELIMITER + line);
        }

        // exclude header line from sorting
        Collections.sort(contentList.subList(1, contentList.size()));

        writeLines(destFile, Charsets.UTF_8.name(), contentList);
    }

    for (String key : globalContentListMultimap.keySet()) {
        List<String> globalContentList = globalContentListMultimap.get(key);

        // exclude header line from sorting
        Collections.sort(globalContentList.subList(1, globalContentList.size()));

        writeLines(new File(destDir, "global" + SystemUtils.FILE_SEPARATOR + key), Charsets.UTF_8.name(),
                globalContentList);
    }
}

From source file:com.gatf.executor.report.ReportHandler.java

public static void doFinalLoadTestReport(String prefix, TestSuiteStats testSuiteStats,
        AcceptanceTestContext acontext, List<String> nodes, List<String> nodeurls,
        List<LoadTestResource> loadTestResources) {
    GatfExecutorConfig config = acontext.getGatfExecutorConfig();
    VelocityContext context = new VelocityContext();

    try {/*from   ww  w .  j  a v  a 2s .co  m*/
        String reportingJson = new ObjectMapper().writeValueAsString(testSuiteStats);
        context.put("suiteStats", reportingJson);

        if (nodes == null) {
            reportingJson = new ObjectMapper().writeValueAsString(loadTestResources);
            context.put("loadTestResources", reportingJson);
        } else {
            context.put("loadTestResources", "{}");
            context.put("nodes", nodes);
            context.put("nodeurls", nodeurls);
        }
    } catch (Exception e) {
        e.printStackTrace();
    }

    try {
        File basePath = null;
        if (config.getOutFilesBasePath() != null)
            basePath = new File(config.getOutFilesBasePath());
        else {
            URL url = Thread.currentThread().getContextClassLoader().getResource(".");
            basePath = new File(url.getPath());
        }
        File resource = new File(basePath, config.getOutFilesDir());

        VelocityEngine engine = new VelocityEngine();
        engine.setProperty(RuntimeConstants.RESOURCE_LOADER, "classpath");
        engine.setProperty("classpath.resource.loader.class", ClasspathResourceLoader.class.getName());
        engine.init();

        StringWriter writer = new StringWriter();
        engine.mergeTemplate("/gatf-templates/index-load.vm", context, writer);

        if (prefix == null)
            prefix = "";

        BufferedWriter fwriter = new BufferedWriter(new FileWriter(
                new File(resource.getAbsolutePath() + SystemUtils.FILE_SEPARATOR + prefix + "index.html")));
        fwriter.write(writer.toString());
        fwriter.close();

        //if(distributedTestStatus!=null)
        {
            //distributedTestStatus.getReportFileContent().put(prefix + "index.html", writer.toString());
        }
    } catch (Exception e) {
        e.printStackTrace();
    }
}

From source file:com.mgmtp.perfload.perfalyzer.reportpreparation.PerfMonReportPreparationStrategy.java

private void createPlots(final File sourceDir, final File destDir,
        final ListMultimap<String, PerfAlyzerFile> byTypeAndHostMultimap) throws IOException {

    Map<String, NumberDataSet> globalDataSets = newHashMap();

    for (String key : byTypeAndHostMultimap.keySet()) {
        List<PerfAlyzerFile> filesByType = byTypeAndHostMultimap.get(key);
        File destFile = new File(destDir, key);

        String[] split = split(key, SystemUtils.FILE_SEPARATOR, 2);
        String host = split[0];/*from   w  w w.j  a  v  a 2  s .c  om*/
        String keyWithoutHost = split[1];

        NumberDataSet dataSet = new NumberDataSet();

        for (PerfAlyzerFile f : filesByType) {
            String type = f.getFileNameParts().get(1);
            List<SeriesPoint> dataList = readDataFile(new File(sourceDir, f.getFile().getPath()),
                    Charsets.UTF_8, intNumberFormat);
            dataSet.addSeries(type, dataList);

            NumberDataSet globalDataSet = globalDataSets.get(keyWithoutHost);
            if (globalDataSet == null) {
                globalDataSet = new NumberDataSet();
                globalDataSets.put(keyWithoutHost, globalDataSet);
            }

            globalDataSet.addSeries(host + ":" + type, dataList);
        }

        plotCreator.writePlotFile(destFile, AxisType.LINEAR, AxisType.LINEAR, RendererType.LINES,
                ChartDimensions.DEFAULT, dataRange, false, dataSet);
    }

    for (Entry<String, NumberDataSet> entry : globalDataSets.entrySet()) {
        NumberDataSet dataSet = entry.getValue();
        File destFile = new File(destDir, "global" + SystemUtils.FILE_SEPARATOR + entry.getKey());
        plotCreator.writePlotFile(destFile, AxisType.LINEAR, AxisType.LINEAR, RendererType.LINES,
                ChartDimensions.DEFAULT, dataRange, false, dataSet);
    }
}