Example usage for java.io File pathSeparator

List of usage examples for java.io File pathSeparator

Introduction

In this page you can find the example usage for java.io File pathSeparator.

Prototype

String pathSeparator

To view the source code for java.io File pathSeparator.

Click Source Link

Document

The system-dependent path-separator character, represented as a string for convenience.

Usage

From source file:com.openkm.util.FormatUtil.java

/**
 * Split by separator//from  w ww.  ja  v a  2 s.c  om
 */
public static String splitBySeparator(String str) {
    String ret = str.replace(File.pathSeparator, "<br/>");
    ret = ret.replace(",", "<br/>");
    return ret;
}

From source file:org.apache.hadoop.sqoop.orm.CompilationManager.java

/**
 * Compile the .java files into .class files via embedded javac call.
 *//*from w  w  w .  jav a2  s  .co m*/
public void compile() throws IOException {
    List<String> args = new ArrayList<String>();

    // ensure that the jar output dir exists.
    String jarOutDir = options.getJarOutputDir();
    boolean mkdirSuccess = new File(jarOutDir).mkdirs();
    if (!mkdirSuccess) {
        LOG.debug("Warning: Could not make directories for " + jarOutDir);
    }

    // find hadoop-*-core.jar for classpath.
    String coreJar = findHadoopCoreJar();
    if (null == coreJar) {
        // Couldn't find a core jar to insert into the CP for compilation.
        // If, however, we're running this from a unit test, then the path
        // to the .class files might be set via the hadoop.alt.classpath property
        // instead. Check there first.
        String coreClassesPath = System.getProperty("hadoop.alt.classpath");
        if (null == coreClassesPath) {
            // no -- we're out of options. Fail.
            throw new IOException("Could not find hadoop core jar!");
        } else {
            coreJar = coreClassesPath;
        }
    }

    // find sqoop jar for compilation classpath
    String sqoopJar = findThisJar();
    if (null != sqoopJar) {
        sqoopJar = File.pathSeparator + sqoopJar;
    } else {
        LOG.warn("Could not find sqoop jar; child compilation may fail");
        sqoopJar = "";
    }

    String curClasspath = System.getProperty("java.class.path");

    args.add("-sourcepath");
    String srcOutDir = options.getCodeOutputDir();
    args.add(srcOutDir);

    args.add("-d");
    args.add(jarOutDir);

    args.add("-classpath");
    args.add(curClasspath + File.pathSeparator + coreJar + sqoopJar);

    // add all the source files
    for (String srcfile : sources) {
        args.add(srcOutDir + srcfile);
    }

    StringBuilder sb = new StringBuilder();
    for (String arg : args) {
        sb.append(arg + " ");
    }

    // NOTE(aaron): Usage is at http://java.sun.com/j2se/1.5.0/docs/tooldocs/solaris/javac.html
    LOG.info("Invoking javac with args: " + sb.toString());
    int javacRet = com.sun.tools.javac.Main.compile(args.toArray(new String[0]));
    if (javacRet != 0) {
        throw new IOException("javac exited with status " + javacRet);
    }
}

From source file:org.kgi.mybatis.scala.generator.GenerateDaoMojo.java

public void execute() throws MojoExecutionException {
    String scaladocParamFileName = project.getBuild().getOutputDirectory() + File.separator + "myb-doclet.txt";

    try {//from   w  ww.  j  a  va2  s. c om
        File f = outputDirectory;
        getLog().info("writing generated files to directory:" + outputDirectory.getAbsolutePath());
        if (!f.exists()) {
            f.mkdirs();
        }

        File sourcesDir = new File(project.getBasedir(), "src" + File.separator + "main");
        getLog().info("sources located in:" + sourcesDir.getAbsolutePath());
        Collection<File> sourceFiles = FileUtils.listFiles(sourcesDir, new String[] { "scala", "java" }, true);

        PrintWriter scaladocParamFileWriter = new PrintWriter(new FileWriter(scaladocParamFileName));
        scaladocParamFileWriter.println("-d");
        scaladocParamFileWriter.println("src");
        scaladocParamFileWriter.println("-doc-generator");
        scaladocParamFileWriter.println("org.kgi.mybatis.scala.generator.doclet.MyBatisMappingDoclet");
        for (File sourceFile : sourceFiles) {
            scaladocParamFileWriter.println(sourceFile.getAbsolutePath());
        }
        scaladocParamFileWriter.flush();
        scaladocParamFileWriter.close();

        DependencyNode depTree = dependencyGraphBuilder.buildDependencyGraph(project, new ArtifactFilter() {
            public boolean include(Artifact artifact) {
                return "jar".equals(artifact.getType());
            }
        });

        List deps = collectDependencies(depTree);

        Iterator depIterator = deps.iterator();
        StringBuilder cpBuilder = new StringBuilder();
        String docletPath = null;

        while (depIterator.hasNext()) {
            Artifact dep = (Artifact) depIterator.next();

            String path = System.getProperty("user.home") + File.separator + ".m2" + File.separator
                    + "repository" + File.separator + dep.getGroupId().replace('.', File.separatorChar)
                    + File.separator + dep.getArtifactId() + File.separator + dep.getVersion() + File.separator
                    + dep.getArtifactId() + "-" + dep.getVersion() + "." + dep.getType();
            if (cpBuilder.length() > 0) {
                cpBuilder.append(File.pathSeparator);
            }
            cpBuilder.append(path);
            if ("mybatis-scala-gen-doclet".equals(dep.getArtifactId())) {
                docletPath = path;
            }
        }
        CommandLine cmdl = new CommandLine("scaladoc");
        cmdl.addArgument("-Dmyb-gen-destination=" + outputDirectory.getAbsolutePath());
        cmdl.addArgument("-Dmyb-gen-destination-package=" + destinationPackage);
        cmdl.addArgument("-classpath");
        cmdl.addArgument(cpBuilder.toString());
        cmdl.addArgument("-toolcp");
        cmdl.addArgument(docletPath);
        cmdl.addArgument("@" + scaladocParamFileName);

        getLog().info("generation command:\n" + cmdl.toString());
        DefaultExecutor executor = new DefaultExecutor();
        executor.setExitValue(0);
        executor.execute(cmdl);
    } catch (Exception e) {
        getLog().error(e);
        throw new MojoExecutionException("Problems generating DAO sources" + scaladocParamFileName);
    }
}

From source file:com.asakusafw.compiler.bootstrap.BatchCompilerDriver.java

private static boolean start(String[] args) throws Exception {
    CommandLineParser parser = new BasicParser();
    CommandLine cmd = parser.parse(OPTIONS, args);
    String output = cmd.getOptionValue(OPT_OUTPUT.getOpt());
    String className = cmd.getOptionValue(OPT_CLASS.getOpt());
    String packageName = cmd.getOptionValue(OPT_PACKAGE.getOpt());
    String hadoopWork = cmd.getOptionValue(OPT_HADOOPWORK.getOpt());
    String compilerWork = cmd.getOptionValue(OPT_COMPILERWORK.getOpt());
    String link = cmd.getOptionValue(OPT_LINK.getOpt());
    String plugin = cmd.getOptionValue(OPT_PLUGIN.getOpt());

    File outputDirectory = new File(output);
    Location hadoopWorkLocation = Location.fromPath(hadoopWork, '/');
    File compilerWorkDirectory = new File(compilerWork);
    List<File> linkingResources = new ArrayList<>();
    if (link != null) {
        for (String s : link.split(File.pathSeparator)) {
            linkingResources.add(new File(s));
        }// w  w w.j av a  2s.c om
    }
    List<URL> pluginLocations = new ArrayList<>();
    if (plugin != null) {
        for (String s : plugin.split(File.pathSeparator)) {
            if (s.trim().isEmpty()) {
                continue;
            }
            try {
                File file = new File(s);
                if (file.exists() == false) {
                    throw new FileNotFoundException(file.getAbsolutePath());
                }
                URL url = file.toURI().toURL();
                pluginLocations.add(url);
            } catch (IOException e) {
                LOG.warn(MessageFormat.format(Messages.getString("BatchCompilerDriver.warnFailedToLoadPlugin"), //$NON-NLS-1$
                        s), e);
            }
        }
    }

    Class<? extends BatchDescription> batchDescription = Class.forName(className)
            .asSubclass(BatchDescription.class);
    boolean succeeded = compile(outputDirectory, batchDescription, packageName, hadoopWorkLocation,
            compilerWorkDirectory, linkingResources, pluginLocations);

    if (succeeded) {
        LOG.info(MessageFormat.format(Messages.getString("BatchCompilerDriver.infoComplete"), //$NON-NLS-1$
                batchDescription.getName()));
    }
    return succeeded;
}

From source file:averroes.options.AverroesOptions.java

/**
 * The list of the library JAR files separated by {@link File#pathSeparator}
 * /* ww w . j a  v  a2  s.com*/
 * @return
 */
public static List<String> getLibraryJarFiles() {
    return Arrays.asList(cmd.getOptionValue(libraryJars.getOpt(), "").split(File.pathSeparator));
}

From source file:org.apache.zeppelin.spark.DepInterpreter.java

private void createIMain() {
    Settings settings = new Settings();
    URL[] urls = getClassloaderUrls();

    // set classpath for scala compiler
    PathSetting pathSettings = settings.classpath();
    String classpath = "";
    List<File> paths = currentClassPath();
    for (File f : paths) {
        if (classpath.length() > 0) {
            classpath += File.pathSeparator;
        }/*from   w  w w.ja va 2 s.  c o m*/
        classpath += f.getAbsolutePath();
    }

    if (urls != null) {
        for (URL u : urls) {
            if (classpath.length() > 0) {
                classpath += File.pathSeparator;
            }
            classpath += u.getFile();
        }
    }

    pathSettings.v_$eq(classpath);
    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);

    // set classloader for scala compiler
    settings.explicitParentLoader_$eq(new Some<>(Thread.currentThread().getContextClassLoader()));

    BooleanSetting b = (BooleanSetting) settings.usejavacp();
    b.v_$eq(true);
    settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);

    interpreter = new SparkILoop((java.io.BufferedReader) null, new PrintWriter(out));
    interpreter.settings_$eq(settings);

    interpreter.createInterpreter();

    intp = Utils.invokeMethod(interpreter, "intp");

    if (Utils.isScala2_10()) {
        Utils.invokeMethod(intp, "setContextClassLoader");
        Utils.invokeMethod(intp, "initializeSynchronous");
    }

    depc = new SparkDependencyContext(getProperty("zeppelin.dep.localrepo"),
            getProperty("zeppelin.dep.additionalRemoteRepository"));
    if (Utils.isScala2_10()) {
        completer = Utils.instantiateClass("org.apache.spark.repl.SparkJLineCompletion",
                new Class[] { Utils.findClass("org.apache.spark.repl.SparkIMain") }, new Object[] { intp });
    }
    interpret("@transient var _binder = new java.util.HashMap[String, Object]()");
    Map<String, Object> binder;
    if (Utils.isScala2_10()) {
        binder = (Map<String, Object>) getValue("_binder");
    } else {
        binder = (Map<String, Object>) getLastObject();
    }
    binder.put("depc", depc);

    interpret("@transient val z = " + "_binder.get(\"depc\")"
            + ".asInstanceOf[org.apache.zeppelin.spark.dep.SparkDependencyContext]");

}

From source file:nl.b3p.catalog.arcgis.ArcObjectsSynchronizerForker.java

private static String buildClasspath(ServletContext context) {
    StringBuilder cp = new StringBuilder();
    cp.append("classes");

    File lib = new File(context.getRealPath("/WEB-INF/lib"));
    File[] jarFiles = lib.listFiles(new FilenameFilter() {
        public boolean accept(File dir, String name) {
            return name.endsWith(".jar");
        }//from  ww w  . j ava  2  s.c om
    });

    for (File f : jarFiles) {
        cp.append(File.pathSeparator);
        cp.append("lib");
        cp.append(File.separatorChar);
        cp.append(f.getName());
    }
    return cp.toString();
}

From source file:com.twinsoft.convertigo.engine.localbuild.BuildLocally.java

private String runCommand(File launchDir, String command, List<String> parameters, boolean mergeError)
        throws Throwable {
    if (is(OS.win32)) {
        // Works for cordova and npm
        command += ".cmd";
    }/*  ww w  . ja  va 2s  .co m*/

    String shellFullpath = command;
    String paths = getLocalBuildAdditionalPath();
    paths = (paths.length() > 0 ? paths + File.pathSeparator : "") + System.getenv("PATH");

    String defaultPaths = null;
    if (is(OS.mac) || is(OS.linux)) {
        defaultPaths = "/usr/local/bin";
    } else if (is(OS.win32)) {
        String programFiles = System.getenv("ProgramW6432");
        if (programFiles != null && programFiles.length() > 0) {
            defaultPaths = programFiles + File.separator + "nodejs";
        }

        programFiles = System.getenv("ProgramFiles");
        if (programFiles != null && programFiles.length() > 0) {
            defaultPaths = (defaultPaths == null ? "" : defaultPaths + File.pathSeparator) + programFiles
                    + File.separator + "nodejs";
        }

        String appData = System.getenv("APPDATA");
        if (appData != null && appData.length() > 0) {
            defaultPaths = (defaultPaths == null ? "" : defaultPaths + File.pathSeparator) + appData
                    + File.separator + "npm";
        }
    }
    paths += File.pathSeparator + defaultPaths;

    // Checks if the command is already full path 
    if (!(new File(shellFullpath).exists())) {
        // Else search where the "exec" is and build the absolute path for this "exec"
        shellFullpath = getFullPath(paths, command);

        // If the "exec" is not found then it search it elsewhere
        if (shellFullpath == null) {
            shellFullpath = command;
        }
    }

    // Prepares the command
    parameters.add(0, shellFullpath);
    ProcessBuilder pb = new ProcessBuilder(parameters);
    // Set the directory from where the command will be executed
    pb.directory(launchDir.getCanonicalFile());

    Map<String, String> pbEnv = pb.environment();
    // must set "Path" for Windows 8.1 64
    pbEnv.put(pbEnv.get("PATH") == null ? "Path" : "PATH", paths);

    // Specific to npm command
    if (shellFullpath.endsWith("npm") || shellFullpath.endsWith("npm.cmd")) {

        // Set the proxy for npm
        String proxyMode = EnginePropertiesManager
                .getProperty(EnginePropertiesManager.PropertyName.PROXY_SETTINGS_MODE);
        if (proxyMode.equals(ProxyMode.manual.getValue())) {
            String proxyAuthMethod = EnginePropertiesManager
                    .getProperty(EnginePropertiesManager.PropertyName.PROXY_SETTINGS_METHOD);

            if (proxyAuthMethod.equals(ProxyMethod.anonymous.getValue())
                    || proxyAuthMethod.equals(ProxyMethod.basic.getValue())) {
                String proxyHost = EnginePropertiesManager
                        .getProperty(EnginePropertiesManager.PropertyName.PROXY_SETTINGS_HOST);
                String proxyPort = EnginePropertiesManager
                        .getProperty(EnginePropertiesManager.PropertyName.PROXY_SETTINGS_PORT);

                String npmProxy = proxyHost + ":" + proxyPort;

                if (proxyAuthMethod.equals(ProxyMethod.basic.getValue())) {
                    String proxyUser = EnginePropertiesManager
                            .getProperty(EnginePropertiesManager.PropertyName.PROXY_SETTINGS_USER);
                    String proxyPassword = EnginePropertiesManager
                            .getProperty(EnginePropertiesManager.PropertyName.PROXY_SETTINGS_PASSWORD);

                    npmProxy = proxyUser + ":" + proxyPassword + "@" + npmProxy;
                }

                pbEnv.put("http-proxy", "http://" + npmProxy);
                pbEnv.put("https-proxy", "http://" + npmProxy);
            }
        }
    }

    pb.redirectErrorStream(mergeError);

    Engine.logEngine.info("Executing command : " + parameters);

    process = pb.start();

    cmdOutput = "";
    // Logs the output
    new Thread(new Runnable() {
        @Override
        public void run() {
            try {
                String line;
                processCanceled = false;

                BufferedReader bis = new BufferedReader(new InputStreamReader(process.getInputStream()));
                while ((line = bis.readLine()) != null) {
                    Engine.logEngine.info(line);
                    BuildLocally.this.cmdOutput += line;
                }
            } catch (IOException e) {
                Engine.logEngine.error("Error while executing command", e);
            }
        }
    }).start();

    if (!mergeError) {
        // Logs the error output
        new Thread(new Runnable() {
            @Override
            public void run() {
                try {
                    String line;
                    processCanceled = false;

                    BufferedReader bis = new BufferedReader(new InputStreamReader(process.getErrorStream()));
                    while ((line = bis.readLine()) != null) {
                        Engine.logEngine.error(line);
                        errorLines += line;
                    }
                } catch (IOException e) {
                    Engine.logEngine.error("Error while executing command", e);
                }
            }
        }).start();
    }

    int exitCode = process.waitFor();

    if (exitCode != 0 && exitCode != 127) {
        throw new Exception(
                "Exit code " + exitCode + " when running the command '" + command + "' with parameters : '"
                        + parameters + "'. The output of the command is : '" + cmdOutput + "'");
    }

    return cmdOutput;
}

From source file:org.apache.hadoop.hdfs.server.namenode.StandbyStorageRetentionManager.java

/**
 * Delete backups according to the retention policy.
 * //from  w  w w .j  av  a2s .co  m
 * @param root root directory
 * @param backups backups SORTED on the timestamp from oldest to newest
 * @param daysToKeep
 * @param copiesToKeep
 */
static void deleteOldBackups(File root, String[] backups, int daysToKeep, int copiesToKeep) {
    Date now = new Date(AvatarNode.now());

    // leave the copiesToKeep-1 at least (+1 will be the current backup)
    int maxIndex = Math.max(0, backups.length - copiesToKeep + 1);

    for (int i = 0; i < maxIndex; i++) {
        String backup = backups[i];
        Date backupDate = null;
        try {
            backupDate = dateForm.parse(backup.substring(backup.indexOf(File.pathSeparator) + 1));
        } catch (ParseException pex) {
            // This should not happen because of the 
            // way we construct the list
        }
        long backupAge = now.getTime() - backupDate.getTime();

        // if daysToKeep is set delete everything older providing that
        // we retain at least copiesToKeep copies
        boolean deleteOldBackup = (daysToKeep > 0 && backupAge > daysToKeep * 24 * 60 * 60 * 1000);

        // if daysToKeep is set to zero retain most recent copies
        boolean deleteExtraBackup = (daysToKeep == 0);

        if (deleteOldBackup || deleteExtraBackup) {
            // This backup is older than daysToKeep, delete it
            try {
                FileUtil.fullyDelete(new File(root, backup));
                LOG.info("Deleted backup " + new File(root, backup));
            } catch (IOException iex) {
                LOG.error("Error deleting backup " + new File(root, backup), iex);
            }
        } else {
            // done with deleting old backups
            break;
        }
    }
}

From source file:org.cloudifysource.shell.commands.TestRecipe.java

/**
 * Create a full classpath, including the existing classpath and additional paths to Jars and service files.
 *
 * @param serviceFolder//w  ww.j  a  v  a2  s.com
 *            The folder of the current service
 * @return A full classpath
 */
private String createClasspathString(final File serviceFolder) {

    // Start with current environment variable
    String currentClassPathEnv = System.getenv("CLASSPATH");
    if (currentClassPathEnv == null) {
        currentClassPathEnv = "";
    }
    currentClassPathEnv += File.pathSeparator;
    final StringBuilder sb = new StringBuilder(currentClassPathEnv);

    // Add the required jar dirs
    for (final String jarDir : JAR_DIRS) {
        final File dir = getDirIfExists(jarDir);
        sb.append(dir.getAbsolutePath()).append(File.separator).append("*").append(File.pathSeparator);
    }

    // finally, add the service folder to the recipe, so it finds the
    // META-INF files, and the lib dir
    sb.append(serviceFolder.getAbsolutePath()).append(File.separator).append(File.pathSeparator);
    sb.append(serviceFolder.getAbsolutePath()).append(File.separator).append("lib").append(File.separator)
            .append("*").append(File.pathSeparator);

    //sb.append(serviceFolder.getAbsolutePath() + "/ext/usmlib")
    // TODO - add local recipe jar files!
    return sb.toString();

}