Example usage for java.io File pathSeparator

List of usage examples for java.io File pathSeparator

Introduction

In this page you can find the example usage for java.io File pathSeparator.

Prototype

String pathSeparator

To view the source code for java.io File pathSeparator.

Click Source Link

Document

The system-dependent path-separator character, represented as a string for convenience.

Usage

From source file:org.codehaus.mojo.exec.ExecMojo.java

private List<String> getExecutablePaths(Map<String, String> enviro) {
    List<String> paths = new ArrayList<String>();
    paths.add("");

    String path = enviro.get("PATH");
    if (path != null) {
        paths.addAll(Arrays.asList(StringUtils.split(path, File.pathSeparator)));
    }/*from   ww w . j  a  v  a  2  s  .  c  o m*/

    return paths;
}

From source file:org.kepler.util.sql.HSQL.java

/** Start the HSQL Server */
private static int _launchDBServer(String dbNamePath, String dbAlias, String dbPort) {

    if (_forkServers) {
        System.out.println("spawning HSQL server for " + dbNamePath);

        // find the hsql jar
        String classpath = System.getProperty("java.class.path");
        String[] jars = classpath.split(File.pathSeparator);
        String hsqlJar = null;/*ww  w  . jav a 2 s. c  o  m*/
        for (String jar : jars) {
            if (jar.matches(".*hsqldb-[\\d\\.]+\\.jar$")) {
                hsqlJar = jar;
                break;
            }
        }

        if (hsqlJar == null) {
            MessageHandler.error("Unable to find HSQL jar in class path.");
            return ServerConstants.SERVER_STATE_SHUTDOWN;
        }

        // NOTE: the database argument must include the file name of
        // the database. when using the Server API to start the server
        // (see below), the database argument does NOT include the file
        // name, but uses the alias as the file name.

        ProcessBuilder procBuilder = new ProcessBuilder("java", "-cp", hsqlJar, "org.hsqldb.Server", "-address",
                "localhost", "-port", dbPort, "-dbname.0", dbAlias, "-database.0",
                dbNamePath + File.separator + dbAlias);
        procBuilder.redirectErrorStream(true);

        //for(String str : procBuilder.command())
        //System.out.print(str + " ");
        //System.out.println();

        try {
            /*Process proc =*/ procBuilder.start();

            // sleep a few seconds so that it has time to start before we
            // try to connect
            // XXX this may not be long enough
            Thread.sleep(3000);
            return ServerConstants.SERVER_STATE_ONLINE;
        } catch (Exception e) {
            MessageHandler.error("Error starting HSQL server.", e);
            return ServerConstants.SERVER_STATE_SHUTDOWN;
        }
    } else {
        Server server = new Server();

        if (!_isDebugging) {
            server.setLogWriter(null);
            server.setErrWriter(null);
        } else {
            _log.debug("starting server for " + dbNamePath);
        }

        // the file name is full path and alias.
        String dbFileName = dbNamePath + File.separator + dbAlias;

        server.setDatabasePath(0, dbFileName);
        server.setDatabaseName(0, dbAlias);

        if (dbPort != null && dbPort.length() > 0) {
            try {
                int port = Integer.parseInt(dbPort);
                server.setPort(port);
            } catch (NumberFormatException e) {
                System.out.print("ERROR: bad port " + dbPort + ": " + e.getMessage());
            }
        }

        server.setSilent(true);
        server.setTrace(false);
        server.setNoSystemExit(true);
        server.start();

        _servers.add(server);

        return server.getState();
    }
}

From source file:net.rim.ejde.internal.packaging.PackagingManager.java

private void calculateRAPCCommand() throws CoreException {
    // calculate rapc commands
    SourcerootVisitor visitor = new SourcerootVisitor();
    _bbProject.getProject().accept(visitor);
    // get customized jad and rapc files
    String custjad = PackagingUtils.getCustomJadFile(_bbProject);
    if (custjad != null) {
        _otherFiles.add(custjad);// w w w  . j av a2 s  .co m
    }
    String custrapc = PackagingUtils.getCustomRapcFile(_bbProject);
    if (custrapc != null) {
        _otherFiles.add(custrapc);
    }
    // get imported jars (project level and workspace level)
    _imports = getCompileImports(_bbProject);
    // check if there is any exported midlet jar
    // TODO: Try invoking rapc.jar in Windows instead of rapc.exe
    if (OSUtils.isWindows()) {
        // add path of rapc.exe
        _rapcCommandsHead.add(getRAPCPath());
    } else {
        _rapcCommandsHead.add("java");
        _rapcCommandsHead.add("-jar");
        // add path of rapc.jar
        _rapcCommandsHead.add(getRAPCPath());
    }

    // get compile options
    _compileOptions = getCompileOptions();
    // add compile options
    if (_compileOptions.size() > 0) {
        _rapcCommandsHead.addAll(_compileOptions);
    }
    // get source roots
    _sourceRoots = visitor.getSourceRoots(_bbProject.getProject());
    // add source roots
    // TODO: Added for preverifier and do more investigation and
    // see whether we can eliminate this option or not.
    if (!OSUtils.isWindows()) {
        String binDir = getRAPCPath().replace("rapc.jar", "");
        String exepath = "-exepath=" + binDir;
        _rapcCommandsHead.add(exepath);
    }
    StringBuffer rapcComandBuffer = new StringBuffer();
    if (_sourceRoots.size() > 0) {
        rapcComandBuffer.append("-sourceroot=");
        rapcComandBuffer.append(composeString(_sourceRoots, File.pathSeparator));
        _rapcCommandsHead.add(rapcComandBuffer.toString());
    }
    // get protection options
    _protectionOptions = getProtectionOptions(false);
    // add imports
    rapcComandBuffer = new StringBuffer();
    writeToFile = (_imports.size() + _protectionOptions.size()) > MAX_COMMAND_ELEMENTS;
    if (_imports.size() > 0) {
        if (writeToFile) {
            for (int i = 0; i < _imports.size(); i++) {
                _rapcCommands.add("-import=" + _imports.get(i).toString());
            }
        } else {
            rapcComandBuffer.append("-import=");
            rapcComandBuffer.append(composeImportsString(_imports, File.pathSeparator, false));
            _rapcCommands.add(rapcComandBuffer.toString());
        }
    }
    // add protection options
    if (_protectionOptions.size() > 0) {
        _rapcCommands.addAll(_protectionOptions);
    }
    // add exported jar files
    for (int i = 0; i < _imports.size(); i++) {
        if (_imports.get(i).isExported()) {
            _rapcCommands.add(_imports.get(i).getPath());
        }
    }

    prepareDescriptor();

    // add other files
    _rapcCommands.addAll(_otherFiles);
    // get output folders
    getOutputFolder();
    // add output folders
    if (_outputFolders.size() > 0) {
        _rapcCommands.addAll(_outputFolders);
    }
}

From source file:com.alibaba.jstorm.blobstore.BlobStoreUtils.java

private static String resourcesJar() {
    String path = System.getProperty("java.class.path");
    if (path == null) {
        return null;
    }//from   www. jav  a  2  s  .c om

    String[] paths = path.split(File.pathSeparator);

    List<String> jarPaths = new ArrayList<>();
    for (String s : paths) {
        if (s.endsWith(".jar")) {
            jarPaths.add(s);
        }
    }

    /**
     * FIXME, problematic??
     */
    List<String> rtn = new ArrayList<>();
    int size = jarPaths.size();
    for (int i = 0; i < size; i++) {
        if (JStormUtils.zipContainsDir(jarPaths.get(i), StormConfig.RESOURCES_SUBDIR)) {
            rtn.add(jarPaths.get(i));
        }
    }

    if (rtn.size() == 0)
        return null;

    return rtn.get(0);
}

From source file:org.hyperic.hq.product.ProductPluginManager.java

public Collection<PluginInfo> getAllPluginInfoDirectFromFileSystem(String path) {
    final Collection<PluginInfo> rtn = new ArrayList<PluginInfo>();
    final List<String> dirs = StringUtil.explode(path, File.pathSeparator);
    for (final String d : dirs) {
        final File dir = new File(d);
        if (!dir.exists() || !dir.isDirectory()) {
            continue;
        }//from   ww  w .  j  ava 2 s .c o  m
        File[] plugins = dir.listFiles();
        for (File plugin : plugins) {
            String name = plugin.getName();
            if (name.endsWith("-plugin.jar") || name.endsWith("-plugin.xml")) {
                rtn.add(new PluginInfo(plugin, "NONE"));
            }
        }
    }
    return rtn;
}

From source file:ca.weblite.jdeploy.JDeploy.java

public void bundleJetty() throws IOException {
    // Now we need to create the stub.
    File bin = new File(getBinDir());
    //File pkgPath = new File(bin, "ca"+File.separator+"weblite"+File.separator+"jdeploy");
    //pkgPath.mkdirs();
    //File stubFile = new File(bin, "WarRunner.jar");
    InputStream warRunnerInput = getClass().getResourceAsStream("WarRunner.jar");
    //FileUtils.copyInputStreamToFile(warRunnerInput, stubFile);

    //String stubFileSrc = FileUtils.readFileToString(stubFile, "UTF-8");

    //stubFileSrc = stubFileSrc.replace("{{PORT}}", String.valueOf(getPort(0)));
    //stubFileSrc = stubFileSrc.replace("{{WAR_PATH}}", new File(getWar(null)).getName());
    //FileUtils.writeStringToFile(stubFile, stubFileSrc, "UTF-8");

    InputStream jettyRunnerJarInput = getClass().getResourceAsStream("jetty-runner.jar");
    File libDir = new File(bin, "lib");
    libDir.mkdir();//  w w w  .  jav a  2 s  .c o  m
    File jettyRunnerDest = new File(libDir, "jetty-runner.jar");
    File warRunnerDest = new File(libDir, "WarRunner.jar");
    FileUtils.copyInputStreamToFile(jettyRunnerJarInput, jettyRunnerDest);
    FileUtils.copyInputStreamToFile(warRunnerInput, warRunnerDest);
    /*
    ProcessBuilder javac = new ProcessBuilder();
    javac.inheritIO();
    javac.directory(bin);
    javac.command("javac", "-cp", "lib/jetty-runner.jar", "ca" + File.separator + "weblite" + File.separator + "jdeploy" + File.separator + "WarRunner.java");
    Process javacP = javac.start();
    int javacResult=0;
    try {
    javacResult = javacP.waitFor();
    } catch (InterruptedException ex) {
    Logger.getLogger(JDeploy.class.getName()).log(Level.SEVERE, null, ex);
    throw new IOException(ex);
    }
    if (javacResult != 0) {
    System.exit(javacResult);
    }
    */

    setMainClass("ca.weblite.jdeploy.WarRunner");
    setClassPath("." + File.pathSeparator + "lib/jetty-runner.jar" + File.pathSeparator + "lib/WarRunner.jar");

}

From source file:org.apache.zeppelin.spark.SparkInterpreter.java

@Override
public void open() {
    // set properties and do login before creating any spark stuff for secured cluster
    if (isYarnMode()) {
        System.setProperty("SPARK_YARN_MODE", "true");
    }//w w w  . j  av a2  s. c om
    if (getProperty().containsKey("spark.yarn.keytab") && getProperty().containsKey("spark.yarn.principal")) {
        try {
            String keytab = getProperty().getProperty("spark.yarn.keytab");
            String principal = getProperty().getProperty("spark.yarn.principal");
            UserGroupInformation.loginUserFromKeytab(principal, keytab);
        } catch (IOException e) {
            throw new RuntimeException("Can not pass kerberos authentication", e);
        }
    }

    conf = new SparkConf();
    URL[] urls = getClassloaderUrls();

    // Very nice discussion about how scala compiler handle classpath
    // https://groups.google.com/forum/#!topic/scala-user/MlVwo2xCCI0

    /*
     * > val env = new nsc.Settings(errLogger) > env.usejavacp.value = true > val p = new
     * Interpreter(env) > p.setContextClassLoader > Alternatively you can set the class path through
     * nsc.Settings.classpath.
     *
     * >> val settings = new Settings() >> settings.usejavacp.value = true >>
     * settings.classpath.value += File.pathSeparator + >> System.getProperty("java.class.path") >>
     * val in = new Interpreter(settings) { >> override protected def parentClassLoader =
     * getClass.getClassLoader >> } >> in.setContextClassLoader()
     */
    Settings settings = new Settings();

    // process args
    String args = getProperty("args");
    if (args == null) {
        args = "";
    }

    String[] argsArray = args.split(" ");
    LinkedList<String> argList = new LinkedList<>();
    for (String arg : argsArray) {
        argList.add(arg);
    }

    DepInterpreter depInterpreter = getDepInterpreter();
    String depInterpreterClasspath = "";
    if (depInterpreter != null) {
        SparkDependencyContext depc = depInterpreter.getDependencyContext();
        if (depc != null) {
            List<File> files = depc.getFiles();
            if (files != null) {
                for (File f : files) {
                    if (depInterpreterClasspath.length() > 0) {
                        depInterpreterClasspath += File.pathSeparator;
                    }
                    depInterpreterClasspath += f.getAbsolutePath();
                }
            }
        }
    }

    if (Utils.isScala2_10()) {
        scala.collection.immutable.List<String> list = JavaConversions.asScalaBuffer(argList).toList();

        Object sparkCommandLine = Utils.instantiateClass("org.apache.spark.repl.SparkCommandLine",
                new Class[] { scala.collection.immutable.List.class }, new Object[] { list });

        settings = (Settings) Utils.invokeMethod(sparkCommandLine, "settings");
    } else {
        String sparkReplClassDir = getProperty("spark.repl.classdir");
        if (sparkReplClassDir == null) {
            sparkReplClassDir = System.getProperty("spark.repl.classdir");
        }
        if (sparkReplClassDir == null) {
            sparkReplClassDir = System.getProperty("java.io.tmpdir");
        }

        synchronized (sharedInterpreterLock) {
            if (outputDir == null) {
                outputDir = createTempDir(sparkReplClassDir);
            }
        }
        argList.add("-Yrepl-class-based");
        argList.add("-Yrepl-outdir");
        argList.add(outputDir.getAbsolutePath());

        String classpath = "";
        if (conf.contains("spark.jars")) {
            classpath = StringUtils.join(conf.get("spark.jars").split(","), File.separator);
        }

        if (!depInterpreterClasspath.isEmpty()) {
            if (!classpath.isEmpty()) {
                classpath += File.separator;
            }
            classpath += depInterpreterClasspath;
        }

        if (!classpath.isEmpty()) {
            argList.add("-classpath");
            argList.add(classpath);
        }

        scala.collection.immutable.List<String> list = JavaConversions.asScalaBuffer(argList).toList();

        settings.processArguments(list, true);
    }

    // set classpath for scala compiler
    PathSetting pathSettings = settings.classpath();
    String classpath = "";

    List<File> paths = currentClassPath();
    for (File f : paths) {
        if (classpath.length() > 0) {
            classpath += File.pathSeparator;
        }
        classpath += f.getAbsolutePath();
    }

    if (urls != null) {
        for (URL u : urls) {
            if (classpath.length() > 0) {
                classpath += File.pathSeparator;
            }
            classpath += u.getFile();
        }
    }

    // add dependency from DepInterpreter
    if (classpath.length() > 0) {
        classpath += File.pathSeparator;
    }
    classpath += depInterpreterClasspath;

    // add dependency from local repo
    String localRepo = getProperty("zeppelin.interpreter.localRepo");
    if (localRepo != null) {
        File localRepoDir = new File(localRepo);
        if (localRepoDir.exists()) {
            File[] files = localRepoDir.listFiles();
            if (files != null) {
                for (File f : files) {
                    if (classpath.length() > 0) {
                        classpath += File.pathSeparator;
                    }
                    classpath += f.getAbsolutePath();
                }
            }
        }
    }

    pathSettings.v_$eq(classpath);
    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$classpath_$eq(pathSettings);

    // set classloader for scala compiler
    settings.explicitParentLoader_$eq(new Some<>(Thread.currentThread().getContextClassLoader()));
    BooleanSetting b = (BooleanSetting) settings.usejavacp();
    b.v_$eq(true);
    settings.scala$tools$nsc$settings$StandardScalaSettings$_setter_$usejavacp_$eq(b);

    /* Required for scoped mode.
     * In scoped mode multiple scala compiler (repl) generates class in the same directory.
     * Class names is not randomly generated and look like '$line12.$read$$iw$$iw'
     * Therefore it's possible to generated class conflict(overwrite) with other repl generated
     * class.
     *
     * To prevent generated class name conflict,
     * change prefix of generated class name from each scala compiler (repl) instance.
     *
     * In Spark 2.x, REPL generated wrapper class name should compatible with the pattern
     * ^(\$line(?:\d+)\.\$read)(?:\$\$iw)+$
     *
     * As hashCode() can return a negative integer value and the minus character '-' is invalid
     * in a package name we change it to a numeric value '0' which still conforms to the regexp.
     * 
     */
    System.setProperty("scala.repl.name.line", ("$line" + this.hashCode()).replace('-', '0'));

    // To prevent 'File name too long' error on some file system.
    MutableSettings.IntSetting numClassFileSetting = settings.maxClassfileName();
    numClassFileSetting.v_$eq(128);
    settings.scala$tools$nsc$settings$ScalaSettings$_setter_$maxClassfileName_$eq(numClassFileSetting);

    synchronized (sharedInterpreterLock) {
        /* create scala repl */
        if (printREPLOutput()) {
            this.interpreter = new SparkILoop((java.io.BufferedReader) null, new PrintWriter(out));
        } else {
            this.interpreter = new SparkILoop((java.io.BufferedReader) null,
                    new PrintWriter(Console.out(), false));
        }

        interpreter.settings_$eq(settings);

        interpreter.createInterpreter();

        intp = Utils.invokeMethod(interpreter, "intp");
        Utils.invokeMethod(intp, "setContextClassLoader");
        Utils.invokeMethod(intp, "initializeSynchronous");

        if (Utils.isScala2_10()) {
            if (classOutputDir == null) {
                classOutputDir = settings.outputDirs().getSingleOutput().get();
            } else {
                // change SparkIMain class output dir
                settings.outputDirs().setSingleOutput(classOutputDir);
                ClassLoader cl = (ClassLoader) Utils.invokeMethod(intp, "classLoader");
                try {
                    Field rootField = cl.getClass().getSuperclass().getDeclaredField("root");
                    rootField.setAccessible(true);
                    rootField.set(cl, classOutputDir);
                } catch (NoSuchFieldException | IllegalAccessException e) {
                    logger.error(e.getMessage(), e);
                }
            }
        }

        if (Utils.findClass("org.apache.spark.repl.SparkJLineCompletion", true) != null) {
            completer = Utils.instantiateClass("org.apache.spark.repl.SparkJLineCompletion",
                    new Class[] { Utils.findClass("org.apache.spark.repl.SparkIMain") }, new Object[] { intp });
        } else if (Utils.findClass("scala.tools.nsc.interpreter.PresentationCompilerCompleter", true) != null) {
            completer = Utils.instantiateClass("scala.tools.nsc.interpreter.PresentationCompilerCompleter",
                    new Class[] { IMain.class }, new Object[] { intp });
        } else if (Utils.findClass("scala.tools.nsc.interpreter.JLineCompletion", true) != null) {
            completer = Utils.instantiateClass("scala.tools.nsc.interpreter.JLineCompletion",
                    new Class[] { IMain.class }, new Object[] { intp });
        }

        if (Utils.isSpark2()) {
            sparkSession = getSparkSession();
        }
        sc = getSparkContext();
        if (sc.getPoolForName("fair").isEmpty()) {
            Value schedulingMode = org.apache.spark.scheduler.SchedulingMode.FAIR();
            int minimumShare = 0;
            int weight = 1;
            Pool pool = new Pool("fair", schedulingMode, minimumShare, weight);
            sc.taskScheduler().rootPool().addSchedulable(pool);
        }

        sparkVersion = SparkVersion.fromVersionString(sc.version());

        sqlc = getSQLContext();

        dep = getDependencyResolver();

        hooks = getInterpreterGroup().getInterpreterHookRegistry();

        z = new ZeppelinContext(sc, sqlc, null, dep, hooks,
                Integer.parseInt(getProperty("zeppelin.spark.maxResult")));

        interpret("@transient val _binder = new java.util.HashMap[String, Object]()");
        Map<String, Object> binder;
        if (Utils.isScala2_10()) {
            binder = (Map<String, Object>) getValue("_binder");
        } else {
            binder = (Map<String, Object>) getLastObject();
        }
        binder.put("sc", sc);
        binder.put("sqlc", sqlc);
        binder.put("z", z);

        if (Utils.isSpark2()) {
            binder.put("spark", sparkSession);
        }

        interpret("@transient val z = "
                + "_binder.get(\"z\").asInstanceOf[org.apache.zeppelin.spark.ZeppelinContext]");
        interpret("@transient val sc = " + "_binder.get(\"sc\").asInstanceOf[org.apache.spark.SparkContext]");
        interpret("@transient val sqlc = "
                + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");
        interpret("@transient val sqlContext = "
                + "_binder.get(\"sqlc\").asInstanceOf[org.apache.spark.sql.SQLContext]");

        if (Utils.isSpark2()) {
            interpret("@transient val spark = "
                    + "_binder.get(\"spark\").asInstanceOf[org.apache.spark.sql.SparkSession]");
        }

        interpret("import org.apache.spark.SparkContext._");

        if (importImplicit()) {
            if (Utils.isSpark2()) {
                interpret("import spark.implicits._");
                interpret("import spark.sql");
                interpret("import org.apache.spark.sql.functions._");
            } else {
                if (sparkVersion.oldSqlContextImplicits()) {
                    interpret("import sqlContext._");
                } else {
                    interpret("import sqlContext.implicits._");
                    interpret("import sqlContext.sql");
                    interpret("import org.apache.spark.sql.functions._");
                }
            }
        }
    }

    /* Temporary disabling DisplayUtils. see https://issues.apache.org/jira/browse/ZEPPELIN-127
     *
    // Utility functions for display
    intp.interpret("import org.apache.zeppelin.spark.utils.DisplayUtils._");
            
    // Scala implicit value for spark.maxResult
    intp.interpret("import org.apache.zeppelin.spark.utils.SparkMaxResult");
    intp.interpret("implicit val sparkMaxResult = new SparkMaxResult(" +
    Integer.parseInt(getProperty("zeppelin.spark.maxResult")) + ")");
     */

    if (Utils.isScala2_10()) {
        try {
            if (sparkVersion.oldLoadFilesMethodName()) {
                Method loadFiles = this.interpreter.getClass().getMethod("loadFiles", Settings.class);
                loadFiles.invoke(this.interpreter, settings);
            } else {
                Method loadFiles = this.interpreter.getClass()
                        .getMethod("org$apache$spark$repl$SparkILoop$$loadFiles", Settings.class);
                loadFiles.invoke(this.interpreter, settings);
            }
        } catch (NoSuchMethodException | SecurityException | IllegalAccessException | IllegalArgumentException
                | InvocationTargetException e) {
            throw new InterpreterException(e);
        }
    }

    // add jar from DepInterpreter
    if (depInterpreter != null) {
        SparkDependencyContext depc = depInterpreter.getDependencyContext();
        if (depc != null) {
            List<File> files = depc.getFilesDist();
            if (files != null) {
                for (File f : files) {
                    if (f.getName().toLowerCase().endsWith(".jar")) {
                        sc.addJar(f.getAbsolutePath());
                        logger.info("sc.addJar(" + f.getAbsolutePath() + ")");
                    } else {
                        sc.addFile(f.getAbsolutePath());
                        logger.info("sc.addFile(" + f.getAbsolutePath() + ")");
                    }
                }
            }
        }
    }

    // add jar from local repo
    if (localRepo != null) {
        File localRepoDir = new File(localRepo);
        if (localRepoDir.exists()) {
            File[] files = localRepoDir.listFiles();
            if (files != null) {
                for (File f : files) {
                    if (f.getName().toLowerCase().endsWith(".jar")) {
                        sc.addJar(f.getAbsolutePath());
                        logger.info("sc.addJar(" + f.getAbsolutePath() + ")");
                    } else {
                        sc.addFile(f.getAbsolutePath());
                        logger.info("sc.addFile(" + f.getAbsolutePath() + ")");
                    }
                }
            }
        }
    }

    numReferenceOfSparkContext.incrementAndGet();
}

From source file:org.apache.geode.management.internal.cli.commands.GfshCommandJUnitTest.java

@Test
public void testToClassPathOrder() {
    String userClasspathOne = "/path/to/user/lib/a.jar:/path/to/user/classes";
    String userClasspathTwo = "/path/to/user/lib/x.jar:/path/to/user/lib/y.jar:/path/to/user/lib/z.jar";

    String expectedClasspath = StartMemberUtils.getGemFireJarPath().concat(File.pathSeparator)
            .concat(userClasspathOne).concat(File.pathSeparator).concat(userClasspathTwo)
            .concat(File.pathSeparator).concat(System.getProperty("java.class.path")).concat(File.pathSeparator)
            .concat(StartMemberUtils.CORE_DEPENDENCIES_JAR_PATHNAME).concat(File.pathSeparator)
            .concat(StartMemberUtils.CORE_DEPENDENCIES_JAR_PATHNAME);

    String actualClasspath = StartMemberUtils
            .toClasspath(true,/*from   w w  w . j av a  2 s  .c  o m*/
                    new String[] { StartMemberUtils.CORE_DEPENDENCIES_JAR_PATHNAME,
                            StartMemberUtils.CORE_DEPENDENCIES_JAR_PATHNAME },
                    userClasspathOne, userClasspathTwo);

    assertEquals(expectedClasspath, actualClasspath);
}

From source file:org.hyperic.hq.product.ProductPluginManager.java

public Collection<PluginInfo> registerPlugins(String path, Collection<PluginInfo> excludes) {
    Collection<PluginInfo> rtn = new ArrayList<PluginInfo>();
    List<String> dirs = StringUtil.explode(path, File.pathSeparator);
    for (int i = 0; i < dirs.size(); i++) {
        File dir = new File(dirs.get(i));
        if (!dir.exists()) {
            log.warn("register plugins: " + dir + " does not exist");
            continue;
        }//from   w  w w .ja  v  a 2s  .c  om
        if (!dir.isDirectory()) {
            log.warn("register plugins: " + dir + " not a directory");
            continue;
        }
        File[] plugins = listPlugins(dir);
        Collection<PluginInfo> pluginInfo = register(Arrays.asList(plugins), excludes);
        rtn.addAll(pluginInfo);
    }
    return rtn;
}

From source file:org.apache.maven.cli.MavenCli.java

private List<File> parseExtClasspath(CliRequest cliRequest) {
    String extClassPath = cliRequest.userProperties.getProperty(EXT_CLASS_PATH);
    if (extClassPath == null) {
        extClassPath = cliRequest.systemProperties.getProperty(EXT_CLASS_PATH);
    }//www .  jav a  2s .c om

    List<File> jars = new ArrayList<>();

    if (StringUtils.isNotEmpty(extClassPath)) {
        for (String jar : StringUtils.split(extClassPath, File.pathSeparator)) {
            File file = resolveFile(new File(jar), cliRequest.workingDirectory);

            slf4jLogger.debug("  Included " + file);

            jars.add(file);
        }
    }

    return jars;
}