Example usage for java.io File mkdirs

List of usage examples for java.io File mkdirs

Introduction

In this page you can find the example usage for java.io File mkdirs.

Prototype

public boolean mkdirs() 

Source Link

Document

Creates the directory named by this abstract pathname, including any necessary but nonexistent parent directories.

Usage

From source file:com.mmounirou.spotirss.SpotiRss.java

/**
 * @param args/*from  w  w  w  . ja  v  a2  s . com*/
 * @throws IOException 
 * @throws ClassNotFoundException 
 * @throws IllegalAccessException 
 * @throws InstantiationException 
 * @throws SpotifyClientException 
 * @throws ChartRssException 
 * @throws SpotifyException 
 */
public static void main(String[] args) throws IOException, InstantiationException, IllegalAccessException,
        ClassNotFoundException, SpotifyClientException {
    if (args.length == 0) {
        System.err.println("usage : java -jar spotiboard.jar <charts-folder>");
        return;
    }

    Properties connProperties = new Properties();
    InputStream inStream = SpotiRss.class.getResourceAsStream("/spotify-server.properties");
    try {
        connProperties.load(inStream);
    } finally {
        IOUtils.closeQuietly(inStream);
    }

    String host = connProperties.getProperty("host");
    int port = Integer.parseInt(connProperties.getProperty("port"));
    String user = connProperties.getProperty("user");

    final SpotifyClient spotifyClient = new SpotifyClient(host, port, user);
    final Map<String, Playlist> playlistsByTitle = getPlaylistsByTitle(spotifyClient);

    final File outputDir = new File(args[0]);
    outputDir.mkdirs();
    TrackCache cache = new TrackCache();
    try {

        for (String strProvider : PROVIDERS) {
            String providerClassName = EntryToTrackConverter.class.getPackage().getName() + "."
                    + StringUtils.capitalize(strProvider);
            final EntryToTrackConverter converter = (EntryToTrackConverter) SpotiRss.class.getClassLoader()
                    .loadClass(providerClassName).newInstance();
            Iterable<String> chartsRss = getCharts(strProvider);
            final File resultDir = new File(outputDir, strProvider);
            resultDir.mkdir();

            final SpotifyHrefQuery hrefQuery = new SpotifyHrefQuery(cache);
            Iterable<String> results = FluentIterable.from(chartsRss).transform(new Function<String, String>() {

                @Override
                @Nullable
                public String apply(@Nullable String chartRss) {

                    try {

                        long begin = System.currentTimeMillis();
                        ChartRss bilboardChartRss = ChartRss.getInstance(chartRss, converter);
                        Map<Track, String> trackHrefs = hrefQuery.getTrackHrefs(bilboardChartRss.getSongs());

                        String strTitle = bilboardChartRss.getTitle();
                        File resultFile = new File(resultDir, strTitle);
                        List<String> lines = Lists.newLinkedList(FluentIterable.from(trackHrefs.keySet())
                                .transform(Functions.toStringFunction()));
                        lines.addAll(trackHrefs.values());
                        FileUtils.writeLines(resultFile, Charsets.UTF_8.displayName(), lines);

                        Playlist playlist = playlistsByTitle.get(strTitle);
                        if (playlist != null) {
                            playlist.getTracks().clear();
                            playlist.getTracks().addAll(trackHrefs.values());
                            spotifyClient.patch(playlist);
                            LOGGER.info(String.format("%s chart exported patched", strTitle));
                        }

                        LOGGER.info(String.format("%s chart exported in %s in %d s", strTitle,
                                resultFile.getAbsolutePath(),
                                (int) TimeUnit.MILLISECONDS.toSeconds(System.currentTimeMillis() - begin)));

                    } catch (Exception e) {
                        LOGGER.error(String.format("fail to export %s charts", chartRss), e);
                    }

                    return "";
                }
            });

            // consume iterables
            Iterables.size(results);

        }

    } finally {
        cache.close();
    }

}

From source file:com.ctriposs.rest4j.tools.data.FilterSchemaGenerator.java

public static void main(String[] args) {
    final CommandLineParser parser = new GnuParser();
    CommandLine cl = null;//w w  w  . ja v  a  2 s  .  c  om
    try {
        cl = parser.parse(_options, args);
    } catch (ParseException e) {
        _log.error("Invalid arguments: " + e.getMessage());
        reportInvalidArguments();
    }

    final String[] directoryArgs = cl.getArgs();
    if (directoryArgs.length != 2) {
        reportInvalidArguments();
    }

    final File sourceDirectory = new File(directoryArgs[0]);
    if (!sourceDirectory.exists()) {
        _log.error(sourceDirectory.getPath() + " does not exist");
        System.exit(1);
    }
    if (!sourceDirectory.isDirectory()) {
        _log.error(sourceDirectory.getPath() + " is not a directory");
        System.exit(1);
    }
    final URI sourceDirectoryURI = sourceDirectory.toURI();

    final File outputDirectory = new File(directoryArgs[1]);
    if (outputDirectory.exists() && !sourceDirectory.isDirectory()) {
        _log.error(outputDirectory.getPath() + " is not a directory");
        System.exit(1);
    }

    final boolean isAvroMode = cl.hasOption('a');
    final String predicateExpression = cl.getOptionValue('e');
    final Predicate predicate = PredicateExpressionParser.parse(predicateExpression);

    final Collection<File> sourceFiles = FileUtil.listFiles(sourceDirectory, null);
    int exitCode = 0;
    for (File sourceFile : sourceFiles) {
        try {
            final ValidationOptions val = new ValidationOptions();
            val.setAvroUnionMode(isAvroMode);

            final SchemaParser schemaParser = new SchemaParser();
            schemaParser.setValidationOptions(val);

            schemaParser.parse(new FileInputStream(sourceFile));
            if (schemaParser.hasError()) {
                _log.error("Error parsing " + sourceFile.getPath() + ": "
                        + schemaParser.errorMessageBuilder().toString());
                exitCode = 1;
                continue;
            }

            final DataSchema originalSchema = schemaParser.topLevelDataSchemas().get(0);
            if (!(originalSchema instanceof NamedDataSchema)) {
                _log.error(sourceFile.getPath() + " does not contain valid NamedDataSchema");
                exitCode = 1;
                continue;
            }

            final SchemaParser filterParser = new SchemaParser();
            filterParser.setValidationOptions(val);

            final NamedDataSchema filteredSchema = Filters.removeByPredicate((NamedDataSchema) originalSchema,
                    predicate, filterParser);
            if (filterParser.hasError()) {
                _log.error("Error applying predicate: " + filterParser.errorMessageBuilder().toString());
                exitCode = 1;
                continue;
            }

            final String relativePath = sourceDirectoryURI.relativize(sourceFile.toURI()).getPath();
            final String outputFilePath = outputDirectory.getPath() + File.separator + relativePath;
            final File outputFile = new File(outputFilePath);
            final File outputFileParent = outputFile.getParentFile();
            outputFileParent.mkdirs();
            if (!outputFileParent.exists()) {
                _log.error("Unable to write filtered schema to " + outputFileParent.getPath());
                exitCode = 1;
                continue;
            }

            FileOutputStream fout = new FileOutputStream(outputFile);
            fout.write(filteredSchema.toString().getBytes(RestConstants.DEFAULT_CHARSET));
            fout.close();
        } catch (IOException e) {
            _log.error(e.getMessage());
            exitCode = 1;
        }
    }

    System.exit(exitCode);
}

From source file:jvmoptions.OptionAnalyzer.java

public static void main(String[] args) throws Exception {
    File f = new File("result");
    if (f.exists() == false && f.mkdirs() == false) {
        System.exit(1);//  www  .j a v  a 2  s .c  om
    }
    Path json = toJson("java6", "java7", "java8");
    toCSV(json);
}

From source file:com.linkedin.restli.tools.data.FilterSchemaGenerator.java

public static void main(String[] args) {
    CommandLine cl = null;//from   w  w  w  .  ja  va 2s .  c  o m
    try {
        final CommandLineParser parser = new GnuParser();
        cl = parser.parse(_options, args);
    } catch (ParseException e) {
        _log.error("Invalid arguments: " + e.getMessage());
        reportInvalidArguments();
    }

    final String[] directoryArgs = cl.getArgs();
    if (directoryArgs.length != 2) {
        reportInvalidArguments();
    }

    final File sourceDirectory = new File(directoryArgs[0]);
    if (!sourceDirectory.exists()) {
        _log.error(sourceDirectory.getPath() + " does not exist");
        System.exit(1);
    }
    if (!sourceDirectory.isDirectory()) {
        _log.error(sourceDirectory.getPath() + " is not a directory");
        System.exit(1);
    }
    final URI sourceDirectoryURI = sourceDirectory.toURI();

    final File outputDirectory = new File(directoryArgs[1]);
    if (outputDirectory.exists() && !sourceDirectory.isDirectory()) {
        _log.error(outputDirectory.getPath() + " is not a directory");
        System.exit(1);
    }

    final boolean isAvroMode = cl.hasOption('a');
    final String predicateExpression = cl.getOptionValue('e');
    final Predicate predicate = PredicateExpressionParser.parse(predicateExpression);

    final Collection<File> sourceFiles = FileUtil.listFiles(sourceDirectory, null);
    int exitCode = 0;
    for (File sourceFile : sourceFiles) {
        try {
            final ValidationOptions val = new ValidationOptions();
            val.setAvroUnionMode(isAvroMode);

            final SchemaParser schemaParser = new SchemaParser();
            schemaParser.setValidationOptions(val);

            schemaParser.parse(new FileInputStream(sourceFile));
            if (schemaParser.hasError()) {
                _log.error("Error parsing " + sourceFile.getPath() + ": " + schemaParser.errorMessageBuilder());
                exitCode = 1;
                continue;
            }

            final DataSchema originalSchema = schemaParser.topLevelDataSchemas().get(0);
            if (!(originalSchema instanceof NamedDataSchema)) {
                _log.error(sourceFile.getPath() + " does not contain valid NamedDataSchema");
                exitCode = 1;
                continue;
            }

            final SchemaParser filterParser = new SchemaParser();
            filterParser.setValidationOptions(val);

            final NamedDataSchema filteredSchema = Filters.removeByPredicate((NamedDataSchema) originalSchema,
                    predicate, filterParser);
            if (filterParser.hasError()) {
                _log.error("Error applying predicate: " + filterParser.errorMessageBuilder());
                exitCode = 1;
                continue;
            }

            final String relativePath = sourceDirectoryURI.relativize(sourceFile.toURI()).getPath();
            final String outputFilePath = outputDirectory.getPath() + File.separator + relativePath;
            final File outputFile = new File(outputFilePath);
            final File outputFileParent = outputFile.getParentFile();
            outputFileParent.mkdirs();
            if (!outputFileParent.exists()) {
                _log.error("Unable to write filtered schema to " + outputFileParent.getPath());
                exitCode = 1;
                continue;
            }

            FileOutputStream fout = new FileOutputStream(outputFile);
            String schemaJson = SchemaToJsonEncoder.schemaToJson(filteredSchema, JsonBuilder.Pretty.INDENTED);
            fout.write(schemaJson.getBytes(RestConstants.DEFAULT_CHARSET));
            fout.close();
        } catch (IOException e) {
            _log.error(e.getMessage());
            exitCode = 1;
        }
    }

    System.exit(exitCode);
}

From source file:PinotResponseTime.java

public static void main(String[] args) throws Exception {
    try (CloseableHttpClient client = HttpClients.createDefault()) {
        HttpPost post = new HttpPost("http://localhost:8099/query");
        CloseableHttpResponse res;//w ww  .  j a v  a2  s  .  c o m

        if (STORE_RESULT) {
            File dir = new File(RESULT_DIR);
            if (!dir.exists()) {
                dir.mkdirs();
            }
        }

        int length;

        // Make sure all segments online
        System.out.println("Test if number of records is " + RECORD_NUMBER);
        post.setEntity(new StringEntity("{\"pql\":\"select count(*) from tpch_lineitem\"}"));
        while (true) {
            System.out.print('*');
            res = client.execute(post);
            boolean valid;
            try (BufferedInputStream in = new BufferedInputStream(res.getEntity().getContent())) {
                length = in.read(BUFFER);
                valid = new String(BUFFER, 0, length, "UTF-8").contains("\"value\":\"" + RECORD_NUMBER + "\"");
            }
            res.close();
            if (valid) {
                break;
            } else {
                Thread.sleep(5000);
            }
        }
        System.out.println("Number of Records Test Passed");

        // Start Benchmark
        for (int i = 0; i < QUERIES.length; i++) {
            System.out.println(
                    "--------------------------------------------------------------------------------");
            System.out.println("Start running query: " + QUERIES[i]);
            post.setEntity(new StringEntity("{\"pql\":\"" + QUERIES[i] + "\"}"));

            // Warm-up Rounds
            System.out.println("Run " + WARMUP_ROUND + " times to warm up cache...");
            for (int j = 0; j < WARMUP_ROUND; j++) {
                res = client.execute(post);
                if (!isValid(res, null)) {
                    System.out.println("\nInvalid Response, Sleep 20 Seconds...");
                    Thread.sleep(20000);
                }
                res.close();
                System.out.print('*');
            }
            System.out.println();

            // Test Rounds
            int[] time = new int[TEST_ROUND];
            int totalTime = 0;
            int validIdx = 0;
            System.out.println("Run " + TEST_ROUND + " times to get average time...");
            while (validIdx < TEST_ROUND) {
                long startTime = System.currentTimeMillis();
                res = client.execute(post);
                long endTime = System.currentTimeMillis();
                boolean valid;
                if (STORE_RESULT && validIdx == 0) {
                    valid = isValid(res, RESULT_DIR + File.separator + i + ".json");
                } else {
                    valid = isValid(res, null);
                }
                if (!valid) {
                    System.out.println("\nInvalid Response, Sleep 20 Seconds...");
                    Thread.sleep(20000);
                    res.close();
                    continue;
                }
                res.close();
                time[validIdx] = (int) (endTime - startTime);
                totalTime += time[validIdx];
                System.out.print(time[validIdx] + "ms ");
                validIdx++;
            }
            System.out.println();

            // Process Results
            double avgTime = (double) totalTime / TEST_ROUND;
            double stdDev = 0;
            for (int temp : time) {
                stdDev += (temp - avgTime) * (temp - avgTime) / TEST_ROUND;
            }
            stdDev = Math.sqrt(stdDev);
            System.out.println("The average response time for the query is: " + avgTime + "ms");
            System.out.println("The standard deviation is: " + stdDev);
        }
    }
}

From source file:com.xiaoxiaomo.flink.batch.distcp.DistCp.java

public static void main(String[] args) throws Exception {

    // set up the execution environment
    final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();

    ParameterTool params = ParameterTool.fromArgs(args);
    if (!params.has("input") || !params.has("output")) {
        System.err.println("Usage: --input <path> --output <path> [--parallelism <n>]");
        return;//from   ww  w.  j  a  va2 s  . c o m
    }

    final Path sourcePath = new Path(params.get("input"));
    final Path targetPath = new Path(params.get("output"));
    if (!isLocal(env) && !(isOnDistributedFS(sourcePath) && isOnDistributedFS(targetPath))) {
        System.out.println("In a distributed mode only HDFS input/output paths are supported");
        return;
    }

    final int parallelism = params.getInt("parallelism", 10);
    if (parallelism <= 0) {
        System.err.println("Parallelism should be greater than 0");
        return;
    }

    // make parameters available in the web interface
    env.getConfig().setGlobalJobParameters(params);

    env.setParallelism(parallelism);

    long startTime = System.currentTimeMillis();
    LOGGER.info("Initializing copy tasks");
    List<FileCopyTask> tasks = getCopyTasks(sourcePath);
    LOGGER.info("Copy task initialization took " + (System.currentTimeMillis() - startTime) + "ms");

    DataSet<FileCopyTask> inputTasks = new DataSource<FileCopyTask>(env, new FileCopyTaskInputFormat(tasks),
            new GenericTypeInfo<FileCopyTask>(FileCopyTask.class), "fileCopyTasks");

    FlatMapOperator<FileCopyTask, Object> res = inputTasks
            .flatMap(new RichFlatMapFunction<FileCopyTask, Object>() {

                private static final long serialVersionUID = 1109254230243989929L;
                private LongCounter fileCounter;
                private LongCounter bytesCounter;

                @Override
                public void open(Configuration parameters) throws Exception {
                    bytesCounter = getRuntimeContext().getLongCounter(BYTES_COPIED_CNT_NAME);
                    fileCounter = getRuntimeContext().getLongCounter(FILES_COPIED_CNT_NAME);
                }

                @Override
                public void flatMap(FileCopyTask task, Collector<Object> out) throws Exception {
                    LOGGER.info("Processing task: " + task);
                    Path outPath = new Path(targetPath, task.getRelativePath());

                    FileSystem targetFs = targetPath.getFileSystem();
                    // creating parent folders in case of a local FS
                    if (!targetFs.isDistributedFS()) {
                        //dealing with cases like file:///tmp or just /tmp
                        File outFile = outPath.toUri().isAbsolute() ? new File(outPath.toUri())
                                : new File(outPath.toString());
                        File parentFile = outFile.getParentFile();
                        if (!parentFile.mkdirs() && !parentFile.exists()) {
                            throw new RuntimeException(
                                    "Cannot create local file system directories: " + parentFile);
                        }
                    }
                    FSDataOutputStream outputStream = null;
                    FSDataInputStream inputStream = null;
                    try {
                        outputStream = targetFs.create(outPath, true);
                        inputStream = task.getPath().getFileSystem().open(task.getPath());
                        int bytes = IOUtils.copy(inputStream, outputStream);
                        bytesCounter.add(bytes);
                    } finally {
                        IOUtils.closeQuietly(inputStream);
                        IOUtils.closeQuietly(outputStream);
                    }
                    fileCounter.add(1L);
                }
            });

    // no data sinks are needed, therefore just printing an empty result
    res.print();

    Map<String, Object> accumulators = env.getLastJobExecutionResult().getAllAccumulatorResults();
    LOGGER.info("== COUNTERS ==");
    for (Map.Entry<String, Object> e : accumulators.entrySet()) {
        LOGGER.info(e.getKey() + ": " + e.getValue());
    }
}

From source file:de.tudarmstadt.ukp.argumentation.data.roomfordebate.DataFetcher.java

public static void main(String[] args) throws Exception {
    File crawledPagesFolder = new File(args[0]);
    if (!crawledPagesFolder.exists()) {
        crawledPagesFolder.mkdirs();
    }//from  w w  w  .  j  a  v  a2s .com

    File outputFolder = new File(args[1]);
    if (!outputFolder.exists()) {
        outputFolder.mkdirs();
    }

    // read links from text file
    final String urlsResourceName = "roomfordebate-urls.txt";

    InputStream urlsStream = DataFetcher.class.getClassLoader().getResourceAsStream(urlsResourceName);

    if (urlsStream == null) {
        throw new IOException("Cannot find resource " + urlsResourceName + " on the classpath");
    }

    // read list of urls
    List<String> urls = new ArrayList<>();
    LineIterator iterator = IOUtils.lineIterator(urlsStream, "utf-8");
    while (iterator.hasNext()) {
        // ignore commented url (line starts with #)
        String line = iterator.nextLine();
        if (!line.startsWith("#") && !line.trim().isEmpty()) {
            urls.add(line.trim());
        }
    }

    // download all
    crawlPages(urls, crawledPagesFolder);

    List<File> files = new ArrayList<>(FileUtils.listFiles(crawledPagesFolder, null, false));
    Collections.sort(files, new Comparator<File>() {
        @Override
        public int compare(File o1, File o2) {
            return o1.getName().compareTo(o2.getName());
        }
    });

    int idCounter = 0;

    for (File file : files) {
        NYTimesCommentsScraper commentsScraper = new NYTimesCommentsScraper();
        NYTimesArticleExtractor extractor = new NYTimesArticleExtractor();

        String html = FileUtils.readFileToString(file, "utf-8");

        idCounter++;
        File outputFileArticle = new File(outputFolder, String.format("Cx%03d.txt", idCounter));
        File outputFileComments = new File(outputFolder, String.format("Dx%03d.txt", idCounter));

        try {
            List<Comment> comments = commentsScraper.extractComments(html);
            Article article = extractor.extractArticle(html);

            saveArticleToText(article, outputFileArticle);
            System.out.println("Saved to " + outputFileArticle);

            saveCommentsToText(comments, outputFileComments, article);
            System.out.println("Saved to " + outputFileComments);
        } catch (IOException ex) {
            System.err.println(file.getName() + "\n" + ex.getMessage());
        }
    }
}

From source file:de.tudarmstadt.ukp.experiments.dip.wp1.documents.Step5LinguisticPreprocessing.java

public static void main(String[] args) throws Exception {
    // input dir - list of xml query containers
    // step4-boiler-plate/
    File inputDir = new File(args[0]);

    // output dir
    File outputDir = new File(args[1]);
    if (!outputDir.exists()) {
        outputDir.mkdirs();
    }//from   ww  w .j  a  v  a  2  s . co  m

    // iterate over query containers
    for (File f : FileUtils.listFiles(inputDir, new String[] { "xml" }, false)) {
        QueryResultContainer queryResultContainer = QueryResultContainer
                .fromXML(FileUtils.readFileToString(f, "utf-8"));

        for (QueryResultContainer.SingleRankedResult rankedResults : queryResultContainer.rankedResults) {
            //                System.out.println(rankedResults.plainText);

            if (rankedResults.plainText != null) {
                String[] lines = StringUtils.split(rankedResults.plainText, "\n");

                // collecting all cleaned lines
                List<String> cleanLines = new ArrayList<>(lines.length);
                // collecting line tags
                List<String> lineTags = new ArrayList<>(lines.length);

                for (String line : lines) {
                    // get the tag
                    String tag = null;
                    Matcher m = OPENING_TAG_PATTERN.matcher(line);

                    if (m.find()) {
                        tag = m.group(1);
                    }

                    if (tag == null) {
                        throw new IllegalArgumentException("No html tag found for line:\n" + line);
                    }

                    // replace the tag at the beginning and the end
                    String noTagText = line.replaceAll("^<\\S+>", "").replaceAll("</\\S+>$", "");

                    // do some html cleaning
                    noTagText = noTagText.replaceAll("&nbsp;", " ");

                    noTagText = noTagText.trim();

                    // add to the output
                    if (!noTagText.isEmpty()) {
                        cleanLines.add(noTagText);
                        lineTags.add(tag);
                    }
                }

                if (cleanLines.isEmpty()) {
                    // the document is empty
                    System.err.println("Document " + rankedResults.clueWebID + " in query "
                            + queryResultContainer.qID + " is empty");
                } else {
                    // now join them back to paragraphs
                    String text = StringUtils.join(cleanLines, "\n");

                    // create JCas
                    JCas jCas = JCasFactory.createJCas();
                    jCas.setDocumentText(text);
                    jCas.setDocumentLanguage("en");

                    // annotate WebParagraph
                    SimplePipeline.runPipeline(jCas,
                            AnalysisEngineFactory.createEngineDescription(WebParagraphAnnotator.class));

                    // fill the original tag information
                    List<WebParagraph> webParagraphs = new ArrayList<>(
                            JCasUtil.select(jCas, WebParagraph.class));

                    // they must be the same size as original ones
                    if (webParagraphs.size() != lineTags.size()) {
                        throw new IllegalStateException(
                                "Different size of annotated paragraphs and original lines");
                    }

                    for (int i = 0; i < webParagraphs.size(); i++) {
                        WebParagraph p = webParagraphs.get(i);
                        // get tag
                        String tag = lineTags.get(i);

                        p.setOriginalHtmlTag(tag);
                    }

                    SimplePipeline.runPipeline(jCas,
                            AnalysisEngineFactory.createEngineDescription(StanfordSegmenter.class,
                                    // only on existing WebParagraph annotations
                                    StanfordSegmenter.PARAM_ZONE_TYPES, WebParagraph.class.getCanonicalName()));

                    // now convert to XMI
                    ByteArrayOutputStream byteOutputStream = new ByteArrayOutputStream();
                    XmiCasSerializer.serialize(jCas.getCas(), byteOutputStream);

                    // encode to base64
                    String encoded = new BASE64Encoder().encode(byteOutputStream.toByteArray());

                    rankedResults.originalXmi = encoded;
                }
            }
        }

        // and save the query to output dir
        File outputFile = new File(outputDir, queryResultContainer.qID + ".xml");
        FileUtils.writeStringToFile(outputFile, queryResultContainer.toXML(), "utf-8");
        System.out.println("Finished " + outputFile);
    }

}

From source file:com.jivesoftware.os.routing.bird.deployable.config.extractor.ConfigExtractor.java

public static void main(String[] args) {
    String configHost = args[0];//from w w w  . j  a va 2 s . com
    String configPort = args[1];
    String instanceKey = args[2];
    String instanceVersion = args[3];
    String setPath = args[4];
    String getPath = args[5];

    HttpRequestHelper buildRequestHelper = buildRequestHelper(null, configHost, Integer.parseInt(configPort));

    try {
        Set<URL> packages = new HashSet<>();
        for (int i = 5; i < args.length; i++) {
            packages.addAll(ClasspathHelper.forPackage(args[i]));
        }

        Reflections reflections = new Reflections(new ConfigurationBuilder().setUrls(packages)
                .setScanners(new SubTypesScanner(), new TypesScanner()));

        Set<Class<? extends Config>> subTypesOf = reflections.getSubTypesOf(Config.class);

        File configDir = new File("./config");
        configDir.mkdirs();

        Set<Class<? extends Config>> serviceConfig = new HashSet<>();
        Set<Class<? extends Config>> healthConfig = new HashSet<>();
        for (Class<? extends Config> type : subTypesOf) {
            if (HealthCheckConfig.class.isAssignableFrom(type)) {
                healthConfig.add(type);
            } else {
                serviceConfig.add(type);
            }
        }

        Map<String, String> defaultServiceConfig = extractAndPublish(serviceConfig,
                new File(configDir, "default-service-config.properties"), "default", instanceKey,
                instanceVersion, buildRequestHelper, setPath);

        DeployableConfig getServiceOverrides = new DeployableConfig("override", instanceKey, instanceVersion,
                defaultServiceConfig);
        DeployableConfig gotSerivceConfig = buildRequestHelper.executeRequest(getServiceOverrides, getPath,
                DeployableConfig.class, null);
        if (gotSerivceConfig == null) {
            System.out.println("Failed to publish default service config for " + Arrays.deepToString(args));
        } else {
            Properties override = createKeySortedProperties();
            override.putAll(gotSerivceConfig.properties);
            override.store(new FileOutputStream("config/override-service-config.properties"), "");
        }

        Map<String, String> defaultHealthConfig = extractAndPublish(healthConfig,
                new File(configDir, "default-health-config.properties"), "default-health", instanceKey,
                instanceVersion, buildRequestHelper, setPath);

        DeployableConfig getHealthOverrides = new DeployableConfig("override-health", instanceKey,
                instanceVersion, defaultHealthConfig);
        DeployableConfig gotHealthConfig = buildRequestHelper.executeRequest(getHealthOverrides, getPath,
                DeployableConfig.class, null);
        if (gotHealthConfig == null) {
            System.out.println("Failed to publish default health config for " + Arrays.deepToString(args));
        } else {
            Properties override = createKeySortedProperties();
            override.putAll(gotHealthConfig.properties);
            override.store(new FileOutputStream("config/override-health-config.properties"), "");
        }

        Properties instanceProperties = createKeySortedProperties();
        File configFile = new File("config/instance.properties");
        if (configFile.exists()) {
            instanceProperties.load(new FileInputStream(configFile));
        }

        Properties serviceOverrideProperties = createKeySortedProperties();
        configFile = new File("config/override-service-config.properties");
        if (configFile.exists()) {
            serviceOverrideProperties.load(new FileInputStream(configFile));
        }

        Properties healthOverrideProperties = createKeySortedProperties();
        configFile = new File("config/override-health-config.properties");
        if (configFile.exists()) {
            healthOverrideProperties.load(new FileInputStream(configFile));
        }

        Properties properties = createKeySortedProperties();
        properties.putAll(defaultServiceConfig);
        properties.putAll(defaultHealthConfig);
        properties.putAll(serviceOverrideProperties);
        properties.putAll(healthOverrideProperties);
        properties.putAll(instanceProperties);
        properties.store(new FileOutputStream("config/config.properties"), "");

        System.exit(0);
    } catch (Exception x) {
        x.printStackTrace();
        System.exit(1);
    }

}

From source file:io.apiman.tools.jdbc.ApimanJdbcServer.java

public static void main(String[] args) {
    try {/*from   w ww  . j av a 2 s .com*/
        File dataDir = new File("target/h2");
        String url = "jdbc:h2:tcp://localhost:9092/apiman";

        if (dataDir.exists()) {
            FileUtils.deleteDirectory(dataDir);
        }
        dataDir.mkdirs();

        Server.createTcpServer("-tcpPassword", "sa", "-baseDir", dataDir.getAbsolutePath(), "-tcpPort", "9092",
                "-tcpAllowOthers").start();
        Class.forName("org.h2.Driver");

        try (Connection connection = DriverManager.getConnection(url, "sa", "")) {
            System.out.println("Connection Established: " + connection.getMetaData().getDatabaseProductName()
                    + "/" + connection.getCatalog());
            executeUpdate(connection,
                    "CREATE TABLE users ( username varchar(255) NOT NULL, password varchar(255) NOT NULL, PRIMARY KEY (username))");
            executeUpdate(connection,
                    "INSERT INTO users (username, password) VALUES ('bwayne', 'ae2efd698aefdf366736a4eda1bc5241f9fbfec7')");
            executeUpdate(connection,
                    "INSERT INTO users (username, password) VALUES ('ckent', 'ea59f7ca52a2087c99374caba0ff29be1b2dcdbf')");
            executeUpdate(connection,
                    "INSERT INTO users (username, password) VALUES ('ballen', 'ea59f7ca52a2087c99374caba0ff29be1b2dcdbf')");
            executeUpdate(connection,
                    "CREATE TABLE roles (rolename varchar(255) NOT NULL, username varchar(255) NOT NULL)");
            executeUpdate(connection, "INSERT INTO roles (rolename, username) VALUES ('user', 'bwayne')");
            executeUpdate(connection, "INSERT INTO roles (rolename, username) VALUES ('admin', 'bwayne')");
            executeUpdate(connection, "INSERT INTO roles (rolename, username) VALUES ('ckent', 'user')");
            executeUpdate(connection, "INSERT INTO roles (rolename, username) VALUES ('ballen', 'user')");
        }

        System.out.println("======================================================");
        System.out.println("JDBC (H2) server started successfully.");
        System.out.println("");
        System.out.println("  Data: " + dataDir.getAbsolutePath());
        System.out.println("  JDBC URL: " + url);
        System.out.println("  JDBC User: sa");
        System.out.println("  JDBC Password: ");
        System.out.println(
                "  Authentication Query:   SELECT * FROM users u WHERE u.username = ? AND u.password = ?");
        System.out.println("  Authorization Query:    SELECT r.rolename FROM roles r WHERE r.username = ?");
        System.out.println("======================================================");
        System.out.println("");
        System.out.println("");
        System.out.println("Press Enter to stop the JDBC server.");
        new BufferedReader(new InputStreamReader(System.in)).readLine();

        System.out.println("Shutting down the JDBC server...");

        Server.shutdownTcpServer("tcp://localhost:9092", "", true, true);

        System.out.println("Done!");
    } catch (Exception e) {
        e.printStackTrace();
    }
}