Example usage for java.lang String format

List of usage examples for java.lang String format

Introduction

In this page you can find the example usage for java.lang String format.

Prototype

public static String format(String format, Object... args) 

Source Link

Document

Returns a formatted string using the specified format string and arguments.

Usage

From source file:com.act.lcms.db.io.LoadTSVIntoDB.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    opts.addOption(Option.builder("t").argName("type")
            .desc("The type of TSV data to read, options are: " + StringUtils.join(TSV_TYPE.values(), ", "))
            .hasArg().required().longOpt("table-type").build());
    opts.addOption(Option.builder("i").argName("path").desc("The TSV file to read").hasArg().required()
            .longOpt("input-file").build());

    // DB connection options.
    opts.addOption(Option.builder().argName("database url")
            .desc("The url to use when connecting to the LCMS db").hasArg().longOpt("db-url").build());
    opts.addOption(Option.builder("u").argName("database user").desc("The LCMS DB user").hasArg()
            .longOpt("db-user").build());
    opts.addOption(Option.builder("p").argName("database password").desc("The LCMS DB password").hasArg()
            .longOpt("db-pass").build());
    opts.addOption(Option.builder("H").argName("database host")
            .desc(String.format("The LCMS DB host (default = %s)", DB.DEFAULT_HOST)).hasArg().longOpt("db-host")
            .build());/*from ww  w. j av  a 2 s . co m*/
    opts.addOption(Option.builder("P").argName("database port")
            .desc(String.format("The LCMS DB port (default = %d)", DB.DEFAULT_PORT)).hasArg().longOpt("db-port")
            .build());
    opts.addOption(Option.builder("N").argName("database name")
            .desc(String.format("The LCMS DB name (default = %s)", DB.DEFAULT_DB_NAME)).hasArg()
            .longOpt("db-name").build());

    // Everybody needs a little help from their friends.
    opts.addOption(
            Option.builder("h").argName("help").desc("Prints this help message").longOpt("help").build());

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HelpFormatter fmt = new HelpFormatter();
        fmt.printHelp(LoadTSVIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        new HelpFormatter().printHelp(LoadTSVIntoDB.class.getCanonicalName(), opts, true);
        return;
    }

    File inputFile = new File(cl.getOptionValue("input-file"));
    if (!inputFile.exists()) {
        System.err.format("Unable to find input file at %s\n", cl.getOptionValue("input-file"));
        new HelpFormatter().printHelp(LoadTSVIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    TSV_TYPE contentType = null;
    try {
        contentType = TSV_TYPE.valueOf(cl.getOptionValue("table-type"));
    } catch (IllegalArgumentException e) {
        System.err.format("Unrecognized TSV type '%s'\n", cl.getOptionValue("table-type"));
        new HelpFormatter().printHelp(LoadTSVIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    DB db;

    if (cl.hasOption("db-url")) {
        db = new DB().connectToDB(cl.getOptionValue("db-url"));
    } else {
        Integer port = null;
        if (cl.getOptionValue("P") != null) {
            port = Integer.parseInt(cl.getOptionValue("P"));
        }
        db = new DB().connectToDB(cl.getOptionValue("H"), port, cl.getOptionValue("N"), cl.getOptionValue("u"),
                cl.getOptionValue("p"));
    }

    try {
        db.getConn().setAutoCommit(false);

        TSVParser parser = new TSVParser();
        parser.parse(inputFile);

        List<Pair<Integer, DB.OPERATION_PERFORMED>> results = null;
        switch (contentType) {
        case CURATED_CHEMICAL:
            results = CuratedChemical.insertOrUpdateCuratedChemicalsFromTSV(db, parser);
            break;
        case CONSTRUCT:
            results = ConstructEntry.insertOrUpdateCompositionMapEntriesFromTSV(db, parser);
            break;
        case CHEMICAL_OF_INTEREST:
            results = ChemicalOfInterest.insertOrUpdateChemicalOfInterestsFromTSV(db, parser);
            break;
        default:
            throw new RuntimeException(String.format("Unsupported TSV type: %s", contentType));
        }
        if (results != null) {
            for (Pair<Integer, DB.OPERATION_PERFORMED> r : results) {
                System.out.format("%d: %s\n", r.getLeft(), r.getRight());
            }
        }
        // If we didn't encounter an exception, commit the transaction.
        db.getConn().commit();
    } catch (Exception e) {
        System.err.format("Caught exception when trying to load plate composition, rolling back. %s\n",
                e.getMessage());
        db.getConn().rollback();
        throw (e);
    } finally {
        db.getConn().close();
    }
}

From source file:de.zazaz.iot.bosch.indego.util.IndegoIftttAdapter.java

public static void main(String[] args) {
    System.setProperty("log4j.configurationFile", "log4j2-indegoIftttAdapter-normal.xml");

    Options options = new Options();

    StringBuilder commandList = new StringBuilder();
    for (DeviceCommand cmd : DeviceCommand.values()) {
        if (commandList.length() > 0) {
            commandList.append(", ");
        }//from   www . ja  va2  s. c  o m
        commandList.append(cmd.toString());
    }

    options.addOption(Option //
            .builder("c") //
            .longOpt("config") //
            .desc("The configuration file to use") //
            .required() //
            .hasArg() //
            .build());
    options.addOption(Option //
            .builder("d") //
            .longOpt("debug") //
            .desc("Logs more details") //
            .build());
    options.addOption(Option //
            .builder("?") //
            .longOpt("help") //
            .desc("Prints this help") //
            .build());

    CommandLineParser parser = new DefaultParser();
    CommandLine cmds = null;
    try {
        cmds = parser.parse(options, args);
    } catch (ParseException ex) {
        System.err.println(ex.getMessage());
        System.err.println();
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(IndegoIftttAdapter.class.getName(), options);
        System.exit(1);
        return;
    }

    if (cmds.hasOption("?")) {
        HelpFormatter formatter = new HelpFormatter();
        formatter.printHelp(CmdLineTool.class.getName(), options);
        return;
    }

    if (cmds.hasOption("d")) {
        System.setProperty("log4j.configurationFile", "log4j2-indegoIftttAdapter-debug.xml");
    }

    String configFileName = cmds.getOptionValue('c');
    File configFile = new File(configFileName);

    if (!configFile.exists()) {
        System.err.println(String.format("The specified config file (%s) does not exist", configFileName));
        System.err.println();
        System.exit(2);
        return;
    }

    Properties properties = new Properties();
    try (InputStream in = new FileInputStream(configFile)) {
        properties.load(in);
    } catch (IOException ex) {
        System.err.println(ex.getMessage());
        System.err.println(String.format("Was not able to load the properties file (%s)", configFileName));
        System.err.println();
    }

    IftttIndegoAdapterConfiguration config = new IftttIndegoAdapterConfiguration();
    config.setIftttMakerKey(properties.getProperty("indego.ifttt.maker.key"));
    config.setIftttIgnoreServerCertificate(
            Integer.parseInt(properties.getProperty("indego.ifttt.maker.ignore-server-certificate")) != 0);
    config.setIftttReceiverPort(Integer.parseInt(properties.getProperty("indego.ifttt.maker.receiver-port")));
    config.setIftttReceiverSecret(properties.getProperty("indego.ifttt.maker.receiver-secret"));
    config.setIftttOfflineEventName(properties.getProperty("indego.ifttt.maker.eventname-offline"));
    config.setIftttOnlineEventName(properties.getProperty("indego.ifttt.maker.eventname-online"));
    config.setIftttErrorEventName(properties.getProperty("indego.ifttt.maker.eventname-error"));
    config.setIftttErrorClearedEventName(properties.getProperty("indego.ifttt.maker.eventname-error-cleared"));
    config.setIftttStateChangeEventName(properties.getProperty("indego.ifttt.maker.eventname-state-change"));
    config.setIndegoBaseUrl(properties.getProperty("indego.ifttt.device.base-url"));
    config.setIndegoUsername(properties.getProperty("indego.ifttt.device.username"));
    config.setIndegoPassword(properties.getProperty("indego.ifttt.device.password"));
    config.setPollingIntervalMs(Integer.parseInt(properties.getProperty("indego.ifttt.polling-interval-ms")));

    IftttIndegoAdapter adapter = new IftttIndegoAdapter(config);
    adapter.startup();
}

From source file:alluxio.master.backcompat.BackwardsCompatibilityJournalGenerator.java

/**
 * Generates journal files to be used by the backwards compatibility test. The files are named
 * based on the current version defined in ProjectConstants.VERSION. Run this with each release,
 * and commit the created journal and snapshot into the git repository.
 *
 * @param args no args expected//  w w w.j a  v  a2  s .  c  o  m
 */
public static void main(String[] args) throws Exception {
    BackwardsCompatibilityJournalGenerator generator = new BackwardsCompatibilityJournalGenerator();
    new JCommander(generator, args);
    if (!LoginUser.get().getName().equals("root")) {
        System.err.printf("Journals must be generated as root so that they can be replayed by root%n");
        System.exit(-1);
    }
    File journalDst = new File(generator.getOutputDirectory(),
            String.format("journal-%s", ProjectConstants.VERSION));
    if (journalDst.exists()) {
        System.err.printf("%s already exists, delete it first%n", journalDst.getAbsolutePath());
        System.exit(-1);
    }
    File backupDst = new File(generator.getOutputDirectory(),
            String.format("backup-%s", ProjectConstants.VERSION));
    if (backupDst.exists()) {
        System.err.printf("%s already exists, delete it first%n", backupDst.getAbsolutePath());
        System.exit(-1);
    }
    MultiProcessCluster cluster = MultiProcessCluster.newBuilder(PortCoordination.BACKWARDS_COMPATIBILITY)
            .setClusterName("BackwardsCompatibility").setNumMasters(1).setNumWorkers(1).build();
    try {
        cluster.start();
        cluster.notifySuccess();
        cluster.waitForAllNodesRegistered(10 * Constants.SECOND_MS);
        for (TestOp op : OPS) {
            op.apply(cluster.getClients());
        }
        AlluxioURI backup = cluster.getMetaMasterClient()
                .backup(new File(generator.getOutputDirectory()).getAbsolutePath(), true).getBackupUri();
        FileUtils.moveFile(new File(backup.getPath()), backupDst);
        cluster.stopMasters();
        FileUtils.copyDirectory(new File(cluster.getJournalDir()), journalDst);
    } catch (Throwable t) {
        t.printStackTrace();
    } finally {
        cluster.destroy();
    }
    System.out.printf("Artifacts successfully generated at %s and %s%n", journalDst.getAbsolutePath(),
            backupDst.getAbsolutePath());
}

From source file:com.mapr.synth.Synth.java

public static void main(String[] args)
        throws IOException, CmdLineException, InterruptedException, ExecutionException {
    final Options opts = new Options();
    CmdLineParser parser = new CmdLineParser(opts);
    try {// ww w  .  j  a v  a 2  s .c  o m
        parser.parseArgument(args);
    } catch (CmdLineException e) {
        System.err.println("Usage: " + "[ -count <number>G|M|K ] " + "-schema schema-file "
                + "[-quote DOUBLE_QUOTE|BACK_SLASH|OPTIMISTIC] " + "[-format JSON|TSV|CSV|XML ] "
                + "[-threads n] " + "[-output output-directory-name] ");
        throw e;
    }
    Preconditions.checkArgument(opts.threads > 0 && opts.threads <= 2000,
            "Must have at least one thread and no more than 2000");

    if (opts.threads > 1) {
        Preconditions.checkArgument(!"-".equals(opts.output),
                "If more than on thread is used, you have to use -output to set the output directory");
    }

    File outputDir = new File(opts.output);
    if (!"-".equals(opts.output)) {
        if (!outputDir.exists()) {
            Preconditions.checkState(outputDir.mkdirs(),
                    String.format("Couldn't create output directory %s", opts.output));
        }
        Preconditions.checkArgument(outputDir.exists() && outputDir.isDirectory(),
                String.format("Couldn't create directory %s", opts.output));
    }

    if (opts.schema == null) {
        throw new IllegalArgumentException("Must specify schema file using [-schema filename] option");
    }
    final SchemaSampler sampler = new SchemaSampler(opts.schema);
    final AtomicLong rowCount = new AtomicLong();

    final List<ReportingWorker> tasks = Lists.newArrayList();
    int limit = (opts.count + opts.threads - 1) / opts.threads;
    int remaining = opts.count;
    for (int i = 0; i < opts.threads; i++) {

        final int count = Math.min(limit, remaining);
        remaining -= count;

        tasks.add(new ReportingWorker(opts, sampler, rowCount, count, i));
    }

    final double t0 = System.nanoTime() * 1e-9;
    ExecutorService pool = Executors.newFixedThreadPool(opts.threads);
    ScheduledExecutorService blinker = Executors.newScheduledThreadPool(1);
    final AtomicBoolean finalRun = new AtomicBoolean(false);

    final PrintStream sideLog = new PrintStream(new FileOutputStream("side-log"));
    Runnable blink = new Runnable() {
        public double oldT;
        private long oldN;

        @Override
        public void run() {
            double t = System.nanoTime() * 1e-9;
            long n = rowCount.get();
            System.err.printf("%s\t%d\t%.1f\t%d\t%.1f\t%.3f\n", finalRun.get() ? "F" : "R", opts.threads,
                    t - t0, n, n / (t - t0), (n - oldN) / (t - oldT));
            for (ReportingWorker task : tasks) {
                ReportingWorker.ThreadReport r = task.report();
                sideLog.printf("\t%d\t%.2f\t%.2f\t%.2f\t%.1f\t%.1f\n", r.fileNumber, r.threadTime, r.userTime,
                        r.wallTime, r.rows / r.threadTime, r.rows / r.wallTime);
            }
            oldN = n;
            oldT = t;
        }
    };
    if (!"-".equals(opts.output)) {
        blinker.scheduleAtFixedRate(blink, 0, 10, TimeUnit.SECONDS);
    }
    List<Future<Integer>> results = pool.invokeAll(tasks);

    int total = 0;
    for (Future<Integer> result : results) {
        total += result.get();
    }
    Preconditions.checkState(total == opts.count, String
            .format("Expected to generate %d lines of output, but actually generated %d", opts.count, total));
    pool.shutdownNow();
    blinker.shutdownNow();
    finalRun.set(true);
    sideLog.close();
    blink.run();
}

From source file:com.arpnetworking.tsdaggregator.TsdAggregator.java

/**
 * Entry point for Time Series Data (TSD) Aggregator.
 *
 * @param args the command line arguments
 */// ww w.j a va2s .co  m
public static void main(final String[] args) {
    LOGGER.info("Launching tsd-aggregator");

    // Global initialization
    Thread.setDefaultUncaughtExceptionHandler(new Thread.UncaughtExceptionHandler() {
        @Override
        public void uncaughtException(final Thread thread, final Throwable throwable) {
            LOGGER.error("Unhandled exception!", throwable);
        }
    });

    Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
        @Override
        public void run() {
            final LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
            context.stop();
        }
    }));

    System.setProperty("org.vertx.logger-delegate-factory-class-name",
            "org.vertx.java.core.logging.impl.SLF4JLogDelegateFactory");

    // Run the tsd aggregator
    if (args.length != 1) {
        throw new RuntimeException("No configuration file specified");
    }
    LOGGER.debug(String.format("Loading configuration from file; file=%s", args[0]));

    final File configurationFile = new File(args[0]);
    final Configurator<TsdAggregator, TsdAggregatorConfiguration> configurator = new Configurator<>(
            TsdAggregator.class, TsdAggregatorConfiguration.class);
    final ObjectMapper objectMapper = TsdAggregatorConfiguration.createObjectMapper();
    final DynamicConfiguration configuration = new DynamicConfiguration.Builder().setObjectMapper(objectMapper)
            .addSourceBuilder(
                    new JsonNodeFileSource.Builder().setObjectMapper(objectMapper).setFile(configurationFile))
            .addTrigger(new FileTrigger.Builder().setFile(configurationFile).build()).addListener(configurator)
            .build();

    configuration.launch();

    final AtomicBoolean isRunning = new AtomicBoolean(true);
    Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
        @Override
        public void run() {
            LOGGER.info("Stopping tsd-aggregator");
            configuration.shutdown();
            configurator.shutdown();
            isRunning.set(false);
        }
    }));

    while (isRunning.get()) {
        try {
            Thread.sleep(30000);
        } catch (final InterruptedException e) {
            break;
        }
    }

    LOGGER.info("Exiting tsd-aggregator");
}

From source file:com.act.lcms.db.io.LoadPlateCompositionIntoDB.java

public static void main(String[] args) throws Exception {
    Options opts = new Options();
    opts.addOption(Option.builder("t").argName("type")
            .desc("The type of plate composition in this file, valid options are: "
                    + StringUtils.join(Arrays.asList(Plate.CONTENT_TYPE.values()), ", "))
            .hasArg().longOpt("plate-type").required().build());
    opts.addOption(Option.builder("i").argName("path").desc("The plate composition file to read").hasArg()
            .longOpt("input-file").required().build());

    // DB connection options.
    opts.addOption(Option.builder().argName("database url")
            .desc("The url to use when connecting to the LCMS db").hasArg().longOpt("db-url").build());
    opts.addOption(Option.builder("u").argName("database user").desc("The LCMS DB user").hasArg()
            .longOpt("db-user").build());
    opts.addOption(Option.builder("p").argName("database password").desc("The LCMS DB password").hasArg()
            .longOpt("db-pass").build());
    opts.addOption(Option.builder("H").argName("database host")
            .desc(String.format("The LCMS DB host (default = %s)", DB.DEFAULT_HOST)).hasArg().longOpt("db-host")
            .build());/*from  w w  w. ja  v a2  s  .co m*/
    opts.addOption(Option.builder("P").argName("database port")
            .desc(String.format("The LCMS DB port (default = %d)", DB.DEFAULT_PORT)).hasArg().longOpt("db-port")
            .build());
    opts.addOption(Option.builder("N").argName("database name")
            .desc(String.format("The LCMS DB name (default = %s)", DB.DEFAULT_DB_NAME)).hasArg()
            .longOpt("db-name").build());

    // Everybody needs a little help from their friends.
    opts.addOption(
            Option.builder("h").argName("help").desc("Prints this help message").longOpt("help").build());

    CommandLine cl = null;
    try {
        CommandLineParser parser = new DefaultParser();
        cl = parser.parse(opts, args);
    } catch (ParseException e) {
        System.err.format("Argument parsing failed: %s\n", e.getMessage());
        HelpFormatter fmt = new HelpFormatter();
        fmt.printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    if (cl.hasOption("help")) {
        new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        return;
    }

    File inputFile = new File(cl.getOptionValue("input-file"));
    if (!inputFile.exists()) {
        System.err.format("Unable to find input file at %s\n", cl.getOptionValue("input-file"));
        new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    PlateCompositionParser parser = new PlateCompositionParser();
    parser.processFile(inputFile);

    Plate.CONTENT_TYPE contentType = null;
    try {
        contentType = Plate.CONTENT_TYPE.valueOf(cl.getOptionValue("plate-type"));
    } catch (IllegalArgumentException e) {
        System.err.format("Unrecognized plate type '%s'\n", cl.getOptionValue("plate-type"));
        new HelpFormatter().printHelp(LoadPlateCompositionIntoDB.class.getCanonicalName(), opts, true);
        System.exit(1);
    }

    DB db;
    if (cl.hasOption("db-url")) {
        db = new DB().connectToDB(cl.getOptionValue("db-url"));
    } else {
        Integer port = null;
        if (cl.getOptionValue("P") != null) {
            port = Integer.parseInt(cl.getOptionValue("P"));
        }
        db = new DB().connectToDB(cl.getOptionValue("H"), port, cl.getOptionValue("N"), cl.getOptionValue("u"),
                cl.getOptionValue("p"));
    }

    try {
        db.getConn().setAutoCommit(false);

        Plate p = Plate.getOrInsertFromPlateComposition(db, parser, contentType);

        switch (contentType) {
        case LCMS:
            List<LCMSWell> LCMSWells = LCMSWell.getInstance().insertFromPlateComposition(db, parser, p);
            for (LCMSWell LCMSWell : LCMSWells) {
                System.out.format("%d: %d x %d  %s  %s\n", LCMSWell.getId(), LCMSWell.getPlateColumn(),
                        LCMSWell.getPlateRow(), LCMSWell.getMsid(), LCMSWell.getComposition());
            }
            break;
        case STANDARD:
            List<StandardWell> standardWells = StandardWell.getInstance().insertFromPlateComposition(db, parser,
                    p);
            for (StandardWell standardWell : standardWells) {
                System.out.format("%d: %d x %d  %s\n", standardWell.getId(), standardWell.getPlateColumn(),
                        standardWell.getPlateRow(), standardWell.getChemical());
            }
            break;
        case DELIVERED_STRAIN:
            List<DeliveredStrainWell> deliveredStrainWells = DeliveredStrainWell.getInstance()
                    .insertFromPlateComposition(db, parser, p);
            for (DeliveredStrainWell deliveredStrainWell : deliveredStrainWells) {
                System.out.format("%d: %d x %d (%s) %s %s \n", deliveredStrainWell.getId(),
                        deliveredStrainWell.getPlateColumn(), deliveredStrainWell.getPlateRow(),
                        deliveredStrainWell.getWell(), deliveredStrainWell.getMsid(),
                        deliveredStrainWell.getComposition());
            }
            break;
        case INDUCTION:
            List<InductionWell> inductionWells = InductionWell.getInstance().insertFromPlateComposition(db,
                    parser, p);
            for (InductionWell inductionWell : inductionWells) {
                System.out.format("%d: %d x %d %s %s %s %d\n", inductionWell.getId(),
                        inductionWell.getPlateColumn(), inductionWell.getPlateRow(), inductionWell.getMsid(),
                        inductionWell.getComposition(), inductionWell.getChemical(), inductionWell.getGrowth());
            }
            break;
        case PREGROWTH:
            List<PregrowthWell> pregrowthWells = PregrowthWell.getInstance().insertFromPlateComposition(db,
                    parser, p);
            for (PregrowthWell pregrowthWell : pregrowthWells) {
                System.out.format("%d: %d x %d (%s @ %s) %s %s %d\n", pregrowthWell.getId(),
                        pregrowthWell.getPlateColumn(), pregrowthWell.getPlateRow(),
                        pregrowthWell.getSourcePlate(), pregrowthWell.getSourceWell(), pregrowthWell.getMsid(),
                        pregrowthWell.getComposition(), pregrowthWell.getGrowth());
            }
            break;
        case FEEDING_LCMS:
            List<FeedingLCMSWell> feedingLCMSWells = FeedingLCMSWell.getInstance()
                    .insertFromPlateComposition(db, parser, p);
            for (FeedingLCMSWell feedingLCMSWell : feedingLCMSWells) {
                System.out.format("%d: %d x %d (%s @ %s) %s %s %f\n", feedingLCMSWell.getId(),
                        feedingLCMSWell.getPlateColumn(), feedingLCMSWell.getPlateRow(),
                        feedingLCMSWell.getMsid(), feedingLCMSWell.getComposition(),
                        feedingLCMSWell.getExtract(), feedingLCMSWell.getChemical(),
                        feedingLCMSWell.getConcentration());
            }
            break;
        default:
            System.err.format("Unrecognized/unimplemented data type '%s'\n", contentType);
            break;
        }
        // If we didn't encounter an exception, commit the transaction.
        db.getConn().commit();
    } catch (Exception e) {
        System.err.format("Caught exception when trying to load plate composition, rolling back. %s\n",
                e.getMessage());
        db.getConn().rollback();
        throw (e);
    } finally {
        db.getConn().close();
    }

}

From source file:com.kinesis.datavis.writer.BidRequestStreamWriter.java

/**
 * Start a number of threads and send randomly generated {@link }s to a Kinesis Stream until the
 * program is terminated./*from   www .ja  v a  2  s . c o  m*/
 *
 * @param args Expecting 3 arguments: A numeric value indicating the number of threads to use to send
 *             data to Kinesis and the name of the stream to send records to, and the AWS region in which these resources
 *             exist or should be created.
 * @throws InterruptedException If this application is interrupted while sending records to Kinesis.
 */
public static void main(String[] args) throws InterruptedException {
    int numberOfThreads = Integer.parseInt(args[0]);

    AppProperties appProps = new AppProperties("bidrq", args[1]);

    String streamName = appProps.streamName();

    Region region = AppUtils.parseRegion(appProps.getRegion());

    AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain();

    ClientConfiguration clientConfig = AppUtils.configureUserAgentForSample(new ClientConfiguration());

    AmazonKinesis kinesis = new AmazonKinesisClient(credentialsProvider, clientConfig);
    kinesis.setRegion(region);

    // The more resources we declare the higher write IOPS we need on our DynamoDB table.
    // We write a record for each resource every interval.
    // If interval = 500ms, resource count = 7 we need: (1000/500 * 7) = 14 write IOPS minimum.
    List<String> resources = new ArrayList<>();
    resources.add("300x200");
    resources.add("500x200");
    resources.add("400x600");
    resources.add("800x600");

    List<String> bidRequestIds = new ArrayList<>();
    //        bidRequestIds.add(UUID.randomUUID().toString());
    //        bidRequestIds.add(UUID.randomUUID().toString());
    //        bidRequestIds.add(UUID.randomUUID().toString());
    //        bidRequestIds.add("11111111111");
    //        bidRequestIds.add("22222222222");
    //        bannerIds.add("33333333333");
    //        bidRequestIds.add("44444444444");
    bidRequestIds.add("92b9b9d9-2d6d-454a-b80a-d6a318aca9ec");

    BidRequestFactory bdFactory = new BidRequestFactory(bidRequestIds, resources);

    // Creates a stream to write to with 2 shards if it doesn't exist
    StreamUtils streamUtils = new StreamUtils(kinesis);
    streamUtils.createStreamIfNotExists(streamName, 2);

    LOG.info(String.format("%s stream is ready for use", streamName));

    final BidRequestPutter putter = new BidRequestPutter(bdFactory, kinesis, streamName);

    GeneralStreamWriter streamWriter = new GeneralStreamWriter(numberOfThreads, putter);

    streamWriter.doWrite();

}

From source file:de.tudarmstadt.ukp.dkpro.argumentation.io.XmiFilePatternJsonStreamDumper.java

public static void main(final String[] args) throws ParseException, UIMAException, IOException {
    // TODO: It would be nice to be able to read from standard input but it
    // seems that ResourceCollectionReaderBase doesn't facilitate this
    final CommandLineParser parser = new DefaultParser();
    final Options opts = Parameter.createOptions();
    try {/* w  ww .j a  va 2s  .  c  o m*/
        final CommandLine commandLine = parser.parse(opts, args);
        final String sourceLocation = commandLine.getOptionValue(Parameter.SOURCE_LOCATION.paramName);
        LOG.info(String.format("Source location is \"%s\".", sourceLocation));
        final String targetLocation = commandLine.getOptionValue(Parameter.TARGET_LOCATION.paramName);
        LOG.info(String.format("Target location is \"%s\".", targetLocation));
        new XmiFilePatternJsonStreamDumper(JsonStreamDumpWriter.class, sourceLocation, targetLocation).call();
    } catch (final ParseException e) {
        printHelp(opts);
        throw e;
    }
}

From source file:com.alertlogic.aws.kinesis.test1.StreamProcessor.java

/**
 * Start the Kinesis Client application.
 * /*w  ww  .j  av  a 2s.c  o  m*/
 * @param args Expecting 4 arguments: Application name to use for the Kinesis Client Application, Stream name to
 *        read from, DynamoDB table name to persist counts into, and the AWS region in which these resources
 *        exist or should be created.
 */
public static void main(String[] args) throws UnknownHostException {
    if (args.length != 4) {
        System.err.println("Usage: " + StreamProcessor.class.getSimpleName()
                + " <application name> <stream name> <DynamoDB table name> <region>");
        System.exit(1);
    }

    String applicationName = args[0];
    String streamName = args[1];
    String countsTableName = args[2];
    Region region = SampleUtils.parseRegion(args[3]);

    AWSCredentialsProvider credentialsProvider = new DefaultAWSCredentialsProviderChain();
    ClientConfiguration clientConfig = SampleUtils.configureUserAgentForSample(new ClientConfiguration());
    AmazonKinesis kinesis = new AmazonKinesisClient(credentialsProvider, clientConfig);
    kinesis.setRegion(region);
    AmazonDynamoDB dynamoDB = new AmazonDynamoDBClient(credentialsProvider, clientConfig);
    dynamoDB.setRegion(region);

    // Creates a stream to write to, if it doesn't exist
    StreamUtils streamUtils = new StreamUtils(kinesis);
    streamUtils.createStreamIfNotExists(streamName, 2);
    LOG.info(String.format("%s stream is ready for use", streamName));

    DynamoDBUtils dynamoDBUtils = new DynamoDBUtils(dynamoDB);
    dynamoDBUtils.createCountTableIfNotExists(countsTableName);
    LOG.info(String.format("%s DynamoDB table is ready for use", countsTableName));

    String workerId = String.valueOf(UUID.randomUUID());
    LOG.info(String.format("Using working id: %s", workerId));
    KinesisClientLibConfiguration kclConfig = new KinesisClientLibConfiguration(applicationName, streamName,
            credentialsProvider, workerId);
    kclConfig.withCommonClientConfig(clientConfig);
    kclConfig.withRegionName(region.getName());
    kclConfig.withInitialPositionInStream(InitialPositionInStream.LATEST);

    // Persist counts to DynamoDB
    DynamoDBCountPersister persister = new DynamoDBCountPersister(
            dynamoDBUtils.createMapperForTable(countsTableName));

    IRecordProcessorFactory recordProcessor = new CountingRecordProcessorFactory<HttpReferrerPair>(
            HttpReferrerPair.class, persister, COMPUTE_RANGE_FOR_COUNTS_IN_MILLIS, COMPUTE_INTERVAL_IN_MILLIS);

    Worker worker = new Worker(recordProcessor, kclConfig);

    int exitCode = 0;
    try {
        worker.run();
    } catch (Throwable t) {
        LOG.error("Caught throwable while processing data.", t);
        exitCode = 1;
    }
    System.exit(exitCode);
}

From source file:com.bt.aloha.batchtest.WeekendBatchTest.java

public static void main(String[] args) throws Exception {
    ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("batchTestApplicationContext.xml");
    BatchTest batchTest = (BatchTest) ctx.getBean("batchTestBean");
    batchTest.setApplicationContext(ctx);
    batchTest.init();//from   w w  w  . j  a  v  a2s .co  m
    batchTest.assignNewCollectionsToBeans();

    while (true) {
        configure(batchTest);
        if (stop)
            break;
        batchTest.run();
        logStatistics(log, batchTest);
        batchTest.reset();
        if (sleepTime > 0) {
            log.info(String.format("sleeping for %d minutes", sleepTime / 60 / 1000));
            Thread.sleep(sleepTime);
        }
    }
    // wait until all things in collection should be ready for housekeeping
    Properties batchProps = new Properties();
    InputStream is = batchTest.getClass().getResourceAsStream("/batchrun.sip.properties");
    batchProps.load(is);
    is.close();
    Thread.sleep(Long.parseLong(batchProps.getProperty("dialog.max.time.to.live", "900000"))
            + Long.parseLong(batchProps.getProperty("housekeeping.interval", "300000")));
    // housekeeping should have happend
    // log out all things still left
    logCollections(batchTest);

    batchTest.destroy();
}