Example usage for java.lang Integer valueOf

List of usage examples for java.lang Integer valueOf

Introduction

In this page you can find the example usage for java.lang Integer valueOf.

Prototype

@HotSpotIntrinsicCandidate
public static Integer valueOf(int i) 

Source Link

Document

Returns an Integer instance representing the specified int value.

Usage

From source file:org.datagator.tools.importer.Main.java

public static void main(String[] args) throws IOException {

    int columnHeaders = 0; // cli input
    int rowHeaders = 0; // cli input

    try {//from  ww w  . j av a2 s . c  o  m
        CommandLine cmds = parser.parse(opts, args);
        if (cmds.hasOption("filter")) {
            throw new UnsupportedOperationException("Not supported yet.");
        }
        if (cmds.hasOption("layout")) {
            String[] layout = cmds.getOptionValues("layout");
            if ((layout == null) || (layout.length != 2)) {
                throw new IllegalArgumentException("Bad layout.");
            }
            try {
                columnHeaders = Integer.valueOf(layout[0]);
                rowHeaders = Integer.valueOf(layout[1]);
                if ((columnHeaders < 0) || (rowHeaders < 0)) {
                    throw new IllegalArgumentException("Bad layout.");
                }
            } catch (NumberFormatException ex) {
                throw new IllegalArgumentException(ex);
            }
        }
        if (cmds.hasOption("help")) {
            help.printHelp("importer", opts, true);
            System.exit(EX_OK);
        }
        // positional arguments, i.e., input file name(s)
        args = cmds.getArgs();
    } catch (ParseException ex) {
        throw new IllegalArgumentException(ex);
    } catch (IllegalArgumentException ex) {
        help.printHelp("importer", opts, true);
        throw ex;
    }

    JsonGenerator jg = json.createGenerator(System.out, JsonEncoding.UTF8);
    jg.setPrettyPrinter(new StandardPrinter());

    final Extractor extractor;

    if (args.length == 1) {
        extractor = new FileExtractor(new File(args[0]));
    } else if (args.length > 1) {
        throw new UnsupportedOperationException("Not supported yet.");
    } else {
        throw new IllegalArgumentException("Missing input.");
    }

    int columnsCount = -1;
    int matrixCount = 0;

    ArrayDeque<String> stack = new ArrayDeque<String>();

    AtomType token = extractor.nextAtom();
    while (token != null) {
        switch (token) {
        case FLOAT:
        case INTEGER:
        case STRING:
        case NULL:
            jg.writeObject(extractor.getCurrentAtomData());
            break;
        case START_RECORD:
            jg.writeStartArray();
            break;
        case END_RECORD:
            int _numFields = (Integer) extractor.getCurrentAtomData();
            if (columnsCount < 0) {
                columnsCount = _numFields;
            } else if (columnsCount != _numFields) {
                throw new RuntimeException(String.format("row %s has different length than previous rows",
                        String.valueOf(_numFields - 1)));
            }
            jg.writeEndArray();
            break;
        case START_GROUP:
            stack.push(String.valueOf(extractor.getCurrentAtomData()));
            jg.writeStartObject();
            jg.writeStringField("kind", "datagator#Matrix");
            jg.writeNumberField("columnHeaders", columnHeaders);
            jg.writeNumberField("rowHeaders", rowHeaders);
            jg.writeFieldName("rows");
            jg.writeStartArray();
            break;
        case END_GROUP:
            int rowsCount = (Integer) extractor.getCurrentAtomData();
            if (rowsCount == 0) {
                jg.writeStartArray();
                jg.writeEndArray();
                rowsCount = 1;
                columnsCount = 0;
            }
            jg.writeEndArray();
            jg.writeNumberField("rowsCount", rowsCount);
            jg.writeNumberField("columnsCount", columnsCount);
            jg.writeEndObject();
            matrixCount += 1;
            stack.pop();
            break;
        default:
            break;
        }
        token = extractor.nextAtom();
    }

    jg.close();

    System.exit(EX_OK);
}

From source file:com.eternitywall.ots.OtsCli.java

public static void main(String[] args) {
    // Create the Options
    Options options = new Options();
    options.addOption("c", "calendar", true,
            "Create timestamp with the aid of a remote calendar. May be specified multiple times.");
    options.addOption("k", "key", true, "Signature key file of private remote calendars.");
    options.addOption("d", "digest", true, "Verify a (hex-encoded) digest rather than a file.");
    options.addOption("a", "algorithm", true,
            "Pass the hashing algorithm of the document to timestamp: SHA256(default), SHA1, RIPEMD160.");
    options.addOption("m", "", true,
            "Commitments are sent to remote calendars in the event of timeout the timestamp is considered done if at least M calendars replied.");
    options.addOption("s", "shrink", false, "Shrink upgraded timestamp.");
    options.addOption("V", "version", false, "Print " + title + " version.");
    options.addOption("v", "verbose", false, "Be more verbose..");
    options.addOption("f", "file", true,
            "Specify target file explicitly (default: original file present in the same directory without .ots)");
    options.addOption("h", "help", false, "print this help.");

    // Parse the args to retrieve options & command
    CommandLineParser parser = new BasicParser();
    try {/*from w  w w  .  jav  a 2  s  .  c  om*/
        CommandLine line = parser.parse(options, args);
        // args are the arguments passed to the  the application via the main method

        if (line.hasOption("c")) {
            String[] cals = line.getOptionValues("c");
            calendarsUrl.addAll(Arrays.asList(cals));
        }

        if (line.hasOption("m")) {
            m = Integer.valueOf(line.getOptionValue("m"));
        }

        if (line.hasOption("k")) {
            signatureFile = line.getOptionValue("k");
            calendarsUrl.clear();
        }

        if (line.hasOption("s")) {
            shrink = true;
        }

        if (line.hasOption("v")) {
            verbose = true;
        }

        if (line.hasOption("V")) {
            System.out.println("Version: " + title + " v." + version + '\n');
            return;
        }

        if (line.hasOption("h")) {
            showHelp();
            return;
        }

        if (line.hasOption("d")) {
            shasum = Utils.hexToBytes(line.getOptionValue("d"));
        }

        if (line.hasOption("f")) {
            verifyFile = line.getOptionValue("f");
        }

        if (line.hasOption("a")) {
            algorithm = line.getOptionValue("a");
            if (!Arrays.asList(algorithms).contains(algorithm.toUpperCase())) {
                System.out.println("Algorithm: " + algorithm + " not supported\n");
                return;
            }
        }

        if (line.getArgList().isEmpty()) {
            showHelp();
            return;
        }

        cmd = line.getArgList().get(0);
        files = line.getArgList().subList(1, line.getArgList().size());
    } catch (Exception e) {
        System.out.println(title + ": invalid parameters ");
        return;
    }

    // Parse the command
    switch (cmd) {
    case "info":
    case "i":
        if (files.isEmpty()) {
            System.out.println("Show information on a timestamp given as argument.\n");
            System.out.println(title + " info: bad options ");
            break;
        }

        info(files.get(0), verbose);

        break;
    case "stamp":
    case "s":
        if (!files.isEmpty()) {
            multistamp(files, calendarsUrl, m, signatureFile, algorithm);
        } else if (shasum != null) {
            Hash hash = new Hash(shasum, algorithm);
            stamp(hash, calendarsUrl, m, signatureFile);
        } else {
            System.out.println("Create timestamp with the aid of a remote calendar.\n");
            System.out.println(title + ": bad options number ");
        }

        break;
    case "verify":
    case "v":
        if (!files.isEmpty()) {
            Hash hash = null;

            if (shasum != null) {
                hash = new Hash(shasum, algorithm);
            }

            if (verifyFile == null) {
                verifyFile = files.get(0).replace(".ots", "");
            }

            verify(files.get(0), hash, verifyFile);
        } else {
            System.out.println("Verify the timestamp attestations given as argument.\n");
            System.out.println(title + ": bad options number ");
        }

        break;
    case "upgrade":
    case "u":
        if (files.isEmpty()) {
            System.out.println("Upgrade remote calendar timestamps to be locally verifiable.\n");
            System.out.println(title + ": bad options number ");
            break;
        }

        upgrade(files.get(0), shrink);

        break;
    default:
        System.out.println(title + ": bad option: " + cmd);
    }
}

From source file:net.iridiant.hdfs.webdav.Main.java

public static void main(String[] args) {

    HDFSWebDAVServlet servlet = HDFSWebDAVServlet.getServlet();
    Configuration conf = servlet.getConfiguration();

    // Process command line 

    Options options = new Options();
    options.addOption("d", "debug", false, "Enable debug logging");
    options.addOption("p", "port", true, "Port to bind to [default: 8080]");
    options.addOption("b", "bind-address", true, "Address or hostname to bind to [default: 0.0.0.0]");
    options.addOption("g", "ganglia", true, "Send Ganglia metrics to host:port [default: none]");

    CommandLine cmd = null;/*from ww w .  j  a v a 2  s .  co m*/
    try {
        cmd = new PosixParser().parse(options, args);
    } catch (ParseException e) {
        printUsageAndExit(options, -1);
    }

    if (cmd.hasOption('d')) {
        Logger rootLogger = Logger.getLogger("net.iridiant");
        rootLogger.setLevel(Level.DEBUG);
    }

    if (cmd.hasOption('b')) {
        conf.set("hadoop.webdav.bind.address", cmd.getOptionValue('b'));
    }

    if (cmd.hasOption('p')) {
        conf.setInt("hadoop.webdav.port", Integer.valueOf(cmd.getOptionValue('p')));
    }

    String gangliaHost = null;
    int gangliaPort = 8649;
    if (cmd.hasOption('g')) {
        String val = cmd.getOptionValue('g');
        if (val.indexOf(':') != -1) {
            String[] split = val.split(":");
            gangliaHost = split[0];
            gangliaPort = Integer.valueOf(split[1]);
        } else {
            gangliaHost = val;
        }
    }

    InetSocketAddress addr = getAddress(conf);

    // Log in the server principal from keytab

    UserGroupInformation.setConfiguration(conf);
    if (UserGroupInformation.isSecurityEnabled())
        try {
            SecurityUtil.login(conf, "hadoop.webdav.server.kerberos.keytab",
                    "hadoop.webdav.server.kerberos.principal", addr.getHostName());
        } catch (IOException e) {
            LOG.fatal("Could not log in", e);
            System.err.println("Could not log in");
            System.exit(-1);
        }

    // Set up embedded Jetty

    Server server = new Server();

    server.setSendServerVersion(false);
    server.setSendDateHeader(false);
    server.setStopAtShutdown(true);

    // Set up connector
    Connector connector = new SelectChannelConnector();
    connector.setPort(addr.getPort());
    connector.setHost(addr.getHostName());
    server.addConnector(connector);
    LOG.info("Listening on " + addr);

    // Set up context
    Context context = new Context(server, "/", Context.SESSIONS);
    // WebDAV servlet
    ServletHolder servletHolder = new ServletHolder(servlet);
    servletHolder.setInitParameter("authenticate-header", "Basic realm=\"Hadoop WebDAV Server\"");
    context.addServlet(servletHolder, "/*");
    // metrics instrumentation filter
    context.addFilter(new FilterHolder(new DefaultWebappMetricsFilter()), "/*", 0);
    // auth filter
    context.addFilter(new FilterHolder(new AuthFilter(conf)), "/*", 0);
    server.setHandler(context);

    // Set up Ganglia metrics reporting
    if (gangliaHost != null) {
        GangliaReporter.enable(1, TimeUnit.MINUTES, gangliaHost, gangliaPort);
    }

    // Start and join the server thread    
    try {
        server.start();
        server.join();
    } catch (Exception e) {
        LOG.fatal("Failed to start Jetty", e);
        System.err.println("Failed to start Jetty");
        System.exit(-1);
    }
}

From source file:com.trendmicro.hdfs.webdav.Main.java

public static void main(String[] args) {

    HDFSWebDAVServlet servlet = HDFSWebDAVServlet.getServlet();
    Configuration conf = servlet.getConfiguration();

    // Process command line 

    Options options = new Options();
    options.addOption("d", "debug", false, "Enable debug logging");
    options.addOption("p", "port", true, "Port to bind to [default: 8080]");
    options.addOption("b", "bind-address", true, "Address or hostname to bind to [default: 0.0.0.0]");
    options.addOption("g", "ganglia", true, "Send Ganglia metrics to host:port [default: none]");

    CommandLine cmd = null;//  w  w  w . ja  v  a 2s .  c  om
    try {
        cmd = new PosixParser().parse(options, args);
    } catch (ParseException e) {
        printUsageAndExit(options, -1);
    }

    if (cmd.hasOption('d')) {
        Logger rootLogger = Logger.getLogger("com.trendmicro");
        rootLogger.setLevel(Level.DEBUG);
    }

    if (cmd.hasOption('b')) {
        conf.set("hadoop.webdav.bind.address", cmd.getOptionValue('b'));
    }

    if (cmd.hasOption('p')) {
        conf.setInt("hadoop.webdav.port", Integer.valueOf(cmd.getOptionValue('p')));
    }

    String gangliaHost = null;
    int gangliaPort = 8649;
    if (cmd.hasOption('g')) {
        String val = cmd.getOptionValue('g');
        if (val.indexOf(':') != -1) {
            String[] split = val.split(":");
            gangliaHost = split[0];
            gangliaPort = Integer.valueOf(split[1]);
        } else {
            gangliaHost = val;
        }
    }

    InetSocketAddress addr = getAddress(conf);

    // Log in the server principal from keytab

    UserGroupInformation.setConfiguration(conf);
    if (UserGroupInformation.isSecurityEnabled())
        try {
            SecurityUtil.login(conf, "hadoop.webdav.server.kerberos.keytab",
                    "hadoop.webdav.server.kerberos.principal", addr.getHostName());
        } catch (IOException e) {
            LOG.fatal("Could not log in", e);
            System.err.println("Could not log in");
            System.exit(-1);
        }

    // Set up embedded Jetty

    Server server = new Server();

    server.setSendServerVersion(false);
    server.setSendDateHeader(false);
    server.setStopAtShutdown(true);

    // Set up connector
    Connector connector = new SelectChannelConnector();
    connector.setPort(addr.getPort());
    connector.setHost(addr.getHostName());
    server.addConnector(connector);
    LOG.info("Listening on " + addr);

    // Set up context
    Context context = new Context(server, "/", Context.SESSIONS);
    // WebDAV servlet
    ServletHolder servletHolder = new ServletHolder(servlet);
    servletHolder.setInitParameter("authenticate-header", "Basic realm=\"Hadoop WebDAV Server\"");
    context.addServlet(servletHolder, "/*");
    // metrics instrumentation filter
    context.addFilter(new FilterHolder(new DefaultWebappMetricsFilter()), "/*", 0);
    // auth filter
    context.addFilter(new FilterHolder(new AuthFilter(conf)), "/*", 0);
    server.setHandler(context);

    // Set up Ganglia metrics reporting
    if (gangliaHost != null) {
        GangliaReporter.enable(1, TimeUnit.MINUTES, gangliaHost, gangliaPort);
    }

    // Start and join the server thread    
    try {
        server.start();
        server.join();
    } catch (Exception e) {
        LOG.fatal("Failed to start Jetty", e);
        System.err.println("Failed to start Jetty");
        System.exit(-1);
    }
}

From source file:edu.uci.ics.crawler4j.weatherCrawler.BasicCrawlController.java

public static void main(String[] args) {
    String folder = ConfigUtils.getFolder();
    String crawlerCount = ConfigUtils.getCrawlerCount();
    args = new String[2];
    if (StringUtils.isBlank(folder) || StringUtils.isBlank(crawlerCount)) {
        args[0] = "weather";
        args[1] = "10";
        System.out.println("No parameters in config.properties .......");
        System.out.println("[weather] will be used as rootFolder (it will contain intermediate crawl data)");
        System.out.println("[10] will be used as numberOfCralwers (number of concurrent threads)");
    } else {/*w  w  w  .  jav a  2s. co m*/

        args[0] = folder;
        args[1] = crawlerCount;
    }

    /*
     * crawlStorageFolder is a folder where intermediate crawl data is
     * stored.
     */
    String crawlStorageFolder = args[0];

    /*
     * numberOfCrawlers shows the number of concurrent threads that should
     * be initiated for crawling.
     */
    int numberOfCrawlers = Integer.parseInt(args[1]);

    CrawlConfig config = new CrawlConfig();

    if (crawlStorageFolder != null && IO.deleteFolderContents(new File(crawlStorageFolder)))
        System.out.println("");
    config.setCrawlStorageFolder(crawlStorageFolder + "/d" + System.currentTimeMillis());

    /*
     * Be polite: Make sure that we don't send more than 1 request per
     * second (1000 milliseconds between requests).
     */
    config.setPolitenessDelay(1000);

    config.setConnectionTimeout(1000 * 60);
    // config1.setPolitenessDelay(1000);

    /*
     * You can set the maximum crawl depth here. The default value is -1 for
     * unlimited depth
     */
    config.setMaxDepthOfCrawling(StringUtils.isBlank(ConfigUtils.getCrawlerDepth()) ? 40
            : Integer.valueOf(ConfigUtils.getCrawlerDepth()));
    // config1.setMaxDepthOfCrawling(0);

    /*
     * You can set the maximum number of pages to crawl. The default value
     * is -1 for unlimited number of pages
     */
    config.setMaxPagesToFetch(100000);
    // config1.setMaxPagesToFetch(10000);

    /*
     * Do you need to set a proxy? If so, you can use:
     * config.setProxyHost("proxyserver.example.com");
     * config.setProxyPort(8080);
     * 
     * If your proxy also needs authentication:
     * config.setProxyUsername(username); config.getProxyPassword(password);
     */

    if (ConfigUtils.getValue("useProxy", "false").equalsIgnoreCase("true")) {

        System.out.println("?============");
        List<ProxySetting> proxys = ConfigUtils.getProxyList();

        ProxySetting proxy = proxys.get(RandomUtils.nextInt(proxys.size() - 1));

        /* test the proxy is alaviable or not */
        while (!TestProxy.testProxyAvailable(proxy)) {
            proxy = proxys.get(RandomUtils.nextInt(proxys.size() - 1));
        }
        System.out.println("??" + proxy.getIp() + ":" + proxy.getPort());
        config.setProxyHost(proxy.getIp());
        config.setProxyPort(proxy.getPort());
        //      config.setProxyHost("127.0.0.1");
        //      config.setProxyPort(8087);
    } else {
        System.out.println("??============");
    }

    /*
     * This config parameter can be used to set your crawl to be resumable
     * (meaning that you can resume the crawl from a previously
     * interrupted/crashed crawl). Note: if you enable resuming feature and
     * want to start a fresh crawl, you need to delete the contents of
     * rootFolder manually.
     */
    config.setResumableCrawling(false);
    // config1.setResumableCrawling(false);
    /*
     * Instantiate the controller for this crawl.
     */
    PageFetcher pageFetcher = new PageFetcher(config);
    // PageFetcher pageFetcher1 = new PageFetcher(config1);

    RobotstxtConfig robotstxtConfig = new RobotstxtConfig();
    RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher);

    try {

        /*
         * For each crawl, you need to add some seed urls. These are the
         * first URLs that are fetched and then the crawler starts following
         * links which are found in these pages
         */
        CrawlController controller = new CrawlController(config, pageFetcher, robotstxtServer);

        controller.addSeed(StringUtils.isBlank(ConfigUtils.getSeed()) ? "http://www.tianqi.com/chinacity.html"
                : ConfigUtils.getSeed());

        // controller.addSeed("http://www.ics.uci.edu/~lopes/");
        // controller.addSeed("http://www.ics.uci.edu/~welling/");

        /*
         * Start the crawl. This is a blocking operation, meaning that your
         * code will reach the line after this only when crawling is
         * finished.
         */

        String isDaily = null;

        isDaily = ConfigUtils.getValue("isDaily", "true");

        System.out
                .println("?=======" + ConfigUtils.getValue("table", "weather_data") + "=======");

        if (isDaily.equalsIgnoreCase("true")) {
            System.out.println("???==============");
            controller.start(BasicDailyCrawler.class, numberOfCrawlers);
        } else {
            System.out.println("???==============");
            controller.start(BasicCrawler.class, numberOfCrawlers);
        }

    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }

}

From source file:org.memex.GunAdsParser.java

public static void main(String[] args) throws SolrServerException, IOException {
    GunAdsParser parser = new GunAdsParser(args[0]);
    parser.getAds(args[1], Integer.valueOf(args[2]));
}

From source file:com.maxpowered.amazon.advertising.api.app.App.java

public static void main(final String... args)
        throws FileNotFoundException, IOException, JAXBException, XMLStreamException, InterruptedException {
    try (ClassPathXmlApplicationContext ctx = new ClassPathXmlApplicationContext("application-context.xml")) {
        /*//from  w w  w. java 2s. c o  m
         * Get default options based on spring configs
         */
        final String inputDefault = getOptionDefaultBasedOnSpringProperty(ctx, PROPERTY_APP_INPUT, STD_IN_STR);
        final String processedDefault = inputDefault.equals(STD_IN_STR) ? DEFAULT_PROCESSED_FILE_BASE
                : inputDefault + PROCESSED_EXT;
        final String outputDefault = getOptionDefaultBasedOnSpringProperty(ctx, PROPERTY_APP_OUTPUT,
                STD_OUT_STR);
        int throttleDefault = Integer.valueOf(getOptionDefaultBasedOnSpringProperty(ctx, PROPERTY_APP_THROTTLE,
                String.valueOf(DEFAULT_APP_THROTTLE)));
        // Maximum of 25000 requests per hour
        throttleDefault = Math.min(throttleDefault, MAX_APP_THROTTLE);

        /*
         * Get options from the CLI args
         */
        final Options options = new Options();

        options.addOption("h", false, "Display this help.");
        options.addOption("i", true, "Set the file to read ASINs from. " + DEFAULT_STR + inputDefault);
        options.addOption("p", true, "Set the file to store processed ASINs in. " + DEFAULT_STR
                + processedDefault + " or '" + PROCESSED_EXT + "' appended to the input file name.");
        // Add a note that the output depends on the configured processors. If none are configured, it defaults to a
        // std.out processor
        options.addOption("o", true,
                "Set the file to write fetched info xml to via FileProcessor. " + DEFAULT_STR + outputDefault);
        options.addOption("1", false, "Override output file and always output fetched info xml to std.out.");
        options.addOption("t", true, "Set the requests per hour throttle (max of " + MAX_APP_THROTTLE + "). "
                + DEFAULT_STR + throttleDefault);

        final CommandLineParser parser = new DefaultParser();
        CommandLine cmd = null;
        boolean needsHelp = false;

        try {
            cmd = parser.parse(options, args);
        } catch (final ParseException e) {
            needsHelp = true;
        }

        if (cmd.hasOption("h") || needsHelp) {
            final HelpFormatter formatter = new HelpFormatter();
            formatter.printHelp("App", options);
            return;
        }

        // Get throttle rate
        final int throttle = Math.min(
                cmd.hasOption("t") ? Integer.valueOf(cmd.getOptionValue("t")) : throttleDefault,
                MAX_APP_THROTTLE);
        LOG.debug("Throttle (default {}) is {} requests per hour", throttleDefault, throttle);
        // We don't want to hit our limit, just under an hour worth of milliseconds
        final int requestWait = 3540000 / throttle;

        // Get input stream
        String input;
        if (cmd.hasOption("i")) {
            input = cmd.getOptionValue("i");
        } else {
            input = inputDefault;
        }
        LOG.debug("Input name (default {}) is {}", inputDefault, input);

        // Get processed file
        String processed;
        if (cmd.hasOption("p")) {
            processed = cmd.getOptionValue("p");
        } else {
            processed = input + PROCESSED_EXT;
        }
        LOG.debug("Processed file name (default {}) is {}", processedDefault, processed);
        final File processedFile = new File(processed);
        processedFile.createNewFile();

        try (final InputStream inputStream = getInputStream(input)) {

            // Get output stream
            String output;
            if (cmd.hasOption("o")) {
                output = cmd.getOptionValue("o");
            } else {
                output = outputDefault;
            }
            if (cmd.hasOption("1")) {
                output = STD_OUT_STR;
            }
            LOG.debug("Output (default {}) name is {}", outputDefault, output);
            // Special logic to set the FileProcessor output
            if (output.equals(STD_OUT_STR)) {
                final FileProcessor fileProcessor = ctx.getBeanFactory().getBean(FileProcessor.class);
                fileProcessor.setOutputStream(System.out);
            } else if (!output.equals(outputDefault)) {
                final FileProcessor fileProcessor = ctx.getBeanFactory().getBean(FileProcessor.class);
                fileProcessor.setOutputFile(output);
            }

            // This could be easily configured through CLI or properties
            final List<String> responseGroups = Lists.newArrayList();
            for (final ResponseGroup responseGroup : new ResponseGroup[] { ResponseGroup.IMAGES,
                    ResponseGroup.ITEM_ATTRIBUTES }) {
                responseGroups.add(responseGroup.getResponseGroupName());
            }
            final String responseGroupString = Joiner.on(",").join(responseGroups);

            // Search the list of remaining ASINs
            final ProductFetcher fetcher = ctx.getBeanFactory().getBean(ProductFetcher.class);
            fetcher.setProcessedFile(processedFile);
            fetcher.setRequestWait(requestWait);
            fetcher.setInputStream(inputStream);
            fetcher.setResponseGroups(responseGroupString);

            // This ensures that statistics of processed asins should almost always get printed at the end
            Runtime.getRuntime().addShutdownHook(new Thread() {
                @Override
                public void run() {
                    fetcher.logStatistics();
                }
            });

            fetcher.fetchProductInformation();
        }
    }
}

From source file:org.ala.harvester.LocalCSVHarvester.java

/**
 * Main method for testing this particular Harvester
 *
 * @param args// w ww.  jav a 2s  .co m
 */
public static void main(String[] args) throws Exception {
    String[] locations = { "classpath*:spring.xml" };
    ApplicationContext context = new ClassPathXmlApplicationContext(locations);
    LocalCSVHarvester h = new LocalCSVHarvester();
    Repository r = (Repository) context.getBean("repository");
    h.setRepository(r);
    int infosourceId = 0;
    String csvMapper = null;

    if (args.length == 2) {
        try {
            infosourceId = Integer.valueOf(args[0]);
        } catch (NumberFormatException nfe) {
            System.out.println("Invalid infosource ID!");
            System.exit(1);
        }

        csvMapper = args[1];
    } else {
        System.out.println("Usage: LocalCSVHarvester INFOSOURCE_ID CSV_MAPPER_NAME");
        System.exit(1);
    }

    CsvMapper csvMapperClass = (CsvMapper) Class.forName(csvMapper).newInstance();
    //        displayMapContent(csvMapperClass.getParams());

    h.setConnectionParams(csvMapperClass.getParams());

    //set the connection params   
    h.start(infosourceId);

}

From source file:cz.cuni.mff.ufal.dspace.app.MoveItems.java

/**
 * main method to run the MoveItems action
 * /*from   w w w .  ja va 2  s  . c o  m*/
 * @param argv
 *            the command line arguments given
 * @throws SQLException
 */
public static void main(String[] argv) throws SQLException {
    // Create an options object and populate it
    CommandLineParser parser = new PosixParser();

    Options options = new Options();

    options.addOption("l", "list", false, "list collections");
    options.addOption("s", "source", true, "source collection ID");
    options.addOption("t", "target", true, "target collection ID");
    options.addOption("i", "inherit", false, "inherit target collection privileges (default false)");
    options.addOption("h", "help", false, "help");

    // Parse the command line arguments
    CommandLine line;
    try {
        line = parser.parse(options, argv);
    } catch (ParseException pe) {
        System.err.println("Error parsing command line arguments: " + pe.getMessage());
        System.exit(1);
        return;
    }

    if (line.hasOption('h') || line.getOptions().length == 0) {
        printHelp(options, 0);
    }

    // Create a context
    Context context;
    try {
        context = new Context();
        context.turnOffAuthorisationSystem();
    } catch (Exception e) {
        System.err.println("Unable to create a new DSpace Context: " + e.getMessage());
        System.exit(1);
        return;
    }

    if (line.hasOption('l')) {
        listCollections(context);
        System.exit(0);
    }

    // Check a filename is given
    if (!line.hasOption('s')) {
        System.err.println("Required parameter -s missing!");
        printHelp(options, 1);
    }

    Boolean inherit;

    // Check a filename is given
    if (line.hasOption('i')) {
        inherit = true;
    } else {
        inherit = false;
    }

    String sourceCollectionIdParam = line.getOptionValue('s');
    Integer sourceCollectionId = null;

    if (sourceCollectionIdParam.matches("\\d+")) {
        sourceCollectionId = Integer.valueOf(sourceCollectionIdParam);
    } else {
        System.err.println("Invalid argument for parameter -s: " + sourceCollectionIdParam);
        printHelp(options, 1);
    }

    // Check source collection ID is given
    if (!line.hasOption('t')) {
        System.err.println("Required parameter -t missing!");
        printHelp(options, 1);
    }
    String targetCollectionIdParam = line.getOptionValue('t');
    Integer targetCollectionId = null;

    if (targetCollectionIdParam.matches("\\d+")) {
        targetCollectionId = Integer.valueOf(targetCollectionIdParam);
    } else {
        System.err.println("Invalid argument for parameter -t: " + targetCollectionIdParam);
        printHelp(options, 1);
    }

    // Check target collection ID is given
    if (targetCollectionIdParam.equals(sourceCollectionIdParam)) {
        System.err.println("Source collection id and target collection id must differ");
        printHelp(options, 1);
    }

    try {

        moveItems(context, sourceCollectionId, targetCollectionId, inherit);

        // Finish off and tidy up
        context.restoreAuthSystemState();
        context.complete();
    } catch (Exception e) {
        context.abort();
        System.err.println("Error committing changes to database: " + e.getMessage());
        System.err.println("Aborting most recent changes.");
        System.exit(1);
    }
}

From source file:com.boulmier.machinelearning.jobexecutor.JobExecutor.java

public static void main(String[] args) throws ParseException, IOException, InterruptedException {
    Options options = defineOptions();//from   ww  w.  ja v a  2 s .c  o m
    sysMon = new JavaSysMon();
    InetAddress vmscheduler_ip, mongodb_ip = null;
    Integer vmscheduler_port = null, mongodb_port = null;
    CommandLineParser parser = new BasicParser();

    try {
        CommandLine cmd = parser.parse(options, args);
        if (cmd.hasOption(JobExecutorConfig.OPTIONS.CMD.LONGPORTFIELD)) {
            vmscheduler_port = Integer.valueOf(cmd.getOptionValue(JobExecutorConfig.OPTIONS.CMD.LONGPORTFIELD));
        }
        mongodb_port = (int) (cmd.hasOption(JobExecutorConfig.OPTIONS.CMD.LONGMONGOPORTFIELD)
                ? cmd.hasOption(JobExecutorConfig.OPTIONS.CMD.LONGMONGOPORTFIELD)
                : JobExecutorConfig.OPTIONS.LOGGING.MONGO_DEFAULT_PORT);
        mongodb_ip = InetAddress.getByName(cmd.getOptionValue(JobExecutorConfig.OPTIONS.CMD.LONGMONGOIPFIELD));

        vmscheduler_ip = InetAddress.getByName(cmd.getOptionValue(JobExecutorConfig.OPTIONS.CMD.LONGIPFIELD));

        decryptKey = cmd.getOptionValue("decrypt-key");

        debugState = cmd.hasOption(JobExecutorConfig.OPTIONS.CMD.LONGDEBUGFIELD);

        logger = LoggerFactory.getLogger();
        logger.info("Attempt to connect on master @" + vmscheduler_ip + ":" + vmscheduler_port);

        new RequestConsumer().start();

    } catch (MissingOptionException moe) {
        logger.error(moe.getMissingOptions() + " are missing");
        HelpFormatter help = new HelpFormatter();
        help.printHelp(JobExecutor.class.getSimpleName(), options);

    } catch (UnknownHostException ex) {
        logger.error(ex.getMessage());
    } finally {
        Runtime.getRuntime().addShutdownHook(new Thread() {
            @Override
            public void run() {
                logger.info("JobExeutor is shutting down");
            }
        });
    }
}