Example usage for java.lang Throwable printStackTrace

List of usage examples for java.lang Throwable printStackTrace

Introduction

In this page you can find the example usage for java.lang Throwable printStackTrace.

Prototype

public void printStackTrace(PrintWriter s) 

Source Link

Document

Prints this throwable and its backtrace to the specified print writer.

Usage

From source file:bixo.tools.RunUrlNormalizerTool.java

/**
 * @param args/*w ww  .j a  v  a  2 s. co  m*/
 */
public static void main(String[] args) {
    String curUrl = null;

    try {
        List<String> lines = FileUtils.readLines(new File(args[0]));

        BaseUrlNormalizer urlNormalizer = new SimpleUrlNormalizer();
        for (String url : lines) {
            curUrl = url;
            String normalized = urlNormalizer.normalize(curUrl);
            if (!normalized.equalsIgnoreCase(curUrl)) {
                System.out.println(curUrl + " ==> " + normalized);
            }
        }
    } catch (Throwable t) {
        System.err.println("Exception while processing URLs: " + t.getMessage());
        System.err.println("Current url: " + curUrl);
        t.printStackTrace(System.err);
        System.exit(-1);
    }
}

From source file:com.twitter.distributedlog.basic.ConsoleProxyMultiWriter.java

public static void main(String[] args) throws Exception {
    if (2 != args.length) {
        System.out.println(HELP);
        return;//  w  w w. j  a  v  a 2 s.  c om
    }

    String finagleNameStr = args[0];
    final String streamList = args[1];

    DistributedLogClient client = DistributedLogClientBuilder.newBuilder()
            .clientId(ClientId.apply("console-proxy-writer")).name("console-proxy-writer").thriftmux(true)
            .finagleNameStr(finagleNameStr).build();
    String[] streamNameList = StringUtils.split(streamList, ',');
    DistributedLogMultiStreamWriter multiStreamWriter = DistributedLogMultiStreamWriter.newBuilder()
            .streams(Lists.newArrayList(streamNameList)).bufferSize(0).client(client).flushIntervalMs(0)
            .firstSpeculativeTimeoutMs(10000).maxSpeculativeTimeoutMs(20000).requestTimeoutMs(50000).build();

    // Setup Terminal
    Terminal terminal = Terminal.setupTerminal();
    ConsoleReader reader = new ConsoleReader();
    String line;
    while ((line = reader.readLine(PROMPT_MESSAGE)) != null) {
        multiStreamWriter.write(ByteBuffer.wrap(line.getBytes(UTF_8)))
                .addEventListener(new FutureEventListener<DLSN>() {
                    @Override
                    public void onFailure(Throwable cause) {
                        System.out.println("Encountered error on writing data");
                        cause.printStackTrace(System.err);
                        Runtime.getRuntime().exit(0);
                    }

                    @Override
                    public void onSuccess(DLSN value) {
                        // done
                    }
                });
    }

    multiStreamWriter.close();
    client.close();
}

From source file:com.twitter.distributedlog.messaging.ConsoleProxyPartitionedMultiWriter.java

public static void main(String[] args) throws Exception {
    if (2 != args.length) {
        System.out.println(HELP);
        return;// w  w  w.  j  a  va2s. c om
    }

    String finagleNameStr = args[0];
    final String streamList = args[1];

    DistributedLogClient client = DistributedLogClientBuilder.newBuilder()
            .clientId(ClientId.apply("console-proxy-writer")).name("console-proxy-writer").thriftmux(true)
            .finagleNameStr(finagleNameStr).build();
    String[] streamNameList = StringUtils.split(streamList, ',');
    PartitionedWriter<Integer, String> partitionedWriter = new PartitionedWriter<Integer, String>(
            streamNameList, new IntPartitioner(), client);

    // Setup Terminal
    Terminal terminal = Terminal.setupTerminal();
    ConsoleReader reader = new ConsoleReader();
    String line;
    while ((line = reader.readLine(PROMPT_MESSAGE)) != null) {
        String[] parts = StringUtils.split(line, ':');
        if (parts.length != 2) {
            System.out.println("Invalid input. Needs 'KEY:VALUE'");
            continue;
        }
        int key;
        try {
            key = Integer.parseInt(parts[0]);
        } catch (NumberFormatException nfe) {
            System.out.println("Invalid input. Needs 'KEY:VALUE'");
            continue;
        }
        String value = parts[1];

        partitionedWriter.write(key, value).addEventListener(new FutureEventListener<DLSN>() {
            @Override
            public void onFailure(Throwable cause) {
                System.out.println("Encountered error on writing data");
                cause.printStackTrace(System.err);
                Runtime.getRuntime().exit(0);
            }

            @Override
            public void onSuccess(DLSN value) {
                // done
            }
        });
    }

    client.close();
}

From source file:com.jsuper.compiler.cli.SuperCommand.java

/**
 * The compiler main function.//from   ww  w.  j a va 2 s.c  om
 *
 * @param args List of command line arguments invoking to the compiler
 */
public static void main(String[] args) {
    SuperCommand compiler = new SuperCommand();

    try {
        ProcessResult result = compiler.run(args);

        System.exit(result.getValue());
    } catch (Throwable cause) {
        cause.printStackTrace(System.err);

        System.exit(ProcessResult.FAILURE.getValue());
    }
}

From source file:embedding.events.ExampleEvents.java

/**
 * Main method./*from  www . java  2s.c om*/
 * @param args command-line arguments
 */
public static void main(String[] args) {
    try {
        System.out.println("FOP ExampleEvents\n");
        System.out.println("Preparing...");

        //Setup directories
        File baseDir = new File(".");
        File outDir = new File(baseDir, "out");
        outDir.mkdirs();

        //Setup input and output files
        URL fo = ExampleEvents.class.getResource("missing-image.fo");
        File pdffile = new File(outDir, "out.pdf");

        System.out.println("Input: XSL-FO (" + fo.toExternalForm() + ")");
        System.out.println("Output: PDF (" + pdffile + ")");
        System.out.println();
        System.out.println("Transforming...");

        ExampleEvents app = new ExampleEvents();

        try {
            app.convertFO2PDF(fo, pdffile);
        } catch (TransformerException te) {
            //Note: We don't get the original exception here!
            //FOP needs to embed the exception in a SAXException and the TraX transformer
            //again wraps the SAXException in a TransformerException. Even our own
            //RuntimeException just wraps the original FileNotFoundException.
            //So we need to unpack to get the original exception (about three layers deep).
            Throwable originalThrowable = getOriginalThrowable(te);
            originalThrowable.printStackTrace(System.err);
            System.out.println("Aborted!");
            System.exit(-1);
        }

        System.out.println("Success!");
    } catch (Exception e) {
        //Some other error (shouldn't happen in this example)
        e.printStackTrace(System.err);
        System.exit(-1);
    }
}

From source file:com.github.rvesse.github.pr.stats.PullRequestStats.java

public static void main(String[] args) throws MalformedURLException, IOException {
    SingleCommand<PullRequestStats> parser = SingleCommand.singleCommand(PullRequestStats.class);
    try {/*www  . j  av a2 s . c om*/
        ParseResult<PullRequestStats> results = parser.parseWithResult(args);
        if (results.wasSuccessful()) {
            // Run the command
            results.getCommand().run();
        } else {
            // Display errors
            int errNum = 1;
            for (ParseException e : results.getErrors()) {
                System.err.format("Error #%d: %s\n", errNum, e.getMessage());
                errNum++;
            }
            System.err.println();

            // Show help
            Help.help(parser.getCommandMetadata(), System.out);
        }
        System.exit(0);
    } catch (ParseException e) {
        System.err.println(e.getMessage());
        System.exit(1);
    } catch (Throwable t) {
        System.err.println(t.getMessage());
        t.printStackTrace(System.err);
        System.exit(2);
    }
}

From source file:io.tempra.AppServer.java

public static void main(String[] args) throws Exception {

    try {//from w w  w. ja  v  a  2 s.co  m
        // String dir = getProperty("storageLocal");//
        // System.getProperty("user.home");
        String arch = "Win" + System.getProperty("sun.arch.data.model");
        System.out.println("sun.arch.data.model " + arch);

        // Sample to force java load dll or so on a filesystem
        // InputStream in = AppServer.class.getResourceAsStream("/dll/" +
        // arch + "/RechargeRPC.dll");
        //
        // File jCliSiTefI = new File(dir + "/" + "RechargeRPC.dll");
        // System.out.println("Writing dll to: " +
        // jCliSiTefI.getAbsolutePath());
        // OutputStream os = new FileOutputStream(jCliSiTefI);
        // byte[] buffer = new byte[4096];
        // int length;
        // while ((length = in.read(buffer)) > 0) {
        // os.write(buffer, 0, length);
        // }
        // os.close();
        // in.close();
        // System.load(jCliSiTefI.toString());
        // addLibraryPath(dir);

    } catch (Exception e) {
        e.printStackTrace();
    }

    ServletContextHandler context = new ServletContextHandler(ServletContextHandler.SESSIONS);
    context.setContextPath("/");

    Server jettyServer = new Server(Integer.parseInt(getProperty("localport")));
    // security configuration
    // FilterHolder holder = new FilterHolder(CrossOriginFilter.class);
    // holder.setInitParameter(CrossOriginFilter.ALLOWED_ORIGINS_PARAM,
    // "*");
    // holder.setInitParameter("allowCredentials", "true");
    // holder.setInitParameter(
    // CrossOriginFilter.ACCESS_CONTROL_ALLOW_ORIGIN_HEADER, "*");
    // holder.setInitParameter(CrossOriginFilter.ALLOWED_METHODS_PARAM,
    // "GET,POST,HEAD");
    // holder.setInitParameter(CrossOriginFilter.ALLOWED_HEADERS_PARAM,
    // "X-Requested-With,Content-Type,Accept,Origin");
    // holder.setName("cross-origin");

    // context.addFilter(holder, fm);

    ResourceHandler resource_handler = new ResourceHandler();

    // add application on embedded server
    boolean servApp = true;
    boolean quioskMode = false;

    // if (System.getProperty("noServApp") != null )
    // if (System.getProperty("noServApp").equalsIgnoreCase("true"))
    // servApp = false;
    if (servApp) {
        //   ProtectionDomain domain = AppServer.class.getProtectionDomain();
        String webDir = AppServer.class.getResource("/webapp").toExternalForm();
        System.out.println("Jetty WEB DIR >>>>" + webDir);
        resource_handler.setDirectoriesListed(true);
        resource_handler.setWelcomeFiles(new String[] { "index.html" });
        resource_handler.setResourceBase(webDir); //
        // "C:/git/tsAgenciaVirtual/www/");
        // resource_handler.setResourceBase("C:/git/tsAgenciaVirtual/www/");

        // copyJarResourceToFolder((JarURLConnection) AppServer.class
        // .getResource("/app").openConnection(), createTempDir("app"));

        // resource_handler.setResourceBase(System
        // .getProperty("java.io.tmpdir") + "/app");
    }
    // sample to add rest services on container
    context.addServlet(FileUploadServlet.class, "/upload");
    ServletHolder jerseyServlet = context.addServlet(ServletContainer.class, "/services/*");
    jerseyServlet.setInitOrder(0);

    jerseyServlet.setInitParameter("jersey.config.server.provider.classnames",
            RestServices.class.getCanonicalName() + "," + RestServices.class.getCanonicalName());

    HandlerList handlers = new HandlerList();
    // context.addFilter(holder, "/*", EnumSet.of(DispatcherType.REQUEST));

    handlers.setHandlers(new Handler[] { resource_handler, context,

            // wscontext,
            new DefaultHandler() });
    jettyServer.setHandler(handlers);
    try {
        // Initialize javax.websocket layer
        ServerContainer wscontainer = WebSocketServerContainerInitializer.configureContext(context);
        wscontainer.setDefaultMaxSessionIdleTimeout(TimeUnit.HOURS.toMillis(1000));

        // Add WebSocket endpoint to javax.websocket layer
        wscontainer.addEndpoint(FileUpload.class);

    } catch (Throwable t) {
        t.printStackTrace(System.err);
    }

    // try {
    jettyServer.start();
    jettyServer.dump(System.err);

    if (servApp && quioskMode) {
        try {
            Process process = new ProcessBuilder(
                    "C:\\Program Files (x86)\\Google\\Chrome\\Application\\chrome.exe", "--kiosk",
                    "--kiosk-printing", "--auto", "--disable-pinch", "--incognito",
                    "--disable-session-crashed-bubble", "--overscroll-history-navigation=0",
                    "http://localhost:" + getProperty("localport")).start();
            // Process process =
            // Runtime.getRuntime().exec(" -kiosk http://localhost:8080");
            InputStream is = process.getInputStream();
            InputStreamReader isr = new InputStreamReader(is);
            BufferedReader br = new BufferedReader(isr);
            String line;

            System.out.printf("Output of running %s is:", Arrays.toString(args));
            while ((line = br.readLine()) != null) {
                System.out.println(line);
            }
        }

        catch (Exception e) {
            // TODO: handle exception
            e.printStackTrace();
        }
    }
    // } finally {
    // jettyServer.destroy();
    // }//}

    jettyServer.join();
}

From source file:com.finderbots.miner2.pinterest.PinterestCrawlAndMinerTool.java

public static void main(String[] args) {
    Options options = new Options();
    CmdLineParser parser = new CmdLineParser(options);

    try {/*ww w. j  av  a 2  s  . c om*/
        parser.parseArgument(args);
    } catch (CmdLineException e) {
        System.err.println(e.getMessage());
        printUsageAndExit(parser);
    }

    // Before we get too far along, see if the domain looks valid.
    String domain = options.getDomain();
    String urlsFile = options.getUrlsFile();
    if (domain != null) {
        validateDomain(domain, parser);
    } else {
        if (urlsFile == null) {
            System.err.println(
                    "Either a target domain should be specified or a file with a list of urls needs to be provided");
            printUsageAndExit(parser);
        }
    }

    if (domain != null && urlsFile != null) {
        System.out.println("Warning: Both domain and urls file list provided - using domain");
    }

    String outputDirName = options.getOutputDir();
    if (options.isDebugLogging()) {
        System.setProperty("bixo.root.level", "DEBUG");
    } else {
        System.setProperty("bixo.root.level", "INFO");
    }

    if (options.getLoggingAppender() != null) {
        // Set console vs. DRFA vs. something else
        System.setProperty("bixo.appender", options.getLoggingAppender());
    }

    String logsDir = options.getLogsDir();
    if (!logsDir.endsWith("/")) {
        logsDir = logsDir + "/";
    }

    try {
        JobConf conf = new JobConf();
        Path outputPath = new Path(outputDirName);
        FileSystem fs = outputPath.getFileSystem(conf);

        // First check if the user wants to clean
        if (options.isCleanOutputDir()) {
            if (fs.exists(outputPath)) {
                fs.delete(outputPath, true);
            }
        }

        // See if the user isn't starting from scratch then set up the
        // output directory and create an initial urls subdir.
        if (!fs.exists(outputPath)) {
            fs.mkdirs(outputPath);

            // Create a "0-<timestamp>" sub-directory with just a /crawldb subdir
            // In the /crawldb dir the input file will have a single URL for the target domain.

            Path curLoopDir = CrawlDirUtils.makeLoopDir(fs, outputPath, 0);
            String curLoopDirName = curLoopDir.getName();
            setLoopLoggerFile(logsDir + curLoopDirName, 0);

            Path crawlDbPath = new Path(curLoopDir, CrawlConfig.CRAWLDB_SUBDIR_NAME);

            if (domain != null) {
                importOneDomain(domain, crawlDbPath, conf);
            } else {
                importUrls(urlsFile, crawlDbPath);
            }
        }

        Path latestDirPath = CrawlDirUtils.findLatestLoopDir(fs, outputPath);

        if (latestDirPath == null) {
            System.err.println("No previous cycle output dirs exist in " + outputDirName);
            printUsageAndExit(parser);
        }

        Path crawlDbPath = new Path(latestDirPath, CrawlConfig.CRAWLDB_SUBDIR_NAME);

        // Set up the start and end loop counts.
        int startLoop = CrawlDirUtils.extractLoopNumber(latestDirPath);
        int endLoop = startLoop + options.getNumLoops();

        // Set up the UserAgent for the fetcher.
        UserAgent userAgent = new UserAgent(options.getAgentName(), CrawlConfig.EMAIL_ADDRESS,
                CrawlConfig.WEB_ADDRESS);

        // You also get to customize the FetcherPolicy
        FetcherPolicy defaultPolicy;
        if (options.getCrawlDuration() != 0) {
            defaultPolicy = new AdaptiveFetcherPolicy(options.getEndCrawlTime(), options.getCrawlDelay());
        } else {
            defaultPolicy = new FetcherPolicy();
        }
        defaultPolicy.setMaxContentSize(CrawlConfig.MAX_CONTENT_SIZE);
        defaultPolicy.setRequestTimeout(10L * 1000L);//10 seconds

        // COMPLETE for crawling a single site, EFFICIENT for many sites
        if (options.getCrawlPolicy().equals(Options.IMPOLITE_CRAWL_POLICY)) {
            defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.IMPOLITE);
        } else if (options.getCrawlPolicy().equals(Options.EFFICIENT_CRAWL_POLICY)) {
            defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.EFFICIENT);
        } else if (options.getCrawlPolicy().equals(Options.COMPLETE_CRAWL_POLICY)) {
            defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.COMPLETE);
        }

        // It is a good idea to set up a crawl duration when running long crawls as you may
        // end up in situations where the fetch slows down due to a 'long tail' and by
        // specifying a crawl duration you know exactly when the crawl will end.
        int crawlDurationInMinutes = options.getCrawlDuration();
        boolean hasEndTime = crawlDurationInMinutes != Options.NO_CRAWL_DURATION;
        long targetEndTime = hasEndTime
                ? System.currentTimeMillis() + (crawlDurationInMinutes * CrawlConfig.MILLISECONDS_PER_MINUTE)
                : FetcherPolicy.NO_CRAWL_END_TIME;

        // By setting up a url filter we only deal with urls that we want to
        // instead of all the urls that we extract.
        BaseUrlFilter urlFilter = null;
        List<String> patterns = null;
        String regexUrlFiltersFile = options.getRegexUrlFiltersFile();
        if (regexUrlFiltersFile != null) {
            patterns = RegexUrlDatumFilter.getUrlFilterPatterns(regexUrlFiltersFile);
        } else {
            patterns = RegexUrlDatumFilter.getDefaultUrlFilterPatterns();
            if (domain != null) {
                String domainPatterStr = "+(?i)^(http|https)://([a-z0-9]*\\.)*" + domain;
                patterns.add(domainPatterStr);
            } else {
                String protocolPatterStr = "+(?i)^(http|https)://*";
                patterns.add(protocolPatterStr);
                //Log.warn("Defaulting to basic url regex filtering (just suffix and protocol");
            }
        }
        urlFilter = new RegexUrlDatumFilter(patterns.toArray(new String[patterns.size()]));

        // get a list of patterns which tell the miner which URLs to include or exclude.
        patterns.clear();
        RegexUrlStringFilter urlsToMineFilter = null;
        String regexUrlsToMineFiltersFile = options.getRegexUrlToMineFile();
        AnalyzeHtml analyzer = null;
        if (regexUrlsToMineFiltersFile != null) {
            patterns = RegexUrlDatumFilter.getUrlFilterPatterns(regexUrlsToMineFiltersFile);
            urlsToMineFilter = new RegexUrlStringFilter(patterns.toArray(new String[patterns.size()]));
            analyzer = new AnalyzeHtml(urlsToMineFilter);
        }

        // OK, now we're ready to start looping, since we've got our current
        // settings
        for (int curLoop = startLoop + 1; curLoop <= endLoop; curLoop++) {

            // Adjust target end time, if appropriate.
            if (hasEndTime) {
                int remainingLoops = (endLoop - curLoop) + 1;
                long now = System.currentTimeMillis();
                long perLoopTime = (targetEndTime - now) / remainingLoops;
                defaultPolicy.setCrawlEndTime(now + perLoopTime);
            }

            Path curLoopDirPath = CrawlDirUtils.makeLoopDir(fs, outputPath, curLoop);
            String curLoopDirName = curLoopDirPath.getName();
            setLoopLoggerFile(logsDir + curLoopDirName, curLoop);

            Flow flow = PinterestCrawlAndMinerWorkflow.createFlow(curLoopDirPath, crawlDbPath, defaultPolicy,
                    userAgent, urlFilter, analyzer, options);
            flow.complete();

            // Writing out .dot files is a good way to verify your flows.
            flow.writeDOT("valid-flow.dot");

            // Update crawlDbPath to point to the latest crawl db
            crawlDbPath = new Path(curLoopDirPath, CrawlConfig.CRAWLDB_SUBDIR_NAME);
        }
    } catch (PlannerException e) {
        e.writeDOT("failed-flow.dot");
        System.err.println("PlannerException: " + e.getMessage());
        e.printStackTrace(System.err);
        System.exit(-1);
    } catch (Throwable t) {
        System.err.println("Exception running tool: " + t.getMessage());
        t.printStackTrace(System.err);
        System.exit(-1);
    }
}

From source file:com.finderbots.miner2.tomatoes.RTCriticsCrawlAndMinerTool.java

public static void main(String[] args) {
    Options options = new Options();
    CmdLineParser parser = new CmdLineParser(options);

    try {//from ww  w.  j ava  2 s . co m
        parser.parseArgument(args);
    } catch (CmdLineException e) {
        System.err.println(e.getMessage());
        printUsageAndExit(parser);
    }

    // Before we get too far along, see if the domain looks valid.
    String domain = options.getDomain();
    String urlsFile = options.getUrlsFile();
    if (domain != null) {
        validateDomain(domain, parser);
    } else {
        if (urlsFile == null) {
            System.err.println(
                    "Either a target domain should be specified or a file with a list of urls needs to be provided");
            printUsageAndExit(parser);
        }
    }

    if (domain != null && urlsFile != null) {
        System.out.println("Warning: Both domain and urls file list provided - using domain");
    }

    String outputDirName = options.getOutputDir();
    if (options.isDebugLogging()) {
        System.setProperty("bixo.root.level", "DEBUG");
    } else {
        System.setProperty("bixo.root.level", "INFO");
    }

    if (options.getLoggingAppender() != null) {
        // Set console vs. DRFA vs. something else
        System.setProperty("bixo.appender", options.getLoggingAppender());
    }

    String logsDir = options.getLogsDir();
    if (!logsDir.endsWith("/")) {
        logsDir = logsDir + "/";
    }

    try {
        JobConf conf = new JobConf();
        Path outputPath = new Path(outputDirName);
        FileSystem fs = outputPath.getFileSystem(conf);

        // First check if the user wants to clean
        if (options.isCleanOutputDir()) {
            if (fs.exists(outputPath)) {
                fs.delete(outputPath, true);
            }
        }

        // See if the user isn't starting from scratch then set up the
        // output directory and create an initial urls subdir.
        if (!fs.exists(outputPath)) {
            fs.mkdirs(outputPath);

            // Create a "0-<timestamp>" sub-directory with just a /crawldb subdir
            // In the /crawldb dir the input file will have a single URL for the target domain.

            Path curLoopDir = CrawlDirUtils.makeLoopDir(fs, outputPath, 0);
            String curLoopDirName = curLoopDir.getName();
            setLoopLoggerFile(logsDir + curLoopDirName, 0);

            Path crawlDbPath = new Path(curLoopDir, CrawlConfig.CRAWLDB_SUBDIR_NAME);

            if (domain != null) {
                importOneDomain(domain, crawlDbPath, conf);
            } else {
                importUrls(urlsFile, crawlDbPath);
            }
        }

        Path latestDirPath = CrawlDirUtils.findLatestLoopDir(fs, outputPath);

        if (latestDirPath == null) {
            System.err.println("No previous cycle output dirs exist in " + outputDirName);
            printUsageAndExit(parser);
        }

        Path crawlDbPath = new Path(latestDirPath, CrawlConfig.CRAWLDB_SUBDIR_NAME);

        // Set up the start and end loop counts.
        int startLoop = CrawlDirUtils.extractLoopNumber(latestDirPath);
        int endLoop = startLoop + options.getNumLoops();

        // Set up the UserAgent for the fetcher.
        UserAgent userAgent = new UserAgent(options.getAgentName(), CrawlConfig.EMAIL_ADDRESS,
                CrawlConfig.WEB_ADDRESS);

        // You also get to customize the FetcherPolicy
        FetcherPolicy defaultPolicy;
        if (options.getCrawlDuration() != 0) {
            defaultPolicy = new AdaptiveFetcherPolicy(options.getEndCrawlTime(), options.getCrawlDelay());
        } else {
            defaultPolicy = new FetcherPolicy();
        }
        defaultPolicy.setMaxContentSize(CrawlConfig.MAX_CONTENT_SIZE);
        defaultPolicy.setRequestTimeout(10L * 1000L);//10 seconds

        // COMPLETE for crawling a single site, EFFICIENT for many sites
        if (options.getCrawlPolicy().equals(Options.IMPOLITE_CRAWL_POLICY)) {
            defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.IMPOLITE);
        } else if (options.getCrawlPolicy().equals(Options.EFFICIENT_CRAWL_POLICY)) {
            defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.EFFICIENT);
        } else if (options.getCrawlPolicy().equals(Options.COMPLETE_CRAWL_POLICY)) {
            defaultPolicy.setFetcherMode(FetcherPolicy.FetcherMode.COMPLETE);
        }

        // It is a good idea to set up a crawl duration when running long crawls as you may
        // end up in situations where the fetch slows down due to a 'long tail' and by
        // specifying a crawl duration you know exactly when the crawl will end.
        int crawlDurationInMinutes = options.getCrawlDuration();
        boolean hasEndTime = crawlDurationInMinutes != Options.NO_CRAWL_DURATION;
        long targetEndTime = hasEndTime
                ? System.currentTimeMillis() + (crawlDurationInMinutes * CrawlConfig.MILLISECONDS_PER_MINUTE)
                : FetcherPolicy.NO_CRAWL_END_TIME;

        // By setting up a url filter we only deal with urls that we want to
        // instead of all the urls that we extract.
        BaseUrlFilter urlFilter = null;
        List<String> patterns = null;
        String regexUrlFiltersFile = options.getRegexUrlFiltersFile();
        if (regexUrlFiltersFile != null) {
            patterns = RegexUrlDatumFilter.getUrlFilterPatterns(regexUrlFiltersFile);
        } else {
            patterns = RegexUrlDatumFilter.getDefaultUrlFilterPatterns();
            if (domain != null) {
                String domainPatterStr = "+(?i)^(http|https)://([a-z0-9]*\\.)*" + domain;
                patterns.add(domainPatterStr);
            } else {
                String protocolPatterStr = "+(?i)^(http|https)://*";
                patterns.add(protocolPatterStr);
                //Log.warn("Defaulting to basic url regex filtering (just suffix and protocol");
            }
        }
        urlFilter = new RegexUrlDatumFilter(patterns.toArray(new String[patterns.size()]));

        // get a list of patterns which tell the miner which URLs to include or exclude.
        patterns.clear();
        RegexUrlStringFilter urlsToMineFilter = null;
        String regexUrlsToMineFiltersFile = options.getRegexUrlToMineFile();
        MineRTCriticsPreferences prefsAnalyzer = null;
        if (regexUrlsToMineFiltersFile != null) {
            patterns = RegexUrlDatumFilter.getUrlFilterPatterns(regexUrlsToMineFiltersFile);
            urlsToMineFilter = new RegexUrlStringFilter(patterns.toArray(new String[patterns.size()]));
            prefsAnalyzer = new MineRTCriticsPreferences(urlsToMineFilter);
        }

        // OK, now we're ready to start looping, since we've got our current
        // settings
        for (int curLoop = startLoop + 1; curLoop <= endLoop; curLoop++) {

            // Adjust target end time, if appropriate.
            if (hasEndTime) {
                int remainingLoops = (endLoop - curLoop) + 1;
                long now = System.currentTimeMillis();
                long perLoopTime = (targetEndTime - now) / remainingLoops;
                defaultPolicy.setCrawlEndTime(now + perLoopTime);
            }

            Path curLoopDirPath = CrawlDirUtils.makeLoopDir(fs, outputPath, curLoop);
            String curLoopDirName = curLoopDirPath.getName();
            setLoopLoggerFile(logsDir + curLoopDirName, curLoop);

            Flow flow = RTCriticsCrawlAndMinerWorkflow.createFlow(curLoopDirPath, crawlDbPath, defaultPolicy,
                    userAgent, urlFilter, prefsAnalyzer, options);
            flow.complete();

            // Writing out .dot files is a good way to verify your flows.
            flow.writeDOT("valid-flow.dot");

            // Update crawlDbPath to point to the latest crawl db
            crawlDbPath = new Path(curLoopDirPath, CrawlConfig.CRAWLDB_SUBDIR_NAME);
        }
    } catch (PlannerException e) {
        e.writeDOT("failed-flow.dot");
        System.err.println("PlannerException: " + e.getMessage());
        e.printStackTrace(System.err);
        System.exit(-1);
    } catch (Throwable t) {
        System.err.println("Exception running tool: " + t.getMessage());
        t.printStackTrace(System.err);
        System.exit(-1);
    }
}

From source file:fr.inria.atlanmod.dag.instantiator.Launcher.java

public static void main(String[] args) throws GenerationException, IOException {

    ResourceSetImpl resourceSet = new ResourceSetImpl();
    { // initializing the registry

        resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap().put(EcorePackage.eNS_PREFIX,
                new EcoreResourceFactoryImpl());
        resourceSet.getResourceFactoryRegistry().getExtensionToFactoryMap()
                .put(Resource.Factory.Registry.DEFAULT_EXTENSION, new XMIResourceFactoryImpl());
        resourceSet.getResourceFactoryRegistry().getProtocolToFactoryMap().put(NeoEMFURI.NEOEMF_HBASE_SCHEME,
                NeoEMFResourceFactory.eINSTANCE);

    }/*from  w ww. j  av  a  2  s.co m*/

    Options options = new Options();

    configureOptions(options);

    CommandLineParser parser = new GnuParser();

    try {
        CommandLine commandLine = parser.parse(options, args);

        //         String epackage_class = commandLine.getOptionValue(E_PACKAGE_CLASS);
        //         
        //         LOGGER.info("Start loading the package");
        //         Class<?> inClazz = Launcher.class.getClassLoader().loadClass(epackage_class);
        //         EPackage _package = (EPackage) inClazz.getMethod("init").invoke(null);
        //         
        //         Resource metamodelResource = new XMIResourceImpl(URI.createFileURI("dummy"));
        //          metamodelResource.getContents().add(_package);
        //          LOGGER.info("Finish loading the package");

        int size = Launcher.DEFAULT_AVERAGE_MODEL_SIZE;
        if (commandLine.hasOption(SIZE)) {
            Number number = (Number) commandLine.getParsedOptionValue(SIZE);
            size = (int) Math.min(Integer.MAX_VALUE, number.longValue());
        }

        float variation = Launcher.DEFAULT_DEVIATION;
        if (commandLine.hasOption(VARIATION)) {
            Number number = (Number) commandLine.getParsedOptionValue(VARIATION);
            if (number.floatValue() < 0.0f || number.floatValue() > 1.0f) {
                throw new ParseException(MessageFormat.format("Invalid value for option -{0}: {1}", VARIATION,
                        number.floatValue()));
            }
            variation = number.floatValue();
        }

        float propVariation = Launcher.DEFAULT_DEVIATION;
        if (commandLine.hasOption(PROP_VARIATION)) {
            Number number = (Number) commandLine.getParsedOptionValue(PROP_VARIATION);
            if (number.floatValue() < 0.0f || number.floatValue() > 1.0f) {
                throw new ParseException(MessageFormat.format("Invalid value for option -{0}: {1}",
                        PROP_VARIATION, number.floatValue()));
            }
            propVariation = number.floatValue();
        }

        long seed = System.currentTimeMillis();
        if (commandLine.hasOption(SEED)) {
            seed = ((Number) commandLine.getParsedOptionValue(SEED)).longValue();
        }

        Range<Integer> range = Range.between(Math.round(size * (1 - variation)),
                Math.round(size * (1 + variation)));

        ISpecimenConfiguration config = new DagMetamodelConfig(range, seed);
        IGenerator generator = new DagGenerator(config, config.getSeed());

        GenericMetamodelGenerator modelGen = new GenericMetamodelGenerator(generator);

        if (commandLine.hasOption(OUTPUT_PATH)) {
            String outDir = commandLine.getOptionValue(OUTPUT_PATH);
            //java.net.URI intermediateURI = java.net.URI.create(outDir);
            modelGen.setSamplesPath(outDir);
        }

        int numberOfModels = 1;
        if (commandLine.hasOption(N_MODELS)) {
            numberOfModels = ((Number) commandLine.getParsedOptionValue(N_MODELS)).intValue();
        }

        int valuesSize = GenericMetamodelConfig.DEFAULT_AVERAGE_VALUES_LENGTH;
        if (commandLine.hasOption(VALUES_SIZE)) {
            Number number = (Number) commandLine.getParsedOptionValue(VALUES_SIZE);
            valuesSize = (int) Math.min(Integer.MAX_VALUE, number.longValue());
        }

        int referencesSize = GenericMetamodelConfig.DEFAULT_AVERAGE_REFERENCES_SIZE;
        if (commandLine.hasOption(DEGREE)) {
            Number number = (Number) commandLine.getParsedOptionValue(DEGREE);
            referencesSize = (int) Math.min(Integer.MAX_VALUE, number.longValue());
        }

        config.setValuesRange(Math.round(valuesSize * (1 - propVariation)),
                Math.round(valuesSize * (1 + propVariation)));

        config.setReferencesRange(Math.round(referencesSize * (1 - propVariation)),
                Math.round(referencesSize * (1 + propVariation)));

        config.setPropertiesRange(Math.round(referencesSize * (1 - propVariation)),
                Math.round(referencesSize * (1 + propVariation)));

        long start = System.currentTimeMillis();
        modelGen.runGeneration(resourceSet, numberOfModels, size, variation);
        long end = System.currentTimeMillis();
        LOGGER.info(
                MessageFormat.format("Generation finished after {0} s", Long.toString((end - start) / 1000)));

        //         for (Resource rsc : resourceSet.getResources()) {
        //            if (rsc.getContents().get(0) instanceof DAG) {
        //               
        //            }
        //               
        //         }

        if (commandLine.hasOption(DIAGNOSE)) {
            for (Resource resource : resourceSet.getResources()) {
                LOGGER.info(
                        MessageFormat.format("Requested validation for resource ''{0}''", resource.getURI()));
                BasicDiagnostic diagnosticChain = diagnoseResource(resource);
                if (!isFailed(diagnosticChain)) {
                    LOGGER.info(MessageFormat.format("Result of the diagnosis of resurce ''{0}'' is ''OK''",
                            resource.getURI()));
                } else {
                    LOGGER.severe(MessageFormat.format("Found ''{0}'' error(s) in the resource ''{1}''",
                            diagnosticChain.getChildren().size(), resource.getURI()));
                    for (Diagnostic diagnostic : diagnosticChain.getChildren()) {
                        LOGGER.fine(diagnostic.getMessage());
                    }
                }
            }
            LOGGER.info("Validation finished");
        }

    } catch (ParseException e) {
        System.err.println(e.getLocalizedMessage());
        HelpFormatter formatter = new HelpFormatter();
        formatter.setOptionComparator(new OptionComarator<Option>());
        try {
            formatter.setWidth(Math.max(Terminal.getTerminal().getTerminalWidth(), 80));
        } catch (Throwable t) {
            LOGGER.warning("Unable to get console information");
        }
        ;
        formatter.printHelp("java -jar <this-file.jar>", options, true);
        System.exit(ERROR);
    } catch (Throwable t) {
        System.err.println("ERROR: " + t.getLocalizedMessage());
        StringWriter stringWriter = new StringWriter();
        t.printStackTrace(new PrintWriter(stringWriter));
        System.err.println(t);
        LOGGER.severe(stringWriter.toString());
        System.exit(ERROR);
    }
}